pax_global_header00006660000000000000000000000064143241620120014505gustar00rootroot0000000000000052 comment=4bc8f24d3e899462e43621aab981f6383a370365 cargo-0.66.0/000077500000000000000000000000001432416201200126715ustar00rootroot00000000000000cargo-0.66.0/.github/000077500000000000000000000000001432416201200142315ustar00rootroot00000000000000cargo-0.66.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001432416201200164145ustar00rootroot00000000000000cargo-0.66.0/.github/ISSUE_TEMPLATE/bug_report.yml000066400000000000000000000022501432416201200213060ustar00rootroot00000000000000name: Bug Report description: Create a report to help us improve labels: ["C-bug"] body: - type: markdown attributes: value: Thanks for filing a πŸ› bug report πŸ˜„! - type: textarea id: problem attributes: label: Problem description: > Please provide a clear and concise description of what the bug is, including what currently happens and what you expected to happen. validations: required: true - type: textarea id: steps attributes: label: Steps description: Please list the steps to reproduce the bug. placeholder: | 1. 2. 3. - type: textarea id: possible-solutions attributes: label: Possible Solution(s) description: > Not obligatory, but suggest a fix/reason for the bug, or ideas how to implement the addition or change. - type: textarea id: notes attributes: label: Notes description: Provide any additional notes that might be helpful. - type: textarea id: version attributes: label: Version description: Please paste the output of running `cargo version --verbose`. render: text cargo-0.66.0/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000005671432416201200204140ustar00rootroot00000000000000contact_links: - name: Question url: https://users.rust-lang.org about: > Got a question about Cargo? Ask the community on the user forum. - name: Inspiring Idea url: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo about: > Need more discussions with your next big idea? Reach out the coummunity on the internals forum. cargo-0.66.0/.github/ISSUE_TEMPLATE/feature_request.yml000066400000000000000000000025121432416201200223420ustar00rootroot00000000000000name: Feature Request description: Suggest an idea for enhancing Cargo labels: ["C-feature-request"] body: - type: markdown attributes: value: | Thanks for filing a πŸ™‹ feature request πŸ˜„! If the feature request is relatively small and already with a possible solution, this might be the place for you. If you are brewing a big feature that needs feedback from the community, [the internal forum] is the best fit, especially for pre-RFC. You can also talk the idea over with other developers in [#t-cargo Zulip stream]. [the internal forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo/15 [#t-cargo Zulip stream]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo - type: textarea id: problem attributes: label: Problem description: > Please provide a clear description of your use case and the problem this feature request is trying to solve. validations: required: true - type: textarea id: solution attributes: label: Proposed Solution description: > Please provide a clear and concise description of what you want to happen. - type: textarea id: notes attributes: label: Notes description: Provide any additional context or information that might be helpful. cargo-0.66.0/.github/ISSUE_TEMPLATE/tracking_issue.yml000066400000000000000000000042041432416201200221510ustar00rootroot00000000000000name: Tracking Issue description: A tracking issue for an accepted feature or RFC in Cargo. title: "Tracking Issue for _FEATURE_NAME_" labels: ["C-tracking-issue"] body: - type: markdown attributes: value: > Thank you for creating a tracking issue! Tracking issues are for tracking an accepted feature or RFC from implementation to stabilization. Please do not file a tracking issue until the feature or RFC has been approved. - type: textarea id: summary attributes: label: Summary description: Please provide a very brief summary of the feature. value: | RFC: [#NNNN](https://github.com/rust-lang/rfcs/pull/NNNN) Original issue: #NNNN Implementation: #NNNN Documentation: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#my-feature Please enter a short, one-sentence description here. validations: required: true - type: textarea id: unresolved attributes: label: Unresolved Issues description: List issues that have not yet been resolved. placeholder: | * [ ] Make a list of any known implementation or design issues. - type: textarea id: future attributes: label: Future Extensions description: > An optional section where you can mention where the feature may be extended in the future, but is explicitly not intended to address. - type: textarea id: about attributes: label: About tracking issues description: Please include this notice in the issue. value: | Tracking issues are used to record the overall progress of implementation. They are also used as hubs connecting to other relevant issues, e.g., bugs or open design questions. A tracking issue is however *not* meant for large scale discussion, questions, or bug reports about a feature. Instead, open a dedicated issue for the specific matter and add the relevant feature gate label. cargo-0.66.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000035161432416201200200370ustar00rootroot00000000000000 cargo-0.66.0/.github/workflows/000077500000000000000000000000001432416201200162665ustar00rootroot00000000000000cargo-0.66.0/.github/workflows/contrib.yml000066400000000000000000000022521432416201200204520ustar00rootroot00000000000000name: Contrib Deploy on: push: branches: - master permissions: contents: read jobs: deploy: permissions: contents: write # for Git to git push runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Install mdbook run: | mkdir mdbook curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.9/mdbook-v0.4.9-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook echo `pwd`/mdbook >> $GITHUB_PATH - name: Build API doc run: | cargo doc --document-private-items --no-deps - name: Deploy docs run: | cd src/doc/contrib mdbook build git worktree add gh-pages gh-pages git config user.name "Deploy from CI" git config user.email "" cd gh-pages # Delete the ref to avoid keeping history. git update-ref -d refs/heads/gh-pages rm -rf contrib mv ../book contrib # Move rustdoc under contrib/ mv ../../../../target/doc contrib/apidoc git add contrib git commit -m "Deploy $GITHUB_SHA to gh-pages" git push --force cargo-0.66.0/.github/workflows/main.yml000066400000000000000000000142501432416201200177370ustar00rootroot00000000000000name: CI on: push: branches-ignore: [master] pull_request: branches: ['*'] defaults: run: shell: bash permissions: contents: read jobs: # Check Code style quickly by running `rustfmt` over all code rustfmt: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - run: rustup update stable && rustup default stable - run: rustup component add rustfmt - run: cargo fmt --all --check - run: | for manifest in `find crates benches/benchsuite benches/capture -name Cargo.toml` do echo check fmt for $manifest cargo fmt --all --manifest-path $manifest --check done test: runs-on: ${{ matrix.os }} env: CARGO_PROFILE_DEV_DEBUG: 1 CARGO_PROFILE_TEST_DEBUG: 1 CARGO_INCREMENTAL: 0 strategy: matrix: include: - name: Linux x86_64 stable os: ubuntu-latest rust: stable other: i686-unknown-linux-gnu - name: Linux x86_64 beta os: ubuntu-latest rust: beta other: i686-unknown-linux-gnu - name: Linux x86_64 nightly os: ubuntu-latest rust: nightly other: i686-unknown-linux-gnu - name: macOS x86_64 stable os: macos-latest rust: stable other: x86_64-apple-ios - name: Windows x86_64 MSVC stable os: windows-latest rust: stable-msvc other: i686-pc-windows-msvc - name: Windows x86_64 gnu nightly os: windows-latest rust: nightly-gnu other: i686-pc-windows-gnu name: Tests ${{ matrix.name }} steps: - uses: actions/checkout@v3 - name: Dump Environment run: ci/dump-environment.sh - name: Update Rustup (temporary workaround) run: rustup self update shell: bash if: startsWith(matrix.os, 'windows') - run: rustup update --no-self-update ${{ matrix.rust }} && rustup default ${{ matrix.rust }} - run: rustup target add ${{ matrix.other }} - run: rustup component add rustc-dev llvm-tools-preview rust-docs if: startsWith(matrix.rust, 'nightly') - run: sudo apt update -y && sudo apt install gcc-multilib libsecret-1-0 libsecret-1-dev -y if: matrix.os == 'ubuntu-latest' - run: rustup component add rustfmt || echo "rustfmt not available" # Deny warnings on CI to avoid warnings getting into the codebase. - run: cargo test --features 'deny-warnings' - name: Check operability of rustc invocation with argfile env: __CARGO_TEST_FORCE_ARGFILE: 1 run: | # This only tests `cargo fix` because fix-proxy-mode is one of the most # complicated subprocess management in Cargo. cargo test --test testsuite --features 'deny-warnings' -- fix:: - run: cargo test --features 'deny-warnings' --manifest-path crates/cargo-test-support/Cargo.toml env: CARGO_TARGET_DIR: target - run: cargo test -p cargo-platform - run: cargo test -p cargo-util - run: cargo test --manifest-path crates/mdman/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-1password/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-gnome-secret/Cargo.toml if: matrix.os == 'ubuntu-latest' - run: cargo build --manifest-path crates/credential/cargo-credential-macos-keychain/Cargo.toml if: matrix.os == 'macos-latest' - run: cargo build --manifest-path crates/credential/cargo-credential-wincred/Cargo.toml if: matrix.os == 'windows-latest' - name: Check benchmarks env: # Share the target dir to try to cache a few build-time deps. CARGO_TARGET_DIR: target run: | # This only tests one benchmark since it can take over 10 minutes to # download all workspaces. cargo test --manifest-path benches/benchsuite/Cargo.toml --all-targets -- cargo cargo check --manifest-path benches/capture/Cargo.toml # The testsuite generates a huge amount of data, and fetch-smoke-test was # running out of disk space. - name: Clear test output run: | df -h rm -rf target/tmp df -h - name: Fetch smoke test run: ci/fetch-smoke-test.sh resolver: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - run: rustup update stable && rustup default stable - run: cargo test --manifest-path crates/resolver-tests/Cargo.toml build_std: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - run: rustup update nightly && rustup default nightly - run: rustup component add rust-src - run: cargo build - run: cargo test --test build-std env: CARGO_RUN_BUILD_STD_TESTS: 1 docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - run: rustup update nightly && rustup default nightly - run: rustup update stable - run: rustup component add rust-docs - run: ci/validate-man.sh # This requires rustfmt, use stable. - run: cd src/doc/semver-check && cargo +stable run - run: | mkdir mdbook curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.5/mdbook-v0.4.5-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook echo `pwd`/mdbook >> $GITHUB_PATH - run: cargo doc --document-private-items --no-deps env: RUSTDOCFLAGS: -D warnings - run: cd src/doc && mdbook build --dest-dir ../../target/doc - run: | cd src/doc curl -sSLo linkcheck.sh \ https://raw.githubusercontent.com/rust-lang/rust/master/src/tools/linkchecker/linkcheck.sh sh linkcheck.sh --all cargo success: permissions: contents: none name: bors build finished needs: [docs, rustfmt, test, resolver, build_std] runs-on: ubuntu-latest if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'" steps: - run: echo ok failure: permissions: contents: none name: bors build finished needs: [docs, rustfmt, test, resolver, build_std] runs-on: ubuntu-latest if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'" steps: - run: exit 1 cargo-0.66.0/.gitignore000066400000000000000000000002321432416201200146560ustar00rootroot00000000000000target Cargo.lock .cargo /config.stamp /Makefile /config.mk src/doc/build src/etc/*.pyc src/registry/target rustc __pycache__ .idea/ .vscode/ *.iml *.swp cargo-0.66.0/.ignore000066400000000000000000000002321432416201200141520ustar00rootroot00000000000000# Output generated from src/doc/man # # The goal is to help people find the right file to edit src/doc/man/generated_txt src/doc/src/commands src/etc/man cargo-0.66.0/CHANGELOG.md000066400000000000000000004471651432416201200145230ustar00rootroot00000000000000# Changelog ## Cargo 1.65 (2022-11-03) [4fd148c4...HEAD](https://github.com/rust-lang/cargo/compare/4fd148c4...HEAD) ### Added ### Changed - Cargo now uses the standard library's `available_parallelism` instead of the `num_cpus` crate for determining the default parallelism. [#10969](https://github.com/rust-lang/cargo/pull/10969) ### Fixed ### Nightly only ## Cargo 1.64 (2022-09-22) [a5e08c47...rust-1.64.0](https://github.com/rust-lang/cargo/compare/a5e08c47...rust-1.64.0) ### Added - πŸŽ‰ Packages can now inherit settings from the workspace so that the settings can be centralized in one place. See [`workspace.package`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacepackage-table) and [`workspace.dependencies`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacedependencies-table) for more details on how to define these common settings. [#10859](https://github.com/rust-lang/cargo/pull/10859) - Added the [`--crate-type`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-rustc.html#option-cargo-rustc---crate-type) flag to `cargo rustc` to override the crate type. [#10838](https://github.com/rust-lang/cargo/pull/10838) - Cargo commands can now accept multiple `--target` flags to build for multiple targets at once, and the [`build.target`](https://doc.rust-lang.org/nightly/cargo/reference/config.html#buildtarget) config option may now take an array of multiple targets. [#10766](https://github.com/rust-lang/cargo/pull/10766) - The `--jobs` argument can now take a negative number to count backwards from the max CPUs. [#10844](https://github.com/rust-lang/cargo/pull/10844) ### Changed - Bash completion of `cargo install --path` now supports path completion. [#10798](https://github.com/rust-lang/cargo/pull/10798) - Significantly improved the performance fetching git dependencies from GitHub when using a hash in the `rev` field. [#10079](https://github.com/rust-lang/cargo/pull/10079) - Published packages will now include the resolver setting from the workspace to ensure that they use the same resolver when used in isolation. [#10911](https://github.com/rust-lang/cargo/pull/10911) [#10961](https://github.com/rust-lang/cargo/pull/10961) [#10970](https://github.com/rust-lang/cargo/pull/10970) - `cargo add` will now update `Cargo.lock`. [#10902](https://github.com/rust-lang/cargo/pull/10902) - The path in the config output of `cargo vendor` now translates backslashes to forward slashes so that the settings should work across platforms. [#10668](https://github.com/rust-lang/cargo/pull/10668) - The [`workspace.default-members`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#package-selection) setting now allows a value of `"."` in a non-virtual workspace to refer to the root package. [#10784](https://github.com/rust-lang/cargo/pull/10784) ### Fixed - The `os` output in `cargo --version --verbose` now supports more platforms. [#10802](https://github.com/rust-lang/cargo/pull/10802) - Cached git checkouts will now be rebuilt if they are corrupted. This may happen when using `net.git-fetch-with-cli` and interrupting the clone process. [#10829](https://github.com/rust-lang/cargo/pull/10829) - Fixed panic in `cargo add --offline`. [#10817](https://github.com/rust-lang/cargo/pull/10817) ### Nightly only - Fixed deserialization of unstable `check-cfg` in `config.toml`. [#10799](https://github.com/rust-lang/cargo/pull/10799) ## Cargo 1.63 (2022-08-11) [3f052d8e...rust-1.63.0](https://github.com/rust-lang/cargo/compare/3f052d8e...rust-1.63.0) ### Added - πŸŽ‰ Added the `--config` CLI option to pass config options directly on the CLI. [#10755](https://github.com/rust-lang/cargo/pull/10755) - The `CARGO_PKG_RUST_VERSION` environment variable is now set when compiling a crate if the manifest has the `rust-version` field set. [#10713](https://github.com/rust-lang/cargo/pull/10713) ### Changed - A warning is emitted when encountering multiple packages with the same name in a git dependency. This will ignore packages with `publish=false`. [#10701](https://github.com/rust-lang/cargo/pull/10701) [#10767](https://github.com/rust-lang/cargo/pull/10767) - Change tracking now uses the contents of a `.json` target spec file instead of its path. This should help avoid rebuilds if the path changes. [#10746](https://github.com/rust-lang/cargo/pull/10746) - Git dependencies with a submodule configured with the `update=none` strategy in `.gitmodules` is now honored, and the submodule will not be fetched. [#10717](https://github.com/rust-lang/cargo/pull/10717) - Crate files now use a more recent date (Jul 23, 2006 instead of Nov 29, 1973) for deterministic behavior. [#10720](https://github.com/rust-lang/cargo/pull/10720) - The initial template used for `cargo new` now includes a slightly more realistic test structure that has `use super::*;` in the test module. [#10706](https://github.com/rust-lang/cargo/pull/10706) - Updated the internal HTTP library libcurl with various small fixes and updates. [#10696](https://github.com/rust-lang/cargo/pull/10696) ### Fixed - Fix zsh completions for `cargo add` and `cargo locate-project` [#10810](https://github.com/rust-lang/cargo/pull/10810) [#10811](https://github.com/rust-lang/cargo/pull/10811) - Fixed `-p` being ignored with `cargo publish` in the root of a virtual workspace. Some additional checks were also added to generate an error if multiple packages were selected (previously it would pick the first one). [#10677](https://github.com/rust-lang/cargo/pull/10677) - The human-readable executable name is no longer displayed for `cargo test` when using JSON output. [#10691](https://github.com/rust-lang/cargo/pull/10691) ### Nightly only - Added `-Zcheck-cfg=output` to support build-scripts declaring their supported set of `cfg` values with `cargo:rustc-check-cfg`. [#10539](https://github.com/rust-lang/cargo/pull/10539) - `-Z http-registry` now uses https://index.crates.io/ when accessing crates-io. [#10725](https://github.com/rust-lang/cargo/pull/10725) - Fixed formatting of `.workspace` key in `cargo add` for workspace inheritance. [#10705](https://github.com/rust-lang/cargo/pull/10705) - Sparse HTTP registry URLs must now end with a `/`. [#10698](https://github.com/rust-lang/cargo/pull/10698) - Fixed issue with `cargo add` and workspace inheritance of the `default-features` key. [#10685](https://github.com/rust-lang/cargo/pull/10685) ## Cargo 1.62 (2022-06-30) [1ef1e0a1...rust-1.62.0](https://github.com/rust-lang/cargo/compare/1ef1e0a1...rust-1.62.0) ### Added - πŸŽ‰ Added the `cargo add` command for adding dependencies to `Cargo.toml` from the command-line. [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-add.html) [#10472](https://github.com/rust-lang/cargo/pull/10472) [#10577](https://github.com/rust-lang/cargo/pull/10577) [#10578](https://github.com/rust-lang/cargo/pull/10578) - Package ID specs now support `name@version` syntax in addition to the previous `name:version` to align with the behavior in `cargo add` and other tools. `cargo install` and `cargo yank` also now support this syntax so the version does not need to passed as a separate flag. [#10582](https://github.com/rust-lang/cargo/pull/10582) [#10650](https://github.com/rust-lang/cargo/pull/10650) [#10597](https://github.com/rust-lang/cargo/pull/10597) - Added the CLI option `-F` as an alias of `--features`. [#10576](https://github.com/rust-lang/cargo/pull/10576) - The `git` and `registry` directories in Cargo's home directory (usually `~/.cargo`) are now marked as cache directories so that they are not included in backups or content indexing (on Windows). [#10553](https://github.com/rust-lang/cargo/pull/10553) - Added the `--version` flag to `cargo yank` to replace the `--vers` flag to be consistent with `cargo install`. [#10575](https://github.com/rust-lang/cargo/pull/10575) - Added automatic `@` argfile support, which will use "response files" if the command-line to `rustc` exceeds the operating system's limit. [#10546](https://github.com/rust-lang/cargo/pull/10546) - `cargo clean` now has a progress bar (if it takes longer than half a second). [#10236](https://github.com/rust-lang/cargo/pull/10236) ### Changed - `cargo install` no longer generates an error if no binaries were found to install (such as missing required features). [#10508](https://github.com/rust-lang/cargo/pull/10508) - `cargo test` now passes `--target` to `rustdoc` if the specified target is the same as the host target. [#10594](https://github.com/rust-lang/cargo/pull/10594) - `cargo doc` now automatically passes `-Arustdoc::private-intra-doc-links` when documenting a binary (which automatically includes `--document-private-items`). The [`private-intra-doc-links`](https://doc.rust-lang.org/rustdoc/lints.html#private_intra_doc_links) lint is only relevant when *not* documenting private items, which doesn't apply to binaries. [#10142](https://github.com/rust-lang/cargo/pull/10142) - The length of the short git hash in the `cargo --version` output is now fixed to 9 characters. Previously the length was inconsistent between different platforms. [#10579](https://github.com/rust-lang/cargo/pull/10579) - Attempting to publish a package with a `Cargo.toml.orig` file will now result in an error. The filename would otherwise conflict with the automatically-generated file. [#10551](https://github.com/rust-lang/cargo/pull/10551) ### Fixed - The `build.dep-info-basedir` configuration setting now properly supports the use of `..` in the path to refer to a parent directory. [#10281](https://github.com/rust-lang/cargo/pull/10281) - Fixed regression in automatic detection of the default number of CPUs to use on systems using cgroups v1. [#10737](https://github.com/rust-lang/cargo/pull/10737) [#10739](https://github.com/rust-lang/cargo/pull/10739) ### Nightly only - `cargo fetch` now works with `-Zbuild-std` to fetch the standard library's dependencies. [#10129](https://github.com/rust-lang/cargo/pull/10129) - Added support for workspace inheritance. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#workspace-inheritance) [#10584](https://github.com/rust-lang/cargo/pull/10584) [#10568](https://github.com/rust-lang/cargo/pull/10568) [#10565](https://github.com/rust-lang/cargo/pull/10565) [#10564](https://github.com/rust-lang/cargo/pull/10564) [#10563](https://github.com/rust-lang/cargo/pull/10563) [#10606](https://github.com/rust-lang/cargo/pull/10606) [#10548](https://github.com/rust-lang/cargo/pull/10548) [#10538](https://github.com/rust-lang/cargo/pull/10538) - Added `-Zcheck-cfg` which adds various forms of validating `cfg` expressions for unknown names and values. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg) [#10486](https://github.com/rust-lang/cargo/pull/10486) [#10566](https://github.com/rust-lang/cargo/pull/10566) - The `--config` CLI option no longer allows setting a registry token. [#10580](https://github.com/rust-lang/cargo/pull/10580) - Fixed issues with proc-macros and `-Z rustdoc-scrape-examples`. [#10549](https://github.com/rust-lang/cargo/pull/10549) [#10533](https://github.com/rust-lang/cargo/pull/10533) ## Cargo 1.61 (2022-05-19) [ea2a21c9...rust-1.61.0](https://github.com/rust-lang/cargo/compare/ea2a21c9...rust-1.61.0) ### Added ### Changed - `cargo test --no-run` will now display the path to the test executables. [#10346](https://github.com/rust-lang/cargo/pull/10346) - `cargo tree --duplicates` no longer reports dependencies that are shared between the host and the target as duplicates. [#10466](https://github.com/rust-lang/cargo/pull/10466) - Updated to the 1.4.2 release of libgit2 which brings in several fixes [#10442](https://github.com/rust-lang/cargo/pull/10442) [#10479](https://github.com/rust-lang/cargo/pull/10479) - `cargo vendor` no longer allows multiple values for `--sync`, you must pass multiple `--sync` flags instead. [#10448](https://github.com/rust-lang/cargo/pull/10448) - Warnings are now issued for manifest keys that have mixed both underscore and dash variants (such as specifying both `proc_macro` and `proc-macro`) [#10316](https://github.com/rust-lang/cargo/pull/10316) - Cargo now uses the standard library's `available_parallelism` instead of the `num_cpus` crate for determining the default parallelism. [#10427](https://github.com/rust-lang/cargo/pull/10427) - `cargo search` terms are now highlighted. [#10425](https://github.com/rust-lang/cargo/pull/10425) ### Fixed - Paths passed to VCS tools like `hg` are now added after `--` to avoid conflict with VCS flags. [#10483](https://github.com/rust-lang/cargo/pull/10483) - Fixed the `http.timeout` configuration value to actually work. [#10456](https://github.com/rust-lang/cargo/pull/10456) - Fixed issues with `cargo rustc --crate-type` not working in some situations. [#10388](https://github.com/rust-lang/cargo/pull/10388) ### Nightly only - Added `-Z check-cfg-features` to enable compile-time checking of features [#10408](https://github.com/rust-lang/cargo/pull/10408) - Added `-Z bindeps` to support binary artifact dependencies (RFC-3028) [#9992](https://github.com/rust-lang/cargo/pull/9992) - `-Z multitarget` is now supported in the `build.target` config value with an array. [#10473](https://github.com/rust-lang/cargo/pull/10473) - Added `--keep-going` flag which will continue compilation even if one crate fails to compile. [#10383](https://github.com/rust-lang/cargo/pull/10383) - Start work on inheriting manifest values in a workspace. [#10497](https://github.com/rust-lang/cargo/pull/10497) [#10517](https://github.com/rust-lang/cargo/pull/10517) - Added support for sparse HTTP registries. [#10470](https://github.com/rust-lang/cargo/pull/10470) [#10064](https://github.com/rust-lang/cargo/pull/10064) - Fixed panic when artifact target is used for `[target.'cfg()'.dependencies]` [#10433](https://github.com/rust-lang/cargo/pull/10433) - Fixed host flags to pass to build scripts (`-Z target-applies-to-host`) [#10395](https://github.com/rust-lang/cargo/pull/10395) - Added `-Z check-cfg-features` support for rustdoc [#10428](https://github.com/rust-lang/cargo/pull/10428) ## Cargo 1.60 (2022-04-07) [358e79fe...rust-1.60.0](https://github.com/rust-lang/cargo/compare/358e79fe...rust-1.60.0) ### Added - πŸŽ‰ Added the `dep:` prefix in the `[features]` table to refer to an optional dependency. This allows creating feature names with the same name as a dependency, and allows for "hiding" optional dependencies so that they do not implicitly expose a feature name. [docs](https://doc.rust-lang.org/nightly/cargo/reference/features.html#optional-dependencies) [#10269](https://github.com/rust-lang/cargo/pull/10269) - πŸŽ‰ Added the `dep-name?/feature-name` syntax to the `[features]` table to only enable the feature `feature-name` if the optional dependency `dep-name` is already enabled by some other feature. [docs](https://doc.rust-lang.org/nightly/cargo/reference/features.html#dependency-features) [#10269](https://github.com/rust-lang/cargo/pull/10269) - πŸŽ‰ Added `--timings` option to generate an HTML report about build timing, concurrency, and CPU use. [docs](https://doc.rust-lang.org/nightly/cargo/reference/timings.html) [#10245](https://github.com/rust-lang/cargo/pull/10245) - Added the `"v"` and `"features2"` fields to the registry index. The `"v"` field provides a method for compatibility with future changes to the index. [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format) [#10269](https://github.com/rust-lang/cargo/pull/10269) - Added bash completion for `cargo clippy` [#10347](https://github.com/rust-lang/cargo/pull/10347) - Added bash completion for `cargo report` [#10295](https://github.com/rust-lang/cargo/pull/10295) - Added support to build scripts for `rustc-link-arg-tests`, `rustc-link-arg-examples`, and `rustc-link-arg-benches`. [docs](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#outputs-of-the-build-script) [#10274](https://github.com/rust-lang/cargo/pull/10274) ### Changed - Cargo now uses the clap 3 library for command-line argument parsing. [#10265](https://github.com/rust-lang/cargo/pull/10265) - The `build.pipelining` config option is now deprecated, pipelining will now always be enabled. [#10258](https://github.com/rust-lang/cargo/pull/10258) - `cargo new` will now generate a `.gitignore` which only ignores `Cargo.lock` in the root of the repo, instead of any directory. [#10379](https://github.com/rust-lang/cargo/pull/10379) - Improved startup time of bash completion. [#10365](https://github.com/rust-lang/cargo/pull/10365) - The `--features` flag is now honored when used with the `--all-features` flag, which allows enabling features from other packages. [#10337](https://github.com/rust-lang/cargo/pull/10337) - Cargo now uses a different TOML parser. This should not introduce any user-visible changes. This paves the way to support format-preserving programmatic modification of TOML files for supporting `cargo add` and other future enhancements. [#10086](https://github.com/rust-lang/cargo/pull/10086) - Setting a library to emit both a `dylib` and `cdylib` is now an error, as this combination is not supported. [#10243](https://github.com/rust-lang/cargo/pull/10243) - `cargo --list` now includes the `help` command. [#10300](https://github.com/rust-lang/cargo/pull/10300) ### Fixed - Fixed running `cargo doc` on examples with dev-dependencies. [#10341](https://github.com/rust-lang/cargo/pull/10341) - Fixed `cargo install --path` for a path that is relative to a directory outside of the workspace in the current directory. [#10335](https://github.com/rust-lang/cargo/pull/10335) - `cargo test TEST_FILTER` should no longer build binaries that are explicitly disabled with `test = false`. [#10305](https://github.com/rust-lang/cargo/pull/10305) - Fixed regression with `term.verbose` without `term.quiet`, and vice versa. [#10429](https://github.com/rust-lang/cargo/pull/10429) [#10436](https://github.com/rust-lang/cargo/pull/10436) ### Nightly only - Added `rustflags` option to a profile definition. [#10217](https://github.com/rust-lang/cargo/pull/10217) - Changed `--config` to only support dotted keys. [#10176](https://github.com/rust-lang/cargo/pull/10176) - Fixed profile `rustflags` not being gated in profile overrides. [#10411](https://github.com/rust-lang/cargo/pull/10411) [#10413](https://github.com/rust-lang/cargo/pull/10413) ## Cargo 1.59 (2022-02-24) [7f08ace4...rust-1.59.0](https://github.com/rust-lang/cargo/compare/7f08ace4...rust-1.59.0) ### Added - πŸŽ‰ The `strip` option can now be specified in a profile to specify the behavior for removing symbols and debug information from binaries. [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#strip) [#10088](https://github.com/rust-lang/cargo/pull/10088) [#10376](https://github.com/rust-lang/cargo/pull/10376) - πŸŽ‰ Added future incompatible reporting. This provides reporting for when a future change in `rustc` may cause a package or any of its dependencies to stop building. [docs](https://doc.rust-lang.org/nightly/cargo/reference/future-incompat-report.html) [#10165](https://github.com/rust-lang/cargo/pull/10165) - SSH authentication on Windows now supports ssh-agent. [docs](https://doc.rust-lang.org/nightly/cargo/appendix/git-authentication.html#ssh-authentication) [#10248](https://github.com/rust-lang/cargo/pull/10248) - Added `term.quiet` configuration option to enable the `--quiet` behavior from a config file. [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#termquiet) [#10152](https://github.com/rust-lang/cargo/pull/10152) - Added `-r` CLI option as an alias for `--release`. [#10133](https://github.com/rust-lang/cargo/pull/10133) ### Changed - Scanning the package directory should now be resilient to errors, such as filesystem loops or access issues. [#10188](https://github.com/rust-lang/cargo/pull/10188) [#10214](https://github.com/rust-lang/cargo/pull/10214) [#10286](https://github.com/rust-lang/cargo/pull/10286) - `cargo help ` will now show the target of the alias. [#10193](https://github.com/rust-lang/cargo/pull/10193) - Removed the deprecated `--host` CLI option. [#10145](https://github.com/rust-lang/cargo/pull/10145) [#10327](https://github.com/rust-lang/cargo/pull/10327) - Cargo should now report its version to always be in sync with `rustc`. [#10178](https://github.com/rust-lang/cargo/pull/10178) - Added EOPNOTSUPP to ignored file locking errors, which is relevant to BSD operating systems. [#10157](https://github.com/rust-lang/cargo/pull/10157) ### Fixed - macOS: Fixed an issue where running an executable would sporadically be killed by the kernel (likely starting in macOS 12). [#10196](https://github.com/rust-lang/cargo/pull/10196) - Fixed so that the `doc=false` setting is honored in the `[lib]` definition of a dependency. [#10201](https://github.com/rust-lang/cargo/pull/10201) [#10324](https://github.com/rust-lang/cargo/pull/10324) - The `"executable"` field in the JSON option was incorrectly including the path to `index.html` when documenting a binary. It is now null. [#10171](https://github.com/rust-lang/cargo/pull/10171) - Documenting a binary now waits for the package library to finish documenting before starting. This fixes some race conditions if the binary has intra-doc links to the library. [#10172](https://github.com/rust-lang/cargo/pull/10172) - Fixed panic when displaying help text to a closed pipe. [#10164](https://github.com/rust-lang/cargo/pull/10164) ### Nightly only - Added the `--crate-type` flag to `cargo rustc`. [#10093](https://github.com/rust-lang/cargo/pull/10093) ## Cargo 1.58 (2022-01-13) [b2e52d7c...rust-1.58.0](https://github.com/rust-lang/cargo/compare/b2e52d7c...rust-1.58.0) ### Added - Added `rust_version` field to package data in `cargo metadata`. [#9967](https://github.com/rust-lang/cargo/pull/9967) - Added `--message-format` option to `cargo install`. [#10107](https://github.com/rust-lang/cargo/pull/10107) ### Changed - A warning is now shown when an alias shadows an external command. [#10082](https://github.com/rust-lang/cargo/pull/10082) - Updated curl to 7.80.0. [#10040](https://github.com/rust-lang/cargo/pull/10040) [#10106](https://github.com/rust-lang/cargo/pull/10106) ### Fixed - Doctests now include rustc-link-args from build scripts. [#9916](https://github.com/rust-lang/cargo/pull/9916) - Fixed `cargo tree` entering an infinite loop with cyclical dev-dependencies. Fixed an edge case where the resolver would fail to handle a cyclical dev-dependency with a feature. [#10103](https://github.com/rust-lang/cargo/pull/10103) - Fixed `cargo clean -p` when the directory path contains glob characters. [#10072](https://github.com/rust-lang/cargo/pull/10072) - Fixed debug builds of `cargo` which could panic when downloading a crate when the server has a redirect with a non-empty body. [#10048](https://github.com/rust-lang/cargo/pull/10048) ### Nightly only - Make future-incompat-report output more user-friendly. [#9953](https://github.com/rust-lang/cargo/pull/9953) - Added support to scrape code examples from the `examples` directory to be included in the documentation. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#scrape-examples) [#9525](https://github.com/rust-lang/cargo/pull/9525) [#10037](https://github.com/rust-lang/cargo/pull/10037) [#10017](https://github.com/rust-lang/cargo/pull/10017) - Fixed `cargo report future-incompatibilities` to check stdout if it supports color. [#10024](https://github.com/rust-lang/cargo/pull/10024) ## Cargo 1.57 (2021-12-02) [18751dd3...rust-1.57.0](https://github.com/rust-lang/cargo/compare/18751dd3...rust-1.57.0) ### Added - πŸŽ‰ Added custom named profiles. This also changes the `test` and `bench` profiles to inherit their settings from `dev` and `release`, and Cargo will now only use a single profile during a given command instead of using different profiles for dependencies and cargo-targets. [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles) [#9943](https://github.com/rust-lang/cargo/pull/9943) - The `rev` option for a git dependency now supports git references that start with `refs/`. An example where this can be used is to depend on a pull request from a service like GitHub before it is merged. [#9859](https://github.com/rust-lang/cargo/pull/9859) - Added `path_in_vcs` field to the `.cargo_vcs_info.json` file. [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-package.html#cargo_vcs_infojson-format) [#9866](https://github.com/rust-lang/cargo/pull/9866) ### Changed - ❗ `RUSTFLAGS` is no longer set for build scripts. This change was made in 1.55, but the release notes did not highlight this change. Build scripts should use `CARGO_ENCODED_RUSTFLAGS` instead. See the [documentation](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts) for more details. - The `cargo version` command now includes some extra information. [#9968](https://github.com/rust-lang/cargo/pull/9968) - Updated libgit2 to 1.3 which brings in a number of fixes and changes to git handling. [#9963](https://github.com/rust-lang/cargo/pull/9963) [#9988](https://github.com/rust-lang/cargo/pull/9988) - Shell completions now include shorthand b/r/c/d subcommands. [#9951](https://github.com/rust-lang/cargo/pull/9951) - `cargo update --precise` now allows specifying a version without semver metadata (stuff after `+` in the version number). [#9945](https://github.com/rust-lang/cargo/pull/9945) - zsh completions now complete `--example` names. [#9939](https://github.com/rust-lang/cargo/pull/9939) - The progress bar now differentiates when building unittests. [#9934](https://github.com/rust-lang/cargo/pull/9934) - Some backwards-compatibility support for invalid TOML syntax has been removed. [#9932](https://github.com/rust-lang/cargo/pull/9932) - Reverted the change from 1.55 that triggered an error for dependency specifications that did not include any fields. [#9911](https://github.com/rust-lang/cargo/pull/9911) ### Fixed - Removed a log message (from `CARGO_LOG`) that may leak tokens. [#9873](https://github.com/rust-lang/cargo/pull/9873) - `cargo fix` will now avoid writing fixes to the global registry cache. [#9938](https://github.com/rust-lang/cargo/pull/9938) - Fixed `-Z help` CLI option when used with a shorthand alias (b/c/r/d). [#9933](https://github.com/rust-lang/cargo/pull/9933) ### Nightly only ## Cargo 1.56 (2021-10-21) [cebef295...rust-1.56.0](https://github.com/rust-lang/cargo/compare/cebef295...rust-1.56.0) ### Added - πŸŽ‰ Cargo now supports the 2021 edition. More information may be found in the [edition guide](https://doc.rust-lang.org/nightly/edition-guide/rust-2021/index.html). [#9800](https://github.com/rust-lang/cargo/pull/9800) - πŸŽ‰ Added the [`rust-version`](https://doc.rust-lang.org/nightly/cargo/reference/manifest.html#the-rust-version-field) field to `Cargo.toml` to specify the minimum supported Rust version, and the `--ignore-rust-version` command line option to override it. [#9732](https://github.com/rust-lang/cargo/pull/9732) - Added the `[env]` table to config files to specify environment variables to set. [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#env) [#9411](https://github.com/rust-lang/cargo/pull/9411) - `[patch]` tables may now be specified in config files. [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#patch) [#9839](https://github.com/rust-lang/cargo/pull/9839) - `cargo doc` now supports the `--example` and `--examples` flags. [#9808](https://github.com/rust-lang/cargo/pull/9808) - πŸŽ‰ Build scripts can now pass additional linker arguments for binaries or all linkable targets. [docs](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#outputs-of-the-build-script) [#9557](https://github.com/rust-lang/cargo/pull/9557) - Added support for the `-p` flag for `cargo publish` to publish a specific package in a workspace. `cargo package` also now supports `-p` and `--workspace`. [#9559](https://github.com/rust-lang/cargo/pull/9559) - Added documentation about third-party registries. [#9830](https://github.com/rust-lang/cargo/pull/9830) - Added the `{sha256-checksum}` placeholder for URLs in a registry `config.json`. [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format) [#9801](https://github.com/rust-lang/cargo/pull/9801) - Added a warning when a dependency does not have a library. [#9771](https://github.com/rust-lang/cargo/pull/9771) ### Changed - Doc tests now support the `-q` flag to show terse test output. [#9730](https://github.com/rust-lang/cargo/pull/9730) - `features` used in a `[replace]` table now issues a warning, as they are ignored. [#9681](https://github.com/rust-lang/cargo/pull/9681) - Changed so that only `wasm32-unknown-emscripten` executables are built without a hash in the filename. Previously it was all `wasm32` targets. Additionally, all `apple` binaries are now built with a hash in the filename. This allows multiple copies to be cached at once, and matches the behavior on other platforms (except `msvc`). [#9653](https://github.com/rust-lang/cargo/pull/9653) - `cargo new` now generates an example that doesn't generate a warning with clippy. [#9796](https://github.com/rust-lang/cargo/pull/9796) - `cargo fix --edition` now only applies edition-specific lints. [#9846](https://github.com/rust-lang/cargo/pull/9846) - Improve resolver message to include dependency requirements. [#9827](https://github.com/rust-lang/cargo/pull/9827) - `cargo fix` now has more debug logging available with the `CARGO_LOG` environment variable. [#9831](https://github.com/rust-lang/cargo/pull/9831) - Changed `cargo fix --edition` to emit a warning when on the latest stable edition when running on stable instead of generating an error. [#9792](https://github.com/rust-lang/cargo/pull/9792) - `cargo install` will now determine all of the packages to install before starting the installation, which should help with reporting errors without partially installing. [#9793](https://github.com/rust-lang/cargo/pull/9793) - The resolver report for `cargo fix --edition` now includes differences for dev-dependencies. [#9803](https://github.com/rust-lang/cargo/pull/9803) - `cargo fix` will now show better diagnostics for abnormal errors from `rustc`. [#9799](https://github.com/rust-lang/cargo/pull/9799) - Entries in `cargo --list` are now deduplicated. [#9773](https://github.com/rust-lang/cargo/pull/9773) - Aliases are now included in `cargo --list`. [#9764](https://github.com/rust-lang/cargo/pull/9764) ### Fixed - Fixed panic with build-std of a proc-macro. [#9834](https://github.com/rust-lang/cargo/pull/9834) - Fixed running `cargo` recursively from proc-macros while running `cargo fix`. [#9818](https://github.com/rust-lang/cargo/pull/9818) - Return an error instead of a stack overflow for command alias loops. [#9791](https://github.com/rust-lang/cargo/pull/9791) - Updated to curl 7.79.1, which will hopefully fix intermittent http2 errors. [#9937](https://github.com/rust-lang/cargo/pull/9937) ### Nightly only - Added `[future-incompat-report]` config section. [#9774](https://github.com/rust-lang/cargo/pull/9774) - Fixed value-after-table error with custom named profiles. [#9789](https://github.com/rust-lang/cargo/pull/9789) - Added the `different-binary-name` feature to support specifying a non-rust-identifier for a binary name. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#different-binary-name) [#9627](https://github.com/rust-lang/cargo/pull/9627) - Added a profile option to select the codegen backend. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#codegen-backend) [#9118](https://github.com/rust-lang/cargo/pull/9118) ## Cargo 1.55 (2021-09-09) [aa8b0929...rust-1.55.0](https://github.com/rust-lang/cargo/compare/aa8b0929...rust-1.55.0) ### Added - The package definition in `cargo metadata` now includes the `"default_run"` field from the manifest. [#9550](https://github.com/rust-lang/cargo/pull/9550) - ❗ Build scripts now have access to the following environment variables: `RUSTC_WRAPPER`, `RUSTC_WORKSPACE_WRAPPER`, `CARGO_ENCODED_RUSTFLAGS`. `RUSTFLAGS` is no longer set for build scripts; they should use `CARGO_ENCODED_RUSTFLAGS` instead. [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts) [#9601](https://github.com/rust-lang/cargo/pull/9601) - Added `cargo d` as an alias for `cargo doc`. [#9680](https://github.com/rust-lang/cargo/pull/9680) - Added `{lib}` to the `cargo tree --format` option to display the library name of a package. [#9663](https://github.com/rust-lang/cargo/pull/9663) - Added `members_mut` method to the `Workspace` API. [#9547](https://github.com/rust-lang/cargo/pull/9547) ### Changed - If a build command does not match any targets when using the `--all-targets`, `--bins`, `--tests`, `--examples`, or `--benches` flags, a warning is now displayed to inform you that there were no matching targets. [#9549](https://github.com/rust-lang/cargo/pull/9549) - The way `cargo init` detects whether or not existing source files represent a binary or library has been changed to respect the command-line flags instead of trying to guess which type it is. [#9522](https://github.com/rust-lang/cargo/pull/9522) - Registry names are now displayed instead of registry URLs when possible. [#9632](https://github.com/rust-lang/cargo/pull/9632) - Duplicate compiler diagnostics are no longer shown. This can often happen with `cargo test` which builds multiple copies of the same code in parallel. This also updates the warning summary to provide more context. [#9675](https://github.com/rust-lang/cargo/pull/9675) - The output for warnings or errors is now improved to be leaner, cleaner, and show more context. [#9655](https://github.com/rust-lang/cargo/pull/9655) - Network send errors are now treated as "spurious" which means they will be retried. [#9695](https://github.com/rust-lang/cargo/pull/9695) - Git keys (`branch`, `tag`, `rev`) on a non-git dependency are now an error. Additionally, specifying both `git` and `path` is now an error. [#9689](https://github.com/rust-lang/cargo/pull/9689) - Specifying a dependency without any keys is now an error. [#9686](https://github.com/rust-lang/cargo/pull/9686) - The resolver now prefers to use `[patch]` table entries of dependencies when possible. [#9639](https://github.com/rust-lang/cargo/pull/9639) - Package name typo errors in dependencies are now displayed aligned with the original to help make it easier to see the difference. [#9665](https://github.com/rust-lang/cargo/pull/9665) - Windows platforms may now warn on environment variables that have the wrong case. [#9654](https://github.com/rust-lang/cargo/pull/9654) - `features` used in a `[patch]` table now issues a warning, as they are ignored. [#9666](https://github.com/rust-lang/cargo/pull/9666) - The `target` directory is now excluded from content indexing on Windows. [#9635](https://github.com/rust-lang/cargo/pull/9635) - When `Cargo.toml` is not found, the error message now detects if it was misnamed with a lowercase `c` to suggest the correct form. [#9607](https://github.com/rust-lang/cargo/pull/9607) - Building `diesel` with the new resolver displays a compatibility notice. [#9602](https://github.com/rust-lang/cargo/pull/9602) - Updated the `opener` dependency, which handles opening a web browser, which includes several changes, such as new behavior when run on WSL, and using the system `xdg-open` on Linux. [#9583](https://github.com/rust-lang/cargo/pull/9583) - Updated to libcurl 7.78. [#9809](https://github.com/rust-lang/cargo/pull/9809) [#9810](https://github.com/rust-lang/cargo/pull/9810) ### Fixed - Fixed dep-info files including non-local build script paths. [#9596](https://github.com/rust-lang/cargo/pull/9596) - Handle "jobs = 0" case in cargo config files [#9584](https://github.com/rust-lang/cargo/pull/9584) - Implement warning for ignored trailing arguments after `--` [#9561](https://github.com/rust-lang/cargo/pull/9561) - Fixed rustc/rustdoc config values to be config-relative. [#9566](https://github.com/rust-lang/cargo/pull/9566) - `cargo fix` now supports rustc's suggestions with multiple spans. [#9567](https://github.com/rust-lang/cargo/pull/9567) - `cargo fix` now fixes each target serially instead of in parallel to avoid problems with fixing the same file concurrently. [#9677](https://github.com/rust-lang/cargo/pull/9677) - Changes to the target `linker` config value now trigger a rebuild. [#9647](https://github.com/rust-lang/cargo/pull/9647) - Git unstaged deleted files are now ignored when using the `--allow-dirty` flag with `cargo publish` or `cargo package`. [#9645](https://github.com/rust-lang/cargo/pull/9645) ### Nightly only - Enabled support for `cargo fix --edition` for 2021. [#9588](https://github.com/rust-lang/cargo/pull/9588) - Several changes to named profiles. [#9685](https://github.com/rust-lang/cargo/pull/9685) - Extended instructions on what to do when running `cargo fix --edition` on the 2021 edition. [#9694](https://github.com/rust-lang/cargo/pull/9694) - Multiple updates to error messages using nightly features to help better explain the situation. [#9657](https://github.com/rust-lang/cargo/pull/9657) - Adjusted the edition 2021 resolver diff report. [#9649](https://github.com/rust-lang/cargo/pull/9649) - Fixed error using `cargo doc --open` with `doc.extern-map`. [#9531](https://github.com/rust-lang/cargo/pull/9531) - Unified weak and namespaced features. [#9574](https://github.com/rust-lang/cargo/pull/9574) - Various updates to future-incompatible reporting. [#9606](https://github.com/rust-lang/cargo/pull/9606) - `[env]` environment variables are not allowed to set vars set by Cargo. [#9579](https://github.com/rust-lang/cargo/pull/9579) ## Cargo 1.54 (2021-07-29) [4369396c...rust-1.54.0](https://github.com/rust-lang/cargo/compare/4369396c...rust-1.54.0) ### Added - Fetching from a git repository (such as the crates.io index) now displays the network transfer rate. [#9395](https://github.com/rust-lang/cargo/pull/9395) - Added `--prune` option for `cargo tree` to limit what is displayed. [#9520](https://github.com/rust-lang/cargo/pull/9520) - Added `--depth` option for `cargo tree` to limit what is displayed. [#9499](https://github.com/rust-lang/cargo/pull/9499) - Added `cargo tree -e no-proc-macro` to hide procedural macro dependencies. [#9488](https://github.com/rust-lang/cargo/pull/9488) - Added `doc.browser` config option to set which browser to open with `cargo doc --open`. [#9473](https://github.com/rust-lang/cargo/pull/9473) - Added `CARGO_TARGET_TMPDIR` environment variable set for integration tests & benches. This provides a temporary or "scratch" directory in the `target` directory for tests and benches to use. [#9375](https://github.com/rust-lang/cargo/pull/9375) ### Changed - `--features` CLI flags now provide typo suggestions with the new feature resolver. [#9420](https://github.com/rust-lang/cargo/pull/9420) - Cargo now uses a new parser for SemVer versions. This should behave mostly the same as before with some minor exceptions where invalid syntax for version requirements is now rejected. [#9508](https://github.com/rust-lang/cargo/pull/9508) - Mtime handling of `.crate` published packages has changed slightly to avoid mtime values of 0. This was causing problems with lldb which refused to read those files. [#9517](https://github.com/rust-lang/cargo/pull/9517) - Improved performance of git status check in `cargo package`. [#9478](https://github.com/rust-lang/cargo/pull/9478) - `cargo new` with fossil now places the ignore settings in the new repository instead of using `fossil settings` to set them globally. This also includes several other cleanups to make it more consistent with other VCS configurations. [#9469](https://github.com/rust-lang/cargo/pull/9469) - `rustc-cdylib-link-arg` applying transitively displays a warning that this was not intended, and may be an error in the future. [#9563](https://github.com/rust-lang/cargo/pull/9563) ### Fixed - Fixed `package.exclude` in `Cargo.toml` using inverted exclusions (`!somefile`) when not in a git repository or when vendoring a dependency. [#9186](https://github.com/rust-lang/cargo/pull/9186) - Dep-info files now adjust build script `rerun-if-changed` paths to be absolute paths. [#9421](https://github.com/rust-lang/cargo/pull/9421) - Fixed a bug when with resolver = "1" non-virtual package was allowing unknown features. [#9437](https://github.com/rust-lang/cargo/pull/9437) - Fixed an issue with the index cache mishandling versions that only differed in build metadata (such as `110.0.0` and `110.0.0+1.1.0f`). [#9476](https://github.com/rust-lang/cargo/pull/9476) - Fixed `cargo install` with a semver metadata version. [#9467](https://github.com/rust-lang/cargo/pull/9467) ### Nightly only - Added `report` subcommand, and changed `cargo describe-future-incompatibilitie` to `cargo report future-incompatibilities`. [#9438](https://github.com/rust-lang/cargo/pull/9438) - Added a `[host]` table to the config files to be able to set build flags for host target. Also added `target-applies-to-host` to control how the `[target]` tables behave. [#9322](https://github.com/rust-lang/cargo/pull/9322) - Added some validation to build script `rustc-link-arg-*` instructions to return an error if the target doesn't exist. [#9523](https://github.com/rust-lang/cargo/pull/9523) - Added `cargo:rustc-link-arg-bin` instruction for build scripts. [#9486](https://github.com/rust-lang/cargo/pull/9486) ## Cargo 1.53 (2021-06-17) [90691f2b...rust-1.53.0](https://github.com/rust-lang/cargo/compare/90691f2b...rust-1.53.0) ### Added ### Changed - πŸ”₯ Cargo now supports git repositories where the default `HEAD` branch is not "master". This also includes a switch to the version 3 `Cargo.lock` format which can handle default branches correctly. [#9133](https://github.com/rust-lang/cargo/pull/9133) [#9397](https://github.com/rust-lang/cargo/pull/9397) [#9384](https://github.com/rust-lang/cargo/pull/9384) [#9392](https://github.com/rust-lang/cargo/pull/9392) - πŸ”₯ macOS targets now default to `unpacked` split-debuginfo. [#9298](https://github.com/rust-lang/cargo/pull/9298) - ❗ The `authors` field is no longer included in `Cargo.toml` for new projects. [#9282](https://github.com/rust-lang/cargo/pull/9282) - `cargo update` may now work with the `--offline` flag. [#9279](https://github.com/rust-lang/cargo/pull/9279) - `cargo doc` will now erase the `doc` directory when switching between different toolchain versions. There are shared, unversioned files (such as the search index) that can become broken when using different versions. [#8640](https://github.com/rust-lang/cargo/pull/8640) [#9404](https://github.com/rust-lang/cargo/pull/9404) - Improved error messages when path dependency/workspace member is missing. [#9368](https://github.com/rust-lang/cargo/pull/9368) ### Fixed - Fixed `cargo doc` detecting if the documentation needs to be rebuilt when changing some settings such as features. [#9419](https://github.com/rust-lang/cargo/pull/9419) - `cargo doc` now deletes the output directory for the package before running rustdoc to clear out any stale files. [#9419](https://github.com/rust-lang/cargo/pull/9419) - Fixed the `-C metadata` value to always include all information for all builds. Previously, in some situations, the hash only included the package name and version. This fixes some issues, such as incremental builds with split-debuginfo on macOS corrupting the incremental cache in some cases. [#9418](https://github.com/rust-lang/cargo/pull/9418) - Fixed man pages not working on Windows if `man` is in `PATH`. [#9378](https://github.com/rust-lang/cargo/pull/9378) - The `rustc` cache is now aware of `RUSTC_WRAPPER` and `RUSTC_WORKSPACE_WRAPPER`. [#9348](https://github.com/rust-lang/cargo/pull/9348) - Track the `CARGO` environment variable in the rebuild fingerprint if the code uses `env!("CARGO")`. [#9363](https://github.com/rust-lang/cargo/pull/9363) ### Nightly only - Fixed config includes not working. [#9299](https://github.com/rust-lang/cargo/pull/9299) - Emit note when `--future-incompat-report` had nothing to report. [#9263](https://github.com/rust-lang/cargo/pull/9263) - Error messages for nightly features flags (like `-Z` and `cargo-features`) now provides more information. [#9290](https://github.com/rust-lang/cargo/pull/9290) - Added the ability to set the target for an individual package in `Cargo.toml`. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#per-package-target) [#9030](https://github.com/rust-lang/cargo/pull/9030) - Fixed build-std updating the index on every build. [#9393](https://github.com/rust-lang/cargo/pull/9393) - `-Z help` now displays all the `-Z` options. [#9369](https://github.com/rust-lang/cargo/pull/9369) - Added `-Zallow-features` to specify which nightly features are allowed to be used. [#9283](https://github.com/rust-lang/cargo/pull/9283) - Added `cargo config` subcommand. [#9302](https://github.com/rust-lang/cargo/pull/9302) ## Cargo 1.52 (2021-05-06) [34170fcd...rust-1.52.0](https://github.com/rust-lang/cargo/compare/34170fcd...rust-1.52.0) ### Added - Added the `"manifest_path"` field to JSON messages for a package. [#9022](https://github.com/rust-lang/cargo/pull/9022) [#9247](https://github.com/rust-lang/cargo/pull/9247) ### Changed - Build scripts are now forbidden from setting `RUSTC_BOOTSTRAP` on stable. [#9181](https://github.com/rust-lang/cargo/pull/9181) [#9385](https://github.com/rust-lang/cargo/pull/9385) - crates.io now supports SPDX 3.11 licenses. [#9209](https://github.com/rust-lang/cargo/pull/9209) - An error is now reported if `CARGO_TARGET_DIR` is an empty string. [#8939](https://github.com/rust-lang/cargo/pull/8939) - Doc tests now pass the `--message-format` flag into the test so that the "short" format can now be used for doc tests. [#9128](https://github.com/rust-lang/cargo/pull/9128) - `cargo test` now prints a clearer indicator of which target is currently running. [#9195](https://github.com/rust-lang/cargo/pull/9195) - The `CARGO_TARGET_` environment variable will now issue a warning if it is using lowercase letters. [#9169](https://github.com/rust-lang/cargo/pull/9169) ### Fixed - Fixed publication of packages with metadata and resolver fields in `Cargo.toml`. [#9300](https://github.com/rust-lang/cargo/pull/9300) [#9304](https://github.com/rust-lang/cargo/pull/9304) - Fixed logic for determining prefer-dynamic for a dylib which differed in a workspace vs a single package. [#9252](https://github.com/rust-lang/cargo/pull/9252) - Fixed an issue where exclusive target-specific dependencies that overlapped across dependency kinds (like regular and build-dependencies) would incorrectly include the dependencies in both. [#9255](https://github.com/rust-lang/cargo/pull/9255) - Fixed panic with certain styles of Package IDs when passed to the `-p` flag. [#9188](https://github.com/rust-lang/cargo/pull/9188) - When running cargo with output not going to a TTY, and with the progress bar and color force-enabled, the output will now correctly clear the progress line. [#9231](https://github.com/rust-lang/cargo/pull/9231) - Error instead of panic when JSON may contain non-utf8 paths. [#9226](https://github.com/rust-lang/cargo/pull/9226) - Fixed a hang that can happen on broken stderr. [#9201](https://github.com/rust-lang/cargo/pull/9201) - Fixed thin-local LTO not being disabled correctly when `lto=off` is set. [#9182](https://github.com/rust-lang/cargo/pull/9182) ### Nightly only - The `strip` profile option now supports `true` and `false` values. [#9153](https://github.com/rust-lang/cargo/pull/9153) - `cargo fix --edition` now displays a report when switching to 2021 if the new resolver changes features. [#9268](https://github.com/rust-lang/cargo/pull/9268) - Added `[patch]` table support in `.cargo/config` files. [#9204](https://github.com/rust-lang/cargo/pull/9204) - Added `cargo describe-future-incompatibilities` for generating a report on dependencies that contain future-incompatible warnings. [#8825](https://github.com/rust-lang/cargo/pull/8825) - Added easier support for testing the 2021 edition. [#9184](https://github.com/rust-lang/cargo/pull/9184) - Switch the default resolver to "2" in the 2021 edition. [#9184](https://github.com/rust-lang/cargo/pull/9184) - `cargo fix --edition` now supports 2021. [#9184](https://github.com/rust-lang/cargo/pull/9184) - Added `--print` flag to `cargo rustc` to pass along to `rustc` to display information from rustc. [#9002](https://github.com/rust-lang/cargo/pull/9002) - Added `-Zdoctest-in-workspace` for changing the directory where doctests are *run* versus where they are *compiled*. [#9105](https://github.com/rust-lang/cargo/pull/9105) - Added support for an `[env]` section in `.cargo/config.toml` to set environment variables when running cargo. [#9175](https://github.com/rust-lang/cargo/pull/9175) - Added a schema field and `features2` field to the index. [#9161](https://github.com/rust-lang/cargo/pull/9161) - Changes to JSON spec targets will now trigger a rebuild. [#9223](https://github.com/rust-lang/cargo/pull/9223) ## Cargo 1.51 (2021-03-25) [75d5d8cf...rust-1.51.0](https://github.com/rust-lang/cargo/compare/75d5d8cf...rust-1.51.0) ### Added - πŸ”₯ Added the `split-debuginfo` profile option. [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#split-debuginfo) [#9112](https://github.com/rust-lang/cargo/pull/9112) - Added the `path` field to `cargo metadata` for the package dependencies list to show the path for "path" dependencies. [#8994](https://github.com/rust-lang/cargo/pull/8994) - πŸ”₯ Added a new feature resolver, and new CLI feature flag behavior. See the new [features](https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2) and [resolver](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2) documentation for the `resolver = "2"` option. See the [CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#command-line-feature-options) and [resolver 2 CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags) options for the new CLI behavior. And, finally, see [RFC 2957](https://github.com/rust-lang/rfcs/blob/master/text/2957-cargo-features2.md) for a detailed look at what has changed. [#8997](https://github.com/rust-lang/cargo/pull/8997) ### Changed - `cargo install --locked` now emits a warning if `Cargo.lock` is not found. [#9108](https://github.com/rust-lang/cargo/pull/9108) - Unknown or ambiguous package IDs passed on the command-line now display suggestions for the correct package ID. [#9095](https://github.com/rust-lang/cargo/pull/9095) - Slightly optimize `cargo vendor` [#8937](https://github.com/rust-lang/cargo/pull/8937) [#9131](https://github.com/rust-lang/cargo/pull/9131) [#9132](https://github.com/rust-lang/cargo/pull/9132) ### Fixed - Fixed environment variables and cfg settings emitted by a build script that are set for `cargo test` and `cargo run` when the build script runs multiple times during the same build session. [#9122](https://github.com/rust-lang/cargo/pull/9122) - Fixed a panic with `cargo doc` and the new feature resolver. This also introduces some heuristics to try to avoid path collisions with `rustdoc` by only documenting one variant of a package if there are multiple (such as multiple versions, or the same package shared for host and target platforms). [#9077](https://github.com/rust-lang/cargo/pull/9077) - Fixed a bug in Cargo's cyclic dep graph detection that caused a stack overflow. [#9075](https://github.com/rust-lang/cargo/pull/9075) - Fixed build script `links` environment variables (`DEP_*`) not showing up for testing packages in some cases. [#9065](https://github.com/rust-lang/cargo/pull/9065) - Fixed features being selected in a nondeterministic way for a specific scenario when building an entire workspace with all targets with a proc-macro in the workspace with `resolver="2"`. [#9059](https://github.com/rust-lang/cargo/pull/9059) - Fixed to use `http.proxy` setting in `~/.gitconfig`. [#8986](https://github.com/rust-lang/cargo/pull/8986) - Fixed --feature pkg/feat for V1 resolver for non-member. [#9275](https://github.com/rust-lang/cargo/pull/9275) [#9277](https://github.com/rust-lang/cargo/pull/9277) - Fixed panic in `cargo doc` when there are colliding output filenames in a workspace. [#9276](https://github.com/rust-lang/cargo/pull/9276) [#9277](https://github.com/rust-lang/cargo/pull/9277) - Fixed `cargo install` from exiting with success if one of several packages did not install successfully. [#9185](https://github.com/rust-lang/cargo/pull/9185) [#9196](https://github.com/rust-lang/cargo/pull/9196) - Fix panic with doc collision orphan. [#9142](https://github.com/rust-lang/cargo/pull/9142) [#9196](https://github.com/rust-lang/cargo/pull/9196) ### Nightly only - Removed the `publish-lockfile` unstable feature, it was stabilized without the need for an explicit flag 1.5 years ago. [#9092](https://github.com/rust-lang/cargo/pull/9092) - Added better diagnostics, help messages, and documentation for nightly features (such as those passed with the `-Z` flag, or specified with `cargo-features` in `Cargo.toml`). [#9092](https://github.com/rust-lang/cargo/pull/9092) - Added support for Rust edition 2021. [#8922](https://github.com/rust-lang/cargo/pull/8922) - Added support for the `rust-version` field in project metadata. [#8037](https://github.com/rust-lang/cargo/pull/8037) - Added a schema field to the index. [#9161](https://github.com/rust-lang/cargo/pull/9161) [#9196](https://github.com/rust-lang/cargo/pull/9196) ## Cargo 1.50 (2021-02-11) [8662ab42...rust-1.50.0](https://github.com/rust-lang/cargo/compare/8662ab42...rust-1.50.0) ### Added - Added the `doc` field to `cargo metadata`, which indicates if a target is documented. [#8869](https://github.com/rust-lang/cargo/pull/8869) - Added `RUSTC_WORKSPACE_WRAPPER`, an alternate RUSTC wrapper that only runs for the local workspace packages, and caches its artifacts independently of non-wrapped builds. [#8976](https://github.com/rust-lang/cargo/pull/8976) - Added `--workspace` to `cargo update` to update only the workspace members, and not their dependencies. This is particularly useful if you update the version in `Cargo.toml` and want to update `Cargo.lock` without running any other commands. [#8725](https://github.com/rust-lang/cargo/pull/8725) ### Changed - `.crate` files uploaded to a registry are now built with reproducible settings, so that the same `.crate` file created on different machines should be identical. [#8864](https://github.com/rust-lang/cargo/pull/8864) - Git dependencies that specify more than one of `branch`, `tag`, or `rev` are now rejected. [#8984](https://github.com/rust-lang/cargo/pull/8984) - The `rerun-if-changed` build script directive can now point to a directory, in which case Cargo will check if any file in that directory changes. [#8973](https://github.com/rust-lang/cargo/pull/8973) - If Cargo cannot determine the username or email address, `cargo new` will no longer fail, and instead create an empty authors list. [#8912](https://github.com/rust-lang/cargo/pull/8912) - The progress bar width has been reduced to provide more room to display the crates currently being built. [#8892](https://github.com/rust-lang/cargo/pull/8892) - `cargo new` will now support `includeIf` directives in `.gitconfig` to match the correct directory when determining the username and email address. [#8886](https://github.com/rust-lang/cargo/pull/8886) ### Fixed - Fixed `cargo metadata` and `cargo tree` to only download packages for the requested target. [#8987](https://github.com/rust-lang/cargo/pull/8987) - Updated libgit2, which brings in many fixes, particularly fixing a zlib error that occasionally appeared on 32-bit systems. [#8998](https://github.com/rust-lang/cargo/pull/8998) - Fixed stack overflow with a circular dev-dependency that uses the `links` field. [#8969](https://github.com/rust-lang/cargo/pull/8969) - Fixed `cargo publish` failing on some filesystems, particularly 9p on WSL2. [#8950](https://github.com/rust-lang/cargo/pull/8950) ### Nightly only - Allow `resolver="1"` to specify the original feature resolution behavior. [#8857](https://github.com/rust-lang/cargo/pull/8857) - Added `-Z extra-link-arg` which adds the `cargo:rustc-link-arg-bins` and `cargo:rustc-link-arg` build script options. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#extra-link-arg) [#8441](https://github.com/rust-lang/cargo/pull/8441) - Implemented external credential process support, and added `cargo logout`. ([RFC 2730](https://github.com/rust-lang/rfcs/blob/master/text/2730-cargo-token-from-process.md)) ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process)) [#8934](https://github.com/rust-lang/cargo/pull/8934) - Fix panic with `-Zbuild-std` and no roots. [#8942](https://github.com/rust-lang/cargo/pull/8942) - Set docs.rs as the default extern-map for crates.io [#8877](https://github.com/rust-lang/cargo/pull/8877) ## Cargo 1.49 (2020-12-31) [75615f8e...rust-1.49.0](https://github.com/rust-lang/cargo/compare/75615f8e...rust-1.49.0) ### Added - Added `homepage` and `documentation` fields to `cargo metadata`. [#8744](https://github.com/rust-lang/cargo/pull/8744) - Added the `CARGO_PRIMARY_PACKAGE` environment variable which is set when running `rustc` if the package is one of the "root" packages selected on the command line. [#8758](https://github.com/rust-lang/cargo/pull/8758) - Added support for Unix-style glob patterns for package and target selection flags on the command-line (such as `-p 'serde*'` or `--test '*'`). [#8752](https://github.com/rust-lang/cargo/pull/8752) ### Changed - Computed LTO flags are now included in the filename metadata hash so that changes in LTO settings will independently cache build artifacts instead of overwriting previous ones. This prevents rebuilds in some situations such as switching between `cargo build` and `cargo test` in some circumstances. [#8755](https://github.com/rust-lang/cargo/pull/8755) - `cargo tree` now displays `(proc-macro)` next to proc-macro packages. [#8765](https://github.com/rust-lang/cargo/pull/8765) - Added a warning that the allowed characters for a feature name have been restricted to letters, digits, `_`, `-`, and `+` to accommodate future syntax changes. This is still a superset of the allowed syntax on crates.io, which requires ASCII. This is intended to be changed to an error in the future. [#8814](https://github.com/rust-lang/cargo/pull/8814) - `-p` without a value will now print a list of workspace package names. [#8808](https://github.com/rust-lang/cargo/pull/8808) - Add period to allowed feature name characters. [#8932](https://github.com/rust-lang/cargo/pull/8932) [#8943](https://github.com/rust-lang/cargo/pull/8943) ### Fixed - Fixed building a library with both "dylib" and "rlib" crate types with LTO enabled. [#8754](https://github.com/rust-lang/cargo/pull/8754) - Fixed paths in Cargo's dep-info files. [#8819](https://github.com/rust-lang/cargo/pull/8819) - Fixed inconsistent source IDs in `cargo metadata` for git dependencies that explicitly specify `branch="master"`. [#8824](https://github.com/rust-lang/cargo/pull/8824) - Fixed re-extracting dependencies which contained a `.cargo-ok` file. [#8835](https://github.com/rust-lang/cargo/pull/8835) ### Nightly only - Fixed a panic with `cargo doc -Zfeatures=itarget` in some situations. [#8777](https://github.com/rust-lang/cargo/pull/8777) - New implementation for namespaced features, using the syntax `dep:serde`. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#namespaced-features) [#8799](https://github.com/rust-lang/cargo/pull/8799) - Added support for "weak" dependency features, using the syntax `dep_name?/feat_name`, which will enable a feature for a dependency without also enabling the dependency. [#8818](https://github.com/rust-lang/cargo/pull/8818) - Fixed the new feature resolver downloading extra dependencies that weren't strictly necessary. [#8823](https://github.com/rust-lang/cargo/pull/8823) ## Cargo 1.48 (2020-11-19) [51b66125...rust-1.48.0](https://github.com/rust-lang/cargo/compare/51b66125...rust-1.48.0) ### Added - Added `term.progress` configuration option to control when and how the progress bar is displayed. [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#termprogresswhen) [#8165](https://github.com/rust-lang/cargo/pull/8165) - Added `--message-format plain` option to `cargo locate-project` to display the project location without JSON to make it easier to use in a script. [#8707](https://github.com/rust-lang/cargo/pull/8707) - Added `--workspace` option to `cargo locate-project` to display the path to the workspace manifest. [#8712](https://github.com/rust-lang/cargo/pull/8712) - A new contributor guide has been added for contributing to Cargo itself. This is published at . [#8715](https://github.com/rust-lang/cargo/pull/8715) - Zsh `--target` completion will now complete with the built-in rustc targets. [#8740](https://github.com/rust-lang/cargo/pull/8740) ### Changed ### Fixed - Fixed `cargo new` creating a fossil repository to properly ignore the `target` directory. [#8671](https://github.com/rust-lang/cargo/pull/8671) - Don't show warnings about the workspace in the current directory when using `cargo install` of a remote package. [#8681](https://github.com/rust-lang/cargo/pull/8681) - Automatically reinitialize the index when an "Object not found" error is encountered in the git repository. [#8735](https://github.com/rust-lang/cargo/pull/8735) - Updated libgit2, which brings in several fixes for git repository handling. [#8778](https://github.com/rust-lang/cargo/pull/8778) [#8780](https://github.com/rust-lang/cargo/pull/8780) ### Nightly only - Fixed `cargo install` so that it will ignore the `[unstable]` table in local config files. [#8656](https://github.com/rust-lang/cargo/pull/8656) - Fixed nondeterministic behavior of the new feature resolver. [#8701](https://github.com/rust-lang/cargo/pull/8701) - Fixed running `cargo test` on a proc-macro with the new feature resolver under a specific combination of circumstances. [#8742](https://github.com/rust-lang/cargo/pull/8742) ## Cargo 1.47 (2020-10-08) [4f74d9b2...rust-1.47.0](https://github.com/rust-lang/cargo/compare/4f74d9b2...rust-1.47.0) ### Added - `cargo doc` will now include the package's version in the left sidebar. [#8509](https://github.com/rust-lang/cargo/pull/8509) - Added the `test` field to `cargo metadata` targets. [#8478](https://github.com/rust-lang/cargo/pull/8478) - Cargo's man pages are now displayed via the `cargo help` command (such as `cargo help build`). [#8456](https://github.com/rust-lang/cargo/pull/8456) [#8577](https://github.com/rust-lang/cargo/pull/8577) - Added new documentation chapters on [how dependency resolution works](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html) and [SemVer compatibility](https://doc.rust-lang.org/nightly/cargo/reference/semver.html), along with suggestions on how to version your project and work with dependencies. [#8609](https://github.com/rust-lang/cargo/pull/8609) ### Changed - The comments added to `.gitignore` when it is modified have been tweaked to add some spacing. [#8476](https://github.com/rust-lang/cargo/pull/8476) - `cargo metadata` output should now be sorted to be deterministic. [#8489](https://github.com/rust-lang/cargo/pull/8489) - By default, build scripts and proc-macros are now built with `opt-level=0` and the default codegen units, even in release mode. [#8500](https://github.com/rust-lang/cargo/pull/8500) - `workspace.default-members` is now filtered by `workspace.exclude`. [#8485](https://github.com/rust-lang/cargo/pull/8485) - `workspace.members` globs now ignore non-directory paths. [#8511](https://github.com/rust-lang/cargo/pull/8511) - git zlib errors now trigger a retry. [#8520](https://github.com/rust-lang/cargo/pull/8520) - "http" class git errors now trigger a retry. [#8553](https://github.com/rust-lang/cargo/pull/8553) - git dependencies now override the `core.autocrlf` git configuration value to ensure they behave consistently across platforms, particularly when vendoring git dependencies on Windows. [#8523](https://github.com/rust-lang/cargo/pull/8523) - If `Cargo.lock` needs to be updated, then it will be automatically transitioned to the new V2 format. This format removes the `[metadata]` table, and should be easier to merge changes in source control systems. This format was introduced in 1.38, and made the default for new projects in 1.41. [#8554](https://github.com/rust-lang/cargo/pull/8554) - Added preparation for support of git repositories with a non-"master" default branch. Actual support will arrive in a future version. This introduces some warnings: - Warn if a git dependency does not specify a branch, and the default branch on the repository is not "master". In the future, Cargo will fetch the default branch. In this scenario, the branch should be explicitly specified. - Warn if a workspace has multiple dependencies to the same git repository, one without a `branch` and one with `branch="master"`. Dependencies should all use one form or the other. [#8522](https://github.com/rust-lang/cargo/pull/8522) - Warnings are now issued if a `required-features` entry lists a feature that does not exist. [#7950](https://github.com/rust-lang/cargo/pull/7950) - Built-in aliases are now included in `cargo --list`. [#8542](https://github.com/rust-lang/cargo/pull/8542) - `cargo install` with a specific version that has been yanked will now display an error message that it has been yanked, instead of "could not find". [#8565](https://github.com/rust-lang/cargo/pull/8565) - `cargo publish` with a package that has the `publish` field set to a single registry, and no `--registry` flag has been given, will now publish to that registry instead of generating an error. [#8571](https://github.com/rust-lang/cargo/pull/8571) ### Fixed - Fixed issue where if a project directory was moved, and one of the build scripts did not use the `rerun-if-changed` directive, then that build script was being rebuilt when it shouldn't. [#8497](https://github.com/rust-lang/cargo/pull/8497) - Console colors should now work on Windows 7 and 8. [#8540](https://github.com/rust-lang/cargo/pull/8540) - The `CARGO_TARGET_{triplet}_RUNNER` environment variable will now correctly override the config file instead of trying to merge the commands. [#8629](https://github.com/rust-lang/cargo/pull/8629) - Fixed LTO with doctests. [#8657](https://github.com/rust-lang/cargo/pull/8657) [#8658](https://github.com/rust-lang/cargo/pull/8658) ### Nightly only - Added support for `-Z terminal-width` which tells `rustc` the width of the terminal so that it can format diagnostics better. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#terminal-width) [#8427](https://github.com/rust-lang/cargo/pull/8427) - Added ability to configure `-Z` unstable flags in config files via the `[unstable]` table. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html) [#8393](https://github.com/rust-lang/cargo/pull/8393) - Added `-Z build-std-features` flag to set features for the standard library. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std-features) [#8490](https://github.com/rust-lang/cargo/pull/8490) ## Cargo 1.46 (2020-08-27) [9fcb8c1d...rust-1.46.0](https://github.com/rust-lang/cargo/compare/9fcb8c1d...rust-1.46.0) ### Added - The `dl` key in `config.json` of a registry index now supports the replacement markers `{prefix}` and `{lowerprefix}` to allow spreading crates across directories similar to how the index itself is structured. [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format) [#8267](https://github.com/rust-lang/cargo/pull/8267) - Added new environment variables that are set during compilation: - `CARGO_CRATE_NAME`: The name of the crate being built. - `CARGO_BIN_NAME`: The name of the executable binary (if this is a binary crate). - `CARGO_PKG_LICENSE`: The `license` field from the manifest. - `CARGO_PKG_LICENSE_FILE`: The `license-file` field from the manifest. [#8270](https://github.com/rust-lang/cargo/pull/8270) [#8325](https://github.com/rust-lang/cargo/pull/8325) [#8387](https://github.com/rust-lang/cargo/pull/8387) - If the value for `readme` is not specified in `Cargo.toml`, it is now automatically inferred from the existence of a file named `README`, `README.md`, or `README.txt`. This can be suppressed by setting `readme = false`. [#8277](https://github.com/rust-lang/cargo/pull/8277) - `cargo install` now supports the `--index` flag to install directly from an index. [#8344](https://github.com/rust-lang/cargo/pull/8344) - Added the `metadata` table to the `workspace` definition in `Cargo.toml`. This can be used for arbitrary data similar to the `package.metadata` table. [#8323](https://github.com/rust-lang/cargo/pull/8323) - Added the `--target-dir` flag to `cargo install` to set the target directory. [#8391](https://github.com/rust-lang/cargo/pull/8391) - Changes to environment variables used by the [`env!`](https://doc.rust-lang.org/std/macro.env.html) or [`option_env!`](https://doc.rust-lang.org/std/macro.option_env.html) macros are now automatically detected to trigger a rebuild. [#8421](https://github.com/rust-lang/cargo/pull/8421) - The `target` directory now includes the `CACHEDIR.TAG` file which is used by some tools to exclude the directory from backups. [#8378](https://github.com/rust-lang/cargo/pull/8378) - Added docs about rustup's `+toolchain` syntax. [#8455](https://github.com/rust-lang/cargo/pull/8455) ### Changed - A warning is now displayed if a git dependency includes a `#` fragment in the URL. This was potentially confusing because Cargo itself displays git URLs with this syntax, but it does not have any meaning outside of the `Cargo.lock` file, and would not work properly. [#8297](https://github.com/rust-lang/cargo/pull/8297) - Various optimizations and fixes for bitcode embedding and LTO. [#8349](https://github.com/rust-lang/cargo/pull/8349) - Reduced the amount of data fetched for git dependencies. If Cargo knows the branch or tag to fetch, it will now only fetch that branch or tag instead of all branches and tags. [#8363](https://github.com/rust-lang/cargo/pull/8363) - Enhanced git fetch error messages. [#8409](https://github.com/rust-lang/cargo/pull/8409) - `.crate` files are now generated with GNU tar format instead of UStar, which supports longer file names. [#8453](https://github.com/rust-lang/cargo/pull/8453) ### Fixed - Fixed a rare situation where an update to `Cargo.lock` failed once, but then subsequent runs allowed it proceed. [#8274](https://github.com/rust-lang/cargo/pull/8274) - Removed assertion that Windows dylibs must have a `.dll` extension. Some custom JSON spec targets may change the extension. [#8310](https://github.com/rust-lang/cargo/pull/8310) - Updated libgit2, which brings in a fix for zlib errors for some remote git servers like googlesource.com. [#8320](https://github.com/rust-lang/cargo/pull/8320) - Fixed the GitHub fast-path check for up-to-date git dependencies on non-master branches. [#8363](https://github.com/rust-lang/cargo/pull/8363) - Fixed issue when enabling a feature with `pkg/feature` syntax, and `pkg` is an optional dependency, but also a dev-dependency, and the dev-dependency appears before the optional normal dependency in the registry summary, then the optional dependency would not get activated. [#8395](https://github.com/rust-lang/cargo/pull/8395) - Fixed `clean -p` deleting the build directory if there is a test named `build`. [#8398](https://github.com/rust-lang/cargo/pull/8398) - Fixed indentation of multi-line Cargo error messages. [#8409](https://github.com/rust-lang/cargo/pull/8409) - Fixed issue where the automatic inclusion of the `--document-private-items` flag for rustdoc would override any flags passed to the `cargo rustdoc` command. [#8449](https://github.com/rust-lang/cargo/pull/8449) - Cargo will now include a version in the hash of the fingerprint directories to support backwards-incompatible changes to the fingerprint structure. [#8473](https://github.com/rust-lang/cargo/pull/8473) [#8488](https://github.com/rust-lang/cargo/pull/8488) ### Nightly only - Added `-Zrustdoc-map` feature which provides external mappings for rustdoc (such as https://docs.rs/ links). [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map) [#8287](https://github.com/rust-lang/cargo/pull/8287) - Fixed feature calculation when a proc-macro is declared in `Cargo.toml` with an underscore (like `proc_macro = true`). [#8319](https://github.com/rust-lang/cargo/pull/8319) - Added support for setting `-Clinker` with `-Zdoctest-xcompile`. [#8359](https://github.com/rust-lang/cargo/pull/8359) - Fixed setting the `strip` profile field in config files. [#8454](https://github.com/rust-lang/cargo/pull/8454) ## Cargo 1.45 (2020-07-16) [ebda5065e...rust-1.45.0](https://github.com/rust-lang/cargo/compare/ebda5065...rust-1.45.0) ### Added ### Changed - Changed official documentation to recommend `.cargo/config.toml` filenames (with the `.toml` extension). `.toml` extension support was added in 1.39. [#8121](https://github.com/rust-lang/cargo/pull/8121) - The `registry.index` config value is no longer allowed (it has been deprecated for 4 years). [#7973](https://github.com/rust-lang/cargo/pull/7973) - An error is generated if both `--index` and `--registry` are passed (previously `--index` was silently ignored). [#7973](https://github.com/rust-lang/cargo/pull/7973) - The `registry.token` config value is no longer used with the `--index` flag. This is intended to avoid potentially leaking the crates.io token to another registry. [#7973](https://github.com/rust-lang/cargo/pull/7973) - Added a warning if `registry.token` is used with source replacement. It is intended this will be an error in future versions. [#7973](https://github.com/rust-lang/cargo/pull/7973) - Windows GNU targets now copy `.dll.a` import library files for DLL crate types to the output directory. [#8141](https://github.com/rust-lang/cargo/pull/8141) - Dylibs for all dependencies are now unconditionally copied to the output directory. Some obscure scenarios can cause an old dylib to be referenced between builds, and this ensures that all the latest copies are used. [#8139](https://github.com/rust-lang/cargo/pull/8139) - `package.exclude` can now match directory names. If a directory is specified, the entire directory will be excluded, and Cargo will not attempt to inspect it further. Previously Cargo would try to check every file in the directory which could cause problems if the directory contained unreadable files. [#8095](https://github.com/rust-lang/cargo/pull/8095) - When packaging with `cargo publish` or `cargo package`, Cargo can use git to guide its decision on which files to include. Previously this git-based logic required a `Cargo.toml` file to exist at the root of the repository. This is no longer required, so Cargo will now use git-based guidance even if there is not a `Cargo.toml` in the root of the repository. [#8095](https://github.com/rust-lang/cargo/pull/8095) - While unpacking a crate on Windows, if it fails to write a file because the file is a reserved Windows filename (like "aux.rs"), Cargo will display an extra message to explain why it failed. [#8136](https://github.com/rust-lang/cargo/pull/8136) - Failures to set mtime on files are now ignored. Some filesystems did not support this. [#8185](https://github.com/rust-lang/cargo/pull/8185) - Certain classes of git errors will now recommend enabling `net.git-fetch-with-cli`. [#8166](https://github.com/rust-lang/cargo/pull/8166) - When doing an LTO build, Cargo will now instruct rustc not to perform codegen when possible. This may result in a faster build and use less disk space. Additionally, for non-LTO builds, Cargo will instruct rustc to not embed LLVM bitcode in libraries, which should decrease their size. [#8192](https://github.com/rust-lang/cargo/pull/8192) [#8226](https://github.com/rust-lang/cargo/pull/8226) [#8254](https://github.com/rust-lang/cargo/pull/8254) - The implementation for `cargo clean -p` has been rewritten so that it can more accurately remove the files for a specific package. [#8210](https://github.com/rust-lang/cargo/pull/8210) - The way Cargo computes the outputs from a build has been rewritten to be more complete and accurate. Newly tracked files will be displayed in JSON messages, and may be uplifted to the output directory in some cases. Some of the changes from this are: - `.exp` export files on Windows MSVC dynamic libraries are now tracked. - Proc-macros on Windows track import/export files. - All targets (like tests, etc.) that generate separate debug files (pdb/dSYM) are tracked. - Added .map files for wasm32-unknown-emscripten. - macOS dSYM directories are tracked for all dynamic libraries (dylib/cdylib/proc-macro) and for build scripts. There are a variety of other changes as a consequence of this: - Binary examples on Windows MSVC with a hyphen will now show up twice in the examples directory (`foo_bar.exe` and `foo-bar.exe`). Previously Cargo just renamed the file instead of hard-linking it. - Example libraries now follow the same rules for hyphen/underscore translation as normal libs (they will now use underscores). [#8210](https://github.com/rust-lang/cargo/pull/8210) - Cargo attempts to scrub any secrets from the debug log for HTTP debugging. [#8222](https://github.com/rust-lang/cargo/pull/8222) - Context has been added to many of Cargo's filesystem operations, so that error messages now provide more information, such as the path that caused the problem. [#8232](https://github.com/rust-lang/cargo/pull/8232) - Several commands now ignore the error if stdout or stderr is closed while it is running. For example `cargo install --list | grep -q cargo-fuzz` would previously sometimes panic because `grep -q` may close stdout before the command finishes. Regular builds continue to fail if stdout or stderr is closed, matching the behavior of many other build systems. [#8236](https://github.com/rust-lang/cargo/pull/8236) - If `cargo install` is given an exact version, like `--version=1.2.3`, it will now avoid updating the index if that version is already installed, and exit quickly indicating it is already installed. [#8022](https://github.com/rust-lang/cargo/pull/8022) - Changes to the `[patch]` section will now attempt to automatically update `Cargo.lock` to the new version. It should now also provide better error messages for the rare cases where it is unable to automatically update. [#8248](https://github.com/rust-lang/cargo/pull/8248) ### Fixed - Fixed copying Windows `.pdb` files to the output directory when the filename contained dashes. [#8123](https://github.com/rust-lang/cargo/pull/8123) - Fixed error where Cargo would fail when scanning if a package is inside a git repository when any of its ancestor paths is a symlink. [#8186](https://github.com/rust-lang/cargo/pull/8186) - Fixed `cargo update` with an unused `[patch]` so that it does not get stuck and refuse to update. [#8243](https://github.com/rust-lang/cargo/pull/8243) - Fixed a situation where Cargo would hang if stderr is closed, and the compiler generated a large number of messages. [#8247](https://github.com/rust-lang/cargo/pull/8247) - Fixed backtraces on macOS not showing filenames or line numbers. As a consequence of this, binary executables on apple targets do not include a hash in the filename in Cargo's cache. This means Cargo can only track one copy, so if you switch features or rustc versions, Cargo will need to rebuild the executable. [#8329](https://github.com/rust-lang/cargo/pull/8329) [#8335](https://github.com/rust-lang/cargo/pull/8335) - Fixed fingerprinting when using lld on Windows with a dylib. Cargo was erroneously thinking the dylib was never fresh. [#8290](https://github.com/rust-lang/cargo/pull/8290) [#8335](https://github.com/rust-lang/cargo/pull/8335) ### Nightly only - Fixed passing the full path for `--target` to `rustdoc` when using JSON spec targets. [#8094](https://github.com/rust-lang/cargo/pull/8094) - `-Cembed-bitcode=no` renamed to `-Cbitcode-in-rlib=no` [#8134](https://github.com/rust-lang/cargo/pull/8134) - Added new `resolver` field to `Cargo.toml` to opt-in to the new feature resolver. [#8129](https://github.com/rust-lang/cargo/pull/8129) - `-Zbuild-std` no longer treats std dependencies as "local". This means that it won't use incremental compilation for those dependencies, removes them from dep-info files, and caps lints at "allow". [#8177](https://github.com/rust-lang/cargo/pull/8177) - Added `-Zmultitarget` which allows multiple `--target` flags to build the same thing for multiple targets at once. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#multitarget) [#8167](https://github.com/rust-lang/cargo/pull/8167) - Added `strip` option to the profile to remove symbols and debug information. [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-strip-option) [#8246](https://github.com/rust-lang/cargo/pull/8246) - Fixed panic with `cargo tree --target=all -Zfeatures=all`. [#8269](https://github.com/rust-lang/cargo/pull/8269) ## Cargo 1.44 (2020-06-04) [bda50510...rust-1.44.0](https://github.com/rust-lang/cargo/compare/bda50510...rust-1.44.0) ### Added - πŸ”₯ Added the `cargo tree` command. [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-tree.html) [#8062](https://github.com/rust-lang/cargo/pull/8062) - Added warnings if a package has Windows-restricted filenames (like `nul`, `con`, `aux`, `prn`, etc.). [#7959](https://github.com/rust-lang/cargo/pull/7959) - Added a `"build-finished"` JSON message when compilation is complete so that tools can detect when they can stop listening for JSON messages with commands like `cargo run` or `cargo test`. [#8069](https://github.com/rust-lang/cargo/pull/8069) ### Changed - Valid package names are now restricted to Unicode XID identifiers. This is mostly the same as before, except package names cannot start with a number or `-`. [#7959](https://github.com/rust-lang/cargo/pull/7959) - `cargo new` and `init` will warn or reject additional package names (reserved Windows names, reserved Cargo directories, non-ASCII names, conflicting std names like `core`, etc.). [#7959](https://github.com/rust-lang/cargo/pull/7959) - Tests are no longer hard-linked into the output directory (`target/debug/`). This ensures tools will have access to debug symbols and execute tests in the same way as Cargo. Tools should use JSON messages to discover the path to the executable. [#7965](https://github.com/rust-lang/cargo/pull/7965) - Updating git submodules now displays an "Updating" message for each submodule. [#7989](https://github.com/rust-lang/cargo/pull/7989) - File modification times are now preserved when extracting a `.crate` file. This reverses the change made in 1.40 where the mtime was not preserved. [#7935](https://github.com/rust-lang/cargo/pull/7935) - Build script warnings are now displayed separately when the build script fails. [#8017](https://github.com/rust-lang/cargo/pull/8017) - Removed the `git-checkout` subcommand. [#8040](https://github.com/rust-lang/cargo/pull/8040) - The progress bar is now enabled for all unix platforms. Previously it was only Linux, macOS, and FreeBSD. [#8054](https://github.com/rust-lang/cargo/pull/8054) - Artifacts generated by pre-release versions of `rustc` now share the same filenames. This means that changing nightly versions will not leave stale files in the build directory. [#8073](https://github.com/rust-lang/cargo/pull/8073) - Invalid package names are rejected when using renamed dependencies. [#8090](https://github.com/rust-lang/cargo/pull/8090) - Added a certain class of HTTP2 errors as "spurious" that will get retried. [#8102](https://github.com/rust-lang/cargo/pull/8102) - Allow `cargo package --list` to succeed, even if there are other validation errors (such as `Cargo.lock` generation problem, or missing dependencies). [#8175](https://github.com/rust-lang/cargo/pull/8175) [#8215](https://github.com/rust-lang/cargo/pull/8215) ### Fixed - Cargo no longer buffers excessive amounts of compiler output in memory. [#7838](https://github.com/rust-lang/cargo/pull/7838) - Symbolic links in git repositories now work on Windows. [#7996](https://github.com/rust-lang/cargo/pull/7996) - Fixed an issue where `profile.dev` was not loaded from a config file with `cargo test` when the `dev` profile was not defined in `Cargo.toml`. [#8012](https://github.com/rust-lang/cargo/pull/8012) - When a binary is built as an implicit dependency of an integration test, it now checks `dep_name/feature_name` syntax in `required-features` correctly. [#8020](https://github.com/rust-lang/cargo/pull/8020) - Fixed an issue where Cargo would not detect that an executable (such as an integration test) needs to be rebuilt when the previous build was interrupted with Ctrl-C. [#8087](https://github.com/rust-lang/cargo/pull/8087) - Protect against some (unknown) situations where Cargo could panic when the system monotonic clock doesn't appear to be monotonic. [#8114](https://github.com/rust-lang/cargo/pull/8114) - Fixed panic with `cargo clean -p` if the package has a build script. [#8216](https://github.com/rust-lang/cargo/pull/8216) ### Nightly only - Fixed panic with new feature resolver and required-features. [#7962](https://github.com/rust-lang/cargo/pull/7962) - Added `RUSTC_WORKSPACE_WRAPPER` environment variable, which provides a way to wrap `rustc` for workspace members only, and affects the filename hash so that artifacts produced by the wrapper are cached separately. This usage can be seen on nightly clippy with `cargo clippy -Zunstable-options`. [#7533](https://github.com/rust-lang/cargo/pull/7533) - Added `--unit-graph` CLI option to display Cargo's internal dependency graph as JSON. [#7977](https://github.com/rust-lang/cargo/pull/7977) - Changed `-Zbuild_dep` to `-Zhost_dep`, and added proc-macros to the feature decoupling logic. [#8003](https://github.com/rust-lang/cargo/pull/8003) [#8028](https://github.com/rust-lang/cargo/pull/8028) - Fixed so that `--crate-version` is not automatically passed when the flag is found in `RUSTDOCFLAGS`. [#8014](https://github.com/rust-lang/cargo/pull/8014) - Fixed panic with `-Zfeatures=dev_dep` and `check --profile=test`. [#8027](https://github.com/rust-lang/cargo/pull/8027) - Fixed panic with `-Zfeatures=itarget` with certain host dependencies. [#8048](https://github.com/rust-lang/cargo/pull/8048) - Added support for `-Cembed-bitcode=no`, which provides a performance boost and disk-space usage reduction for non-LTO builds. [#8066](https://github.com/rust-lang/cargo/pull/8066) - `-Zpackage-features` has been extended with several changes intended to make it easier to select features on the command-line in a workspace. [#8074](https://github.com/rust-lang/cargo/pull/8074) ## Cargo 1.43 (2020-04-23) [9d32b7b0...rust-1.43.0](https://github.com/rust-lang/cargo/compare/9d32b7b0...rust-1.43.0) ### Added - πŸ”₯ Profiles may now be specified in config files (and environment variables). [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#profile) [#7823](https://github.com/rust-lang/cargo/pull/7823) - ❗ Added `CARGO_BIN_EXE_` environment variable when building integration tests. This variable contains the path to any `[[bin]]` targets in the package. Integration tests should use the `env!` macro to determine the path to a binary to execute. [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates) [#7697](https://github.com/rust-lang/cargo/pull/7697) ### Changed - `cargo install --git` now honors workspaces in a git repository. This allows workspace settings, like `[patch]`, `[replace]`, or `[profile]` to be used. [#7768](https://github.com/rust-lang/cargo/pull/7768) - `cargo new` will now run `rustfmt` on the new files to pick up rustfmt settings like `tab_spaces` so that the new file matches the user's preferred indentation settings. [#7827](https://github.com/rust-lang/cargo/pull/7827) - Environment variables printed with "very verbose" output (`-vv`) are now consistently sorted. [#7877](https://github.com/rust-lang/cargo/pull/7877) - Debug logging for fingerprint rebuild-detection now includes more information. [#7888](https://github.com/rust-lang/cargo/pull/7888) [#7890](https://github.com/rust-lang/cargo/pull/7890) [#7952](https://github.com/rust-lang/cargo/pull/7952) - Added warning during publish if the license-file doesn't exist. [#7905](https://github.com/rust-lang/cargo/pull/7905) - The `license-file` file is automatically included during publish, even if it is not explicitly listed in the `include` list or is in a location outside of the root of the package. [#7905](https://github.com/rust-lang/cargo/pull/7905) - `CARGO_CFG_DEBUG_ASSERTIONS` and `CARGO_CFG_PROC_MACRO` are no longer set when running a build script. These were inadvertently set in the past, but had no meaning as they were always true. Additionally, `cfg(proc-macro)` is no longer supported in a `target` expression. [#7943](https://github.com/rust-lang/cargo/pull/7943) [#7970](https://github.com/rust-lang/cargo/pull/7970) ### Fixed - Global command-line flags now work with aliases (like `cargo -v b`). [#7837](https://github.com/rust-lang/cargo/pull/7837) - Required-features using dependency syntax (like `renamed_dep/feat_name`) now handle renamed dependencies correctly. [#7855](https://github.com/rust-lang/cargo/pull/7855) - Fixed a rare situation where if a build script is run multiple times during the same build, Cargo will now keep the results separate instead of losing the output of the first execution. [#7857](https://github.com/rust-lang/cargo/pull/7857) - Fixed incorrect interpretation of environment variable `CARGO_TARGET_*_RUNNER=true` as a boolean. Also improved related env var error messages. [#7891](https://github.com/rust-lang/cargo/pull/7891) - Updated internal libgit2 library, bringing various fixes to git support. [#7939](https://github.com/rust-lang/cargo/pull/7939) - `cargo package` / `cargo publish` should no longer buffer the entire contents of each file in memory. [#7946](https://github.com/rust-lang/cargo/pull/7946) - Ignore more invalid `Cargo.toml` files in a git dependency. Cargo currently walks the entire repo to find the requested package. Certain invalid manifests were already skipped, and now it should skip all of them. [#7947](https://github.com/rust-lang/cargo/pull/7947) ### Nightly only - Added `build.out-dir` config variable to set the output directory. [#7810](https://github.com/rust-lang/cargo/pull/7810) - Added `-Zjobserver-per-rustc` feature to support improved performance for parallel rustc. [#7731](https://github.com/rust-lang/cargo/pull/7731) - Fixed filename collision with `build-std` and crates like `cc`. [#7860](https://github.com/rust-lang/cargo/pull/7860) - `-Ztimings` will now save its report even if there is an error. [#7872](https://github.com/rust-lang/cargo/pull/7872) - Updated `--config` command-line flag to support taking a path to a config file to load. [#7901](https://github.com/rust-lang/cargo/pull/7901) - Added new feature resolver. [#7820](https://github.com/rust-lang/cargo/pull/7820) - Rustdoc docs now automatically include the version of the package in the side bar (requires `-Z crate-versions` flag). [#7903](https://github.com/rust-lang/cargo/pull/7903) ## Cargo 1.42 (2020-03-12) [0bf7aafe...rust-1.42.0](https://github.com/rust-lang/cargo/compare/0bf7aafe...rust-1.42.0) ### Added - Added documentation on git authentication. [#7658](https://github.com/rust-lang/cargo/pull/7658) - Bitbucket Pipeline badges are now supported on crates.io. [#7663](https://github.com/rust-lang/cargo/pull/7663) - `cargo vendor` now accepts the `--versioned-dirs` option to force it to always include the version number in each package's directory name. [#7631](https://github.com/rust-lang/cargo/pull/7631) - The `proc_macro` crate is now automatically added to the extern prelude for proc-macro packages. This means that `extern crate proc_macro;` is no longer necessary for proc-macros. [#7700](https://github.com/rust-lang/cargo/pull/7700) ### Changed - Emit a warning if `debug_assertions`, `test`, `proc_macro`, or `feature=` is used in a `cfg()` expression. [#7660](https://github.com/rust-lang/cargo/pull/7660) - Large update to the Cargo documentation, adding new chapters on Cargo targets, workspaces, and features. [#7733](https://github.com/rust-lang/cargo/pull/7733) - Windows: `.lib` DLL import libraries are now copied next to the dll for all Windows MSVC targets. Previously it was only supported for `pc-windows-msvc`. This adds DLL support for `uwp-windows-msvc` targets. [#7758](https://github.com/rust-lang/cargo/pull/7758) - The `ar` field in the `[target]` configuration is no longer read. It has been ignored for over 4 years. [#7763](https://github.com/rust-lang/cargo/pull/7763) - Bash completion file simplified and updated for latest changes. [#7789](https://github.com/rust-lang/cargo/pull/7789) - Credentials are only loaded when needed, instead of every Cargo command. [#7774](https://github.com/rust-lang/cargo/pull/7774) ### Fixed - Removed `--offline` empty index check, which was a false positive in some cases. [#7655](https://github.com/rust-lang/cargo/pull/7655) - Files and directories starting with a `.` can now be included in a package by adding it to the `include` list. [#7680](https://github.com/rust-lang/cargo/pull/7680) - Fixed `cargo login` removing alternative registry tokens when previous entries existed in the credentials file. [#7708](https://github.com/rust-lang/cargo/pull/7708) - Fixed `cargo vendor` from panicking when used with alternative registries. [#7718](https://github.com/rust-lang/cargo/pull/7718) - Fixed incorrect explanation in the fingerprint debug log message. [#7749](https://github.com/rust-lang/cargo/pull/7749) - A `[source]` that is defined multiple times will now result in an error. Previously it was randomly picking a source, which could cause non-deterministic behavior. [#7751](https://github.com/rust-lang/cargo/pull/7751) - `dep_kinds` in `cargo metadata` are now de-duplicated. [#7756](https://github.com/rust-lang/cargo/pull/7756) - Fixed packaging where `Cargo.lock` was listed in `.gitignore` in a subdirectory inside a git repository. Previously it was assuming `Cargo.lock` was at the root of the repo. [#7779](https://github.com/rust-lang/cargo/pull/7779) - Partial file transfer errors will now cause an automatic retry. [#7788](https://github.com/rust-lang/cargo/pull/7788) - Linux: Fixed panic if CPU iowait stat decreases. [#7803](https://github.com/rust-lang/cargo/pull/7803) - Fixed using the wrong sysroot for detecting host compiler settings when `--sysroot` is passed in via `RUSTFLAGS`. [#7798](https://github.com/rust-lang/cargo/pull/7798) ### Nightly only - `build-std` now uses `--extern` instead of `--sysroot` to find sysroot packages. [#7699](https://github.com/rust-lang/cargo/pull/7699) - Added `--config` command-line option to set config settings. [#7649](https://github.com/rust-lang/cargo/pull/7649) - Added `include` config setting which allows including another config file. [#7649](https://github.com/rust-lang/cargo/pull/7649) - Profiles in config files now support any named profile. Previously it was limited to dev/release. [#7750](https://github.com/rust-lang/cargo/pull/7750) ## Cargo 1.41 (2020-01-30) [5da4b4d4...rust-1.41.0](https://github.com/rust-lang/cargo/compare/5da4b4d4...rust-1.41.0) ### Added - πŸ”₯ Cargo now uses a new `Cargo.lock` file format. This new format should support easier merges in source control systems. Projects using the old format will continue to use the old format, only new `Cargo.lock` files will use the new format. [#7579](https://github.com/rust-lang/cargo/pull/7579) - πŸ”₯ `cargo install` will now upgrade already installed packages instead of failing. [#7560](https://github.com/rust-lang/cargo/pull/7560) - πŸ”₯ Profile overrides have been added. This allows overriding profiles for individual dependencies or build scripts. See [the documentation](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#overrides) for more. [#7591](https://github.com/rust-lang/cargo/pull/7591) - Added new documentation for build scripts. [#7565](https://github.com/rust-lang/cargo/pull/7565) - Added documentation for Cargo's JSON output. [#7595](https://github.com/rust-lang/cargo/pull/7595) - Significant expansion of config and environment variable documentation. [#7650](https://github.com/rust-lang/cargo/pull/7650) - Add back support for `BROWSER` environment variable for `cargo doc --open`. [#7576](https://github.com/rust-lang/cargo/pull/7576) - Added `kind` and `platform` for dependencies in `cargo metadata`. [#7132](https://github.com/rust-lang/cargo/pull/7132) - The `OUT_DIR` value is now included in the `build-script-executed` JSON message. [#7622](https://github.com/rust-lang/cargo/pull/7622) ### Changed - `cargo doc` will now document private items in binaries by default. [#7593](https://github.com/rust-lang/cargo/pull/7593) - Subcommand typo suggestions now include aliases. [#7486](https://github.com/rust-lang/cargo/pull/7486) - Tweak how the "already existing..." comment is added to `.gitignore`. [#7570](https://github.com/rust-lang/cargo/pull/7570) - Ignore `cargo login` text from copy/paste in token. [#7588](https://github.com/rust-lang/cargo/pull/7588) - Windows: Ignore errors for locking files when not supported by the filesystem. [#7602](https://github.com/rust-lang/cargo/pull/7602) - Remove `**/*.rs.bk` from `.gitignore`. [#7647](https://github.com/rust-lang/cargo/pull/7647) ### Fixed - Fix unused warnings for some keys in the `build` config section. [#7575](https://github.com/rust-lang/cargo/pull/7575) - Linux: Don't panic when parsing `/proc/stat`. [#7580](https://github.com/rust-lang/cargo/pull/7580) - Don't show canonical path in `cargo vendor`. [#7629](https://github.com/rust-lang/cargo/pull/7629) ### Nightly only ## Cargo 1.40 (2019-12-19) [1c6ec66d...5da4b4d4](https://github.com/rust-lang/cargo/compare/1c6ec66d...5da4b4d4) ### Added - Added `http.ssl-version` config option to control the version of TLS, along with min/max versions. [#7308](https://github.com/rust-lang/cargo/pull/7308) - πŸ”₯ Compiler warnings are now cached on disk. If a build generates warnings, re-running the build will now re-display the warnings. [#7450](https://github.com/rust-lang/cargo/pull/7450) - Added `--filter-platform` option to `cargo metadata` to narrow the nodes shown in the resolver graph to only packages included for the given target triple. [#7376](https://github.com/rust-lang/cargo/pull/7376) ### Changed - Cargo's "platform" `cfg` parsing has been extracted into a separate crate named `cargo-platform`. [#7375](https://github.com/rust-lang/cargo/pull/7375) - Dependencies extracted into Cargo's cache no longer preserve mtimes to reduce syscall overhead. [#7465](https://github.com/rust-lang/cargo/pull/7465) - Windows: EXE files no longer include a metadata hash in the filename. This helps with debuggers correlating the filename with the PDB file. [#7400](https://github.com/rust-lang/cargo/pull/7400) - Wasm32: `.wasm` files are no longer treated as an "executable", allowing `cargo test` and `cargo run` to work properly with the generated `.js` file. [#7476](https://github.com/rust-lang/cargo/pull/7476) - crates.io now supports SPDX 3.6 licenses. [#7481](https://github.com/rust-lang/cargo/pull/7481) - Improved cyclic dependency error message. [#7470](https://github.com/rust-lang/cargo/pull/7470) - Bare `cargo clean` no longer locks the package cache. [#7502](https://github.com/rust-lang/cargo/pull/7502) - `cargo publish` now allows dev-dependencies without a version key to be published. A git or path-only dev-dependency will be removed from the package manifest before uploading. [#7333](https://github.com/rust-lang/cargo/pull/7333) - `--features` and `--no-default-features` in the root of a virtual workspace will now generate an error instead of being ignored. [#7507](https://github.com/rust-lang/cargo/pull/7507) - Generated files (like `Cargo.toml` and `Cargo.lock`) in a package archive now have their timestamp set to the current time instead of the epoch. [#7523](https://github.com/rust-lang/cargo/pull/7523) - The `-Z` flag parser is now more strict, rejecting more invalid syntax. [#7531](https://github.com/rust-lang/cargo/pull/7531) ### Fixed - Fixed an issue where if a package had an `include` field, and `Cargo.lock` in `.gitignore`, and a binary or example target, and the `Cargo.lock` exists in the current project, it would fail to publish complaining the `Cargo.lock` was dirty. [#7448](https://github.com/rust-lang/cargo/pull/7448) - Fixed a panic in a particular combination of `[patch]` entries. [#7452](https://github.com/rust-lang/cargo/pull/7452) - Windows: Better error message when `cargo test` or `rustc` crashes in an abnormal way, such as a signal or seg fault. [#7535](https://github.com/rust-lang/cargo/pull/7535) ### Nightly only - The `mtime-on-use` feature may now be enabled via the `unstable.mtime_on_use` config option. [#7411](https://github.com/rust-lang/cargo/pull/7411) - Added support for named profiles. [#6989](https://github.com/rust-lang/cargo/pull/6989) - Added `-Zpanic-abort-tests` to allow building and running tests with the "abort" panic strategy. [#7460](https://github.com/rust-lang/cargo/pull/7460) - Changed `build-std` to use `--sysroot`. [#7421](https://github.com/rust-lang/cargo/pull/7421) - Various fixes and enhancements to `-Ztimings`. [#7395](https://github.com/rust-lang/cargo/pull/7395) [#7398](https://github.com/rust-lang/cargo/pull/7398) [#7397](https://github.com/rust-lang/cargo/pull/7397) [#7403](https://github.com/rust-lang/cargo/pull/7403) [#7428](https://github.com/rust-lang/cargo/pull/7428) [#7429](https://github.com/rust-lang/cargo/pull/7429) - Profile overrides have renamed the syntax to be `[profile.dev.package.NAME]`. [#7504](https://github.com/rust-lang/cargo/pull/7504) - Fixed warnings for unused profile overrides in a workspace. [#7536](https://github.com/rust-lang/cargo/pull/7536) ## Cargo 1.39 (2019-11-07) [e853aa97...1c6ec66d](https://github.com/rust-lang/cargo/compare/e853aa97...1c6ec66d) ### Added - Config files may now use the `.toml` filename extension. [#7295](https://github.com/rust-lang/cargo/pull/7295) - The `--workspace` flag has been added as an alias for `--all` to help avoid confusion about the meaning of "all". [#7241](https://github.com/rust-lang/cargo/pull/7241) - The `publish` field has been added to `cargo metadata`. [#7354](https://github.com/rust-lang/cargo/pull/7354) ### Changed - Display more information if parsing the output from `rustc` fails. [#7236](https://github.com/rust-lang/cargo/pull/7236) - TOML errors now show the column number. [#7248](https://github.com/rust-lang/cargo/pull/7248) - `cargo vendor` no longer deletes files in the `vendor` directory that starts with a `.`. [#7242](https://github.com/rust-lang/cargo/pull/7242) - `cargo fetch` will now show manifest warnings. [#7243](https://github.com/rust-lang/cargo/pull/7243) - `cargo publish` will now check git submodules if they contain any uncommitted changes. [#7245](https://github.com/rust-lang/cargo/pull/7245) - In a build script, `cargo:rustc-flags` now allows `-l` and `-L` flags without spaces. [#7257](https://github.com/rust-lang/cargo/pull/7257) - When `cargo install` replaces an older version of a package it will now delete any installed binaries that are no longer present in the newly installed version. [#7246](https://github.com/rust-lang/cargo/pull/7246) - A git dependency may now also specify a `version` key when published. The `git` value will be stripped from the uploaded crate, matching the behavior of `path` dependencies. [#7237](https://github.com/rust-lang/cargo/pull/7237) - The behavior of workspace default-members has changed. The default-members now only applies when running Cargo in the root of the workspace. Previously it would always apply regardless of which directory Cargo is running in. [#7270](https://github.com/rust-lang/cargo/pull/7270) - libgit2 updated pulling in all upstream changes. [#7275](https://github.com/rust-lang/cargo/pull/7275) - Bump `home` dependency for locating home directories. [#7277](https://github.com/rust-lang/cargo/pull/7277) - zsh completions have been updated. [#7296](https://github.com/rust-lang/cargo/pull/7296) - SSL connect errors are now retried. [#7318](https://github.com/rust-lang/cargo/pull/7318) - The jobserver has been changed to acquire N tokens (instead of N-1), and then immediately acquires the extra token. This was changed to accommodate the `cc` crate on Windows to allow it to release its implicit token. [#7344](https://github.com/rust-lang/cargo/pull/7344) - The scheduling algorithm for choosing which crate to build next has been changed. It now chooses the crate with the greatest number of transitive crates waiting on it. Previously it used a maximum topological depth. [#7390](https://github.com/rust-lang/cargo/pull/7390) - RUSTFLAGS are no longer incorporated in the metadata and filename hash, reversing the change from 1.33 that added it. This means that any change to RUSTFLAGS will cause a recompile, and will not affect symbol munging. [#7459](https://github.com/rust-lang/cargo/pull/7459) ### Fixed - Git dependencies with submodules with shorthand SSH URLs (like `git@github.com/user/repo.git`) should now work. [#7238](https://github.com/rust-lang/cargo/pull/7238) - Handle broken symlinks when creating `.dSYM` symlinks on macOS. [#7268](https://github.com/rust-lang/cargo/pull/7268) - Fixed issues with multiple versions of the same crate in a `[patch]` table. [#7303](https://github.com/rust-lang/cargo/pull/7303) - Fixed issue with custom target `.json` files where a substring of the name matches an unsupported crate type (like "bin"). [#7363](https://github.com/rust-lang/cargo/issues/7363) - Fixed issues with generating documentation for proc-macro crate types. [#7159](https://github.com/rust-lang/cargo/pull/7159) - Fixed hang if Cargo panics within a build thread. [#7366](https://github.com/rust-lang/cargo/pull/7366) - Fixed rebuild detection if a `build.rs` script issues different `rerun-if` directives between builds. Cargo was erroneously causing a rebuild after the change. [#7373](https://github.com/rust-lang/cargo/pull/7373) - Properly handle canonical URLs for `[patch]` table entries, preventing the patch from working after the first time it is used. [#7368](https://github.com/rust-lang/cargo/pull/7368) - Fixed an issue where integration tests were waiting for the package binary to finish building before starting their own build. They now may build concurrently. [#7394](https://github.com/rust-lang/cargo/pull/7394) - Fixed accidental change in the previous release on how `--features a b` flag is interpreted, restoring the original behavior where this is interpreted as `--features a` along with the argument `b` passed to the command. To pass multiple features, use quotes around the features to pass multiple features like `--features "a b"`, or use commas, or use multiple `--features` flags. [#7419](https://github.com/rust-lang/cargo/pull/7419) ### Nightly only - Basic support for building the standard library directly from Cargo has been added. ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std)) [#7216](https://github.com/rust-lang/cargo/pull/7216) - Added `-Ztimings` feature to generate an HTML report on the time spent on individual compilation steps. This also may output completion steps on the console and JSON data. ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#timings)) [#7311](https://github.com/rust-lang/cargo/pull/7311) - Added ability to cross-compile doctests. ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile)) [#6892](https://github.com/rust-lang/cargo/pull/6892) ## Cargo 1.38 (2019-09-26) [4c1fa54d...23ef9a4e](https://github.com/rust-lang/cargo/compare/4c1fa54d...23ef9a4e) ### Added - πŸ”₯ Cargo build pipelining has been enabled by default to leverage more idle CPU parallelism during builds. [#7143](https://github.com/rust-lang/cargo/pull/7143) - The `--message-format` option to Cargo can now be specified multiple times and accepts a comma-separated list of values. In addition to the previous values it also now accepts `json-diagnostic-short` and `json-diagnostic-rendered-ansi` which configures the output coming from rustc in `json` message mode. [#7214](https://github.com/rust-lang/cargo/pull/7214) - Cirrus CI badges are now supported on crates.io. [#7119](https://github.com/rust-lang/cargo/pull/7119) - A new format for `Cargo.lock` has been introduced. This new format is intended to avoid source-control merge conflicts more often, and to generally make it safer to merge changes. This new format is *not* enabled at this time, though Cargo will use it if it sees it. At some point in the future, it is intended that this will become the default. [#7070](https://github.com/rust-lang/cargo/pull/7070) - Progress bar support added for FreeBSD. [#7222](https://github.com/rust-lang/cargo/pull/7222) ### Changed - The `-q` flag will no longer suppress the root error message for an error from Cargo itself. [#7116](https://github.com/rust-lang/cargo/pull/7116) - The Cargo Book is now published with mdbook 0.3 providing a number of formatting fixes and improvements. [#7140](https://github.com/rust-lang/cargo/pull/7140) - The `--features` command-line flag can now be specified multiple times. The list of features from all the flags are joined together. [#7084](https://github.com/rust-lang/cargo/pull/7084) - Package include/exclude glob-vs-gitignore warnings have been removed. Packages may now use gitignore-style matching without producing any warnings. [#7170](https://github.com/rust-lang/cargo/pull/7170) - Cargo now shows the command and output when parsing `rustc` output fails when querying `rustc` for information like `cfg` values. [#7185](https://github.com/rust-lang/cargo/pull/7185) - `cargo package`/`cargo publish` now allows a symbolic link to a git submodule to include that submodule. [#6817](https://github.com/rust-lang/cargo/pull/6817) - Improved the error message when a version requirement does not match any versions, but there are pre-release versions available. [#7191](https://github.com/rust-lang/cargo/pull/7191) ### Fixed - Fixed using the wrong directory when updating git repositories when using the `git-fetch-with-cli` config option, and the `GIT_DIR` environment variable is set. This may happen when running cargo from git callbacks. [#7082](https://github.com/rust-lang/cargo/pull/7082) - Fixed dep-info files being overwritten for targets that have separate debug outputs. For example, binaries on `-apple-` targets with `.dSYM` directories would overwrite the `.d` file. [#7057](https://github.com/rust-lang/cargo/pull/7057) - Fix `[patch]` table not preserving "one major version per source" rule. [#7118](https://github.com/rust-lang/cargo/pull/7118) - Ignore `--remap-path-prefix` flags for the metadata hash in the `cargo rustc` command. This was causing the remap settings to inadvertently affect symbol names. [#7134](https://github.com/rust-lang/cargo/pull/7134) - Fixed cycle detection in `[patch]` dependencies. [#7174](https://github.com/rust-lang/cargo/pull/7174) - Fixed `cargo new` leaving behind a symlink on Windows when `core.symlinks` git config is true. Also adds a number of fixes and updates from upstream libgit2. [#7176](https://github.com/rust-lang/cargo/pull/7176) - macOS: Fixed setting the flag to mark the `target` directory to be excluded from backups. [#7192](https://github.com/rust-lang/cargo/pull/7192) - Fixed `cargo fix` panicking under some situations involving multi-byte characters. [#7221](https://github.com/rust-lang/cargo/pull/7221) ### Nightly only - Added `cargo fix --clippy` which will apply machine-applicable fixes from Clippy. [#7069](https://github.com/rust-lang/cargo/pull/7069) - Added `-Z binary-dep-depinfo` flag to add change tracking for binary dependencies like the standard library. [#7137](https://github.com/rust-lang/cargo/pull/7137) [#7219](https://github.com/rust-lang/cargo/pull/7219) - `cargo clippy-preview` will always run, even if no changes have been made. [#7157](https://github.com/rust-lang/cargo/pull/7157) - Fixed exponential blowup when using `CARGO_BUILD_PIPELINING`. [#7062](https://github.com/rust-lang/cargo/pull/7062) - Fixed passing args to clippy in `cargo clippy-preview`. [#7162](https://github.com/rust-lang/cargo/pull/7162) ## Cargo 1.37 (2019-08-15) [c4fcfb72...9edd0891](https://github.com/rust-lang/cargo/compare/c4fcfb72...9edd0891) ### Added - Added `doctest` field to `cargo metadata` to determine if a target's documentation is tested. [#6953](https://github.com/rust-lang/cargo/pull/6953) [#6965](https://github.com/rust-lang/cargo/pull/6965) - πŸ”₯ The [`cargo vendor`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-vendor.html) command is now built-in to Cargo. This command may be used to create a local copy of the sources of all dependencies. [#6869](https://github.com/rust-lang/cargo/pull/6869) - πŸ”₯ The "publish lockfile" feature is now stable. This feature will automatically include the `Cargo.lock` file when a package is published if it contains a binary executable target. By default, Cargo will ignore `Cargo.lock` when installing a package. To force Cargo to use the `Cargo.lock` file included in the published package, use `cargo install --locked`. This may be useful to ensure that `cargo install` consistently reproduces the same result. It may also be useful when a semver-incompatible change is accidentally published to a dependency, providing a way to fall back to a version that is known to work. [#7026](https://github.com/rust-lang/cargo/pull/7026) - πŸ”₯ The `default-run` feature has been stabilized. This feature allows you to specify which binary executable to run by default with `cargo run` when a package includes multiple binaries. Set the `default-run` key in the `[package]` table in `Cargo.toml` to the name of the binary to use by default. [#7056](https://github.com/rust-lang/cargo/pull/7056) ### Changed - `cargo package` now verifies that build scripts do not create empty directories. [#6973](https://github.com/rust-lang/cargo/pull/6973) - A warning is now issued if `cargo doc` generates duplicate outputs, which causes files to be randomly stomped on. This may happen for a variety of reasons (renamed dependencies, multiple versions of the same package, packages with renamed libraries, etc.). This is a known bug, which needs more work to handle correctly. [#6998](https://github.com/rust-lang/cargo/pull/6998) - Enabling a dependency's feature with `--features foo/bar` will no longer compile the current crate with the `foo` feature if `foo` is not an optional dependency. [#7010](https://github.com/rust-lang/cargo/pull/7010) - If `--remap-path-prefix` is passed via RUSTFLAGS, it will no longer affect the filename metadata hash. [#6966](https://github.com/rust-lang/cargo/pull/6966) - libgit2 has been updated to 0.28.2, which Cargo uses to access git repositories. This brings in hundreds of changes and fixes since it was last updated in November. [#7018](https://github.com/rust-lang/cargo/pull/7018) - Cargo now supports absolute paths in the dep-info files generated by rustc. This is laying the groundwork for [tracking binaries](https://github.com/rust-lang/rust/pull/61727), such as libstd, for rebuild detection. (Note: this contains a known bug.) [#7030](https://github.com/rust-lang/cargo/pull/7030) ### Fixed - Fixed how zsh completions fetch the list of commands. [#6956](https://github.com/rust-lang/cargo/pull/6956) - "+ debuginfo" is no longer printed in the build summary when `debug` is set to 0. [#6971](https://github.com/rust-lang/cargo/pull/6971) - Fixed `cargo doc` with an example configured with `doc = true` to document correctly. [#7023](https://github.com/rust-lang/cargo/pull/7023) - Don't fail if a read-only lock cannot be acquired in CARGO_HOME. This helps when CARGO_HOME doesn't exist, but `--locked` is used which means CARGO_HOME is not needed. [#7149](https://github.com/rust-lang/cargo/pull/7149) - Reverted a change in 1.35 which released jobserver tokens when Cargo blocked on a lock file. It caused a deadlock in some situations. [#7204](https://github.com/rust-lang/cargo/pull/7204) ### Nightly only - Added [compiler message caching](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#cache-messages). The `-Z cache-messages` flag makes cargo cache the compiler output so that future runs can redisplay previous warnings. [#6933](https://github.com/rust-lang/cargo/pull/6933) - `-Z mtime-on-use` no longer touches intermediate artifacts. [#7050](https://github.com/rust-lang/cargo/pull/7050) ## Cargo 1.36 (2019-07-04) [6f3e9c36...c4fcfb72](https://github.com/rust-lang/cargo/compare/6f3e9c36...c4fcfb72) ### Added - Added more detailed documentation on target auto-discovery. [#6898](https://github.com/rust-lang/cargo/pull/6898) - πŸ”₯ Stabilize the `--offline` flag which allows using cargo without a network connection. [#6934](https://github.com/rust-lang/cargo/pull/6934) [#6871](https://github.com/rust-lang/cargo/pull/6871) ### Changed - `publish = ["crates-io"]` may be added to the manifest to restrict publishing to crates.io only. [#6838](https://github.com/rust-lang/cargo/pull/6838) - macOS: Only include the default paths if `DYLD_FALLBACK_LIBRARY_PATH` is not set. Also, remove `/lib` from the default set. [#6856](https://github.com/rust-lang/cargo/pull/6856) - `cargo publish` will now exit early if the login token is not available. [#6854](https://github.com/rust-lang/cargo/pull/6854) - HTTP/2 stream errors are now considered "spurious" and will cause a retry. [#6861](https://github.com/rust-lang/cargo/pull/6861) - Setting a feature on a dependency where that feature points to a *required* dependency is now an error. Previously it was a warning. [#6860](https://github.com/rust-lang/cargo/pull/6860) - The `registry.index` config value now supports relative `file:` URLs. [#6873](https://github.com/rust-lang/cargo/pull/6873) - macOS: The `.dSYM` directory is now symbolically linked next to example binaries without the metadata hash so that debuggers can find it. [#6891](https://github.com/rust-lang/cargo/pull/6891) - The default `Cargo.toml` template for now projects now includes a comment providing a link to the documentation. [#6881](https://github.com/rust-lang/cargo/pull/6881) - Some improvements to the wording of the crate download summary. [#6916](https://github.com/rust-lang/cargo/pull/6916) [#6920](https://github.com/rust-lang/cargo/pull/6920) - ✨ Changed `RUST_LOG` environment variable to `CARGO_LOG` so that user code that uses the `log` crate will not display cargo's debug output. [#6918](https://github.com/rust-lang/cargo/pull/6918) - `Cargo.toml` is now always included when packaging, even if it is not listed in `package.include`. [#6925](https://github.com/rust-lang/cargo/pull/6925) - Package include/exclude values now use gitignore patterns instead of glob patterns. [#6924](https://github.com/rust-lang/cargo/pull/6924) - Provide a better error message when crates.io times out. Also improve error messages with other HTTP response codes. [#6936](https://github.com/rust-lang/cargo/pull/6936) ### Performance - Resolver performance improvements for some cases. [#6853](https://github.com/rust-lang/cargo/pull/6853) - Optimized how cargo reads the index JSON files by caching the results. [#6880](https://github.com/rust-lang/cargo/pull/6880) [#6912](https://github.com/rust-lang/cargo/pull/6912) [#6940](https://github.com/rust-lang/cargo/pull/6940) - Various performance improvements. [#6867](https://github.com/rust-lang/cargo/pull/6867) ### Fixed - More carefully track the on-disk fingerprint information for dependencies. This can help in some rare cases where the build is interrupted and restarted. [#6832](https://github.com/rust-lang/cargo/pull/6832) - `cargo run` now correctly passes non-UTF8 arguments to the child process. [#6849](https://github.com/rust-lang/cargo/pull/6849) - Fixed bash completion to run on bash 3.2, the stock version in macOS. [#6905](https://github.com/rust-lang/cargo/pull/6905) - Various fixes and improvements to zsh completion. [#6926](https://github.com/rust-lang/cargo/pull/6926) [#6929](https://github.com/rust-lang/cargo/pull/6929) - Fix `cargo update` ignoring `-p` arguments if the `Cargo.lock` file was missing. [#6904](https://github.com/rust-lang/cargo/pull/6904) ### Nightly only - Added [`-Z install-upgrade` feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#install-upgrade) to track details about installed crates and to update them if they are out-of-date. [#6798](https://github.com/rust-lang/cargo/pull/6798) - Added the [`public-dependency` feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency) which allows tracking public versus private dependencies. [#6772](https://github.com/rust-lang/cargo/pull/6772) - Added build pipelining via the `build.pipelining` config option (`CARGO_BUILD_PIPELINING` env var). [#6883](https://github.com/rust-lang/cargo/pull/6883) - The `publish-lockfile` feature has had some significant changes. The default is now `true`, the `Cargo.lock` will always be published for binary crates. The `Cargo.lock` is now regenerated during publishing. `cargo install` now ignores the `Cargo.lock` file by default, and requires `--locked` to use the lock file. Warnings have been added if yanked dependencies are detected. [#6840](https://github.com/rust-lang/cargo/pull/6840) ## Cargo 1.35 (2019-05-23) [6789d8a0...6f3e9c36](https://github.com/rust-lang/cargo/compare/6789d8a0...6f3e9c36) ### Added - Added the `rustc-cdylib-link-arg` key for build scripts to specify linker arguments for cdylib crates. [#6298](https://github.com/rust-lang/cargo/pull/6298) ### Changed - When passing a test filter, such as `cargo test foo`, don't build examples (unless they set `test = true`). [#6683](https://github.com/rust-lang/cargo/pull/6683) - Forward the `--quiet` flag from `cargo test` to the libtest harness so that tests are actually quiet. [#6358](https://github.com/rust-lang/cargo/pull/6358) - The verification step in `cargo package` that checks if any files are modified is now stricter. It uses a hash of the contents instead of checking filesystem mtimes. It also checks *all* files in the package. [#6740](https://github.com/rust-lang/cargo/pull/6740) - Jobserver tokens are now released whenever Cargo blocks on a file lock. [#6748](https://github.com/rust-lang/cargo/pull/6748) - Issue a warning for a previous bug in the TOML parser that allowed multiple table headers with the same name. [#6761](https://github.com/rust-lang/cargo/pull/6761) - Removed the `CARGO_PKG_*` environment variables from the metadata hash and added them to the fingerprint instead. This means that when these values change, stale artifacts are not left behind. Also added the "repository" value to the fingerprint. [#6785](https://github.com/rust-lang/cargo/pull/6785) - `cargo metadata` no longer shows a `null` field for a dependency without a library in `resolve.nodes.deps`. The dependency is no longer shown. [#6534](https://github.com/rust-lang/cargo/pull/6534) - `cargo new` will no longer include an email address in the `authors` field if it is set to the empty string. [#6802](https://github.com/rust-lang/cargo/pull/6802) - `cargo doc --open` now works when documenting multiple packages. [#6803](https://github.com/rust-lang/cargo/pull/6803) - `cargo install --path P` now loads the `.cargo/config` file from the directory P. [#6805](https://github.com/rust-lang/cargo/pull/6805) - Using semver metadata in a version requirement (such as `1.0.0+1234`) now issues a warning that it is ignored. [#6806](https://github.com/rust-lang/cargo/pull/6806) - `cargo install` now rejects certain combinations of flags where some flags would have been ignored. [#6801](https://github.com/rust-lang/cargo/pull/6801) - Resolver performance improvements for some cases. [#6776](https://github.com/rust-lang/cargo/pull/6776) ### Fixed - Fixed running separate commands (such as `cargo build` then `cargo test`) where the second command could use stale results from a build script. [#6720](https://github.com/rust-lang/cargo/pull/6720) - Fixed `cargo fix` not working properly if a `.gitignore` file that matched the root package directory. [#6767](https://github.com/rust-lang/cargo/pull/6767) - Fixed accidentally compiling a lib multiple times if `panic=unwind` was set in a profile. [#6781](https://github.com/rust-lang/cargo/pull/6781) - Paths to JSON files in `build.target` config value are now canonicalized to fix building dependencies. [#6778](https://github.com/rust-lang/cargo/pull/6778) - Fixed re-running a build script if its compilation was interrupted (such as if it is killed). [#6782](https://github.com/rust-lang/cargo/pull/6782) - Fixed `cargo new` initializing a fossil repo. [#6792](https://github.com/rust-lang/cargo/pull/6792) - Fixed supporting updating a git repo that has a force push when using the `git-fetch-with-cli` feature. `git-fetch-with-cli` also shows more error information now when it fails. [#6800](https://github.com/rust-lang/cargo/pull/6800) - `--example` binaries built for the WASM target are fixed to no longer include a metadata hash in the filename, and are correctly emitted in the `compiler-artifact` JSON message. [#6812](https://github.com/rust-lang/cargo/pull/6812) ### Nightly only - `cargo clippy-preview` is now a built-in cargo command. [#6759](https://github.com/rust-lang/cargo/pull/6759) - The `build-override` profile setting now includes proc-macros and their dependencies. [#6811](https://github.com/rust-lang/cargo/pull/6811) - Optional and target dependencies now work better with `-Z offline`. [#6814](https://github.com/rust-lang/cargo/pull/6814) ## Cargo 1.34 (2019-04-11) [f099fe94...6789d8a0](https://github.com/rust-lang/cargo/compare/f099fe94...6789d8a0) ### Added - πŸ”₯ Stabilized support for [alternate registries](https://doc.rust-lang.org/1.34.0/cargo/reference/registries.html). [#6654](https://github.com/rust-lang/cargo/pull/6654) - Added documentation on using builds.sr.ht Continuous Integration with Cargo. [#6565](https://github.com/rust-lang/cargo/pull/6565) - `Cargo.lock` now includes a comment at the top that it is `@generated`. [#6548](https://github.com/rust-lang/cargo/pull/6548) - Azure DevOps badges are now supported. [#6264](https://github.com/rust-lang/cargo/pull/6264) - Added a warning if `--exclude` flag specifies an unknown package. [#6679](https://github.com/rust-lang/cargo/pull/6679) ### Changed - `cargo test --doc --no-run` doesn't do anything, so it now displays an error to that effect. [#6628](https://github.com/rust-lang/cargo/pull/6628) - Various updates to bash completion: add missing options and commands, support libtest completions, use rustup for `--target` completion, fallback to filename completion, fix editing the command line. [#6644](https://github.com/rust-lang/cargo/pull/6644) - Publishing a crate with a `[patch]` section no longer generates an error. The `[patch]` section is removed from the manifest before publishing. [#6535](https://github.com/rust-lang/cargo/pull/6535) - `build.incremental = true` config value is now treated the same as `CARGO_INCREMENTAL=1`, previously it was ignored. [#6688](https://github.com/rust-lang/cargo/pull/6688) - Errors from a registry are now always displayed regardless of the HTTP response code. [#6771](https://github.com/rust-lang/cargo/pull/6771) ### Fixed - Fixed bash completion for `cargo run --example`. [#6578](https://github.com/rust-lang/cargo/pull/6578) - Fixed a race condition when using a *local* registry and running multiple cargo commands at the same time that build the same crate. [#6591](https://github.com/rust-lang/cargo/pull/6591) - Fixed some flickering and excessive updates of the progress bar. [#6615](https://github.com/rust-lang/cargo/pull/6615) - Fixed a hang when using a git credential helper that returns incorrect credentials. [#6681](https://github.com/rust-lang/cargo/pull/6681) - Fixed resolving yanked crates with a local registry. [#6750](https://github.com/rust-lang/cargo/pull/6750) ### Nightly only - Added `-Z mtime-on-use` flag to cause the mtime to be updated on the filesystem when a crate is used. This is intended to be able to track stale artifacts in the future for cleaning up unused files. [#6477](https://github.com/rust-lang/cargo/pull/6477) [#6573](https://github.com/rust-lang/cargo/pull/6573) - Added experimental `-Z dual-proc-macros` to build proc macros for both the host and the target. [#6547](https://github.com/rust-lang/cargo/pull/6547) ## Cargo 1.33 (2019-02-28) [8610973a...f099fe94](https://github.com/rust-lang/cargo/compare/8610973a...f099fe94) ### Added - `compiler-artifact` JSON messages now include an `"executable"` key which includes the path to the executable that was built. [#6363](https://github.com/rust-lang/cargo/pull/6363) - The man pages have been rewritten, and are now published with the web documentation. [#6405](https://github.com/rust-lang/cargo/pull/6405) - `cargo login` now displays a confirmation after saving the token. [#6466](https://github.com/rust-lang/cargo/pull/6466) - A warning is now emitted if a `[patch]` entry does not match any package. [#6470](https://github.com/rust-lang/cargo/pull/6470) - `cargo metadata` now includes the `links` key for a package. [#6480](https://github.com/rust-lang/cargo/pull/6480) - "Very verbose" output with `-vv` now displays the environment variables that cargo sets when it runs a process. [#6492](https://github.com/rust-lang/cargo/pull/6492) - `--example`, `--bin`, `--bench`, or `--test` without an argument now lists the available targets for those options. [#6505](https://github.com/rust-lang/cargo/pull/6505) - Windows: If a process fails with an extended status exit code, a human-readable name for the code is now displayed. [#6532](https://github.com/rust-lang/cargo/pull/6532) - Added `--features`, `--no-default-features`, and `--all-features` flags to the `cargo package` and `cargo publish` commands to use the given features when verifying the package. [#6453](https://github.com/rust-lang/cargo/pull/6453) ### Changed - If `cargo fix` fails to compile the fixed code, the rustc errors are now displayed on the console. [#6419](https://github.com/rust-lang/cargo/pull/6419) - Hide the `--host` flag from `cargo login`, it is unused. [#6466](https://github.com/rust-lang/cargo/pull/6466) - Build script fingerprints now include the rustc version. [#6473](https://github.com/rust-lang/cargo/pull/6473) - macOS: Switched to setting `DYLD_FALLBACK_LIBRARY_PATH` instead of `DYLD_LIBRARY_PATH`. [#6355](https://github.com/rust-lang/cargo/pull/6355) - `RUSTFLAGS` is now included in the metadata hash, meaning that changing the flags will not overwrite previously built files. [#6503](https://github.com/rust-lang/cargo/pull/6503) - When updating the crate graph, unrelated yanked crates were erroneously removed. They are now kept at their original version if possible. This was causing unrelated packages to be downgraded during `cargo update -p somecrate`. [#5702](https://github.com/rust-lang/cargo/issues/5702) - TOML files now support the [0.5 TOML syntax](https://github.com/toml-lang/toml/blob/master/CHANGELOG.md#050--2018-07-11). ### Fixed - `cargo fix` will now ignore suggestions that modify multiple files. [#6402](https://github.com/rust-lang/cargo/pull/6402) - `cargo fix` will now only fix one target at a time, to deal with targets which share the same source files. [#6434](https://github.com/rust-lang/cargo/pull/6434) - Fixed bash completion showing the list of cargo commands. [#6461](https://github.com/rust-lang/cargo/issues/6461) - `cargo init` will now avoid creating duplicate entries in `.gitignore` files. [#6521](https://github.com/rust-lang/cargo/pull/6521) - Builds now attempt to detect if a file is modified in the middle of a compilation, allowing you to build again and pick up the new changes. This is done by keeping track of when the compilation *starts* not when it finishes. Also, [#5919](https://github.com/rust-lang/cargo/pull/5919) was reverted, meaning that cargo does *not* treat equal filesystem mtimes as requiring a rebuild. [#6484](https://github.com/rust-lang/cargo/pull/6484) ### Nightly only - Allow using registry *names* in `[patch]` tables instead of just URLs. [#6456](https://github.com/rust-lang/cargo/pull/6456) - `cargo metadata` added the `registry` key for dependencies. [#6500](https://github.com/rust-lang/cargo/pull/6500) - Registry names are now restricted to the same style as package names (alphanumeric, `-` and `_` characters). [#6469](https://github.com/rust-lang/cargo/pull/6469) - `cargo login` now displays the `/me` URL from the registry config. [#6466](https://github.com/rust-lang/cargo/pull/6466) - `cargo login --registry=NAME` now supports interactive input for the token. [#6466](https://github.com/rust-lang/cargo/pull/6466) - Registries may now elide the `api` key from `config.json` to indicate they do not support API access. [#6466](https://github.com/rust-lang/cargo/pull/6466) - Fixed panic when using `--message-format=json` with metabuild. [#6432](https://github.com/rust-lang/cargo/pull/6432) - Fixed detection of publishing to crates.io when using alternate registries. [#6525](https://github.com/rust-lang/cargo/pull/6525) ## Cargo 1.32 (2019-01-17) [339d9f9c...8610973a](https://github.com/rust-lang/cargo/compare/339d9f9c...8610973a) ### Added - Registries may now display warnings after a successful publish. [#6303](https://github.com/rust-lang/cargo/pull/6303) - Added a [glossary](https://doc.rust-lang.org/cargo/appendix/glossary.html) to the documentation. [#6321](https://github.com/rust-lang/cargo/pull/6321) - Added the alias `c` for `cargo check`. [#6218](https://github.com/rust-lang/cargo/pull/6218) ### Changed - πŸ”₯ HTTP/2 multiplexing is now enabled by default. The `http.multiplexing` config value may be used to disable it. [#6271](https://github.com/rust-lang/cargo/pull/6271) - Use ANSI escape sequences to clear lines instead of spaces. [#6233](https://github.com/rust-lang/cargo/pull/6233) - Disable git templates when checking out git dependencies, which can cause problems. [#6252](https://github.com/rust-lang/cargo/pull/6252) - Include the `--update-head-ok` git flag when using the `net.git-fetch-with-cli` option. This can help prevent failures when fetching some repositories. [#6250](https://github.com/rust-lang/cargo/pull/6250) - When extracting a crate during the verification step of `cargo package`, the filesystem mtimes are no longer set, which was failing on some rare filesystems. [#6257](https://github.com/rust-lang/cargo/pull/6257) - `crate-type = ["proc-macro"]` is now treated the same as `proc-macro = true` in `Cargo.toml`. [#6256](https://github.com/rust-lang/cargo/pull/6256) - An error is raised if `dependencies`, `features`, `target`, or `badges` is set in a virtual workspace. Warnings are displayed if `replace` or `patch` is used in a workspace member. [#6276](https://github.com/rust-lang/cargo/pull/6276) - Improved performance of the resolver in some cases. [#6283](https://github.com/rust-lang/cargo/pull/6283) [#6366](https://github.com/rust-lang/cargo/pull/6366) - `.rmeta` files are no longer hard-linked into the base target directory (`target/debug`). [#6292](https://github.com/rust-lang/cargo/pull/6292) - A warning is issued if multiple targets are built with the same output filenames. [#6308](https://github.com/rust-lang/cargo/pull/6308) - When using `cargo build` (without `--release`) benchmarks are now built using the "test" profile instead of "bench". This makes it easier to debug benchmarks, and avoids confusing behavior. [#6309](https://github.com/rust-lang/cargo/pull/6309) - User aliases may now override built-in aliases (`b`, `r`, `t`, and `c`). [#6259](https://github.com/rust-lang/cargo/pull/6259) - Setting `autobins=false` now disables auto-discovery of inferred targets. [#6329](https://github.com/rust-lang/cargo/pull/6329) - `cargo verify-project` will now fail on stable if the project uses unstable features. [#6326](https://github.com/rust-lang/cargo/pull/6326) - Platform targets with an internal `.` within the name are now allowed. [#6255](https://github.com/rust-lang/cargo/pull/6255) - `cargo clean --release` now only deletes the release directory. [#6349](https://github.com/rust-lang/cargo/pull/6349) ### Fixed - Avoid adding extra angle brackets in email address for `cargo new`. [#6243](https://github.com/rust-lang/cargo/pull/6243) - The progress bar is disabled if the CI environment variable is set. [#6281](https://github.com/rust-lang/cargo/pull/6281) - Avoid retaining all rustc output in memory. [#6289](https://github.com/rust-lang/cargo/pull/6289) - If JSON parsing fails, and rustc exits nonzero, don't lose the parse failure message. [#6290](https://github.com/rust-lang/cargo/pull/6290) - Fixed renaming a project directory with build scripts. [#6328](https://github.com/rust-lang/cargo/pull/6328) - Fixed `cargo run --example NAME` to work correctly if the example sets `crate_type = ["bin"]`. [#6330](https://github.com/rust-lang/cargo/pull/6330) - Fixed issue with `cargo package` git discovery being too aggressive. The `--allow-dirty` now completely disables the git repo checks. [#6280](https://github.com/rust-lang/cargo/pull/6280) - Fixed build change tracking for `[patch]` deps which resulted in `cargo build` rebuilding when it shouldn't. [#6493](https://github.com/rust-lang/cargo/pull/6493) ### Nightly only - Allow usernames in registry URLs. [#6242](https://github.com/rust-lang/cargo/pull/6242) - Added `"compile_mode"` key to the build-plan JSON structure to be able to distinguish running a custom build script versus compiling the build script. [#6331](https://github.com/rust-lang/cargo/pull/6331) - `--out-dir` no longer copies over build scripts. [#6300](https://github.com/rust-lang/cargo/pull/6300) ## Cargo 1.31 (2018-12-06) [36d96825...339d9f9c](https://github.com/rust-lang/cargo/compare/36d96825...339d9f9c) ### Added - πŸ”₯ Stabilized support for the 2018 edition. [#5984](https://github.com/rust-lang/cargo/pull/5984) [#5989](https://github.com/rust-lang/cargo/pull/5989) - πŸ”₯ Added the ability to [rename dependencies](https://doc.rust-lang.org/1.31.0/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) in Cargo.toml. [#6319](https://github.com/rust-lang/cargo/pull/6319) - πŸ”₯ Added support for HTTP/2 pipelining and multiplexing. Set the `http.multiplexing` config value to enable. [#6005](https://github.com/rust-lang/cargo/pull/6005) - Added `http.debug` configuration value to debug HTTP connections. Use `CARGO_HTTP_DEBUG=true RUST_LOG=cargo::ops::registry cargo build` to display the debug information. [#6166](https://github.com/rust-lang/cargo/pull/6166) - `CARGO_PKG_REPOSITORY` environment variable is set with the repository value from `Cargo.toml` when building . [#6096](https://github.com/rust-lang/cargo/pull/6096) ### Changed - `cargo test --doc` now rejects other flags instead of ignoring them. [#6037](https://github.com/rust-lang/cargo/pull/6037) - `cargo install` ignores `~/.cargo/config`. [#6026](https://github.com/rust-lang/cargo/pull/6026) - `cargo version --verbose` is now the same as `cargo -vV`. [#6076](https://github.com/rust-lang/cargo/pull/6076) - Comments at the top of `Cargo.lock` are now preserved. [#6181](https://github.com/rust-lang/cargo/pull/6181) - When building in "very verbose" mode (`cargo build -vv`), build script output is prefixed with the package name and version, such as `[foo 0.0.1]`. [#6164](https://github.com/rust-lang/cargo/pull/6164) - If `cargo fix --broken-code` fails to compile after fixes have been applied, the files are no longer reverted and are left in their broken state. [#6316](https://github.com/rust-lang/cargo/pull/6316) ### Fixed - Windows: Pass Ctrl-C to the process with `cargo run`. [#6004](https://github.com/rust-lang/cargo/pull/6004) - macOS: Fix bash completion. [#6038](https://github.com/rust-lang/cargo/pull/6038) - Support arbitrary toolchain names when completing `+toolchain` in bash completion. [#6038](https://github.com/rust-lang/cargo/pull/6038) - Fixed edge cases in the resolver, when backtracking on failed dependencies. [#5988](https://github.com/rust-lang/cargo/pull/5988) - Fixed `cargo test --all-targets` running lib tests three times. [#6039](https://github.com/rust-lang/cargo/pull/6039) - Fixed publishing renamed dependencies to crates.io. [#5993](https://github.com/rust-lang/cargo/pull/5993) - Fixed `cargo install` on a git repo with multiple binaries. [#6060](https://github.com/rust-lang/cargo/pull/6060) - Fixed deeply nested JSON emitted by rustc being lost. [#6081](https://github.com/rust-lang/cargo/pull/6081) - Windows: Fix locking msys terminals to 60 characters. [#6122](https://github.com/rust-lang/cargo/pull/6122) - Fixed renamed dependencies with dashes. [#6140](https://github.com/rust-lang/cargo/pull/6140) - Fixed linking against the wrong dylib when the dylib existed in both `target/debug` and `target/debug/deps`. [#6167](https://github.com/rust-lang/cargo/pull/6167) - Fixed some unnecessary recompiles when `panic=abort` is used. [#6170](https://github.com/rust-lang/cargo/pull/6170) ### Nightly only - Added `--registry` flag to `cargo install`. [#6128](https://github.com/rust-lang/cargo/pull/6128) - Added `registry.default` configuration value to specify the default registry to use if `--registry` flag is not passed. [#6135](https://github.com/rust-lang/cargo/pull/6135) - Added `--registry` flag to `cargo new` and `cargo init`. [#6135](https://github.com/rust-lang/cargo/pull/6135) ## Cargo 1.30 (2018-10-25) [524a578d...36d96825](https://github.com/rust-lang/cargo/compare/524a578d...36d96825) ### Added - πŸ”₯ Added an animated progress bar shows progress during building. [#5995](https://github.com/rust-lang/cargo/pull/5995/) - Added `resolve.nodes.deps` key to `cargo metadata`, which includes more information about resolved dependencies, and properly handles renamed dependencies. [#5871](https://github.com/rust-lang/cargo/pull/5871) - When creating a package, provide more detail with `-v` when failing to discover if files are dirty in a git repository. Also fix a problem with discovery on Windows. [#5858](https://github.com/rust-lang/cargo/pull/5858) - Filters like `--bin`, `--test`, `--example`, `--bench`, or `--lib` can be used in a workspace without selecting a specific package. [#5873](https://github.com/rust-lang/cargo/pull/5873) - `cargo run` can be used in a workspace without selecting a specific package. [#5877](https://github.com/rust-lang/cargo/pull/5877) - `cargo doc --message-format=json` now outputs JSON messages from rustdoc. [#5878](https://github.com/rust-lang/cargo/pull/5878) - Added `--message-format=short` to show one-line messages. [#5879](https://github.com/rust-lang/cargo/pull/5879) - Added `.cargo_vcs_info.json` file to `.crate` packages that captures the current git hash. [#5886](https://github.com/rust-lang/cargo/pull/5886) - Added `net.git-fetch-with-cli` configuration option to use the `git` executable to fetch repositories instead of using the built-in libgit2 library. [#5914](https://github.com/rust-lang/cargo/pull/5914) - Added `required-features` to `cargo metadata`. [#5902](https://github.com/rust-lang/cargo/pull/5902) - `cargo uninstall` within a package will now uninstall that package. [#5927](https://github.com/rust-lang/cargo/pull/5927) - Added `--allow-staged` flag to `cargo fix` to allow it to run if files are staged in git. [#5943](https://github.com/rust-lang/cargo/pull/5943) - Added `net.low-speed-limit` config value, and also honor `net.timeout` for http operations. [#5957](https://github.com/rust-lang/cargo/pull/5957) - Added `--edition` flag to `cargo new`. [#5984](https://github.com/rust-lang/cargo/pull/5984) - Temporarily stabilized 2018 edition support for the duration of the beta. [#5984](https://github.com/rust-lang/cargo/pull/5984) [#5989](https://github.com/rust-lang/cargo/pull/5989) - Added support for `target.'cfg(…)'.runner` config value to specify the run/test/bench runner for targets that use config expressions. [#5959](https://github.com/rust-lang/cargo/pull/5959) ### Changed - Windows: `cargo run` will not kill child processes when the main process exits. [#5887](https://github.com/rust-lang/cargo/pull/5887) - Switched to the `opener` crate to open a web browser with `cargo doc --open`. This should more reliably select the system-preferred browser on all platforms. [#5888](https://github.com/rust-lang/cargo/pull/5888) - Equal file mtimes now cause a target to be rebuilt. Previously only if files were strictly *newer* than the last build would it cause a rebuild. [#5919](https://github.com/rust-lang/cargo/pull/5919) - Ignore `build.target` config value when running `cargo install`. [#5874](https://github.com/rust-lang/cargo/pull/5874) - Ignore `RUSTC_WRAPPER` for `cargo fix`. [#5983](https://github.com/rust-lang/cargo/pull/5983) - Ignore empty `RUSTC_WRAPPER`. [#5985](https://github.com/rust-lang/cargo/pull/5985) ### Fixed - Fixed error when creating a package with an edition field in `Cargo.toml`. [#5908](https://github.com/rust-lang/cargo/pull/5908) - More consistently use relative paths for path dependencies in a workspace. [#5935](https://github.com/rust-lang/cargo/pull/5935) - `cargo fix` now always runs, even if it was run previously. [#5944](https://github.com/rust-lang/cargo/pull/5944) - Windows: Attempt to more reliably detect terminal width. msys-based terminals are forced to 60 characters wide. [#6010](https://github.com/rust-lang/cargo/pull/6010) - Allow multiple target flags with `cargo doc --document-private-items`. [6022](https://github.com/rust-lang/cargo/pull/6022) ### Nightly only - Added [metabuild](https://doc.rust-lang.org/1.30.0/cargo/reference/unstable.html#metabuild). [#5628](https://github.com/rust-lang/cargo/pull/5628) cargo-0.66.0/CONTRIBUTING.md000066400000000000000000000016131432416201200151230ustar00rootroot00000000000000# Contributing to Cargo Contributing documentation has moved to the **[Cargo Contributor Guide]**. [Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/ ## Before hacking on Cargo We encourage people to discuss their design before hacking on code. Typically, you [file an issue] or start a thread on the [internals forum] before submitting a pull request. Please read [the process] of how features and bugs are managed in Cargo. **NOTICE: Due to limited review capacity, the Cargo team is not accepting new features or major changes at this time. Please consult with the team before opening a new PR. Only issues that have been explicitly marked as accepted will be reviewed.** [internals forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo [file an issue]: https://github.com/rust-lang/cargo/issues [the process]: https://doc.crates.io/contrib/process/index.html cargo-0.66.0/Cargo.toml000066400000000000000000000054611432416201200146270ustar00rootroot00000000000000[package] name = "cargo" version = "0.66.0" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://crates.io" repository = "https://github.com/rust-lang/cargo" documentation = "https://docs.rs/cargo" readme = "README.md" description = """ Cargo, a package manager for Rust. """ [lib] name = "cargo" path = "src/cargo/lib.rs" [dependencies] atty = "0.2" bytesize = "1.0" cargo-platform = { path = "crates/cargo-platform", version = "0.1.2" } cargo-util = { path = "crates/cargo-util", version = "0.2.1" } crates-io = { path = "crates/crates-io", version = "0.34.0" } curl = { version = "0.4.43", features = ["http2"] } curl-sys = "0.4.55" env_logger = "0.9.0" pretty_env_logger = { version = "0.4", optional = true } anyhow = "1.0" filetime = "0.2.9" flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } git2 = "0.15.0" git2-curl = "0.16.0" glob = "0.3.0" hex = "0.4" home = "0.5" humantime = "2.0.0" indexmap = "1" ignore = "0.4.7" lazy_static = "1.2.0" jobserver = "0.1.24" lazycell = "1.2.0" libc = "0.2" log = "0.4.6" libgit2-sys = "0.14.0" memchr = "2.1.3" opener = "0.5" os_info = "3.5.0" pathdiff = "0.2" percent-encoding = "2.0" rustfix = "0.6.0" semver = { version = "1.0.3", features = ["serde"] } serde = { version = "1.0.123", features = ["derive"] } serde_ignored = "0.1.0" serde_json = { version = "1.0.30", features = ["raw_value"] } shell-escape = "0.1.4" strip-ansi-escapes = "0.1.0" tar = { version = "0.4.38", default-features = false } tempfile = "3.0" termcolor = "1.1" toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } unicode-xid = "0.2.0" url = "2.2.2" walkdir = "2.2" clap = "3.2.18" unicode-width = "0.1.5" openssl = { version = '0.10.11', optional = true } im-rc = "15.0.0" itertools = "0.10.0" # A noop dependency that changes in the Rust repository, it's a bit of a hack. # See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust` # for more information. rustc-workspace-hack = "1.0.0" [target.'cfg(windows)'.dependencies] fwdansi = "1.1.0" [target.'cfg(windows)'.dependencies.winapi] version = "0.3" features = [ "basetsd", "handleapi", "jobapi", "jobapi2", "memoryapi", "minwindef", "ntdef", "ntstatus", "processenv", "processthreadsapi", "psapi", "synchapi", "winerror", "winbase", "wincon", "winnt", ] [dev-dependencies] cargo-test-macro = { path = "crates/cargo-test-macro" } cargo-test-support = { path = "crates/cargo-test-support" } snapbox = { version = "0.3.0", features = ["diff", "path"] } [build-dependencies] flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } tar = { version = "0.4.38", default-features = false } [[bin]] name = "cargo" test = false doc = false [features] deny-warnings = [] vendored-openssl = ["openssl/vendored"] pretty-env-logger = ["pretty_env_logger"] cargo-0.66.0/LICENSE-APACHE000066400000000000000000000251541432416201200146240ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 https://www.apache.org/licenses/LICENSE-2.0 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cargo-0.66.0/LICENSE-MIT000066400000000000000000000017771432416201200143410ustar00rootroot00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. cargo-0.66.0/LICENSE-THIRD-PARTY000066400000000000000000002055161432416201200154540ustar00rootroot00000000000000The Cargo source code itself does not bundle any third party libraries, but it depends on a number of libraries which carry their own copyright notices and license terms. These libraries are normally all linked static into the binary distributions of Cargo: * OpenSSL - https://www.openssl.org/source/license.html Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. All advertising materials mentioning features or use of this software must display the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit. (https://www.openssl.org/)" 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact openssl-core@openssl.org. 5. Products derived from this software may not be called "OpenSSL" nor may "OpenSSL" appear in their names without prior written permission of the OpenSSL Project. 6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (https://www.openssl.org/)" THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ==================================================================== This product includes cryptographic software written by Eric Young (eay@cryptsoft.com). This product includes software written by Tim Hudson (tjh@cryptsoft.com). --- Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) All rights reserved. This package is an SSL implementation written by Eric Young (eay@cryptsoft.com). The implementation was written so as to conform with Netscapes SSL. This library is free for commercial and non-commercial use as long as the following conditions are aheared to. The following conditions apply to all code found in this distribution, be it the RC4, RSA, lhash, DES, etc., code; not just the SSL code. The SSL documentation included with this distribution is covered by the same copyright terms except that the holder is Tim Hudson (tjh@cryptsoft.com). Copyright remains Eric Young's, and as such any Copyright notices in the code are not to be removed. If this package is used in a product, Eric Young should be given attribution as the author of the parts of the library used. This can be in the form of a textual message at program startup or in documentation (online or textual) provided with the package. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. All advertising materials mentioning features or use of this software must display the following acknowledgement: "This product includes cryptographic software written by Eric Young (eay@cryptsoft.com)" The word 'cryptographic' can be left out if the rouines from the library being used are not cryptographic related :-). 4. If you include any Windows specific code (or a derivative thereof) from the apps directory (application code) you must include an acknowledgement: "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The licence and distribution terms for any publically available version or derivative of this code cannot be changed. i.e. this code cannot simply be copied and put under another distribution licence [including the GNU Public Licence.] * libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING libgit2 is Copyright (C) the libgit2 contributors, unless otherwise stated. See the AUTHORS file for details. Note that the only valid version of the GPL as far as this project is concerned is _this_ particular version of the license (ie v2, not v2.2 or v3.x or whatever), unless explicitly otherwise stated. ---------------------------------------------------------------------- LINKING EXCEPTION In addition to the permissions in the GNU General Public License, the authors give you unlimited permission to link the compiled version of this library into combinations with other programs, and to distribute those combinations without any restriction coming from the use of this file. (The General Public License restrictions do apply in other respects; for example, they cover modification of the file, and distribution when not linked into a combined executable.) ---------------------------------------------------------------------- GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. ---------------------------------------------------------------------- The bundled ZLib code is licensed under the ZLib license: Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. Jean-loup Gailly Mark Adler jloup@gzip.org madler@alumni.caltech.edu ---------------------------------------------------------------------- The Clar framework is licensed under the MIT license: Copyright (C) 2011 by Vicent Marti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ---------------------------------------------------------------------- The regex library (deps/regex/) is licensed under the GNU LGPL GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! ---------------------------------------------------------------------- * libssh2 - https://www.libssh2.org/license.html Copyright (c) 2004-2007 Sara Golemon Copyright (c) 2005,2006 Mikhail Gusarov Copyright (c) 2006-2007 The Written Word, Inc. Copyright (c) 2007 Eli Fant Copyright (c) 2009 Daniel Stenberg Copyright (C) 2008, 2009 Simon Josefsson All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the copyright holder nor the names of any other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * libcurl - https://curl.haxx.se/docs/copyright.html COPYRIGHT AND PERMISSION NOTICE Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se. All rights reserved. Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization of the copyright holder. * flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT * link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT * openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT * toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT * libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT * git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT * tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT Copyright (c) 2014 Alex Crichton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT * semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT Copyright (c) 2014 The Rust Project Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT Copyright (c) 2006-2009 Graydon Hoare Copyright (c) 2009-2013 Mozilla Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt The MIT License (MIT) Copyright (c) 2013, Kang Seonghoon. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE Copyright (c) 2014 Carl Lerche Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to cargo-0.66.0/README.md000066400000000000000000000105651432416201200141570ustar00rootroot00000000000000# Cargo Cargo downloads your Rust project’s dependencies and compiles your project. **To start using Cargo**, learn more at [The Cargo Book]. **To start developing Cargo itself**, read the [Cargo Contributor Guide]. [The Cargo Book]: https://doc.rust-lang.org/cargo/ [Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/ ## Code Status [![CI](https://github.com/rust-lang/cargo/actions/workflows/main.yml/badge.svg?branch=auto-cargo)](https://github.com/rust-lang/cargo/actions/workflows/main.yml) Code documentation: https://docs.rs/cargo/ ## Installing Cargo Cargo is distributed by default with Rust, so if you've got `rustc` installed locally you probably also have `cargo` installed locally. ## Compiling from Source ### Requirements Cargo requires the following tools and packages to build: * `cargo` and `rustc` * A C compiler [for your platform](https://github.com/rust-lang/cc-rs#compile-time-requirements) * `git` (to clone this repository) **Other requirements:** The following are optional based on your platform and needs. * `pkg-config` β€” This is used to help locate system packages, such as `libssl` headers/libraries. This may not be required in all cases, such as using vendored OpenSSL, or on Windows. * OpenSSL β€” Only needed on Unix-like systems and only if the `vendored-openssl` Cargo feature is not used. This requires the development headers, which can be obtained from the `libssl-dev` package on Ubuntu or `openssl-devel` with apk or yum or the `openssl` package from Homebrew on macOS. If using the `vendored-openssl` Cargo feature, then a static copy of OpenSSL will be built from source instead of using the system OpenSSL. This may require additional tools such as `perl` and `make`. On macOS, common installation directories from Homebrew, MacPorts, or pkgsrc will be checked. Otherwise it will fall back to `pkg-config`. On Windows, the system-provided Schannel will be used instead. LibreSSL is also supported. **Optional system libraries:** The build will automatically use vendored versions of the following libraries. However, if they are provided by the system and can be found with `pkg-config`, then the system libraries will be used instead: * [`libcurl`](https://curl.se/libcurl/) β€” Used for network transfers. * [`libgit2`](https://libgit2.org/) β€” Used for fetching git dependencies. * [`libssh2`](https://www.libssh2.org/) β€” Used for SSH access to git repositories. * [`libz`](https://zlib.net/) (aka zlib) β€” Used for data compression. It is recommended to use the vendored versions as they are the versions that are tested to work with Cargo. ### Compiling First, you'll want to check out this repository ``` git clone https://github.com/rust-lang/cargo cd cargo ``` With `cargo` already installed, you can simply run: ``` cargo build --release ``` ## Adding new subcommands to Cargo Cargo is designed to be extensible with new subcommands without having to modify Cargo itself. See [the Wiki page][third-party-subcommands] for more details and a list of known community-developed subcommands. [third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands ## Releases Cargo releases coincide with Rust releases. High level release notes are available as part of [Rust's release notes][rel]. Detailed release notes are available in this repo at [CHANGELOG.md]. [rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md [CHANGELOG.md]: CHANGELOG.md ## Reporting issues Found a bug? We'd love to know about it! Please report all issues on the GitHub [issue tracker][issues]. [issues]: https://github.com/rust-lang/cargo/issues ## Contributing See the **[Cargo Contributor Guide]** for a complete introduction to contributing to Cargo. ## License Cargo is primarily distributed under the terms of both the MIT license and the Apache License (Version 2.0). See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details. ### Third party software This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (https://www.openssl.org/). In binary form, this product includes software that is licensed under the terms of the GNU General Public License, version 2, with a linking exception, which can be obtained from the [upstream repository][1]. See [LICENSE-THIRD-PARTY](LICENSE-THIRD-PARTY) for details. [1]: https://github.com/libgit2/libgit2 cargo-0.66.0/benches/000077500000000000000000000000001432416201200143005ustar00rootroot00000000000000cargo-0.66.0/benches/README.md000066400000000000000000000116011432416201200155560ustar00rootroot00000000000000# Cargo Benchmarking This directory contains some benchmarks for cargo itself. This uses [Criterion] for running benchmarks. It is recommended to read the Criterion book to get familiar with how to use it. A basic usage would be: ```sh cd benches/benchsuite cargo bench ``` The tests involve downloading the index and benchmarking against some real-world and artificial workspaces located in the [`workspaces`](workspaces) directory. **Beware** that the initial download can take a fairly long amount of time (10 minutes minimum on an extremely fast network) and require significant disk space (around 4.5GB). The benchsuite will cache the index and downloaded crates in the `target/tmp/bench` directory, so subsequent runs should be faster. You can (and probably should) specify individual benchmarks to run to narrow it down to a more reasonable set, for example: ```sh cargo bench -- resolve_ws/rust ``` This will only download what's necessary for the rust-lang/rust workspace (which is about 330MB) and run the benchmarks against it (which should take about a minute). To get a list of all the benchmarks, run: ```sh cargo bench -- --list ``` ## Viewing reports The benchmarks display some basic information on the command-line while they run. A more complete HTML report can be found at `target/criterion/report/index.html` which contains links to all the benchmarks and summaries. Check out the Criterion book for more information on the extensive reporting capabilities. ## Comparing implementations Knowing the raw numbers can be useful, but what you're probably most interested in is checking if your changes help or hurt performance. To do that, you need to run the benchmarks multiple times. First, run the benchmarks from the master branch of cargo without any changes. To make it easier to compare, Criterion supports naming the baseline so that you can iterate on your code and compare against it multiple times. ```sh cargo bench -- --save-baseline master ``` Now you can switch to your branch with your changes. Re-run the benchmarks compared against the baseline: ```sh cargo bench -- --baseline master ``` You can repeat the last command as you make changes to re-compare against the master baseline. Without the baseline arguments, it will compare against the last run, which can be helpful for comparing incremental changes. ## Capturing workspaces The [`workspaces`](workspaces) directory contains several workspaces that provide a variety of different workspaces intended to provide good exercises for benchmarks. Some of these are shadow copies of real-world workspaces. This is done with the tool in the [`capture`](capture) directory. The tool will copy `Cargo.lock` and all of the `Cargo.toml` files of the workspace members. It also adds an empty `lib.rs` so Cargo won't error, and sanitizes the `Cargo.toml` to some degree, removing unwanted elements. Finally, it compresses everything into a `tgz`. To run it, do: ```sh cd benches/capture cargo run -- /path/to/workspace/foo ``` The resolver benchmarks also support the `CARGO_BENCH_WORKSPACES` environment variable, which you can point to a Cargo workspace if you want to try different workspaces. For example: ```sh CARGO_BENCH_WORKSPACES=/path/to/some/workspace cargo bench ``` ## TODO This is just a start for establishing a benchmarking suite for Cargo. There's a lot that can be added. Some ideas: * Fix the benchmarks so that the resolver setup doesn't run every iteration. * Benchmark [this section of code](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/ops/cargo_compile.rs#L470-L549) which builds the unit graph. The performance there isn't great, and it would be good to keep an eye on it. Unfortunately that would mean doing a bit of work to make `generate_targets` publicly visible, and there is a bunch of setup code that may need to be duplicated. * Benchmark the fingerprinting code. * Benchmark running the `cargo` executable. Running something like `cargo build` or `cargo check` with everything "Fresh" would be a good end-to-end exercise to measure the overall overhead of Cargo. * Benchmark pathological resolver scenarios. There might be some cases where the resolver can spend a significant amount of time. It would be good to identify if these exist, and create benchmarks for them. This may require creating an artificial index, similar to the `resolver-tests`. This should also consider scenarios where the resolver ultimately fails. * Benchmark without `Cargo.lock`. I'm not sure if this is particularly valuable, since we are mostly concerned with incremental builds which will always have a lock file. * Benchmark just [`resolve::resolve`](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/core/resolver/mod.rs#L122) without anything else. This can help focus on just the resolver. [Criterion]: https://bheisler.github.io/criterion.rs/book/ cargo-0.66.0/benches/benchsuite/000077500000000000000000000000001432416201200164315ustar00rootroot00000000000000cargo-0.66.0/benches/benchsuite/Cargo.toml000066400000000000000000000013471432416201200203660ustar00rootroot00000000000000[package] name = "benchsuite" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://github.com/rust-lang/cargo" repository = "https://github.com/rust-lang/cargo" documentation = "https://docs.rs/cargo-platform" description = "Benchmarking suite for Cargo." [dependencies] cargo = { path = "../.." } # Consider removing html_reports in 0.4 and switching to `cargo criterion`. criterion = { version = "0.3.5", features = ["html_reports"] } flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } tar = { version = "0.4.38", default-features = false } url = "2.2.2" [lib] bench = false [[bench]] name = "resolve" harness = false [[bench]] name = "workspace_initialization" harness = false cargo-0.66.0/benches/benchsuite/benches/000077500000000000000000000000001432416201200200405ustar00rootroot00000000000000cargo-0.66.0/benches/benchsuite/benches/resolve.rs000066400000000000000000000124351432416201200220720ustar00rootroot00000000000000use benchsuite::fixtures; use cargo::core::compiler::{CompileKind, RustcTargetData}; use cargo::core::resolver::features::{FeatureOpts, FeatureResolver}; use cargo::core::resolver::{CliFeatures, ForceAllTargets, HasDevUnits, ResolveBehavior}; use cargo::core::{PackageIdSpec, Workspace}; use cargo::ops::WorkspaceResolve; use cargo::Config; use criterion::{criterion_group, criterion_main, Criterion}; use std::path::Path; struct ResolveInfo<'cfg> { ws: Workspace<'cfg>, requested_kinds: [CompileKind; 1], target_data: RustcTargetData<'cfg>, cli_features: CliFeatures, specs: Vec, has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, ws_resolve: WorkspaceResolve<'cfg>, } /// Helper for resolving a workspace. This will run the resolver once to /// download everything, and returns all the data structures that are used /// during resolution. fn do_resolve<'cfg>(config: &'cfg Config, ws_root: &Path) -> ResolveInfo<'cfg> { let requested_kinds = [CompileKind::Host]; let ws = Workspace::new(&ws_root.join("Cargo.toml"), config).unwrap(); let target_data = RustcTargetData::new(&ws, &requested_kinds).unwrap(); let cli_features = CliFeatures::from_command_line(&[], false, true).unwrap(); let pkgs = cargo::ops::Packages::Default; let specs = pkgs.to_package_id_specs(&ws).unwrap(); let has_dev_units = HasDevUnits::Yes; let force_all_targets = ForceAllTargets::No; // Do an initial run to download anything necessary so that it does // not confuse criterion's warmup. let ws_resolve = cargo::ops::resolve_ws_with_opts( &ws, &target_data, &requested_kinds, &cli_features, &specs, has_dev_units, force_all_targets, ) .unwrap(); ResolveInfo { ws, requested_kinds, target_data, cli_features, specs, has_dev_units, force_all_targets, ws_resolve, } } /// Benchmark of the full `resolve_ws_with_opts` which runs the resolver /// twice, the feature resolver, and more. This is a major component of a /// regular cargo build. fn resolve_ws(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("resolve_ws"); for (ws_name, ws_root) in fixtures.workspaces() { let config = fixtures.make_config(&ws_root); // The resolver info is initialized only once in a lazy fashion. This // allows criterion to skip this workspace if the user passes a filter // on the command-line (like `cargo bench -- resolve_ws/tikv`). // // Due to the way criterion works, it tends to only run the inner // iterator once, and we don't want to call `do_resolve` in every // "step", since that would just be some useless work. let mut lazy_info = None; group.bench_function(&ws_name, |b| { let ResolveInfo { ws, requested_kinds, target_data, cli_features, specs, has_dev_units, force_all_targets, .. } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root)); b.iter(|| { cargo::ops::resolve_ws_with_opts( ws, target_data, requested_kinds, cli_features, specs, *has_dev_units, *force_all_targets, ) .unwrap(); }) }); } group.finish(); } /// Benchmark of the feature resolver. fn feature_resolver(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("feature_resolver"); for (ws_name, ws_root) in fixtures.workspaces() { let config = fixtures.make_config(&ws_root); let mut lazy_info = None; group.bench_function(&ws_name, |b| { let ResolveInfo { ws, requested_kinds, target_data, cli_features, specs, has_dev_units, ws_resolve, .. } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root)); b.iter(|| { let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, *has_dev_units); FeatureResolver::resolve( ws, target_data, &ws_resolve.targeted_resolve, &ws_resolve.pkg_set, cli_features, specs, requested_kinds, feature_opts, ) .unwrap(); }) }); } group.finish(); } // Criterion complains about the measurement time being too small, but the // measurement time doesn't seem important to me, what is more important is // the number of iterations which defaults to 100, which seems like a // reasonable default. Otherwise, the measurement time would need to be // changed per workspace. We wouldn't want to spend 60s on every workspace, // that would take too long and isn't necessary for the smaller workspaces. criterion_group!(benches, resolve_ws, feature_resolver); criterion_main!(benches); cargo-0.66.0/benches/benchsuite/benches/workspace_initialization.rs000066400000000000000000000024341432416201200255160ustar00rootroot00000000000000use benchsuite::fixtures; use cargo::core::Workspace; use criterion::{criterion_group, criterion_main, Criterion}; fn workspace_initialization(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("workspace_initialization"); for (ws_name, ws_root) in fixtures.workspaces() { let config = fixtures.make_config(&ws_root); // The resolver info is initialized only once in a lazy fashion. This // allows criterion to skip this workspace if the user passes a filter // on the command-line (like `cargo bench -- workspace_initialization/tikv`). group.bench_function(ws_name, |b| { b.iter(|| Workspace::new(&ws_root.join("Cargo.toml"), &config).unwrap()) }); } group.finish(); } // Criterion complains about the measurement time being too small, but the // measurement time doesn't seem important to me, what is more important is // the number of iterations which defaults to 100, which seems like a // reasonable default. Otherwise, the measurement time would need to be // changed per workspace. We wouldn't want to spend 60s on every workspace, // that would take too long and isn't necessary for the smaller workspaces. criterion_group!(benches, workspace_initialization); criterion_main!(benches); cargo-0.66.0/benches/benchsuite/src/000077500000000000000000000000001432416201200172205ustar00rootroot00000000000000cargo-0.66.0/benches/benchsuite/src/lib.rs000066400000000000000000000137621432416201200203450ustar00rootroot00000000000000use cargo::Config; use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; use url::Url; #[macro_export] macro_rules! fixtures { () => { $crate::Fixtures::new(env!("CARGO_TARGET_TMPDIR")) }; } // This is an arbitrary commit that existed when I started. This helps // ensure consistent results. It can be updated if needed, but that can // make it harder to compare results with older versions of cargo. const CRATES_IO_COMMIT: &str = "85f7bfd61ea4fee08ec68c468762e886b2aebec6"; pub struct Fixtures { cargo_target_tmpdir: PathBuf, } impl Fixtures { pub fn new(cargo_target_tmpdir: &str) -> Self { let bench = Self { cargo_target_tmpdir: PathBuf::from(cargo_target_tmpdir), }; bench.create_home(); bench.create_target_dir(); bench.clone_index(); bench.unpack_workspaces(); bench } fn root(&self) -> PathBuf { self.cargo_target_tmpdir.join("bench") } fn target_dir(&self) -> PathBuf { let mut p = self.root(); p.push("target"); p } fn cargo_home(&self) -> PathBuf { let mut p = self.root(); p.push("chome"); p } fn index(&self) -> PathBuf { let mut p = self.root(); p.push("index"); p } fn workspaces_path(&self) -> PathBuf { let mut p = self.root(); p.push("workspaces"); p } fn registry_url(&self) -> Url { Url::from_file_path(self.index()).unwrap() } fn create_home(&self) { let home = self.cargo_home(); if !home.exists() { fs::create_dir_all(&home).unwrap(); } fs::write( home.join("config.toml"), format!( r#" [source.crates-io] replace-with = 'local-snapshot' [source.local-snapshot] registry = '{}' "#, self.registry_url() ), ) .unwrap(); } fn create_target_dir(&self) { // This is necessary to ensure the .rustc_info.json file is written. // Otherwise it won't be written, and it is very expensive to create. if !self.target_dir().exists() { fs::create_dir_all(self.target_dir()).unwrap(); } } /// This clones crates.io at a specific point in time into tmp/index. fn clone_index(&self) { let index = self.index(); let maybe_git = |command: &str| { let status = Command::new("git") .current_dir(&index) .args(command.split_whitespace().collect::>()) .status() .expect("git should be installed"); status.success() }; let git = |command: &str| { if !maybe_git(command) { panic!("failed to run git command: {}", command); } }; if index.exists() { if maybe_git(&format!( "rev-parse -q --verify {}^{{commit}}", CRATES_IO_COMMIT )) { // Already fetched. return; } } else { fs::create_dir_all(&index).unwrap(); git("init --bare"); git("remote add origin https://github.com/rust-lang/crates.io-index"); } git(&format!("fetch origin {}", CRATES_IO_COMMIT)); git("branch -f master FETCH_HEAD"); } /// This unpacks the compressed workspace skeletons into tmp/workspaces. fn unpack_workspaces(&self) { let ws_dir = Path::new(env!("CARGO_MANIFEST_DIR")) .parent() .unwrap() .join("workspaces"); let archives = fs::read_dir(ws_dir) .unwrap() .map(|e| e.unwrap().path()) .filter(|p| p.extension() == Some(std::ffi::OsStr::new("tgz"))); for archive in archives { let name = archive.file_stem().unwrap(); let f = fs::File::open(&archive).unwrap(); let f = flate2::read::GzDecoder::new(f); let dest = self.workspaces_path().join(&name); if dest.exists() { fs::remove_dir_all(&dest).unwrap(); } let mut archive = tar::Archive::new(f); archive.unpack(self.workspaces_path()).unwrap(); } } /// Vec of `(ws_name, ws_root)`. pub fn workspaces(&self) -> Vec<(String, PathBuf)> { // CARGO_BENCH_WORKSPACES can be used to override, otherwise it just uses // the workspaces in the workspaces directory. let mut ps: Vec<_> = match std::env::var_os("CARGO_BENCH_WORKSPACES") { Some(s) => std::env::split_paths(&s).collect(), None => fs::read_dir(self.workspaces_path()) .unwrap() .map(|e| e.unwrap().path()) // These currently fail in most cases on Windows due to long // filenames in the git checkouts. .filter(|p| { !(cfg!(windows) && matches!(p.file_name().unwrap().to_str().unwrap(), "servo" | "tikv")) }) .collect(), }; // Sort so it is consistent. ps.sort(); ps.into_iter() .map(|p| (p.file_name().unwrap().to_str().unwrap().to_owned(), p)) .collect() } /// Creates a new Config. pub fn make_config(&self, ws_root: &Path) -> Config { let shell = cargo::core::Shell::new(); let mut config = Config::new(shell, ws_root.to_path_buf(), self.cargo_home()); // Configure is needed to set the target_dir which is needed to write // the .rustc_info.json file which is very expensive. config .configure( 0, false, None, false, false, false, &Some(self.target_dir()), &[], &[], ) .unwrap(); config } } cargo-0.66.0/benches/capture/000077500000000000000000000000001432416201200157435ustar00rootroot00000000000000cargo-0.66.0/benches/capture/Cargo.toml000066400000000000000000000006401432416201200176730ustar00rootroot00000000000000[package] name = "capture" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" description = "Tool for capturing a real-world workspace for benchmarking." [dependencies] cargo_metadata = "0.14.0" flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } tar = { version = "0.4.38", default-features = false } toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } cargo-0.66.0/benches/capture/src/000077500000000000000000000000001432416201200165325ustar00rootroot00000000000000cargo-0.66.0/benches/capture/src/main.rs000066400000000000000000000136511432416201200200320ustar00rootroot00000000000000//! This tool helps to capture the `Cargo.toml` files of a workspace. //! //! Run it by passing a list of workspaces to capture. //! Use the `-f` flag to allow it to overwrite existing captures. //! The workspace will be saved in a `.tgz` file in the `../workspaces` directory. use flate2::{Compression, GzBuilder}; use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; use toml_edit::easy as toml; fn main() { let force = std::env::args().any(|arg| arg == "-f"); let dest = Path::new(env!("CARGO_MANIFEST_DIR")) .parent() .unwrap() .join("workspaces"); if !dest.exists() { panic!("expected {} to exist", dest.display()); } for arg in std::env::args().skip(1).filter(|arg| !arg.starts_with("-")) { let source_root = fs::canonicalize(arg).unwrap(); capture(&source_root, &dest, force); } } fn capture(source_root: &Path, dest: &Path, force: bool) { let name = Path::new(source_root.file_name().unwrap()); let mut dest_gz = PathBuf::from(dest); dest_gz.push(name); dest_gz.set_extension("tgz"); if dest_gz.exists() { if !force { panic!( "dest {:?} already exists, use -f to force overwriting", dest_gz ); } fs::remove_file(&dest_gz).unwrap(); } let vcs_info = capture_vcs_info(source_root, force); let dst = fs::File::create(&dest_gz).unwrap(); let encoder = GzBuilder::new() .filename(format!("{}.tar", name.to_str().unwrap())) .write(dst, Compression::best()); let mut ar = tar::Builder::new(encoder); ar.mode(tar::HeaderMode::Deterministic); if let Some(info) = &vcs_info { add_ar_file(&mut ar, &name.join(".cargo_vcs_info.json"), info); } // Gather all local packages. let metadata = cargo_metadata::MetadataCommand::new() .manifest_path(source_root.join("Cargo.toml")) .features(cargo_metadata::CargoOpt::AllFeatures) .exec() .expect("cargo_metadata failed"); let mut found_root = false; for package in &metadata.packages { if package.source.is_some() { continue; } let manifest_path = package.manifest_path.as_std_path(); copy_manifest(&manifest_path, &mut ar, name, &source_root); found_root |= manifest_path == source_root.join("Cargo.toml"); } if !found_root { // A virtual workspace. let contents = fs::read_to_string(source_root.join("Cargo.toml")).unwrap(); assert!(!contents.contains("[package]")); add_ar_file(&mut ar, &name.join("Cargo.toml"), &contents); } let lock = fs::read_to_string(source_root.join("Cargo.lock")).unwrap(); add_ar_file(&mut ar, &name.join("Cargo.lock"), &lock); let encoder = ar.into_inner().unwrap(); encoder.finish().unwrap(); eprintln!("created {}", dest_gz.display()); } fn copy_manifest( manifest_path: &Path, ar: &mut tar::Builder, name: &Path, source_root: &Path, ) { let relative_path = manifest_path .parent() .unwrap() .strip_prefix(source_root) .expect("workspace member should be under workspace root"); let relative_path = name.join(relative_path); let contents = fs::read_to_string(&manifest_path).unwrap(); let mut manifest: toml::Value = toml::from_str(&contents).unwrap(); let remove = |obj: &mut toml::Value, name| { let table = obj.as_table_mut().unwrap(); if table.contains_key(name) { table.remove(name); } }; remove(&mut manifest, "lib"); remove(&mut manifest, "bin"); remove(&mut manifest, "example"); remove(&mut manifest, "test"); remove(&mut manifest, "bench"); remove(&mut manifest, "profile"); if let Some(package) = manifest.get_mut("package") { remove(package, "default-run"); } let contents = toml::to_string(&manifest).unwrap(); add_ar_file(ar, &relative_path.join("Cargo.toml"), &contents); add_ar_file(ar, &relative_path.join("src").join("lib.rs"), ""); } fn add_ar_file(ar: &mut tar::Builder, path: &Path, contents: &str) { let mut header = tar::Header::new_gnu(); header.set_entry_type(tar::EntryType::file()); header.set_mode(0o644); header.set_size(contents.len() as u64); header.set_mtime(123456789); header.set_cksum(); ar.append_data(&mut header, path, contents.as_bytes()) .unwrap(); } fn capture_vcs_info(ws_root: &Path, force: bool) -> Option { let maybe_git = |command: &str| { Command::new("git") .current_dir(ws_root) .args(command.split_whitespace().collect::>()) .output() .expect("git should be installed") }; assert!(ws_root.join("Cargo.toml").exists()); let relative = maybe_git("ls-files --full-name Cargo.toml"); if !relative.status.success() { if !force { panic!("git repository not detected, use -f to force"); } return None; } let p = Path::new(std::str::from_utf8(&relative.stdout).unwrap().trim()); let relative = p.parent().unwrap(); if !force { let has_changes = !maybe_git("diff-index --quiet HEAD .").status.success(); if has_changes { panic!("git repo appears to have changes, use -f to force, or clean the repo"); } } let commit = maybe_git("rev-parse HEAD"); assert!(commit.status.success()); let commit = std::str::from_utf8(&commit.stdout).unwrap().trim(); let remote = maybe_git("remote get-url origin"); assert!(remote.status.success()); let remote = std::str::from_utf8(&remote.stdout).unwrap().trim(); let info = format!( "{{\n \"git\": {{\n \"sha1\": \"{}\",\n \"remote\": \"{}\"\n }},\ \n \"path_in_vcs\": \"{}\"\n}}\n", commit, remote, relative.display() ); eprintln!("recording vcs info:\n{}", info); Some(info) } cargo-0.66.0/benches/workspaces/000077500000000000000000000000001432416201200164615ustar00rootroot00000000000000cargo-0.66.0/benches/workspaces/cargo.tgz000066400000000000000000000233571432416201200203140ustar00rootroot00000000000000‹cargo.tarν}ko#K’έ|ξ_!h?ΨΖ½ΤΝχc16fΌ»€Χ끍Α|1fB>"%NS€–€ϊqύο{’ψ_Eͺϋ–ϊΞΒβL_U%‹ΕŠΜȈs""“%Νοf?έώsϋ±,nΗΣ6»ωλb6ύΝ`/—3ζd;Ώ”8φΚy㌲8–΄ύζ;ΌžΛ4ΏΊϊΝ§―χξκκϊnΌΌώϋ+>ΔΙβ>Iœ]›$«bkΩ‡”p5)EV4“m6…¬nΩ₯λ»Ο]]ΟιaΆ$ώθύrωΈψϋŸ~Β}οŸςM™=ό4G?&izχS§j7ό•ψάΏρ‡―ΣςŠΗϊΗΏ~χoο~σφϊ.―n4~ϊ‡nL–³‡Ιk|Η…ωo₯6σ_xkάΫό―?η§ρ€Ž*=΄΄ŒiqΣ&iIκύ»J-=M–£Fiω4§ΕΥ½ji² w; Ύώy2ΞΧοί}€ωb<›’ιZވ}ύξέ©{£³Οάxη&βΖά(ΗwΩύόϋwiϊε~φiύ5ΧοrωeuΉΊ~—Ώ,i1ώ™Άο–Izδu£%_Pζ³Ε"Sz=-Η“ΕκƒνOσΙhρe±ωbƒ6š~ΌΜξξhΎj|Γ6žΠrό@λ―Ό‰Χο`ΙΤκTκιV #ΎγΊΥάH4Nfyuω>χτyύ]8žmξgqςτ¦›oPθH\<Ύ›Ξζ΄y6†‡ΡΌtBΪυKš/g³D²kόλ,/hώqσόθ|Ω$ύόε*Ώ―nΔͺ΅Πd²Ϋ4ΞeΫ±8ι€Ϊv‘T7}„ώΩ< ‡r?_=Έd ˜>=ά–Η§Εv\i>ώxβΣRυ|ώ–ΡΑMζιΣνΗ4y:υΗ£ϊUF^c’ΰγG'τŠ΄=$˜νm}Ύ`υύwY<=>ΞζΛsχY_ΒwΪ<4‹1ύ2ϊ”ζSΨ§/p‡n”a΅>ŽΆβΟΧGcίuΥ΄²E­•ͺ»p}όΣζM\χξΟ0sΖ.=-οgσU―_Ίͺικ_ςη«ί~ϊRrρ»»‡4ž0€ώoΧ?^]ΈMώΜθ=]ύφ~i&“ξt{Ιο'π50—ϋ%:ύ· §Ώγ”uΣκBt¦N[Ή{:HψγUΊZ?ΩόRbYahώ|σ—ιυ»:+O0αiσ± τΗ‹›ωΊ{αLλxs…μ9Ψι=ςmw>³‘Ά“1Γ’{χό§«ύΗ«ίγ9ξiΤYΠiZωΗυΝη”κͺατϋόΓ?έ-ώΛυΝ>"iŸ*{’΅sXΉΛŸ»AszΜ³œ΄\T·ϋ4­Β%|_Ύw€ψώβ­ΫΖSώ"j|2]nΨΧ?-ψψq>+΄X@AwΞ–χά{‹υM7‹/ΣrΏ>Ζmi>ŸΝΧΗόˆλΓ2›¦ΛC‹ γςΖώ£πΏΕΌόχˆ ϋ+π?~σ?aήψίw“hr°¨ΐ₯ρwΪΖ΄7φmόΏfό:rq·ό h€Sž«3Ÿύ•Κςκι#M|ζ‹€ρŸWπΨψΞ-˜˜5€•½Ϊ¨δΝKΗ%zτ8‡ Ύ‹μ|ν/Δwϋ•ηP^αλν?ζΏPoσΧgN5Έ4ώ֍Ώ‘^Ώχ°' ψ$π‚ΩΥ'ϊΪwυΫΏvηΏƒUX¦ιψseό4…Ώbf±ŸŽ+nΜ;«ΥβiΌ€sFόœ}ό†|7°π‹MΉψΫεBηη0ΰμΏιrBoσWu,lpaό₯tζhό|Λwό]»Ρζ(γu*ίυœΪκΙD%8‡C³t[ΗσΫ4™lSBΟθ핇ι~ˆΓΜ^JεdΚγT(ωζ¦'Œ|!a²ύά*Y2|Zβ—EΤeθef/ ωΪ―φΫ`ϊ_ψfνΰ[όΏ5ώΝώ:γΟFmЊ Kώί¨#―ΕϋΫπeώεq9έ§Εύ¦~Ež¬ƒy.ia|\|"χjJnΒQωΘNd΄)Όΰ―w½υϋ%SΩ«£Π«Š…‚“/Έ0&Miφ΄ΈZ[Ε+κΈzZP½Κ_V7»ωΞόt…<~13•9ΞΥρ팻κ/Χ@D³Ε_3—e6Η@͞¦΅γορb|ŸΏ•βΦίn@ΐAn0Ύ8·ϊ0^i#«ϋ%‰Υ2›.fΫtκs²τ0kɚόf'ƒfΏ%ώ+τ›ύώγΏ)έΆ ψbώΟΘΓρW^Ύί†ί²š3›'hϋ–“‹_R¦φ‹™μΙυ’‹L§Kš§²dA>Αg^m Έ΄ι[)πsωγ/τΞ«‚Ι_gώZς φ_9ϋ6ΏŸύ_™ϋΙ¬|xο8?ώήx{jύ‡|οπϊ»«?ݏa™Lα/,μμγ΄i2ωrυ»;.Χ‡MΨα9οώξκŸ—|ιtΆμ,(¬zνΜιCš>₯ΙUg7§w7;ΖLÈo­ψϋ­©LuBσλΓΊΗΕμ ~†ζt7^,η_~8o=7v{Δ ώ3œΦ=•‹§ΎESφΫ S‚Ο’„ΜUΧZ¨ΆΖZΙ9HK‘PΝFΆVJ¦lr³Q'm]χ<όύl"”γςαΐpϋ.;¨’΄/­ZςΞϊT“±ΝUΕX’υ!Ψ ²-VΕZ|h©II…NL©kΧοv0;Πw]ύhΉŸ_ψξ}ˆΣΕψ–£ο‡΄q΅΄bHωˆLΜ©Ή’τ9XR¨Qηf”¨Yι [ΚΖU- ͺ$„ΞFTΫ*EσIωVDπœ| vŽИ…s¬}Π΄ΰΙj‹V*ΆZ1N! U\¨₯yL1Y@ §]•J‘RRFΩ§σyϊς‘Κaεςΰ3HιμT+ΞV dΘ7«¬6-[•EΣT…αNϋme$j*€ ‚.δΎη_.Ώ<»Ί‘Cw~YΗL2x/ͺ”FΙR³Μ1YT«nΪSΤ"ͺ’q蝄‚…€e„r…“šuI1^ŽRfνΊΊζΈVwpQγ`^K»;R99°ΠQhΙ*F*x_­+%΄FUQ;Μ!Wth€*y!¬Ρδ”o!Α§žΛγe›€»ΕΑσλΑ5.SΣ‘+.ͺAΡ$_[ΥΊ˜f[’ŠΒB ΠL«E†*\ΛR`e8σόιqρΥΝπΦ }/ŒΖΌ1bjΤ€α0FΉ%£LVΉ„\]¬Qee£S΅TRΈ@Η”Ρϋκ€Ξ-Ώ ιι‘_Ή2žϊx>ω‘‡NΡE ¦Π€Α˜Ψ$δδάή”Ϊgγ½YΦGΤdLM― Ÿ>)ΫN‚Έ›G[ΟΤ=2}­qI:#ύzνηn?ΈΨ@E»” ¬›ƒXIB3­Μθ‘|lΦy‘Ό’B5λ-μŠ'_‚u©6ψγέμ_„r’ΏΦž‹ϋ§³’|°ν>9¨F}nΪΙt4nb΅ΟΝ]ŒΉ;€ΗΥΑ–άΞφVΣΪ@ϋ·Ό^΄;ΩYK»Šώrγ&‡°:ι’ζ«ΓΥzšξ˜‹ Ά£νΝΉδ ;ΰ€C·ˆlu6[ίνyνjzsΫjqθκΧώ6ε έΩ6i±²κJΉY$»oςw–ΖΞgw‡*ΌY۝¬–ΐn‹Υύ#+«Ζ£%e݌8±8vϋFξŽWλΧ‡Όzp{΄Y%»ΣΒq›ΥιNΊeΥp΄PΆkeZύ]ηaΦ'λ:ΥΩμa%ΩήβΨ½–ΟγΥClFuΒy‘'½ΌπυΔ΄ΩτΔΩ{ξΦ‚Χ.\ϊΰ™²‡σσψΜτάΞΓΎσΩΪέh܎δƒγ ΰ=2ZG@qB.Uθ‚©‘ Šrφ&[0@jΉω(Α«+†μ}S”]Ψ>ξӚ Φ}xP ΈυΊαƒX«T5ˆ–πW–bZΔƒ+<Έ‰Axe­ϊ†lBυL ίχχ0ΎΆΣ‹ρΓZy?/?ΝΧϋΨψ‚0ή>tοφσμαa6]MΊ#89Έ2 ΐ’`ƒΛIΩ$ΙT'Kd"€ΎBoηtΫΪ†7RV-Ș.ρ';lW‚΅_~¬έ•―-―lTAσ ŸΙλŠIi}@Τ5;V@_ΐ2„;S*E₯ρΝ ξΣ ²Άs½2ξ›ΤΓTόΠάS€I%2ΚQ ZZ[AΩ€(˜.Kγ45 ζV²1…Hd₯ fnΐ»F$§z†tOˆg΄υ5’Ÿα08u₯€”ΜVεΙ‘uΖ„wi¨o0T.‰Z(ΊVWͺς):·e_°δόœκ`™‡πp‹Ξ;ω9Ψ\K‹εCPƒλSΊF…Π£πy±©Τ*˜~,€YNVY*tΈ‘T©Ζ˜«χΝa6ΑCΘtZŸVΞμŒxϋΤζPkμΠv0¨Š”…|SE·DΝ‡’ƒTΖw‘N‘ktAρgͺ€R±su(¦ζσB"ΐ^‘ŸΦaYΜΠaΉΌ—Ž£œ­F―L¨2&λ ώYΚΦι€›*λΰΘH€—¬»rΎΦZEΓΏƒ9>uΘ·."9Φώ£­Jβΐ½R9Ώ5­ ‡Ήš„7pUY!…C[5ΐz& ˜ΣΔ†ΒR(ο}…%‰9žξ…]ζ½ΕΐkαGσY¦½–Ν₯‹rŸ¦SZσYω@kVρ’Ύ:a[͍‰?tοω?ΌŸ!ь‰]“ΰ@Πkθ (|AX$ηJ.7 αYLu\&υMΪ/ΟXmQ|;™•ΤW'΄)‘ιλΩήkΟΨW(]Π §Ρzς² “”€ξH2L_°π„ΪͺπR*%ΐi«ΛA³«ι ΊljV»—Ήε€ζΫgωάφyΆΈ―k†Έ°ŽΚύΣτΓΊXk[8ΚΩ n»Η>ΣaΫς‘γ]ο‡Ζ<’q¨N„Ό‘ƒB&y£Λ\{ ΄E†ΐ<ΐ.R‚{£tΊΣΦΡτ3ςΝ«γΘ쁂/,Δ`­€¦,μ ΐΎΐ\ LY`A‹ΗEΣ[^ `―jŸ±|.Υ9*z:I—š² Œ=Ζδ4τŽc«Ϊ&Η…Μ€ρΑ)œ/ͺΊβU1]f²XW‡ζΏνΪ‰}m†_t’€Ό-£²/mͺ)Ο%Ν )ΐK`$ΰ$¦Ιͺ6 “f¬…ΠΒ⌻ξ‘«}mΔ4ϘB²ΚΚλ-\ΦΒΫ0NœB5ΚVA>aέ1}‡ DPπΣ™€hΦφ=?ΠQ^ ν+ )?%ι”Jί΄’Τx?Πκt Θeε©5§Bj¦Κμ\†}T›λϊmΌΰ`‚(̐^ϊΥ\ΐΓγιω‘―+>ΧE₯ ¨mM%ΐΪΪΐ …―°πΏUI@Γ—¦"‹ϋoΙCΎον©έάζ‘5ρθ*cο+N7b4s†Μ:€’4‘r”^€Μΐ τTˆX2ΡKUβ«SΡύσσQΟΘΥΟ8 ;WΘm1L,U^7R2uΝ d}ihmŒ7¦I&»JTtI¬ η^Τ1_)ϋμξuνK-xQΞδ ˜‘•­h’@8Πξθ,hR6Εiΰ‘‚‡Ch„—ATν°ίTΖ΄‘'‡s{π: `“p’šϊ—m`~pIIfςΞ FΎΥgθ>ϋY1ΔβB ]Ίρτγ―ΘΑ>«>@«αD /ή0Ž>(8Qn«k VΑ«“ΐ–WΩͺ„ͺ0U³ΌοιwςŸG•1C@δτ-Αe ί•Σci„βtaI°<π?ΊΊm‘ΦS[˜\Ί ™Συc«υΩ]ΡρzέbΏ’rάnπ₯g±Α d‘l©)xל£ft’ ΄QR.±™R¨iΓUοBHΐ ”3&•!%Ώ)€΄]Όs`‡G&16PηΌ,\ϋΞ P#Wv­k­qώ£(Ό‡Η„±tΎ6ιdσΐEϊ+VͺφΘ:›Ί=]Γπ‹θ’j%ε9“₯·A¨ˆΡŠŽΘip  Μ"TΩΓVxΰΦ N 2MΆ9`۞)·^\uΙϊι ΑΰεR=&Ό·Εӌ»ζ^rU'π‘ζ1Œ4:Ϊ"2&„φt‰σ6!pI#7eŒGԜ‹›†%5ΆTjφ\+r·¦ΑΑ+Ϋ †` Wd¨o7.—ΰ* ₯5썬3ιršo'ΓΉ_N±_»ΈΥΛc sΎ“ΦιΑC7΄ωβ‘θϊJD(ΉTΠcΆœπwž—†jϊΦΘIQ’ςΝj κ€Λ―Ο ±8Œ^I w#~`€&'CΗSVλζΐ)]išŠTVΤΎ”’¦ƒgŒό!Θ―΅ύπΟ…up!πωQf‘©˜o«b€Tpͺ^[Ρr<lΨ[NξׁπUF‘om“δDρe§b,Ο€[*΄ΆΐΫY’‚ΣΞ1•’π 9AP―%˜n*€}–υ΄Μϋ‹¦{ΔάMΛšΘ'6p5(ώ”­Θ$Ισ πΨ9}TΙΚ Β›\l2ڛ欨ϊRΡϊ4[Ρ =D4/Gol-μۚΎ£WΕΦ)WŠlΌi™’6₯~TΈr€„«πf 0„8ο©ΉrξάTŒS ηχ›4Τ}Rn½»k»/ηϋas“C0x0ΞΨE§“hI+eU&aV ¨,v2§Έ ±p ΄8₯ 9‘έAœpΊ;vάΑιN8+ψq¦Χ.vΉ’UΡ΅ΒΖV€K~"€:‘©…8Z³@z"΅+ŒŽjr5ΡW ΰ†0K^j!/όπQ²j`ϋ9s0ΐdπeP,ΈHri₯՜“~C§gιsMθω!Ί/―—ΥJ‚―ΰxŒ¨˜ŒΙΑΰCdH2#DΥ\2{ΕUX nΏ6}|_§ˆ›Μρkγ€Π—’ΰ6xK§¨#―jσ d]t)Μ’]ςπ,p(-'/ΑE„0E]εΓϋDέ[¨ρκθ-π~u::ΐ οPW9Ρ₯UΩ…"ŒΧΪ«ˆYgxύ¦ ΉVΝuR-΄v‰7χJywP‚&Ρ-ƒo¨&| NEm#g1UΛΑ¨fw©ΰλU >'¬qI¬j•u^Γ8ω²ςΘ£ _¦Λτω‚πΟϋDΥFΏ ”’‚&g_QDΦHΟr?€6k_mi²dga]=:γ|}NŒ΅°‡φU ½RŠΝψP\~Rΐν”i­ΐ `JΎe`@RjφGΛδ€lsj.› 1χ ±·ΏΜkB―ͺ„‚9 ‹‘4W3Ux9ΕΑ{ͺ©ŠfE »"1.d4L’₯zΜθ₯(άΙ]˜^{s”V<BKI‚wR+ Δ «-i" 3jƒ„cԊ“; ώ…“I&hβuΏ,ΌqΤ‘Ύ >g8θ_Ξ£λM3`<±ḍs ŠΩ„βP%WfŒ±θζ]τΰλιF›’²ž- ϊFςΛΣΡΐΩΑkξ`ΰe52‚xπΊ2°£ζNPR―0TΡ‹λFDΑεΦgζΨ…β½ν²τW/wM€;1‘ 9θ*/’5ΎQ4ఞb]ΧU–θάΑ6ΌQcττφE[^²+λΫŽ6₯:Ν™WrΚ†sY€ΦπTAΆ΄I•ΣσΕ5_’qΌ>-‚> Qh s7ωMϋ«­ή>Zi§·’RUπΫnΛd₯q <¨$NO§[ldš`ω€΄­χ ΜΒVηr…£“Ί_ΆΫυ―œqsΥλKŠyVΨ΄gΘi(πVΞKS² ”TΦͺ"•R&*γ,680q‰A~Ac‘ι¬ι—ι%Ρ7› žΨoΨE:E_u¬pΡ₯ ΞΘK`œŸβvd*Πk_8$μjί¬Ώ+grdn‡.ΝE¬Ž ·0΄6W–*+LΕψzC|WΡ”Ξ– ΩQΑΉ^γέΑιΚ^;H —χγ ήέοpD‡v‘Ζζμΐύ`c’TΞ•βƒπcηT‹σf8@=-U_»’`ΓΡ-`πl„ ¦―†d―ϊΰ -ƒ؊JΑk“O\5ͺmη2<@Ώ |ΦVN’¨½ˆLξ‹²΅Βƒͺ ΏB+ΐ/nΌΩ·εΟ=ΈO4rΕg’`ΆZEƒͺYήΎœ˜Ξ{ΡDWAΨn.|΄•ξ|„ρ’“<ή–τΥΧ‘KΈΜHΥVΙΐU} 4Θι³ΰ₯œœ/4 χŽD—T2EΌs=ςγ’Ξ Ι{ϊƊV“ΌΘ«ΗŠ…w°ώ ΞJπWžLπ`ΈΠS`"ΜJŸ€`jeβDw_ξš}ΕΡΖC[ɏW‡wΑεN3―VQ1c`dΩγ‹**MŒ‚ό_ζ}ΓUΒυμ…Ωο^”ƒα­S–ΕY+yνu­f?ηΚ;Yδ ‹zκrςΩ[Sk‚wΰεͺκΰŒΐ8'{ΊβxocΥσ•BΫRw%Υ―±Ad*²€K)Z‰8ƒΖ™'€κο¬-³ΔόT1at„?¨Α ΚJ½|/Ξ˜ήΤεΔvΉ— Τσή‡υ΄ƒ8C €΅‚7K& Ε%‘ΐ1sHžν/xнΆN‚z§Ϊ¬!“ŠmζΫ‰Ωv£ΡWuͺ0φέ^‰Χ4+ ΰ½ΦΌK(¨&ΠDα ͺ8ξc`œ©πD‘7Ωgͺ›­Qϋ€ά]}φΪΕс5W€ψΫZ‚@@Ό–7ψ o[L YΣΒ&Ρ€u“8£ΆŒΡμYzΎ[=Υ'δxzτK"ς ηZΠ]I-oweΠQΟωαΪ•8gιyρ―Ξx?0šUς+kšŽ²ŽΤ­ώΆ3Ω‹‹Bn;±CχΐΏΑ‘ΌΑ?ψI^ήη#π­l`έΠM.£ƒ KBY΅Š$§DλZπ`c}Z ήƒϋ0’:Έ_ΥόK)‘7δυx\c$ΈΥ~I—ζψΪi*&!ΑΝΕ€6λTΐ΄Σ·°« ΄=˜cΓ§’²< I9ΛΫ9ήa“ͺ‘Ρ-Zϋ耴>’w¦ΰO…λ€vR*^φL}³·όχ0sθ_Kοšι'ΆˆΊΐ>ΊΦ:kQ…σΥH 8Η{ˆ¬p‘ΆžŒ Ό£¬εφW,Ώzΰ­‚EΏ‚Sqˆ°φMŽm—§g¦ΏŠ`Ι$m‘m:j.·¬Ι-ž3 ύ ·ο^{‰„¦ΚΏΨΐͺr•;RΪI―Έμ$¦Ÿl‚ΓXCΝ*r™¨εͺθγ½Ύ0b I5 νJΙ x@Aΐ¦ij$9ώ₯Λ\ JZΓ&ΈT΄³πΚN—+|†§κα`υ žΈΐG³χΟ.0eœœΌηe“κdαZk²$ ‡ΰ€X5ΧͺϋιΫ=mo±ξ‡x+ƒΏyo§e iΎ Wί‰Υf{<Όeΐ‹ΐ„―“ςAςΛ'τ½£+ψs3z=ΝΧ8yI/Μ³λ½Π“ΐ € γhU?Ž™ ΏάθP7:š‘?∞PtœhΒ˜ΩΫ==ξ< Xαέόα!Όύ€;οa₯ 9‚_>€›]€–ζoGψτnΰα—Oπ1¦~ΛgΩωšάΜϊ*£gTd&ΰ'™ε«hYΦΓ§£›ΉH3~‡«χd Όœ|² γš7ύcEˆTo¬νXŸσšωAύIή7£ΰςΎ@IίΤkvŽςώίγχi{Ύ›‘πίbxŽ4Ύ&ψXιΐόοπ£ŸεΘ^[πν|ˆoθχpκ30ρ]Enˆ·ψπιr=~Šθδ)¨>hτhΰˆ+ΣΔ[ώα1IGΓπ‡•φZΛ©Φ?Νβ-ΏλyΧπΈπOz_V;‡^Νf»ΡλΏύϊ‰λ›nΕ–­)Ωi­‘Bΰik/y»d1­ΘΦόώ@ε«N X ‡  ¬» σΛ'ψ/Ό;7€3dΙΠΊtΝΧ€ΕψΚa „ D1|~άp•^ν1Υ4ωιϊΫoP?΄>½bΤ/ŸŽΰδ^tƒ“‘ρxβΎ3*g\η(6γ'ΌΊŸy¨Ž…ΠΒKόK‘gρΈŠ`y`•π9Αžm)LΗbΖv]šΦΣί€>άυV›g–>| Dώ+ΠϋτfcΩZώO½ώψα_/ω?¦ψjP@Λϊ;Ϋ5£₯νEθ7}‹<3ƒ +D£cΣndΘΗΠ0MΘH)>΄--MΡ¨› =‡½φ&|-άp€Ÿ$‹γ ˜kΣθ>y—k:6h[yΰΙA@6ξΡ\ό.^©‹·Œ.Nΐ39c~‰Ψ˜~BήξbΜZΜ)Τ^•gιθ@¦oόβΗΠ"N€–Οmςηφρ³ΕΡΙζz-Ў. JGωT~Pt@μ*Ήßγοπ龝 *5CΌ°3Ρϋω³ρσ3λυΉ_ΩΟ›Ύ.&\ztŽ,ρ½”39jU=J.AxA£ΕSΡœ6γΣ[Θ9Ύ4΅σCFυx‘ΉŽz8ΩΚ ΎΒg ηldG«β αhβΕ9ƒΰα‹ͺ%¦ΝΠpΑ‚π&δQ~}ΕΘ?GΠf‰τXƒζδΣ―;Ω°ΗΜV㣫Ž'κ~ΘP"žΰκ·ΙΐLΕc\}Ÿ9VuΘγFM_:xτ#Ψ=:ΆΡ«›\ ©ΚwζΨ>Β›Oέx}Wƒe%#py‹ ΎΏΎ’οŽ>R8'ϊϊ€,ΌκGIΝ#ψ΄ϊ}JΤάύ^΅»υY58Ι-(΄$ΥŠwT«νDiό―Δ μγ΅·Ϊώ›yύsA;άlYϋzλλΏ΅Χ[½ώΛ‹Κum%ωažΉΖ*M !†‡ΨΨΘ7Ϋέν„9ΐ~Q…Πw(U5ΪλjŽ„ξh3δ©Ω 9±χ’»ο‘γBdΌgˆV"| Σ– cΐψok9:ώ;oώΗ!ŽŸ}`’—J«!(°-ώ—^½Χλ??ώΫ ύE Y5q¬9­ŒΉ*§0Ί†ΕŸ"Χ²μ­:E/·?Ό·˜Υζ2Φ=RωΖρΣ~δ&oζ)NB7㦠֎й<ΔΪΡ)ͺ4 βx[‘¬ΖΏbΙjδ’υ‘sVb]2ΛN”–P@ύΏ³m^β$‰_ΝηΔ½œΥeΆ­?YσzώΗv§υ ϊΏsΆ_Ο$ΌiΣΈ«―u‹ΈΔ7ͺΏτύ+Λλ“΅΅υχ?ΩϊηŸι!ΎΒϋhαCU mύχ;6ώί½ώΣΛŽ‘ΦQκ·X3< Ž,LI—οœΛΏγž£ΨJ}ƒc;AΏE Oόw­ύ?sϋrηΖψρίν~ΗΖŸAώσάz>vΐY[’Β₯Θg禨$κ^@†Ag‘Kk‰Όt΄[X™κβκW_›ΜΊ;i½†ξκΰ/ΎψΏauΓ‰ΨY£:ύΔΜb“λΐ$s\ό~ουΓ΄Rθ†L€{4_ψ!˜)Θekθ YՏ\ƒ’Šn6oC\Ε­’ ›?Cš!\ΰ>J²k·*ΌCο%ijάί^Ε¨¬œχ£…€%τ<ρί½₯υ¬λΣvPl u΄ςΏμοn­σύάΤΥlUΰ·œjPt΅„f•W”IlJ\`dvRƒwέB3ΉF™ξΘ[ΈΚwƒXΖΕ'Η8/δ;!‘“Γ @ύΏίμ΄ό_Μϊη)Χ=a@+˞YύΖΪλυ_‚ώOΟMDΦœ;uv7ş¬χΙ ΨΫ­υΕ<Ίx‘/<βœi,΄±φ¦&2O>¬ν₯ή₯5<8φΌβτ9‚ϊ]Υ¦kΈ%.»Λ>ͺ]ψ)Η€žΡ νyΊξœ§"3θ;κ”μΏ|Ίnlώlp‡x’κN8ΗΞΜΡt1υλX‚Ξ6 Πψqωήɞ0 ‡ώίΫ[­³ώs§Whμυ‚mϊίήΠυ6ͺ Τλ?ώWμKζ ֚I’κ¬!&:Οxt˜ΡcΞzόΜ$άuΌΨCύ˜eΨΔw9ς-©Ψ%ϋ@€>ϊΏΣώίιΧΏ —_/‘fθ^ϊΪυ?ΓΆΫmtώΗ"μzύs4ΰ[π΅ϋIΓ`?išΎ…‡80/P?σ5ώ}Ώ’‚}MμlXΰ.M4xwςΏw OύΧVηΞ°ώL›’€VύΟƝ­ΖΛЍaœ΄MΩΝΖμLeόξ-°a f`kͺϋΡΝ ό£^σΗwŸ€Ω5’Ζζ–2δ»qΟp'k’B«]ή™όWQ§ώckλόΏ%¬?³…ψ‚:α‚6ύΏΩΣύmk£νΉό₯Ξ‘ςmςόsk*&ρυ7¨[yΰh\ΎwΒ R,o"4ΑGtπBπΙάΨӍάΕ… –£Γ3?Λ‘]pAwύoC›Pχ[Βϊg‰₯GXό{θ~™υί1ώ{­νμΥκΈO–7ϋΒ‘4υˆ-ΠD» CνNw%6cδϋ.2ُh)ώο>ίW›°‡ό_kώ— εώA{g~1Κ֟RφŸe3φΏmο΄ώ_„W’FΠtͺ­Οl½‘_½σŒˆχ»₯Νθgς―R΅•UN«­ͺoήώ£δΏΒ֟}ύΏΆmλϊŸΙ֞ާ¬ηg‡υw¬Γc[{m-BSω_η$Žβ’λη έΓ3Ί†„+<dc> PόΣx~θeHyCΆΏ‘μk΄ρ3ρ5sKΘ_έΰεθγN36&~’ΜBλz&Ϋ…™Έι’ˆθΘ^Λκ1‚6"&O-=Ύ¬Θž.ύB»7 •h“ η Ψ$|DυΓZf17eΡ•`ΊΥ~όσͺΣ΅—©d#S~' ώm€dzA˜H²¦n>xmxH’>oH·¦T»š•`τέ’7W?8²W†^MΞa:5΅9‚λ*_CeγxωΟ ‰›σΌρ?©UυΞy›υΜω¦Ξ‚sAP,ΒΜu ™’ΚlE@Žν°J%‚Ε}O{υš“_ͺΕh‹Ω'ΘγήjΩ8Υ(\q θΑU€qŠ+6γ9GΠjE±₯¨;f~Sψ_΅ωΧΛώ³΄ύ7ΟϊΓη‘y•‚-λΏΫΪ;–w§λΏa ϊ?΅™…ΌΖ 5K‘Λφr=δ1Β­•3‰=y—k‘sΌΡΙ ŸiœΕOP―Ζ©ηe•’³Γέ‘Θ^”bΓٜnx {–lεSΣ’­on—bνΆ+Όχ°D7ΫΗΗv… 9J―)f—»Wԁ,’‡λ³ΈGgŽ’©£ή»9;‚!Ь„Τ‡O5™’φEθFπŒ!|@«|Α‚ψΰθyA‰e©Θ\‘[œέ\ήnίΗ$²ΗCnαΏ Ώξρ―"5α>τϊψlν?»VⱂsqΈ•Λό!Ω'nYς^U&y$²Δ.gΎ—?~ߜO^p,πΰΥ›y;QΙΫ!ƒŠ€‚‰_έ$¬ Όoδ+ φCP΄?MP9dRK7v―Κ)p3`ΛςaaOοθ‰ς Ψ¬‡β'Ου玷(wξΌ|NΡ7Ή_φ^nΦ“΅εψFΨ›(T l@ςfΔ~ΐZ&Sy|ΧςεΊ¨‘Ž9mτψ|³θpΡKœο^‰^^ά}³#w”g'ϊUΑ¨qL_8šmŸό‰Α;Ε9RΉg,Eis&†ή7‹m¬^μ\Վξ_« ΉzsT#πy€άbŽ„ΒF΅[λnκ~»ώ­&ϋ_•#¨WKΫΣη–υJsΪςvLύ—eο5γΟGŠχ £Δ]―q B:uμioΧ£ ύŠϊι k £ͺ{Ž’Φ9ύ™pΠΰπB¨–Γί³AγŒrή%b όWκΩλώίSζξdεΉRρŸ-Γb9ΊώŐKK{φαΣΙυƒJ N0ˆΕ჋μμΓIœ”#‡‹ β;'δxΞ„<ύΉ¦“κ™Ρ aš(Ξ μπ8EηkX>ΎΔςΫ †·Gƒ„7YvΏi B3γ2AA$’B*w4 xΙ~2™~΄Γ»Ίμnm“ZΞCΝ―†uΎΥ]ηυ Ψ-μŒ”%{'άΰ+ήο»ΰ e§ΗMŠ‘ΧβΤxYϊ Κ#Vάc3Rn?zŽ――ΨΑYv΄ΰ{lϊF„_HB—υΣYΌ|@†ΰKV† 0 ί²Π±| Mβ•½ GlίSΞeg!Iι 3*‘©%{ξϜ;HέΘήΜδT_ŸκVv43Bχx Ύ(,kVΕNE$’υ) ±•™gπ =\ΐρkγK$$”7@Εz§I…μ½πR,ΖH>EBϋδ“:χΝ£-ΘG•Wη-Α>–η%ΏšΎ=xXΌ™JΥΪ>UMγRsέΕ!Κq>ƒΰy«θ¬˝΅ώ!΅DŸΩο‹šνVf}πΥ'IίΏ%šs1ρ€κަTcˆ€/1ός \έ·=ˆs‘©wƒΙGΞƒYΜf~φrΎ–-YΝί՟κκώΠΗqΨδw¨ήcSNόαΓB”ύ―<χ·§ΗΪ8:w²υοn’*εsΆ,“£ωŸfρπ½=ŽŒ[¦!SΞyΐΝs’ί$m½*€Κ9ΐ”“^ΰϊa‘M±ξΓΨ’€Tώ|FU%R7ΠΨ 2°L1Πλd δμρ•5ν/ΠKcΚ±ωœφΟ€h«AI<¨eύΩ/‡_γΏζp«v”q}₯_°a|~ΜHΛΫd–4x’»8“Υzt8FGυ76Ί%|uόhγμεγoMέ-3} 'ΖΥ¨Ϋ7} ϊ_‰?¨W¦\–Ha*PΫϊο·{zύ7ΞZχ˜‘2­,Av|ξκΩξ>‘w Vθ!•§Ρ” ΡΕsxaΝ‘τ‘%χ—b‡FXld7†ψϋW θ!7ΊΛ„ςnYζ!Ž3ˆ£έΛE•hσo7,Ηv­ωhΛTzLMϋ0¬Τ»£™Oε©H§“v ‰¨”ŸJ ΩΫ%«-ξšŸΣ–’£^Σώα_Ν­Θ£GΥΚ–’wί^Τ EώΟf9ŽΆ¦Ά( ©ψΏ,ΥzPψo·ΧυΏΛΠTiΗ«›\ εΏVb ŽΩBΔ‘ϋcŸš3.{IΜή0ΖFE1+V™4W˜΄Pω?#ΓfηθώO“­έ„ΙiΙO¦ͺf­ύ· ƒ_½ώοΨώoηbμLΑȚή,ni£›€>γ#ΐ_I±·+W—`‘KσD₯°αϋ?ύΡ8%1όΡΡ8Ε‰‘ρΌΔ’ηπύυγ_‘7πyώ½XΊRŽ0N¦ζ_ˆόWϊΨΝ0έϊ»ζ©υ·χ{fύQ@―τ+‘χ*°$—³Ξ£Σ”XE«hψήέΥ#Φgec˜rΧΓ]sΤqtͺŸ\ ί€Y;›Ί Q5‰mέ„ΪςςΠV**ο@\‰TL‚ΎQΪ}³αZόΒψΌ(zyQŽ` ζ•'ηMή•€Ό…}œ›γ(MGΑ-Ε2Αˆ"Zνi•Ό(— ¦X‚ΏA½Σν/φΣΟΖοό£r,œ Γ±Πp©G©όμΰιϊΏIρŸΒzώŸν~ΟΦZ-ΥΑž<Ά›£~’…b“A[Rt›ƒιΦ»΅ƒŠέ$PZG„$¨DςœΥλ_¨zŸΙγG‹κz«aŒ,—&Γ`;Ϊ3]όχoZΉi ²ΤΌ$q|R‰Zω6lώ‡­ωιΝ]k!&ηy(Σ/:χλJ*ZT‘ƒ>ϊ\`χ„Pκ•ρH‚˜πΏίc‰b|§‘μβŠΐα"΅ς_!θ“aλϊ ןˆ†ƒŸέ€§ήНɲ-½ώsθ’ίK7NqάͺΞΒΌ3 p΄fθΜ4PLYςF4ΜΐΟ\νΥ±‹ ΤΐGO·ί:VΣ”·Λ»½;ϋp `Ο@έΦ ‚qx%€Ad‹α‡—G#\IsP@Ξ€α@oωΏˆώολ½ΞœlύοφΤ(€–υί@°Ηι‘λ?hΛD@ΩrŽΪ 0)©ΝŸ ]™\<Ύ)^μ- pΉ9›\­3δI3­0Δb‡ϊ—4s-Ω ωήވΦ[α·XΝ«aoOΠfuXcΤ<3D~5i~hα­βχ>…'1³kτœf ς³6<Ιi'―αWY?~˜‡EΑεΙ‚΄ν’·ŽbNϊΎ$WόχG‘wκŽb17-ι(MήIτΜπRΑ‹+|…x?zψh'ώ–jμ[x΄³Zάw{Οχ εΎΰad+}ΖόΧUu|ΕSYNP?[.ΚKΎ]#αξόιΏώlώεΗοΒU:Ψ.¦ΎƒνΏ!ψ šxYDΗΰ«τ7¨šŸAφψΰžρՏŽρkϊOυWnw/~!QΏθDοAψoŒ`/ώχΖΪ8Wp¬πΆόοݚ‰­wΪ7ώ―£}†H΄JJ%Ε[ΰΝ-¨Β Ϊc‰D+ΟϋΔ7@ δπMˆ²*v^ΡϋT6©Λ»b™ξρAjΒ}θbύj?ίJφψπχܚnpj=„Oώι =ιώ_[ŽWŠΎψ…¦’Φο”NŸx’©τm‰:?εί•Dη'ϊs“`―mjT'²ύεSž₯>^ηJΘ8X7ŸϊC9–«ϊ¬π>ωί۽Κnύ«Ÿώδρ_.=γ·5ο,ώ_hΈ?{aα |jσΣ΅RG9χ?­Ώ  ¨jΑ—γ©Ϊ; bςšDWagΑβnr:/D±Σ³k†+:£χ„²^’κ©±ΣL·Buφvk}1n^d@šjτDζΙGi¬θKμΫκιǞ7ƒΪώε`©rW΅ιdoi@³α)θΥzBΞqY ΉΓΑνGoζ π<χ…€gwSŸΓζγWοδWΩΕcR H Ÿτω©Λζμ•ΦλΌeπ‘άWΗρ]SkZY:ώ@x΄vλβƒΆ„Ι©π±UΚ’·ΓΥŽΕ!Ž5ͺΛΙσ/uXzoΈ’ιqrNΏ‘ζ_ωŸ΅#ΙΎͺο―|+ΰ?ΔΣJ ŽΪσtέ9OˆΗή ϊ4kί!χˆ―ˆϋUpRIψwηΈnlώ%Γβ«₯v~νƒσοΑόMO£ιAԟΑxKnoa°δSγœΧΎσŠ‘cιώ_3­.Ν§λa­Χέc―ΧϋͺtΟ ]/‰νͺεσλ5Ξ@±α ‹yί"IΔ»δ-°Vζβ‹\<ψζĈmέiŠiΣώOEνώŸUziΣiΝΖ’σφq4λ”ώΒ‹Έubυ@­όολλέiοώ_Fs}°ΰ™8u­J0f¨B:¨τύ.3§xYΕέc‹€K‰;–Ρ€C.+²‡3O5d‰₯.~&φi©oF†5” Χ²fιꚏ,U;ϊΨΆ­υdλΟγ60‘s7…ϋΈoPA[ώίzΛΤμtΏ…θ ώχW7x9ϊI^›Β©§@ />挟—d, LΉ$π“Uε°vωfRωΖ5©*‡Νΰ¦`€/ΎV³όD™Ϊ ’j7|χ/Ή_Ϊpš΄6ώΌ?„/Gΐ‰χπέB3ΉF¨Ζ‡;ςςέΒαβ †ƒΛΛU ά~o ρjdg73€KΡ•b‚}"iSGA=Δ‹hωCLΗOΊς―αňOΖΏΩfδkΪ΅“;~iιϋΉkύί’ϋ£Βψogλϊοyןˆ‰4ϋRσ2(Fzwnνc3ό―ϋ΅Y"ώkνΠ™ ŽAmδn\₯=„[VxρK‘ί]“ΐ ~:”Ρσа0EΫ5€υ/^efάkOFq*ΥE‹v(½ωίτΠϋ΅ξ3‘Τώ’zχΙσνυzΛζXšuϊΏμιPΎs¨OΏ`ΉJ8Τ°γρΛ.”ΤΚΞ€k¨ͺ‡΄Ά–翝ŒXEΎεQΙ³pZ8Q]œx£ό>9¨” XΣϋγδύ W4™qΝόΐΟή ψnŒCDŸ•h΄€FΣΛΰΫjό7ώΏw&S„ΫψlΆ£³ΦύŸ—ˆΌΐ½δqΎ ‚|U‰ψ›owΪ²5[…BνœWŒY»G™x’{π!ΞͺιϋjψμG!α1ώΙ§'³wΎΦΚπκ1φšžΩΝց&Iθ^š–Gt(Κ‡ΓΪfΰG䏘s1œƒ:'₯"“H4²£ΠVώρδ ‰žLš‘Ύeǐν‚1oα!Μ ξΙ Όομ:ν7e‚¦uΫ‚MT+²©vœ#Wε‘]»€CbύΣ©’tΨΑ‘‚ϋMm,ρέ_#?3Iνrυ†ξmβΛΊ«Π,*£tηοΩφΚ=C›,ο)²ϊτ_λόΏωΧΏΪ™xΌό?ΛΩμΩό?K―ρΏyΑυžrxŽ(0Φ|Ψ/ž‡AθαΑ_…νEBάZφθ²(€ΩΎ!q­υˆβί bό₯ll>&Ί0ούΣ5ΜIώOœεθϊοΙΦηω]ύ¬x ΅ν.τ`Ϋϊο7LύŸ³ΣώΏ9τ?όΐΙκ›)& zψ—άp–UρdtUσ¨NΙR―­Θ5γΖQSxb΅Š©|!Θ\δω哇ΎU|Ά‚ςlψχ―Βμcν,-g–Κς€ZΦλμ˜όŸΝn―λΏhΥσΪψ97CF»žw ―qKkε„ζ«ΚHn“ΚΆ\v8dα¨lJύ«lYyΌx¦NUiε¨5ΎΩμ–›»έ;\ρ·Τef’—t\zxsO/ΙΙ‹Γω“… d"%ΘΊ&›ωcJvΓ3. I!8ϊ°α5‡M‡ψ…-Rηεžzμ…g1βˆJΘ_2ˆC~…—.υ“Γαw»* gμ΄μΌ‚o­½.³΄³‡hJ]t½α[ΗΞgh £ήΒ“ γŠyO ΙI󣻡#ζh)ςa9a. 1D$x­^†Μ‚šξΒi’k |θ¨yLuφ$ˆsΜ;_άcΗ/ώ>ΣχΒ5-Ω;ςΘiœΒΑ&κ»fή,ρ­ηB‡½Βbτͺ6η4ψVHIϋ Dα½ͺο(ΧP(ΏΏr• έ~ϊδA\°9?~ΣώΝτΦΥEύμ?ey }ψίvΆζšlύσΪ…J·)”s“ϊ©@kηΓβ8–^Ϊ2?R½3h»—γνfγςŠG:F‘%L_6βΘ™G&œΦΉžYέύ;nχΓΛt‘»cgέηxfi‡t5k΄k'ΊŒ>ΜzŽΜξSXΠμ“΅–™‘–qSe[cIΫ8½οΪ°u'x΄—|kΐΟΜxh»ζκ°ιέ ‰žχcΪΚάd ¦z¦(ΫyθνizμΜEqΞ `”7:nΟηΦd“3γ?%6@ŸψίΖΩjό7ΥϊS>bς!M³μ½ΕΪk9ώqΛvqV„.IόυM<ο†DΊTΖOδž;@³7±BzjSΛρPηkϋ‡Τψ Δ§k`ό‡ŸύΟλΑψ>§ZC‰c9bdNZ‹Ώ/ω?«o«ϋ?LŽhοΒDυίλ­ΓΤ8–˜F‘bΘ‹d/’[ύ3¦±%s Ϋ,'Δ”sεqPΗϊ‘Ozρb†\ˆ"#„jΧΏ€L3’½Ι8ΦΎmΥψύΟYηhώΟ ΧŸbJRΛΧ*m–e―ω_¦·ztΦ©+n•΄kq_ώΫ6KίΏR¨>όO{[σ?Ng 2s©6ώΟ½ΓΖ7k­gπ΅ΕΛτωJr|θ'oΊQ“a)Ωg­QjYt σœΘ"E`C6fpP–Om Ι£€=½y™jM/Η—]WyΣt€‘δ₯¨g‰ο‰²Μσ½Λ‘–ς3ΰ§ldΥjJ›ύεSŒA€•kέ3nf$ΪΊΡ±eΨgNρG/’UΪΩ[\}εώ¨ͺ\7•t# ,,σΰΝΩ«nA‚Ϋ¨ΈYδ+G·‡ώ₯@‘€›ί׊Κݝ„©TˆFQͺ2eλa&„€Νϊ_ μcΫŽΞ› ΕSκ©ί%ΦίZΫp΅ώ–eλϊΟβΏ“λEθZ’Μ ιœ)Θr%£\$ˆIΛΝ‚£·ŠΎ$[Σ,nX\{ypvžΣ D~Ω¨Ψρuv4D¨¨½Άf5œ΄Τ*‘ΉŠž”Ϊ?Uj%m!Uξψ«η»ή«νΈŸ‘ »rzH4 ΰΥ6ςΠΈ g‹F^žn€e2lϋŒ]ΣbϋΩ έςY›>"ΩΜΦ†9zTvšΝl(nπΛφ­lΈu‘l·J ·—ΝŒI3ςAr’ν$^‰=έ*γI²UΖ½ W£aΗm‘'ΓfyσΑ+%d¨<_κΠΖΚH‰7Ώ©r¬΄¦ζ$λ.ιΥ‡R«e^{6ɚšu›alsόδpΉkβj€Θ―ϊw ―ΰmƐΡέW6|`M/4ΥΩζw4’ΰ S¦θy“WS©GΘ-& ‹OQͺαvBM;σζj€ghΤ¬ΌΩθNBe!S„W—ΒU½άΡ,Ε5σ"pŽQࣩ( ΞύΚ[?υυZλφM—Aϊεn«±vLύ½Ωλψ ώ_ŽC—}”λεέκν²g‹<&Έ&―ΦDsυ:ό?ϊ―?εΗο oΌͺςι꺝oUώ+†}ς6ŽΞ›‘œ ΥQΰ6ύΏ[3υ?6 ΦλΏ΄ψo=ΊΫ!Ήo`a±V¨)›΅ˆ‘ΎΝΦδ‚“r‚Τ%x§wPμeμΊςBξ/o…:ςσ•GΆL3†‘χΏΐ›ρΑ₯£˜xάϋΝy»Eψ_₯ Ψ‡imiό?ΩϊΧu Iβ)Ύ ¨Ύ‡ƒmώίυ–ιΉΕκυŸΪώ«ϋwιœ³»)ϊXκΉΥ{Πu/€‹θχ€A·t ~φ†ˆ’‹,Cγߎφvk}1ΰχd\\žρ^Zs$Οƒ·ϊGόzθνFχž0ώ_υ­HDO‘Έmύw[9–^τ%‰=hxIlW½ΉΏ^cR.‹6|飋ۼ_oe[ƒ―YβζdΦΩ΅s†+ρ΄.ό‚J–˜χ‘yΕίΏ: °ύ·ΡόsΙε$mφŸ³aϊΫ–ΞšCώ{η$ŽβΒ΄ ¨‡L Ž&O”!Δμ§Y!ϋπ6~pPχ“Έq6’ΐβάd¦Χ²­m„ύh±-yPπ£΄/Ÿ)ΨμDnΠΖjpρχaMΔ`Ψ4ιΚμdgp•ηΨ²y`»σUDκYD ΗQΌΟΊΟ§ω―Ό¨OώΟZΧLΉώtΠvJώg{»ΩpψŸuύΧ ϊΏ[ώ/Qξon^bk? )¨ χfʌΉ<™¦™j;fi—)G €σpFlhΩΉεΠή‘Kj‰ΗύPκΓO —τ’ΌBΛ’Pυ!WvMΘβ<s|‡}στΎIτ(ΠσσΏ[ΪώŸlύ FŒqθ?[σΏ{ΗΦ;Ϊ·@όΧΞYƒWiΏ)β%(AχνNg ½J|ήOΪw_αοG“›™cϊ6Ÿύ oΌ±ωω@‡ΡvφeΨ\aζ€4—ΰΥ¦ ίoδ{δ§2ŒΫTϊy+œg ί(s#€k9CΩͺ@"‘ϋ€;˜G‰Κ y€‘«|ί\D“}*ι) ŒE/ΜW$xλ[;`4²Δβˆ’!‡¬QMΐ(ΗΕΕ–€θ>}ψMdήΉΠΫ‡N4ˆ²ζLMHR3•ο`ϋ,΅W³=]\ž »π’6Ν\f„ ¬|φhΞκ Ώ}Um)βγ˜~%•γΦ[1·vό+ΐγΠυ±Φϋύ^γ‰γ?9P›œΓ±YώΝFΫ‹°8&ž|œ_Ύ0,L›Vw¨½γΆ­ͺθi1Nά~`Α\Λ.Jφδ^ΰ}}Ώζ67ξ(ίϊ7σBΣz€ΈiŽςΉ5P&Α‘ωζZΒ*€΄ζ³ρ­σ?§Χσe¬«yrŒ0žϊΙOπuπϋW¨ϊδΫΦFσΕΕΩ+=C›ύ·έΣύμ΅­ωΏη°n vN.„°UPI§q΅ρΙ%Z‰»*ŠguεE$eΙζeMYΧ<§C €T[)m Έ°‰ υ'ͺ—Ξ…’‘ ₯‘›χ>ž‘Ω]ώwέυ?”ΊώsΊυ'ο}|γd΅¬vk1ώ_ΤB― ϊP·_š/„›.—€ΥΙυΒMH.bδUύH6Q (ς―tfJΎU||ΏT Απ|˜rd˜ΰp}$#δP‡πq†{ρkcαί±±βΦ†γ΄I­“Ύ³&ΘM\ }λ†1ΘW0tI£HωŒ£5TξΠ@yάά΄NύUτz¬Μρw+φHώRνΛΎ$ρΙςl—ktIΐΙ ŽOdΐS=₯ΧΛ%N2ΈνΈ΄0Β”mφjΟ”‹'WW@u^ΑM‘Ώ'ͺ²Ή_¨y³D·P;H˜ΡΖkiKeG (—GΛ­?—ξžβE‚­ζ%ρoy[Q ΰ=Γuw‘π―ίMΨ:β‘sβX‰>¨νΕGDm¦θCΩ}e§ΜΪΪϋPΟOΓ‘UωC&ώΖγΖ”ύ7JXŸώΦ^ηΜθί')ΑsT΅¬f»Άώ7[σΏ-#«-"ΠκΰўnφΨ(εεj:Hτm"‘ž5u9–»ψh―ή—© ΏγWEEτλθ₯­Πφ*x%…𓄐Ϊ_ αKΑ+("υ)+ͺœͺjΏa qs".`jd Ίmg‡’Η#ΰγ^ϋΠη8ρΌ ”Α@*Ϊ Ά ϊπښ}Ίυ§=—«‹›@©‚²zΠڝείYŽΞYώ‡bψŠCΤAΫβ4=74½‚k•Υ„z—kQߏν^Ή§H΄¬κw”]7‘l9u*ΡOx;»ι9t/υ I±uœΜΥβγ3*πΠ(>Δ"!fUΙ|φZWjωOδΠCﬡφMˆΈΌͺ²zπ;–C—š½Π·λŽΊzpζϊ΄Έώ’.f ΥΙpΌ?ώζϋW κ!KηM.Q«*3πCψ'¬³μ₯λ–!9ќ‹!ιφwΰ•`«šQΏ! @5 ΐτ`υΪΟ/φι‘‘ΘŠ[είω(Κ°*H΄ ω>εμυŸΪ;“ύ_ι—χhΎ" ™‚ή]ZυšΡ»ύNγΏEψ…£Ωχ}³€;ωoγΐεμόK.w Όώϊ3Dτψ;©£\‡NΏΒΣpzb)ΐ«X‘γI+^βqL[ϊ΅>VεOLΠΗ»ίθώ?Σ―mι‰p2o>xζhξ˜ϊΟέf§ύI«Tθ?rL_ΊW§ΧŽP j_Fm8΅o“Ύ8‰Qœd₯O݈ΦϊίΆόΰθ£7šazς’ŽΠΈmύχΆΕαΥω_“λ^ώx΅ξuT’-p―OγΗί@Š:•9°c™Υψk›Y³ςΎEπΏξ5ίdςΥ’ŒΤϋKΖώΫYLώοΪΪκυŸΑώ»Χφ‘΄Ϋ'dJ»Ή€NΨΚ¦,“«I«΅pΎVςΉyšλ°ψΕW’3‹XBZ‡7™ž% g ΠΦi;χx4διCNr―9"Λ\)9"Λ^£1x(7*Vθ?Ÿρ_§“‡Δ1ώΣώΧ(ŠJΛͺ†·„§©ιΗπͺz+oΌ¬3Κ‘zΏτΤ¨€–“­?©Š@6 h: »εθϊŸ9τ?―VGΊΆžΟ΄+O³{Ή¦gnzWuΗ;·΄Ί£/¬ΰΠSnάώCjό„ν[γΟ9mξOΧCš!U5ŽV$kιρ»–cΐ€>ό?ΞZΫS―UΜN™½ήo™όΏΝZλεψ;υ€6y¬œ#γirψ2*«Θιb΅‹ †…fw€΄bߎ•šοeˆ©λΨIkfύ#'ηΞv΄ύ?υϊ$©ΗνΝΦfγΏΆΦsψ[ΨώΌsGqΑξg}yΆ—bψ“§νγ2δΙΠγaUΖ°“Ν]Ήπu’P"ζΉaδη]ψΟo1ΡO o]ΊγEͺM‘ΐ9“Ώ]T‘_Α”Φϊ‰ηΜ δηȏiγo#C¦γoϋ‡Τψ=ζ4ƒοΒwΖOXG?Μ‰γ!Φ’œQƒΥyτ2ϊZZOΆώ%;>ͺE ŸJ1`«gΓ࿍eιϊπŸ\ώo4ΖΝ·ο]5 ‘’Po @λT|ζŠJΓΙƒΏ)œ©?ŽΞ,?Y­/υ:ω―τρ¬w:ώ3ωϊϋQzήPΊΧΞϊίq8λ―ωŸ–‘ων@x9ωΈ©Ρά""~kKzPˆ,ZzjΨߟ3 άik'h 'Υw«ήok<ώW™K.‚”Θ₯.€^ϊ­νΙΦΉΣ«Ÿε²1}u“P!h‹XˆλZg§νEθzόGͺΓ³DPΗΛ’ΐ“kn²gώά©rΗ&ΘJ‘σώ7€—11HΧΖέ»Ο΅š"β„j…·pˆ~}…ΟΆG³°:ްHΕΞΎ%$¨Ηq+‘™5–ϊvυΏ:Ψ9ΊΟtλOIjΥ @mλΏέ³ό[=ώ«·ξδ€O,ŒΫΏ@FΡ„χΙ?€2δ~ŠϊΔGΫ“―%=lΚϊέ–­Πω³ΘQ6§t’fΟύI„ςŽ,ύDMδ’VΤΠJgƒΖαό–<φ’ΠE tP9I’™!4}3;#U΄)§6jqlλ¬(ΓΉ’ϋ5Z υTΝ Uε…wΧ7Z/σΈjϋjн½™/ΐσάΩ.=»›ϊϋfߟ٠\€<“/ΣΚέτάOh8C²$=rŸN“*ςΖM>ΰ15O™“@’JΙύŒΊΙΜ:γΰCβΏΡ:€φιιθώίσΖE‘ΗfBkύηžΖ6ϊS―ςόΏMύΏYGκρ₯lύ-OΩB²4W€ώ#ΛS€­½σΕ=6=»ς ŽŒ‘τ(珀Α8ωΔzεRγ²W"#;L aD1d ›Zrύ£‡d`ο R\“qό“ά”%:&»Λξ0‘‡ώ§γΏ“ιͺnχ3!γΖή±όO[]3ƒώgϋ±Ώξ2ΙVϋq/ˆΡσK.:\Ϊζg\ θd}K"ύMl­“Ž(xΟ:υέj­T»ͺ”Kδnbί‡nλΟ›³oόŸίδ«κYβ^rΉƒL4VΟ―ΏΐSžρWμϋ>‡ο£@€{«9΄]žώŸ™Ρω3ωn‘βPp[όwΝςΩ[Kϋθ9Ή~P`Αu―πξΫ]­―…1q=’Iξn¬f!l­EKβ`o’μΌ­Ά`dٟ½›Ηk»kd𕇕|† 7ά,•Ν—fG¨ΐΨqd{§ ις ώ•ήΛ*E™@α– Žεύι}DαV(b.±eω4πΟΥ}[N½/TuoS³/•½±]‰Pσβ‰!rγjϊƒQΎOclMΰϊα?΅‘ΰ^όΏ[7]όΏδ‡ζ8y ^έ Šœΐ­ρ_‡©vœ΅ΖΛΐoˆ!HI*ΉcυΟsθzeΦ Υb\/Η“Mίs8ζ1 ͺΑτμζΗAtvΛϊηdΪ :έ°ΝnVK‹;ƒ£½έZ_Μ£ωlΔ 3ž ΆyΊξœ§œN‰ŸCθBj3›θXtXVSf"έFA]ΕΤοy΅Θ%@ Λf£σΏ&Σ\’Ξ ΨΈώͺeτf»Χψoύ?‚p-^#ƒh’1„,₯μΤ ΫΏΑ7βd_?οžvŽι^.€κ-yυ£GLUχ€ό1:Έk*Γν‡Υ gΰήή^|ΖξooOΘiη{˜l₯ζsΊΖa`ΣνZ~£.©mδΛ )κχ«Bꝉη:λξg-O@Ψ=ΥμR'~φΓΐ7Σ·πώo„ΆΟόߞβ―ώŠnr―·¦ΨυΈZ?»ΎΤ ˆρbΈPΰ‘eizRp}Ύžύ ŠΜΓ±(uκ+h^ΰΓδ4όςΨg½¬αe彝&”qθfn}IΠ/Φύ73ŠsȘs|/>ϊ’νG«Ο³+,ρ₯ϋ–ύYπf;€ƒUχS¦βΣ‰ίΝΌλω N Α•(n‚ΰZΐ^Γnίγ"ς9N%\ύ«€ΐΏ!qpΔNLϊ‰MCΰΏπTξΕUuΌΎΐ½Π_o”ϊOHBςgoθ_/β„ωαιρQιυωU;Cϋύ %ΒSθ^θΪ«Ν¦Οkƒήy¨ž0qNœ Šji‹«ςiω.ŽXύrtCŸ—+‰Στ\hŸ‘uθ>²χύ„=ͺUn‡]ΕόzΒk€OΞ;γeμzYŸ‡]Ό(ζ²JνΠ|ζΝ°3η΄^’r™{η=^VBι">ν5OωΆλs―e³&ξELχΥυ3κaƒ―ΐƒΞω$fu1μήW(ψDxοA1"υ#, ΛΉ—΄pIq½p^¦MԘ'COoŽ›DߞδkYωΣΔ³°”"ο‹ΰέ¨μ½?Υκ_ε<τΫD A΅r^tκ‘£‰ιSίAŽ΅·0pJ<Ήlγ°‹‘Α=ΎΜσΫ…Syφ ύΠό€Ο6ώ_hόYΕ/xKs%Z6‡PΠΤ}™c(1zρ.Œκ…˜₯‡ΰσαo_C„[z>4”?ΣιW ›δ{ωͺs…}αΨο~ςΨν$:œΗp‘PρYΐWψΊ>ΑχIξχmό…qPΗ»e«‡™OδAσ³­¦MR·Ίž2Ί†ΈθYήpZcΘΟβ8ύ,e›=NzΩsβ^ΞβSβέΘx@’ΪΝλήD.±pε«ΐΟ— Ξ2œΑ.·.fOȚ ½ΑΝϋŠήΩO³^VH,ΉT*Ξa¦7œ)©Δ~xΤο‘ΤXΆψ©ήbΏ™MΏ“’*‰†ΣΥ¨ΈπΊΗi½Ζθs—`χ@σH7Kΰ§R‹Wνx΄ε Bψ_ωη™Ψ!œδ)!ySwή’Β!gΖψVς­.ξ©v#ͺlTμς’Ÿβ9FάM}ΖRAg 9šr^—Onω»NQόg€΄΄Tέ&ΈλκY­ϊΡξsνΟTςΥΏ=7­ν†·ζfΥ-πΤ΅Δ‘_ΫžC―Txκ^Dρχ@Žƒ;sΏ uΗ—Ύη'σ6`‘εncˆt’γη¨ 0 ϋλ! @οΣφΔP–˜›ν~ <Λή}>¬λ³Ε:Ν#`ϊα…q[ήίl ˜w₯°^/.Ό@ς^ΌΛίKΌΒΰ»―ύΤΟκy9πϊθΰζΆκF7z<œƒϋl\πε‹…,f½Ρβ3ΌΆά5z›χ=nΊΖέ&4³ΔlmΟΖiƎcO΄k£φδ „Νρό 9°ͺ`ξήΛύOBβMŠž ZSOή劏)kuςA*’"b]ά^Π>υŸ£ΑΰίΜs£ƒghe$8ƒ{Ί#ω\΄§¨†‡ˆΊΈ_2/\dυ‹u’©9ξ=€z=' /n&πά‘'ζsΠωΎΟ΅„ΰmb`ληΣ“²)ZAeΠ"=\Ω—)ˆ«p>lVgp—½λ}Q<οu§Ψ†υhυx$―nrŸΏΕπm'ΐΘ.ςΎ €Qύ;»FΟi"ŸH‘Wp C%°β7Δώo2ΐ"?ς‰_Œo·%―,#meκ„•χ’ͺ2O’Κάι)ο-1e””ι’Q–›²δ܏₯e}̝ﱸL9r<ζΛξ˜&―cΙ‹Κε˜>‹c’όι37tΞFΏG5MΆΖ;ΝӘ>Cc¦άŒ±³2&Οǘ)cŒy³/&Ξ»XLΖΕϋΞ΅˜<αα½ε,&λ@η(Μ7˜/ΣΰΘ1ψ˜Ωσε,)£`ΉsdL?0UlYG•Gˆ*/#ž―ΚH΄ΤI'αF»’™Γγr9{€\ξ2g šΛ]Ϊ$ρσŽ—2i(½γ΅©Žͺw<ύ8v©‹Pk—:αόaχ.—9eΎΫu)Ζw:Ή’Έ|§3ͺΡw:ΉŽΦwyj£ξ₯ΞΎθΎΤŒΞ—;ω˜‘}Ι+Pδ—:ΥHρ~Ήsϊ—Ί‚²€Ξ;EB€τ…(Ο :σΔirΧτ2δnd ŠΩ3‘Ά unΕd Rη\fΦ„τ₯O˜@!wM:—’k.…όs1­’ΓE|€ Ι»}ΏΙr78rή…ΤELŸ‚!YΣdcΘ]Οh‰r§%GCξΤ…θAϊn?ۃԝ«MΛ‘;₯ΞΠι›‘#|'JΦ‘Ό Ay;rη˜#…§Λ•”Ν#u ‹Hμ‘Ίis|”έ3K^Ο»Κθ™1—gY<ο2gϊ̝‰svž#³ψμ˜EζΕ,"#f™Ή0³eΑ̜2aζΛβs^–—ν2SžΛ”.3εΆθ¬–ΟkΒ|–χœΙ2S˜Ω+“δ­Μ“±2gΚτY* ΘO™#3eY9) ežlw™“±¬l ‡‘:cζ Œo%χβg]̜o±ΈL‹εδXΜ–]1K^Ε€‘vc+ΖΎ θϊΈqυ%FΤg₯Ώ‡(ϊxρσί}ԟ£ΒόLΓ8Z½B`^\`α*MΌ”xI:πkψ³sξφόώΎ·w{gηΨ[ψ»΅ΩΪΦοΦS<ψFΊ‰aόξύΑλΈΡσΒτϊΛΔΎΏΥk?ΚdΘ…₯‡‚ˆ›C++‹“ΥχπSŒ±ΨΌώ{³§ΦίΆœυ^―?«ΚΜ_>‘t›Θn f8xi©₯>!L ΐΑMπΖ Τ¦Πΐ*Nαs†ή‘u‰ŠΈ5K^_²B¬ΫΦΓ§ „—“2£MD=“»€ν<(˜ύ“\ŠVœΡ€ΗΗό9€™ΏΩ^ςvΙbz0όμΘi›»šΪ¦(9Όrn+…;’ χƒψ“T₯D.ϋV›Œέ-˜ΪΝ)η2πvώ›ψG$“°Ώ99α΄­Κ’ƒ³…άηy ω‡ΧU&ο΅ΰ©Vξ\(ΝZψψΙΜΈ0Ύσ}‚ΝϊΠΝθ9Ψ#ψ“ ΙϊεnΑ"ΰύHƒp’ό!·ΠΜcj܁·pU„άDΓKs'Νp?B<‰πžΡxξη~ ‘ΙL{&lGΘΣΦ‘Œ½fη8!pτp]ί§υϊ8MΗψo1ΌNx±™Ί<%v}GΏm―ζ?Η!Έδ‰τ(Μ™ώσjUœςαSΰ{πBπΞί_ΦΔ’9‚rm#oJ\ππιr=~z.Κ_J}Έλ­zžg?;_θLψ”ψθ"†»ί;ώλbtΗΆ΅q6M΅ώu-Σ Ϋ+° ΩzuϋΟΆ·Ž^%ΰ7ρΜτΥ½Tœ!u“ LeDΆΚ’2©Žk˜γβύoΘF€―Fέ…EM§fgnLβ€ΨJm½~ΐσα5pq1CEΙWΆ π~|Ӑ­]¬ ӏ ͺ„Β4μι0zΌΐŸΰοπI½ Φ5CΌPΣΠ;ωSρσ3kWεη';ωC%Ύ— Fζ{ωC/ ΄B;ƒΰ‚Σb p\΄μΠRό9ιaKZ•hΈδ€ΕαΜ’›€Ω,³ Z`y©³Ϋπ9ϊΒnnβ»Q&Z₯κ!Μψβq³’ΒΉkαΠjn˜‘tςΦ‘6ž‘K™FΕFφh’Η† ³—ŒΏ™uX\tεΆ¨ϋ¨\.J›¨elΠσ‰>КSϊtύν7R ΣϊέςfgΏ£ί oΒϋ·#ρ­p' olϋMV?£βθ²BΓΰpΒτ’?ςIπ„Εύδq΄κ a?οΡ”;Κ{(6δrΊΈΚb3½ ΜψςWχ›Slθαq…υ―ή„C³j‹ν[·(ό―,πΣ?ώcΫ–φΟ΄ώζŠςsθ&/Š,Α–υ'k]_ΗΆΦzύ—`ωΡΡχ Κ:•Τ"kAgf+ ω¦R?Εξ+ z ΐƒδ:Vw/ι*Ÿd43««!Α†#κΎΊ~q‰ήA A’(¦£ΟCΆC|°*ΛνDCοB§}’ϋ±Μt•Ϊχ|αšnGΐk―Np0*h ΜX]?°ώWƒ{ΰ?Ηr4ώ›eύ‘©­:Π²ώΦf½aπΏ³·υϊ/±nχ©=JήNΨΥS]Ιgς z@‰±όGΖΡΝ\ΓυPΒΉ@Ξ #7xƒΈΧ€ΞWˆ‡ œ|>x@ΟMc†…ΛΕ >ώg³Σςςό’ϊqτS7MAx@’ ΄ιgΛβΏύN―"τ?fΐΗ’ώJωS&δ eb F_|τ†qŽ-XQΎ¦ΚΌΛͺΫ#*b! ZDn&sά(‰œόŸ£ΚΰΪώ8?i8©Ÿ΄Fω†Β©΄b‘»ŒcΙ·2ZεΏ,ΠΗώίΫΪώŸ~ύρΧzIΠ—Ύ:Ζή3ˆ¦Π‡Ιsv{Yˆώ'D”UΪH–œ²Ί—›όΔΟUώ’'tΰ?Χόt?L7Q#Λΰ‚šΎ)6Ž―ϋCx–€«λπQΙU1Λ+7-λΨΜ$dͺγdΒTΦο K¨¨ΤœΚΧ]½τΆcEuυβŠ‰Υ7Nί²βžJώΟ€wνwύA­–‹ Π¦mΈZ­cιϊEθz²?₯ΖΗV³K³ŽgQϋΝhK Ί!€iυw)\΄ _Ύό zθνf«ε„ρT܏X€V8‰Dθ%ŽPBΈΑ5£8σO8 0ŽΞx -soΡρ{½έλϊΏ9τ?ζ(Ηy%σ.Tϋ9eW•_¦ψ$Βa€‚ίΌcYœ|e\> EψEQ&b/ͺ4Λ·DŠeΣ«χ3mόBaG/pύ° a[‹‹‡Ί°΄t©^κRΐΔζέ So9Γ₯©<¨±e‚kS5?%Ά΅°«ώΆVRΣέΚ„kΙ2―κ H˜α>…ς¬—ŸΟΐ¨κ,ΡvBDjΔ¨U8‡7+=Ι;œG¨Ϋ‚Xγ₯ͺs_8zty0}τW<Ψƒa½ΥώŸιπ-OΥόώΰeπϋL‡:€ΪψίΰΚ3ψΏΡψoώ)Τe…Χ΄Π4BτΒTγq+j†ωK‹‚ψΰ茈§™WFλˆθΓFb{θMՐΒoκ0Ω*dΟ ‘7Μ!Œ`tΎ·Žaϊ…9«¦Έ΅PνεO< ˆ;/σθ께‘€υ…œΙς™˜Uh±>MΜΫFΩ*y7§ΞfOƒΝΓΒ]y™&†[DΧ*Κŝμό₯δM¦ΎV’kˆU˜ΓpƒcVj₯Η—η°žΦ‘πΫΈ'1yω}Uh¨%QX¬ΧΩKΞCcj=ζRψo Έw·Ϋjό7ώ₯%`mόo;†ΩΪ؎ζ^ ώ?Ή~Π’ΈΕ/k³†™³1―ΝA›6Ζ ™ž₯ižί!ΥψξUΈ `KQŸ)ο|qΒ/P“Ό K³΅Σ―U&Β§_ЎΌ»Aμv’φϊ—ΠΔΨ>#„Μ§ΦΰGW”„VδC)œ[Κ*λτΎΧ",‚‡G‘@Ε}‰ασK„ξΫJ6»[XέZ7—DvQ ƒsp•·y#3ιέ1ŠΓ;εΌο¬ΆY„U–χιcιϊ9ν?₯p­ω?¦ώgkνtόoϋΟC¬I…’Χa@VβΎΈοζ‚›ΟrЍ>%]{“œΎ™y¬ΖΩƒ‚©ς šΆ‰ςΓΊPβ±xy89.JGGe8 rlΧΓ‘`Ώekό7'ώ«Α#η؎΅eγ?ŽζY„ŸBνΝ?Φ–»2\λΦZa™(“Σlfp·D₯Β?8"ZΒΞωq:`ͺ{g5“ΰϊŽό΅Ξ˜uύ±™5Ikλ0ωίΞf―ωŸήiώχ08Χ?»a~`φQ‹ϋa‰΄‡f˜ς|IΗΒdφoX4Λωψ7ΊώkΊυΟe]°˜˜½έrψ7šaύOλχΚ“Υ—γΠΩ›ΥψšZUpŸχPάοόο›½Ξžlύ‘±Ÿ^ύ ¬Nqς<΅όίX;Vώom];ƒό?Έ)Ψ9ΉΠΒŽ9Τ/gˆ Dυυlώ‚@ΘΦ¬ }ΕΉ!\*Ψ—λ!§)ΫZœ$^τηJ‡&Ϋv˜ –Κ9‘Ιa“kgqe”³Γ‡ε›o0σέ€³Ό€œ­iVΫΪ‘ε '@BˆiτΙ`lH_“€PκΫθ/Ίi=ΆΒ)ωκΡ,A–}Ν*ipΈ>‹mΒ\άSG© δ¨L|nŽυ κτΞΏIPm†pdΎWΰΫ8Ύpb?c•\«πoΰš‰ΤAύ„_ΆσΘ:'^>§&‚uξΕ—-˜½YOΦ–™œχŽ'ΰΧW\QH‚Ψ{Ι}θsεLe+/tΘ9‘ZRΙ“€;RΨGόχυPΝ―5γ{ΐKθ€σg[”U¨hεήΪlύ―ξ4ώηv"ηbίqAmW₯ΰο‘64W‰4€_Η`έr뷐Ѱα]Λ… Oύ—­ϋ?M·ώχ„1’0‰œλ‰»:]#œγt ήι ­ώΏ5£χΆ­ύΏ3θχ#*7υ½"{χΈΡ›"ξ;pτ1ΙjδJΣΘ3ρΧ­‰ΏŽ 3B?q}Ӎ’8#D£Ή»l&ε1 ΏΪSκJ Βχ΅Ι$­¨j0Ω‰¦«WJ΄\MιU£‡Χχφuαuφά½€·gq§)š–Gu%/Κ_5Sz@‚΄4π,D² l@€o=[ΉΨϊm΄&Εώ~Ybš1ΝHN4œζu’ηQO“>}Χ²FΎ’Š9₯Εu΄ώι‰zΪ=πήώŸ9μ?$ι€B‰bPγl«³˜ώ/–γθώ/³ΔkΩήTdΗΙ $<ΐΛ*%ιί›ΕρΦ^Ž’dϊ₯#un;§  *^u…rWӎμΙ>“(ΗηnjZG¨ΒΙΚ€C©‹’ζΟ‹@Ζ3„ςΝ*’ΑκAα(9ξ”pΧ@P΄ι%^ΰ>ωŽ£ω&[’E±ψ/ΰmΒψοzΏηψ-ν_ώλY緁U‘'μP―¨‹oΖ€ίΜhϊΟ=޲~3Η¬­Άfπ`g‘e‹άθ@A+€Φϊo›ε±·ZΟ.₯©GAΣ Kΰό-¬ΝΗ;`iύ_UΖaυ¬Ώ…„Μγšω†±Θ²ρ‘±Γ"ωδF±ΊκUλ]Δ}Γ–°„όŒϊπΏΨkνŸ~ύ™$‡•έάΐ?šα5λΩύ½]―‹ιjΫΊώ{ύΟι±ρρt€€φd -–>Fωω¨VdΜχ©½ΊϊgTωίτι­ϋ?Μ±ώ>!ώ#±eΰΆψzΝΔ7šo2ύOχΩκ’λΛι[ԜI+H£νΡA«WΧ¬6Ž8ޘή!nj2:Š)g^qηCkl΄6[ΡΤlU?†;M !0ή,ΖuSπΖϋ‰h΄Ÿ΄MίΒC˜š€ήω>“8«‚Y#<ύ3…ώWκΑœpZOΌώ½1 Άωv»=Λ¬ύ?sψXμ‡Φ›ΘDJ&Ι/,aRψ/ˆŸ‹².T&•e|ˆ“8-x‘ΊV_ε!Π½πQγ}¨Ž©ϊλαC”Ή_ﳑΏΏ10γc3Y\&€fSϊζ#‹;6VΨͺΘ{‘˜MNώƒ}β?Ϋ­ζYŽώ'o€ϊoΛΪ[lύΟFη-BΧ ·{ιάώ…ΜTƒ΄Υ`…΅4e@dLΥe'Ρ^‡o]ώOYΉΦρŸΙΦ1ešΙΕ[!2Ψη‹₯a”/τΏm±ύvk»ύ_Ο­£>-2#oG17€bΞΟφ–Gf*δXb"Ϊ\ω?5³SΘ2ΤΓjΊN΄xŒxΠo]ώ«τ©Ϋ::ώ?ύϊ“O—$Ξb…€νό―ŒύΏq4ί4ϊ76™,Π4#mˆ‹ς?&4?ͺ²_ !6#‰₯U€τkο§ήgπŸ?ύןςγχƏθ³6~ƒY ψ‘šX~hhπΞεΏΊΠ>ρέiΟŠsθB±6%ϋnη°υ{aCu’€ΒφδTΆ™ήεjf~*Ž]Sk{YI―¬ΡΡȈΊ§CI…€Η’Ο‘…Ά "Ω jNΦ#ή.¬ MZ:vά-\εϋΊV”J”“–3πŸ \<₯Pθ8»Q:γ.θTC‘’Έ΅zŠε†N„ς_σΏƒϊΏž…^’ ›·ΩlΩόΏ­£λθŽxlΡίΠ"ς¨ΤΕ¦œ€°@λ6Φ‘"DΙΡξx{ΪF•̍iΩ—Δ¬^.Ε¨mΔϊΘ1™ΪαkΊΈ¨‚x’Ψ%'α ηΘα”—Kδ„'s!읂ΜΜٚβ€aιΛƒ«Ο¨ψ %° ΰnΔ+ΰk ΖώόZU+fξG³|½,ύχΧ=δ?<\ΧO&‰Ρ—ΎE€ AžψK([… DefgΠ:χzq·εm6 ―eY:ώ;Gό½Θpς³Bμ8PpΤ‚ ­Λ»LΐM‘Ε/~L΅}ϋΜ'>όε“Έ~HžΡΆHqγ Κ§Ώ+Υ~?ŸQύŒ ό­Ÿΰγ3 { 2° @U垁‚πωΗ―\UςdŠ6aί‘όο†zπZλΞžlύσV˜h±‘Ty™Κμo ώ‹­Zkώ—Yβ?oQσX!r?/Ώ‹§ΔύŠq($ p±ίPκ§°±δŽ_ςG‘Ι0OKω% ;‹‰΅ΎΤ?cΙ…ΩίύόΏπo-'χ“heθCϋΟD€ο§ ~5έΘ ήRΏx«ώgϋνΧνA{ι-Oμ~ψD^7‚ΠŸΐώ|j žIjˆ% ‚aέΖSXϊ2=ΫΥύρ/Ώ£ρ‡όk1Š―ΕΘΖ°œω_¦Ζϊgωίτροχš{†υ―e…ρρ€"O Šˆž M;,Λ~£γ?sθζΪ«ζLNUuΪ=YΪEaά8KlΤϋ!IΦΙc3ˆ(1ξ’D92’e•ΖD#Θ~ λf­ετλΟJ:‘€Vώ— ‡UσΏΜ‘Ωϊo‘‡x±€œLΟ½—‰μ 4uߍ²'/ΎFY₯p|T—žiλE•Λ/ŸβKήΡν_Œ,Ή2ιkαΠΌ€½q†ΌΦ—FΠ©X^\ΣΦ ―½δBΎur%ΈςP\+"Έ₯m’cΌ„|Lψς―~ό{ŽΪTcΎ7ΠΡ*DϊπΏξ΄w>~ SΣ8 ‚­ώ=›m[Z/B_@φœΈ—s‘ΧfjΟdŒ+U<ς (Κן7ρΞtίΉ¨©ƒ5uk’CCDΉ)ή§¨‡Vΐ}Ό˜ Ω£<Οƒι Πή DoτEζ;£πλΥπζ5w"¨½oyRΑ?3₯‘GAΗ¬v‚kδg& ζΡw_ξZ_lά#!3SPΏζϊ.ΞΘά<"`ZΫ§¬Jš[’|Qλƒ υ”A,εώΥοτGγ”ΔQOŽ]¬Ωja5ββεΏώ0ŠC⒏δEmΔŠͺ€ϊΤμw:ώ3ύϊ‹τη0K •iΟφΨοtόwψŸ‹ι[ψTυY ιΩMΐΡΘ_l½ΨXE`»ΛΌqTˆ.LuH~ƒ4Aώ|Oώv"Pkώη–υΨ–^ϊκŒΰέsCλε%œTQβ*i(°giې€)4[†goφ™Ί’Φ9‚‰Υ­R[(St λk`Ѝς<Πο‘83°8ΓΖ"#iϋΒ›TUς{πd›~ /m@ d{;±Yώ«1ϋδκόΉνά‡©΄¬Ώ½vXϋo½ΥρΏEΖ‚™RΝαNΞ¨ ‹°κΌa "ήΕ„Pί όίH₯!I±¬w”$»¬JΏΠH?x5a,vBόυgϋήƒC£€brθ«”θᰏsBjύ?ίϊƒ―—ΐυ£)κ­υ†Α{έaόWG{2 M9°™“L άCœdΰ/.*Ψ°Gω'­±ώ‘—sΥnuύΗLρŸΒΚͺ«emΆΧ^ϋα©G|dΙΏΥ‚A>…!Y„xE ·‚Ϊ–΅°…ρΗΏŒ‚ ―‹ZώWε¬ύ7ΊγLλώRΪ€-ώco8όοYdό§sύO;€ΰ„‹˜δe₯Aq’Δ―&>_½}eGηΘ”κrεΑό§ŠΘO;—χT#m‘‰8aΆκƒo ΒIΉ…”ψ„ϊ©΄ςQh—ς >Ω[QŒΓœͺ,Ή™‘²›*5–sγμ1ΜζΥ*ΐτΐφ~―σ?η\Σ5ςH–#i?&‹r½ώ™ χέΠZ‹SO"Τc£Ξ•₯–‚$q ά·η$ΎζΔ•DΖYl\STγcf$σΞPξ¦hΧϘγ2‰`\χ:XλΊώŸΌΟZΗ§_JύW.'¨΅m^_Ν2?ώsΦLώοx/ι5Μslsͺ1–†Η3ˆσ½€ΕdŸ%ς|έƒίBOŽx_Ν‰b―ε¦Θͺ{ |\Β™„ϊΨ=Β“{«f,B›š™½—S―l< έzο@ΟU­ω_ζςζκ@]Έ­ŽυSώ_ΗΦυ‹πΉι[δa30+³ ›eoΉλt½ΐ½δhlƒ|έΓΖ§k‘ή0΄Άan\Ή=|I@y­ΟMΙ'ΖMύπ€2lύ…B[ §`Ζρ³Α”―pγύπϊv!.­ u•°‡„ΡύW£Ι•#²)€*KΕ(KΓxtl›$θ࢚ΰ6‹v n―€ve|Ώψ15ώt zτg~x» ι¦g(Π«}L+μΩ€γθ³Ξ‹&]b럞ψO]@Ÿψf―ύσ]† 5Zσ­-ΓΉέjϋoψŸΐgΔμPΑΩ-Ι™Kγ„œM›U °H],υύT}ψ·šuυ'όΣωlkΗΣΊγ ςŸJΣOΟ]ψ?6ψΟMρ'²©B7nΐ#Š`‡6Ϊ°-Φ;¦$ φώ|]}œ¦Π­‘”ϋwQζ΄PfΌ~bήΛ9‘‘Tl"SŸίHΊ-©΅“ΎsHη5ŠγqH\>#d„SλL°oSΟlotύί\λΟEPΫψΦs6ΊχψO Ώ³‚λ-`•σ=tnjΨMΨδιχPΓήwΖsάώ™ΰDn„(ι›Ηλ4]K [¨’J@@ŸόŸΝFΧMΏώ’œ*aA›gΏ₯ωl ΠλΏΏ\ΗΟΔ}+Tο– X_z—ύ5g²°)ΜMΉ-EΦΆ(›ϋ΄μ, [Ί_wIΤxΨ²Κj @NQεGΙβ.Θ‘kθ3όν’6Ymzw“›¬‹§ŒOηPυ˜Υ’R$έm|[e(|v€ψαζƒWK~¨ƒΊ&οpyεU·Z©*;£pά•…ΊγφŽo`/ΝχΣέ`awόυΏ₯ρί ψγΡξjΓΞ–ρlmKη,υΒz#»w‡ΣδαΩ·Z-§œ}Llυ Υ†*dj$U Δj?¨ύ?ΨΨ'ΛΪjό7“Š#€nŸό±oΡsΆLώοΞΆuόwψ―έI·Lό5ŸKI <ι‰Iz‘ς9(U8ζωΧŒ?ή…‰ρ3α@AρΓc€–ΖΚδδόO;mΟ·ώΉπQ”ή¦χh­©ψ―½Ρυ?3θ“λ•:ŸΠO\ίt£(Ξ°0Θkg­΅¨_ΧΕM^ ΅χΔY΅ΆŸJΟΧ»W?Ϊ»ŽŒμͺHΩί,iVί2ύΧκœ —$Ύ _I‹ζ€YΙ…NešŽ10J …ήΓK ΥP^\έ·A¬qΊώφ[žYύP{2«|Οƒς,―οαڐD.πxWΌ9ˆŸ}o,όθx-§^,Žαϊ‡·³›žCχ’΄­žνν8{5ƒώα“wΉŠ~#&Ug4L±Ι$J«ˆ.Xp§><ξ―?₯λW±*Ύ«Εθ Άο_…θγs6ZώO&σΖ’θ_ŒmΥQ?JΩϋ½C―Ώ½ΥωΏ³Ψo( ςωM9.~„ͺ²ώΌRϊo;4lνέ­U!ϊ/€cU+°Š£xάΧ7Α©ρ>‘έ–UfΕ~c<|ΚϊΠcq¬†š%η@—κŽ±{dOM‚ΪQ6κιΒ£[Β«ΔΟΟάT τˆΘNΎwχΎ2’•0QΛγKkΪͺχ·­b£VΈ+£ξUdc ‡\€ͺ† ε΄κAWώ«£ώλ«mg«εδϊ_±ή—\ν~Ητ΅Φšyύ/έ«ξ(ζΐ„|ΦL]zz‰‹Ή¨ƒπ• §oE½²³w$qRlDš˜¨ΟdΙ[Εb%[Σ,nXή5›€μg7ΰ!u†υξs…4ΩώPUsΉz,φp²ΣφήV9&£yΞnQ5Φωις]|΄εΕΡΙ¦†‘M‚Γ“·KΧΗ›ςšFε.ρΰόώ~•TGΰέγ}/4p° =*„/„ο₯Όaω.ώΈ rΑ\# ½ΓΚΪώΜ‰›3πC?γίMuΏŽϋ!0αkuƒ`ƒzΩπGώΐ›Ά:Hεχƒψκπ}ΌΟNεΔ«\Ρ‡^Cσζ&Ύeάχ ΊΏ!Δ± πΥvŽQώ΅ΖS.Ζh:υ¬K‘{J0 Μiλ‚Ζ?‘ΗΧxξ‡5V•”BŒŽ>’―H0σV€vΐ€F΄AP1Š'΄Άθ΄ΰwZY-x\θ^ΏΤn6ιS‰Ÿwε±Βηθ»ΑSα{Ν{«‹l5VɊήlω—N=ΑΒή’)HατηJ ΙΑ©yΈϊΑ±&ϋVω&ξ°φ/Ε ¦)Ώ:ώWFσγΫ1πΟ3–žυλζμ–4 €`lί\™6›ΔΕVfΑΛΓ+²Ύ4”…s±6υw₯₯Z=T¦ž&l/9e²W0 {-‚ mΌ€Ρ-τ–Ž`ω“iΗ Τμ?ΕvϋίZΫΊΣδφ.O'φۻ͎υ[:ώ3{ώΗ¬§zgKΨ’ύΝΡξ©œQ*0(Ω”ύΝJIΛ²€Aξρ—$5α>t‹>K —+ΝЍ Φ A”­ΔcωηJΧ’θ8ΑZ²>?Ω»Κχͺ°5‡™›γ•ΐ >E3Š3δ{$’y((ΟέD·FΈ  ί%ŽΠΣηŒ–°K S²šυ+eύΤCŠυ VΉuα,υ`μΏ vύΘψCq—ƏIœΕb_Ÿ™₯%ςήR Ήϊ ρg§υΤψ―” j`Λϊo,›ΙΫ¬χΊώkρŸVDΘ­ΐ˜77 ¬hνί)XΤX―ΦΣ°NΎš€žwίPVΒ…|«;$ν)ΝYN;Έ9v"Ο₯\|ΘRLΟΥο›λŸα½„ωn‘—™πvοv—«‡&Kjό@rΗ―cΑ֊ kb4ώ»γ?₯@ŸϊŸ΅Ξnύ9m•@Ϋϊ;ŽΓφ]kο ψ’ΔžΊ^ΫΥ,©_―qŠ ¨ΕΗΥg΅@S]ΓΣ• ‘žΉ±4F†…| 5@ωo;šo²υO/«ψtΒNΣ’€Ϋκ6έۚaŽψŸ›‚“Λ>h·€wήΨ…!·¦Σw)+NXβfβZΕVha$ω™ΙΉΧ$(κQ}Θα›[U-{Œ³ΝλΥ?Ά'ζ’Ϋ·Εvpύ‘$ι=yΛ₯ ρq¨7J Ω`ΐΗm„ρρ(/q)N‘ ΡΉεΏϊ$ >ω?»˜lύλ~ΘΌε^ΰ&βί9[–y―γ‹ˆxη$ŽβBη[ΠψσχBΤςfƒ΄>Κ|†(2‘€N²;±?ζωk«χA:ΰVzπ‹wR?Ό ˆŸ z)xδoΤH$x"h#PӝΕΖQΙ¨ΰ•ΚΰψD…ɧۈˆ.Aœe8{\—ηŠΞ9ΈΤ:ŸσIΏEΟ3ϋύ/ΒkγmϊϋŸΨώΓ0|•χhΎ"†3ΩψφίΦaψ·ΦV―"μΏwAΨ_vƒe=g(=δΊΪfe?U­Ρ’Eκnr¦ΛαΉΠb­½τροΪ_Xύš7ΌRΥΤ>εjύ/πzWψz x½†@/ άϋ·Š”?².•·Λ«?²o HΘYμ?k­γ YΑΝ€ΪτΏ΅fςw›΅ŽΏ_ύ―(iΫΓWβΚ{ΨGpς#’™φJ–¦ΪtύΣSώ@}ϊΩΊχŒλ_Cόƒmϊίή±φΏ£ϋΏ.CΧλλ‘[^ι7/^<ΈhŸ¦ΑŸz/ϊ3ˆ@κ§Fa«Ζ”Η@Íw-‡:ϊ؎φO_s”^!ΚsO {3“k0]ί \y&o­υϊŸ§Ρ4Τ²h֟β]γ¨Pj;6qΣ3ΔΡ1RςԎ™=θ9ˆn€Ξ_3šHώΞίCΆ4ήΚNm Ίv6Θˆ" Ψ‡"‘uδε W\@·”OQ=d4€.τ3Cωn˜Ρ |eΝ»Β‡ŽΗ’4 ε5iH C5Ι§›νΛU)ΓΖΟιpŠVπδ“,ΓΝ£-(έd\;£WξIΣ%q§” KjlΠ§?ύ I‘:±_– >υΐX>|"V,τ||ύ|ύ†2ΘwM ­Β2΅y‘MψoΞώίϋ½φM†‹<…/^θŠs@[ωφŒΟήιϊeψd}|έAυ̚*O{/ιρΠ€;jŠ/ψΝ‘Λ!¬=;ώ»15Ή…½¬$’€Q½S±ͺέ‰χy΅q"ω―6ΈOώοVϋζXΔΝΌσΕ=*Emύ_ΦlώΟΖr4ώ›C'ž™Ύ’ϊN‹˜ΈάςΜΔ}+μκ-―Ζ2οŒΪΨ&Πϊςθ,­³Λ4Τ]( fΐ»ϊL%ύ’ΝnŒΖΞΉŒ11φΙ₯¨Β™½uGξ”Wi€W³θyςΔmλ*ΓΖA…Uƒξ3k €ΖΥ*`΅ΦυŸ3?ιMΊΙ‹$Ψ†μ-§ώKΗ—ααy$ΪΟ:L!xρ2¨f ψa[σbIΖ“‰l§vΥή*ω$K‹ΞΞZΡDΘVλ)‡6 ŽΌ ΘϋNΆGΛWΰ]‘xβί$Ω‹H*κΗ1Σ”FŒlΈΡβΕ+ος±v!«*4£‡t‡½κAiρpŒΚs œVΧRΓΤ ‡#Α>ωšcΚυ?Ύ ±“π?›Άίz·΅9ύ_4ΟΌψNύ₯ŠΣs€Ϊηc:W¬/fνR¨ώΦγ©?Αχ?w'{ΏΣρŸΙδnι€Ν Ά– ˆ%Š -cΟαuvΊώφCύ%ώω]_Ήή™LqbπηrΉ‡ φ^ΈY†HΒα©d=Ki#;iVq/=¬2l$Θύ%R₯€QJ;εV1+P—f·Ι@ ·£ωΏ§ΣuώΧZύμ ±ϋΊ[Ξ™Ϋ§*¬M―™ώπΫΫkϋ"ύOηp2ΚdyR―L‘:wο]ΥμfζΡ6SeίνZ₯Mͺœ—ό>Γ8š¦ψ„ ό ©W3υOT;^υΟxςΏ;"θmΤDΛιμ,‹S(ξP=ΤWΪl ]ΐ­ϊΟτΪ8–ŽΟ`‡~βϊ¦E1!O °pΞmσžάΚ„κN\)kΠwβ₯Ϊ³ξΓΰVW¨ΗΈU J"p+μpjœaΔ'N”η­TΏi τ΄όWθ“·q΄w²υOΟΧΣ)ΘC:BϋΆυw,‹©°Ά{οη…„­ΊΈA€Έˆlα™^έΰεθ'yχ‘G6gΠ=ψΟ ’} άζEδΠΩ³O TΓΦ/Ί~XΛ,fবK_0έκ?ώyUŒ‘Ι‹”‰ΦάL.?κPμm ?Dt[ΆφŸ…{jό?νϊ#U1}GΛ²ΨυΧω3Ψ*–[Ψ<όއ›‡Β"Όοω™IκςΫ^yπβ’λ-GΗG­ϋυ@ώ/’§΅Χω_Σ―n©†mόίpΉ™όGχY’7ˆŸ ―ε° ΰƒ²xι 7A™OAœ–υΘ© ΞZΪϊΡ¨FΨί/‹_ό˜©/ D™ω!ύ,b圠Цc MΏ§3MEζδΠυhTd‘Λ t€ψ§Ι /+dm°–«ZεΏ,Π§ώsΏΡύ&[\Ψsυ3°JΓψΰ@Ζtυ»υŽι·ΩΪΪ·@ύΟΡj­ώ=Δ,γ ΁βU~=pQ–š'Aɟ4;ΥH 3μ½zώ¦σœU”Τ‘ϋ&σ€ΩQΜμ§8‘²κπ–©=Z$‡λ³€IσώΦRG54Σ“uσ«η“εžeHϋPή„Ijy“>Q, }€|,7Λ x3ΙSLέ+Gˆ§‘¦δα Ύ―bίΥΟ]VΒ5τ~U&½k£`ςW7 Ζ_²Ή‘γ4΄αu%7ί7OTVedΓό-iΗ9a”0ϋΈξHΆFήΔ5π(ή jB΄ΒΫt’j·X)J&qΩ1d2ΤΎ$™D§ςTΚQΥekLιξS™\ΙSμζ(9,?]νy띭φOΆώΘ°u Ψ  ˜ϋkΡiβ?{–ΩΩoΆzύ§χpμD Žη S―ήU“Œ„δ“ͺ /·2ΗΓο_ E xθ7Ε›xͺΈΞΖ‡~’yΈHYX”ΧΥ…A5―™O>„’]9Κ`χh_ʎ΄‘αΥ"Ν3 "•C5VςNƒΩZεLρŸζ˜0ώ—! '}‹ΌΚ―ζΝRjYύq}RλoYšeύ_φœ\?(|Δ|'²„Ϋ˜Λ Fd|š%ΐ ‹²εεήٍ"”oΎ—o‚‚6J―©Ε™ς=Β¬U›ϋLpΏ†8Bž2Α˜ΩαΞ|MΕ JΉp’ΗσΝ’1Yβ{©ΰlω^ώΠ27ζ ‚ Τ]l3\ξœτ°dˆΟYψXd–7?»–Lsmψvπ ₯Β Ώ$ΞhaS·φvn ύΰ:Ό,Ό6qM/Ν’šΞε_\aΖΡdœ4?½JΨ-ό­ wΨΰ>Ω ¨#k”%r<ΑΧI‘-vjXω.Ή]•™W·2xΩΆπT‘σ}  κό–΄|„ŸΌΐυΓ‚ίk-`Ϊd|ςtΎς ΚΌŒω‚Ζ₯ζ–ŠσΤ:o:V3wΠΔ$Ί'W­E ¨ͺͺ‰ΞζεΊ•Δ’Z]203ίΗΤζ±’…<RΡβwΩ΄h…-_³fIΫτNUε₯ΤΥ(6ιΤΘΊΔPΰηSΓ?Wχmυ•α.><τ;‚;*vΛͺ²:UyΑlΗ«ΞleΎsΈ£|ίΖn€€`§`η9‰#άPαfxί ό'εΉͺGδΓR›ύ?jb€}όΊλόλoOγ[oν-λ[kο ώ?―*­©€Ί]—κφ¨Rέώ†ΣcšΏεΏ£λ'\:&‰‚Κ*€Ϊς?p­7•³w΄ώŸAώ·Υ`GZ“›μ½†;ς,θˆ5ͺσDΨdΛI,iv%rύ‡SvV%η7І»ͺ5wUHιΜΦρδΏͺ Π>ωŸ{KηΞΆώ₯ΰPΪτΞαυΥωŸ ΤeH³°”δψxTϊ+mbζά|πJiΞbσ‚’H1›@=νfήωβλ·\nV5yΈ+lŽ{y\$RABγ€‘Κ ήi‘J @ϋ³Χς6ύŸ‡ΓT9ZΦίήΩlύ‡΅ΡωŸ ԜΠχZ2 Yρ}9 8 ‚ΜΏV(ι·qΏ|¨nq6&$Ρvp—ˆZ >4–ΚyꞀ*ΘQiΟOhΘW‘Χ’ρΩK Όjb9m5™½0#£Ύ2«cΜWNž=Pϊ_‘¨WόGγΏ™βΤ+   Έ Y» ½ώ;ψ§^ιρί@ŸΝΠ(| ‡ €†`)ωΨ‹‚\ςΆLπNΙΰΤ'Λy4ΒόΝvΐO'qwΛ»–O½–XΟni¬]3Y―‰Rσσ<,cό#ΘfκΑ?Œl&-DΦ?KΦƒ`ό·[λώ_‹Xβ₯ζl‹AH―voιόŸΙρίph71,cSUΥφγΖg6πΕριp4oF&ΠΞcλγό)ju¬¦•ΓA=τv·Σς²υ/ς½^ύ45_"€Ά xώ?k­γ3ψšΊύΠΝ>1ύ7βl.Ό;GΠTiΐohyο‡Κι!Ύ¨ZΨ‡4’ΓV₯#J­›"Žγ'FN“λGΟί!0仁γ@ψ§οϊ§οŒ³›žΡŸθ JύηύŽΟYY₯τΡψύ‘ΐ'7ήΎ3Ό8Κ\?J UC§)ͺ‰U㬻ΰϋΦΠ•Hώ«lΨ'w­λ?¦[{Ε:ωΎ‡>qρ_ƒ6b(θ‹ Ϊβ?φ–‰μΧ:wύο%qš€ υ3aZ©ΈώŸ’ΦΗ8Ψ,Bΰ¦@„~βϊ&œ,ΞQHζˆ‰α“wΉΫp― N9.Κ€Wη‘ά‚_vε&3½ή:€€Έv€3(eŠψk±¨ ₯ΈςYXΑ0jœ)ŒΧTΎΏ:‰,½—1~δ&o&ԝ‘Λ$+±Gt*mψοG…;νθ{§©εήw Νδ!–ξΠ[ΈΚw ‡c‘‡¨gSΡ χ#Δ“Ο6V$_‚;πu% γ$ΖΰL€κ+£FQϊB;ͺ>˜ώο‹ ϋΔφ{έq²υΏχ@ΏyˆΔL₯¨Νƒ}=t]7ώcΣA$θϋ]wθU¨Βν"ΫY°c.°REMHξ±ͺNΗPΠ‘(TΨεX«ζ(:€ϊψ6[νŸq€—-ΤΖςX›Γψ,KγΏEθ:"δ½”>AKΨΒdωΈ/HK’= 8ΐΛJε±)9e˜>²|ψš½&ξ₯lmλ,-;ΈΓ„ΞbA₯>'?ΐΟή7n²}φ=<ƒr²'ζsα†#φΥލڑ°ω?‘ύΏ_ouώη λO1OΒ²Y[;Άώ§ρίυΏDlf€¨ Ώ€ΌWUΉ\žIIξO―οm,μ$ΠΪ@@’²†Li!°iOžQ’?Σ΅EΞ€Ξ8έ ™’€XωΚ²‘"ΞώQ‚j B]RQ*rJΪKNx’{T φu¦Ν}%ϊQ !’ϊ꽏Μ=t ’χ_-XvΠΘXŒˆζγ²uώΧdλόζώq—―?ώ·¦ώΛΪοuόoψŸκΪ‰LQ“6Υ«·@ΗΑ­%kώά‰Ε6p(ΚΠ‰ŠόiθΕ±΄šjρΫΩ€d‹σyΤD²ΌD­6GΧf\Ε Σ~{βjωw©7ΣR‡a΅q’&˜»‹υj-Φ`šτ’Sΰ5ΰκΫ}‹w-cυΨj耕4Τζ »ΥhφD©€G°(*sφήl·‘,Ϋ|Ο―pψ}¬dΔ™‡~jt‘Έ@£ϋ₯^Βέy]•$ኯƒŒ6Ρ(#«ΠUy]43Qv¦½ΧžΦώΫ¦ψ3ώ[²ςγΓψŸ[ωwόχ/[Hέƒύyτ?§ώ³WΑ•ύΫώϋŸ§π΄~ޜ―Π쐇§²α(θΫ ιί΅9 άγΚζ0!―ςΊΦVΥH.€ΉψZ’7§φόϋΓ!]°rΏ=nβμOίj ι°]ŸΜΧ¬ρώ?ŸYόΣπΤ?ηMj½4Ύφό ŠBo§Ύ¦΄e0a^m+•Σϊ5%ί^_^ή:hqχϊτ„”έe:!;–ΔτšdlPΫφυω™€3Τξ5-Ό:gΨοτ΄³.dΙKΒΈb•7Ε‘˜‘ΒטΣΖιΰΛ¦ΏsΥk ~Έ4πδγΊΓ:zΈτυ.hŠf;iΆσ—}ΨύςΣ^ΑM?ϋ`ι1Τ•ΛkιΏϋό[Φ`Φ- ς?©^O‘ΔίόŸύ?’™5ΦVRΊΟim;”4)½‰›νζ˜\ΰιΪ΄*=‘.%ύΔϋΊ¨Ÿ‡p@ sU±ΠΔn²…Ž£ω₯Α\ ΌΔλξ@IυσΚ먈Χ`·Έό_|@ iώŽό;֟χ_¬™α²―ξϋοΠ=kΏ―ω»ΦχvŸΦδw5*UΉ7NΓ—ςόmύ\ώ‘iΩƒϋ?©–<ΘΕώΞώΛΦNίΩλΟ­]ωΟ¬ό»ώ―‘ηΜ‡ίώYΌyόύΰ}₯£§ςΛA‚ώγ ώϋ^Φ_yώρύ=^8rˆ…έμw”ίΉ©[5Ό¦²ο\$λjϋ’:—ΫŒl§;‡|ƒΈή^Rχβζ•œy«sΔόςφ‘Λhψκ―Oαωmυ£ΌνV°« ώμ‘iΡε³Ϋ°ϋΎΒ/δΗύφ\βλ·VκFη.Ε†7έ>7·ΈΌάκzqyγXE?tρΧ£κΌΌwκ_6xυ ,/o=ΏRp» ΏΌΥKΑνέώωΦΉFεΠ%OΔΥ½EοΠ»Ίƒ¦?π“‘sοœbΉΌ~zΟΈΌΥIη9ίl*.>Κ› ν€ξ­ΗM Tyq1-Νmμ'jˆ5vγΧSVΒθΟeί” „œ›½ˆCιDŸ3ϊ;ΐ.F–πΨyhWp^ΚEράϋ;ΰ”Χ]οΒ―ηŸV—'ϊόΐ.Τ²[m_[w/R‡†..7ζ;Θθ_ΉΨ4rΎL‘ηM>?χžί’γ*n6{μœπςςΎ΅Ί 0”\„?΄-iUٍ=ΆΩŽίYE,Πχ§°ύ1ρΜΠYz¬=Ω]9xϊό+Ι•o‡ε^H!ΊO ~Ο‘Έgσ}uΞI3ηAμϊ·0?›?Vψ‹/ίϋ7{₯:WŸθ_ΉΕΔoŸ›€„ΥΣλ‘8nΖο΄ςΘpr‡1l¬^ώtγύ{Βκ)ΌŒ?ΌΖˆ6璏ΉΟύΊ{{ή‡Ÿ½Η;ΕEΣ·/«†Ÿ=œ€\©uίδτcωβe·Βa‡³½ϊ8$&ΔΜΥΗΆ%δΥΫ5QŒΏλΡΗΟ™(}•:τ;E”Γ:γοΌ<†·©gaͺB½RB" ΊΙρ΅σ―o38Œν"Υ±';ΏΩΞx?§\Δ‘λΏ>ύNάΊCττΔKΨbΠε±ŽΞOž§(@zwΧΫ‰›SΧωυΔ<* ކOPz™”ΐαΓζyμΑs*βΰύυvκnΓB8zηΧϊϊ<-Nޝ#|š§Ÿ ‡oA`>°5[χ~ q=tμڏ@~[cΦ~Κsž~tLp΅€½φ'ΛKϋrν©ζHΏlΛώΚ[Οu Ζ΅Η―mΦγo<†8y€t&jqA>vδͺΎχφ7«αptGuοΙS±ίΨύkoJΜέ½ηςΐξέKφΐ”;~)ωΆΎF~ϋ,“ή§M~ΎKiyειώhi;ύίk«Η#Κ  ‰‰GwdZ?ΕΗΡGŽΆΖτχΈF)ŠXσ`T<Ύ΅ ”ξΣΥ¬sžœ~…Ffΐ‘φm:_Έ6υLω ½Ό~žzδθ žzδυy½Ώ°ψ;]A/Cγ£s–»•Πή{`Š£γττLζ–d9Z™Ώž*-:—±Χšωλήΐ·ξ6P)ϋξ όΫΆΰN—ΰΝ©™ΰρβω{ρΘ@q₯αΑ[ΗυγzΦΎ±©5}o­ϋXωΘιφ‘#;°κκ`Bwο<Ώηxεεuχ}Υq oK†―“ρFnώN•η{ί_k},ΏΆŽΑιRgέO—i³v―M ϋ½₯O£ @½lžΙ}πnξ?oφ0›₯u5ρ›}pσoχβΥί{qυ;Ώr_ ήj]8γΑ‹γzZλΣλcϋPn¬z}9ώΣ–dΓt\TΊ Α«}7Cϋώξυ©{yύΌ{)iΰ]«<>ΎΥu³‘·?{o±ƒ₯χϊοΡ*i½3ώ–­’Ώή‹Nς#w{–ίXΙΠρφ;ak5..tνd ν‡xݝ°cξΑθ~ύσΟχέ4x³)ybχGΨ>υοψG‡/―ΗrφW^š[ομž6?ΚΘHή @O·Ϊ[’ θŸψνψ/Οϋ?ώΟνΛ©Ωρ)"°ή5=πθŒc>Λ—ΗυneSΏ4fήΊΠέ°§ˆιSσάWΰεόzΰ}kB±"ήγλ/_ώΗwXΞ_N_ΌϋΎy}Μψ»Ο…λg¬LΖ’Ώu[ώυΊή–/§j­sωΨζ|ατέ7o{ψ’?Κ—?Βσώ Μύ—? ²ΏΟ²X֏ς{:4οκπ›c΄ΜAυ½HEγάΉ°eΆhJ[·b§όεθx¦€™ψJ ΏϋνkiέoΠAχξ|ω?ht_yE‰TNCώ°/»ΝAΎόrtόνΙ]iζ_ήΏ6—ί9Ϊu§ς|ΗκkM9GςοlΥζ<ΒZϋλσ?΄΅ZεόσWόχI@s έ,ώ ―ϋ Pεšβ$o_ώχ hRΜ»'ψ?χτ(ΜΉ/„Σ›**‚;8³―@AM‚άσ·_ZnR᜘χΫ95ξ?Ÿ6`*9ΏΜ†γΔλ{p“Π•mω|Ή}ϋoEo[¬ίΑϋ~€wΏ¬7«¦ΚKϊyF_Q‹’.©΄³Μ&•υJ «d ‘˜δœΠFyWŒ’ά2WMI „Ρ\gI5‹οˆLαύηΫ™:λ&lϋ΅©"< Ɂαύφ™ί}(ΔlΎ6Ό_£―_ιΚξ%4Zk³mΊ#eςπ5XοwL'ω:ΐ…\§yj~xά~y*ός΅Cρ~νΥψځFνΟ«KOτ‘€£ωιΔz<1-“όζYlQ7Ό~ΑΨάτΊ—5WO1‡e'V–C΅κψ«ηΌ4“έoRhέκAλκDŽw$[3œ ηLUάψ˜ KΪy•­.Φ—m\·Ea«λ’t΅ƒωmύΤΰ…ΡαΞμζξŠ₯±0‚qΓTr6ςΒxΜ2{W]ΦΩWSŒŽλβRΙQρZSŠ%ͺX΅—AκΪ€½|!ޞ‹e‘‹Ώ»­ΙλŒ7ސ1ψ/Š(Έ•Μ•R"σUdc…Λœ©Xd‘άs“’U™k%˜”|xUH<―Σ*l·ανK³›ΤΔ"UΛΕ0 ›E‡ιœ’ž—Κ™1!… x±,G{H_½)^¦"UυAΙb³ΐ±ΐ° ™6ΓΓΔ‹―v›z8mψπΌ>HΎυΛԏwυ-u9IάβcΦΒΊ¨kˆ1β|)*¬GMΤ\‚Ι{ζ\aΑkό/β€ag\ΠΦu9ŒŒωΰykVνb΄ψρ!χυ…ώRτ5ξ‘ΐEΓΡ59!ΝΏψ±2$›$uψ0V[œwΨwFŠL.\λˆ'2 V¨Ί€‘πN©ζλmΰ|yΑQ”‘š„Sa£Σ2`Λy+τ~ŽB: Ν’L–\)ΐΊ “Q±¬‘]ΌυqpώX?‡—u#'όνΛΔΕβ 4kέmI,UεTš;‰-—а1c¨@ΒΑK²ͺ²ιg³ψΤ(  NΥβ[ΡE‘²9κ³U§‡ ‡ )lΞ6Ι<Γζ,ͺŠΐ½ΌK.Χh|‚I*‡’œuΤvΚ*ΪΖ5ΩΎέ] bd"_Jι βΈΝڊXΖ –‰₯,#ηΐΌb'xΞΎΚh΄Μ"*mόΨaΫ& ½t–H,~Τ2¬žl€$‡£Ζƒ/Ϊ²˜’ ΗB#3œΑ •–•g6Te ²>gœI+Gίθϋ|Ό_h–…ΧΥ₯¨³γͺD£LpF&l'€‘j…•ΗμoBv)‘X…ΘsV ΟΆ!ŸΑΆΤf0KΫ¦8ψ‚4O‘6U«!‹«u!˜N(5<€”΄ΤΙ;€ I2ΓrΦJM½?U]ΎΏ^܊ΨΞ’&£3N·*Άj‘aΚD-"«²d ¬JΖjξ1œτ°v3.޽nWΆϋ‡πB₯4ε_}΄Ό‘‰ D%Χ€±hΞ•±6JY#t(ν(X(8ΎŽ;nEΦ>x OΦͺZσθ8(9ƒͺ¨ΒSo/->cY–₯H§VψꐢJΞYœ(“tc»+s˜’.H-±½NΓfWk«υΣΛΑŸU_›€ς©Jε·λ£?όξ½§ H­\t₯&_]q@@ŒπKYNi8 ξ)υΑzΟL_.BχϋΜ°Σκ)€νF|iΌ8™ƒ½nφ₯Ήΰ&ηΫsσΙͺk3BιφϋžΓNΉΕχ¬Mc•+œAβ%#K%₯ΑΰΊ•t­Rg Α —ΡKm!0λέ'cΏyZ§Cδπξ;#+BWΙ@ξ‹LYh‚βLζ.c…ΖΈSP€&U©–dΠ/ΠηΓ`δyύ³λ© Βφϋ.> 7Π²£φQϊXΈ³ h‚§ γΖ{eƒ9KXkΕ‡  ZΓUγΰpΊ78κοDˆ΄_…xπ:=cΊeΰ―ϋMͺίzΘ`i€HΦqYW>Ο°`‘&Wb`ΕVi`;˜$]-"˘V²a« Κ¨0"ΡίρgwΗκ₯±ΗŠYΓXUEn‘ T¬’θ‹φ°»-ΰMuBBΘ•¨½„R- !D©`4ŒΘσ³ί½qœΥo«uύrθZs±˜OληυŸ›Ÿλc0b«€£{―ŸVΉiΗϋΝaŠΆΘόη?ΞίΥΚw»ˆmΆΗœΟζS+”ρ^λ:ωΈ¬4>\ΚόΪκπύώy¨"ψp理°κ¦Π~½ΰ‘ ΐ Fƒ1ο)“_ΫΕL_G+/Ώφ‹-OΚόω:H4Κ₯j’4ΎiNθβσλΣCz9Μ~=³œβ?λgšƒσ½Gι§ύqzθ=ξυΞ5zψ8”ωυzτΘ;ΪΘajSώώΣ x½_ 0ΫργΣα‡?ΒφεκΖ=σ±&RyΓ^μξΫχΝτ^ΉXρ·—“GυbEGΦfp1ήηύΪtξΚκgOνΉΕ*-8¬™‘€…Ε5Ήb±Di2ΙΗμΐ;L©d8L’κmΖοΐfŒ_ΗߞXo(j}}И‡`7Y”ΘJvx¦•”—δt00k-Ζ¨Š\Βζ^Ζt1Ζ {ΞΫ²ρ·[Ζ·<#Οx-!iXˆ F:,ΘͺmΝ)ηhuUΎdΓ₯2@–<,¬’5@ŒcbΤΦ:Ό~ΪχQΥδθ<%Η!lΡPΓ^Ό)9€¬΄“<±L‘Νˆg‚&‹Θΐ»0ŒΉXIVtΜζzp&Dςbs€)ΌŽ"S«V@DdΚ›B'E— €n¬ΑDŸUJ™kΟΓ0€he,|_γΓv3΅ΙJϊ.ΕέCXXΒήΔ$4ΆVΡ)zΓͺŒέ€ N»Oΐό![1DT΅–k–₯u2ωUβϊΉ) ½―rq·VδUQδŒ[ΐV!l²„(Κςd3 φI š’Š°adY­*U˜39'ŠήO¬Σo£cΛύtν??JKΨ’–EH·hdŒ2WCq`nΈsΚ$‡%‰Ά† a³²lMρ8C G`βz_Γ·chΈό|9F†ιuV»·Γυn~}~‡0₯ό μσ}ψQώψ~2ηfΌ-PΣΐγχHτχΗςσΛΑg51χϋ¦˜ξήξ:S`οsΓ$N8ώkΧ{ QBΗjͺ‘Β f… "d ήEúꐡΟelκW’‰Ρυ‘fρS#•'Λ=pKϊƒYΙ΄†…”ƒώW!³ x(°l­w¬½€`X4^{WGυώikuέρ‹›¬•η"jq8Τ1q₯”Ύ΅0ΩΌζ•β­Π4QY―3ΉU GΠ Η%ύ—γο^.__άΑ‡ŠΉd ˜KQΠ/EήF•C…QBAΣ$ρ¦Ω oD&<:I…ααΪηΧ§ΙΝΥί[ά²tάΨy%’ƒι-`‚;ι’Ο9°ΘNJ„1n ώ«RΑΈ²XΛ( >E%Εp"O}}>ΤrΒFΚλΧ£©rΰψϊΗ۟Γ~„˜κλΡ₯·€W‰’XbS&]Ng‘‘3λŠeΐp֐‚rBψˆ=P™€0w^J6ŠEu} G‚ŒύBv΄VΎQιaLωb*Ήπ09ρa·~Κ=™ΚΟ/¬œ¬8!ΛXœ€B+V8N›ϋZU©1UgνBυΧΨ#:Υ‘§Τ?Ρ ηόσ”žχΔo@±‘‰iΨ€«ψ τΌν%V,-|A ¨“‚%™ Dl|α &€,§lx–z€άe’1κhσΖΒ3΍D{›Ό@Ra!b}Ά§[;θρΣKSI€“΄|v†β°LΉ©x&‡@5€IMΌ$©­†ΪΚN%αΩ[ηYUQ©(žc‹š5IβxbnK†Όψ–ž!²΄4Ε‘œΘΐPz»P–L€:Ηβ!j θBΥ€)ڝγ5κP­¬ϊΊ\ωΐ0—ΧθϊΞ$ i5Kž$Ε ¬ ψ=ΜΰH‘EXΎΞd/9¬ΌO"…±Œ©›Νη}ͺ-Κ›sώόd2ύˆ©νXt!^$Τχh’>g€π…s>{.Κmٟ•νˆCrΤ ‰±„ WΧΨφj³Ε]Kγ(X—[νGj‡νsގ˜³ŠΧΖKδ”½S€wθQΐ.ΰtO‘EXLβ.`h„\ΥR y lͺ QγΤ)\)Aσ™g«œsψŽbu,ύ ³"―υa[ςλΟ"[<•Ψk 0­QβΈKj#yΌf¬Fθ” T €&¬Ÿ!,₯*P»²†Ω:Ώˆ†Zπqj¨O/αqs1P kniΩIyI†\mšBVUsR0€mζ˜5B(QaΉRΉυΩλΟͺU‚“οJ’Α[zEΰ PH~SΖυ,iα΄RΊV­ηΞϋyΔΘrΖβ¨F”νΔz^¨yαˆ—ε• ͺ₯Κ©Φ»¦Ο ωoΡ΅W2ςiuBeΈGΗΡΎ™Ρ/‡ή@¦?j½Λˆ‚ωmz₯Wm–ΈΫW½=γC_َ?-±άC‹:½˜ŸY―ߟV-²·Ξš΅#W½€α ³~š’›'?½n/BkξW9<–«Ί>}lϊJ||χ=ˆΣςΖ—gόπζYuˆ oΪ׏μΌσΨ£CΈ6 -ΊΪΗno„ΐ·nτυΆ{₯C„xzτdψηυΒθΡb ) ½a"ztξP‡σ<¬μnκ'u‘1„,”3αύ­'tŠ­qβ}ž—KηΈZάΏΔ ?¬e™™w^i9Χ`KJΨ¦*3QJ)’δ"訴ΣΩe eΨάlLν-c©(i S<§LyάΆfΖΌMŠ3―­’I%ͺ²FΓ~©‘rΞX$‡ϊGβyι’φGΙΔEΥK ^<¨ηάΕ,£ ŽSa‡€"λ”ΛΑ[―*Ε‘αfΈ ΎxV«”œFm…‹½Ϊ/υΦG/^±š­†e+lGU„`.eœβQEWYΆ΅&ζ”Μ8i£¨š3)muTΉό‘υ!φˆΥΛcΨ“ ιιΕ³J„ ‘#3Uκγ€zεrΆIϋ"p=:JOΎΈˆΑ*&m.LΙjcŒ|pˆηΞΨύJ₯έ«Ix‹Σ·j~-ͺΗzεΈc0˜½S%Κ& VY&9€Λ΅Ίd4ΒαΒγγl ΐε’”σρ²5T@ψ)μv›?ϊ΅JwH€ΖN΅,ΊdaOSΪΏ—`>jΘΥ ι±ήΩο™2ͺΗa€Y]³cQ™ εθέυ3n––ύ*z%ρ‚ΖR*Νj²ϊ9ή‚J!F!χΆvζ!*œUY|₯ꍇΕg§8½ω•ˆRJ=dμβξ‚d|dπΕh…WY¦BžqΓ• "@ωΰ‚ΘZŠg§“AaΔ‚hΨΛυ_Τ,ώHϋ96Ά&[’&cΛΧΚ1KB/ m…°¦d–³Γ»'γ¬.I›,™σΥ AyIJ$ΙRβ՝œNΐŸ*Цδ恑-Υ)8O,z#tζ5IεξFEΛ­S™Š‰«SΖ—D±`Εm†n7ΞVΫa·πξ)<>žb~{žB”^?ΧiRœ4ωά}]²xΛ6E€Φω >ϊZΞΫϊnξwQΩ&€ώΪkτy,Y:ΣΥΟ+iΊ ξ\Z΅;Ά£‘«UQDFΛ₯†E £¬3Gώ(κκςŒŸλθϊ§°U'ΪΡQέέμ•vA]«6‹vϋC.iύΟήΞž“°ΛΚ­kx©έΰσ)GCΫΧuσλ@+ΩOΝvbσޟθpu Ωι ›σ’κiÞ{Π™«'{^²[>΅˜θ;―Ϋκ"{,<Ν?zU¨­n-_/»0Œ”NOΩŸ#εgεΣΟύ.,—Ρσ4[ώαv41ΟΗnΜ_ΌΖr¬ο8‰―.šg FkZ N?_Œl’φuVy+NεfΏy …ΏKαρE<^>υ :}ήΙ‹Og0?Ψl`’tΆωψŽ΅½~2#2κϋOyo­ŠΪζ[Ζ„Γ)e€GΆ4N‘° Όe_©Ό€!‘ΜφŠE<ŒˆΰR¦:ϊ…ᙃ <=Β+8γ½ζήξΦύ‡―₯–e)S2°51/ΧU―9–‰l4ΑΊΖ‡d %¦§ΣΦSUπaνΣζρΈ±Ίdtπ %w Ϋό6pS-p7p)ΕdˆαΕΕΌ,ΜYV-ȍ™Ζώ¨°&CT.Ψ'Γ:κΞΊω€3 δΓΨWy]kΟΨZΊ|KcψΒ°P“g>F–™6Kμ1nΟ£VS Η‰μ40Ά¬ΜΪRŠW0Ός|ΘJΓ)[<ΧΟΣ²TA11GμψyVοΛ^Γ·†οϋΙf‹ζ“ΝLΫhΣΜΝ?zoLΎΚcΖ8Ή,/αW―/Ω04ϋ(«FΗw`Q؏‘—ΟgŒ΄oϋ:Ϊ'η3,αH=q1!Wζ‡sk.Π Μ†χΖς·X—Έ₯Ϋ₯θk»ΪΖψΥ‡Α$Ά)Žˆ6GΔh·ΗeΗǁ^\“g§³ͺ3ORηψΏΨ―³·ΰΔ¬mΛύΣlJδ˜6Q*Η*‘n‰ΚmΙ<Έp«e–'›4ΥJjž5³…SžΎHN›0μ( σΐΓ’ —³Ÿœlο&HsΠ`,>μΦ–Ώ\ Σ"’ξ—vΞUνπ‰’‡ΰXdFH"—’dMΉ9"TUΌ#štr°Ji΅‘H]’ΊΚ₯}(ιοTψŸΆνŒ)8ώώέ'Βp] ζ2"hŽδ<·UTb:5ž’8¬«Aβ©Μ‚f’sM,XΨq$sμ[ΩFρε€R‡6SV(†τό“<₯pm–κs/²¦—ηφΩδb‰/FˆT½ ™Ϋ&sΙ”TŒ'l)*5ρpU²¨BC“¬j”‡₯W_{οZHΓΝΫβƒR—Β©έ…—Ό¦š²ΦΑzƒΧNFΙβ΄7΅R%Ψ?>:‰ηmd[κ‰ωϊœέτΊY‹'˜Xα"% ŒoŠδε”ΘeΔ"Γ•‘…‚&5EŒ¦…ΐz0l_£X0#=.ρΞδquΏΝz/~B G,ίο@ΐζƒ’ξ1LΚHl¦J])σ,1<‚u% Ν΄P&ea)ŒΫ|4ςεu©­γγέɁ\(1Ξ…Lζdr’ΐ*τVΆΒPL š«@aUj—Γ%7–vˆ„‘κΦ܁Ο:u,^騘ΨIΧ.Ήβ=‰ιΰ³g"ΕQΧΡΝ]osoܟμΠmx;bδρηΫnδ‡μοΦ@Cϋ“GΞϋ™!£σ8!šPΒΠιYΈj4ΰBlKm#ŒΧ₯58ίAωΜ*‡φ—€β1{KŽ'μXωΐ3v4%– οΧσNœ‘{.ΧΆ ‚–Ζ‡… QKΘ%Ζ!ΒΈQ>Uθ_νŒt<¨©ΗLΔΠ|ˆ€Κ 'ΪE¦sΖΆφy$εωΈcΫ»ΡOŸΥΝnϋ,ηΛ3»U؁hκ2-tΔκVQRz‚‘ž…˜s¬HGH X#; @U£΅Ω±9©5žΥ…‹μ|5—½–Ξ΅ς²Iί;ΧπTοΉSΔχΪT† υςΘΝ ‘Β!·9₯¬²$D\βb2 ©`ΓΩ€ΪEJ¬‡!›~ΚQέ:₯³'ΰ0Λχζ82JλΔ€||eYPΆ§ζΡ3—uΒ ^uŠ …ΙΫS+aρ’2Ξ³˜3ΰϊ­ΓΪ'³§δπΛ],Ώ|2žSDWβ1”f`TΉŽΒΫTW}ΔF Œν©΅…κΤRΚT±IΔGwΔ +Γ¦Φέ1b—6/εΫkΨζ9su8|χςgΤͺHΒ*Χ‘ˆ5c’”;,ƒ¦¨)ΗΜΉΟ‰K!dήτ†cŸQΘΞXmξvzvOΟβ –+P_ ²Α5μ|Π­0ψTΚ%)’K°ΐ,sα;9DΌ¦žWςβ†8Κ/ΆΘθ4Ό>§οowΙYX”ώo]*Q`6’τΒZ$J_‘DΨξ Φ‡±"Β Oͺ Sν¨ΙqβDλw-žΛ―€‚Ήš+ηδqΣΪΐvaΖε\aϊ)‡‘θΉ«Εa[9“cΡσDD‹z^§8•G3^…ώ#ΓΏC£Bκ [³Γ%I]R$$œ³:ZXœκί(?ϊqJ‘³σ";ξE„ύ$%K0·d°ΞmLIΦ@·bsιφ°Ω80SNJGI> ΐεΒ5»‘”kΣPC„- ³š\Θ’NZš2œΉOA Œ`uΞjYο7νΫλ¬ΔŠΣ/ί½ED‘¨_£§Έz%~.…Eg±\&grԐΰΪj¬—q‚9“”kdLž}¨`κq&-ίwΆmΛZU‹ψχTz ₯λcIm)Hn˜‘žƒRΖ2[Fh‚5˜ΕΐS‡ˆ/Nοž8iΪ$±(Zξ\β ²¨šΰa§›\¨Ε4‚ωX±NΡ(λ-Ή&²Bx²•ΛΜ`Θ «Η½m[E%KΡX4X!K9P22.ΘΟu0οΙŚ‹ΐλJ%r`V'gΉw•]%†oΡnϊ£²₯΅#„δihΥΎΠC2νΛΧ?Λvsˆ7Μ_»ο]RΩ€K-NJK»ˆšœf€ͺβŒqg cΜx}©¦§Ždx/ˆ$a{Ewž»σ—CΩD9J¨NΊΰθ\²3ά»ώ;°Ϊπ²ŠTa0+l‡9ˆ«JuΣ…‡ ˜ΆNΚHIE‡K/%-―X&α{6ΩUwQŽδ&ήγ«ΒΛΛgRˆf₯¦ά«χΚμ~%c³ΠIyώeΟω±΅3Wη6χΘεlξήΑeΑ“ ΒgŽQ‡>η₯φ΄‚ΞΣYˆˆs-«Ά<©ŠςPΤ$)Ž”΅*ύ)ωϋdt~{}yywΞv2lήY_ΜQΎ;aηΊχs9…ωον>„Tt–ΪoGβHA%―'«XXλ¦[^δ—Β+1¨ …MΤ?Α°lό«J«O£ŒRΧL₯ρ_9―‡ό›Ηυx2Ξό#J‚²Ν?{6ρΧ2κn/\Έ—`κυή=hhF{™ρύϊœΫ>ΝF—³;‰΅[’}=»ι’Μg-ΤM“0g;Θ©¨mκg5f·¦ζζ|·ι·gΔ6n˜θ,όzEΑ]$–›·uc_­·>ά©7<Ξ_w³‰OeγΡt­Ά/©;ϋ·%žλg^žΛeΩX‘Ω/r;«³Σ~mϊόm·/ΟP›_މvΝΝmH§ ΌΰΏn°»Ϊςaf¦γxεWKΝ)ΰϊpςς`αώa]θΟν.ΉRΣ'»ΎU|/ κεπΞθδΤžΡ™[ΎμVίAω %ώψdΎϊE φ ·ζΘκjΌŽΥ;ΒΑyΦz?r˜φ‰œλ!QΣΥ’φvύΜ ZΗ­Θ€¨Θ“½€=T„­ΣΓ‘έφωύ;ΉΘIt|ߚ¸ζΓρyΊΎ_N£ΏYRNˆσ₯Ί7"o+ΥnΗφCΩWμ£οΪm³8’—qͺρk_Ϊ–ϊXYΌτΚϋMπ³€žπΉ@’jΩΓΫε;=ΒΨ‘2πΫΐιTIηΗι…;Βην)Ϋ`ŽC£™Z*2Ω­jΩ§οŸwjt ΪΗPΐ…jŸ* ŸW ―©μvΜ©Ί(lΎ₯ΏgŽΜκ>;}¨n4IήχΜy‹kκΙχΟ+gν’Ύύ”f6jE_φWνΓν‘ά±ω°I§„Κx /§Owaq§Σί%gŠδ]hΣGΟ=ύέz﷞B½ k»ΨύJωNυφ<)2ΰψΌˆξWRήnς­ŸΣγ+ε ·^οφόNNΒλŸήΫ„žΏ’q‹ sxκœZ~ΛΛΜ—η €βυρΌθ#«ςŽyί‹@Gλ/ηžξρ.ζ'Ϋr’sΌ±yWh„ΧΗ}O?}¨gωω‹O‚{!₯q¦Ϋ%”ŸμDΈB/πc¬Ž‡=} y”―xcz¨Q’†°ί=@ΐl­ яζυΤ6Ύπ!~‚$ƒΒ“ξΝQ;|ˆ]εΆψ]¦=(2n‘ίΠεΎS*z„υ²“+ήD †½Uγε 'ͺEyf5Ε~Ύ]q] `Ξνn„—ν¦όL_¬©μ‰ΨOιζv8ώsυΉ ½₯ ύ£•η3„κMMΕ/•ΩΒNπΕ‰₯†jΜΛ{;WΒΈΒΎ°›>I‰φ)΅ώN…0‡ςi:ΔτaΕ>΅i(X3 Ξo`°<ζDβυΦϊ ωO5W]κΣ\ ³,ΣσΌI/̊-ΒΈX°Y#Žύ}.σΙSr0μRMŠΣYΪtjZpŽΈιN¨…Ψ oeάλ*=§ΠΩ½Ό6:x.Μ§Ιν>ˆ ocŽi‚IζΉkhr‚ie΅Κi‘ƒηαp³GaΔ]7έχμ΄)†C ·v@]:žpά­)ndNyHη„Ή _w%―ή·ΙχΝnυt%ŽΡχςzMϊ΄‚ΉΗŸW»Χ%ΉŽεΪnj™qχΝυ—, [χ`Λό>RΈ{%π3d%N.ΖZ~άtιv!ξ2Œ-Θ6τΞ`(’\έΌΖΥύ±,oŒl·ΫK‡ΉψŽQ>―s±Δρλ@’α™ΊNWΫϋΈ T[ΐτνqZ·πΪQ€ϊ!Όwαα˜22΅5ήkOfΕΕΞ&Ζ€μ:ϊ:’pFΜμβν>­’ήίmκoNHΈϊ~.dοηx’GeλΥsϋi‡ΑσόΚ€’__.NDΟσyD±ΗΨ­OLΖΔ_ΔΣ8¬(? '‰oτDœτν•§Ζι~Ά’Η₯ϊ5φθξ^πρdnːš/œ―0»w$φΤ‘θΡ‹ί¦…ΕΝ†βMΒε/ |’ψ|Œϋs’ί| \Iο§Ÿ/7•’ΟΕ‘/aK ίqΰνM³/ΣόΨ4F~,-»BΪ~{VήΫTγ΅a:oΰοά)ŸΘXžz―Ζ­ϊΈ~ZΟoq(~j½¬Ÿižώ«€ύΤN›N yυm‘Υξ# HP5Γ‰ό―qν/Ι‹Ότ纏χ}ΫΣ8΄—ΈaϋΜΰ;›ƒ™)l1Ο‡{[1Γη$ωXΉK―6vΊ½­~žΛm^χŽκΌΙ#z5/υv8=τχ>±[fr3βΗύώY³§λ’›[VpwΏΙJš  g'ατsΑΔμνΠ`\’-v·:ާ_ζ„₯w/Π<› ΰϘb½5½%€zKzΘΌŒοC矏‚ͺ‡ιMŸdœq Cξsƒ‘©τίkxj&Οoυ9‡Ξ`ΨcςοΆEΐ'aέ,1‰γ&ν‡#pM[γ₯βρJ³!τ₯ΑΠ£ΌaÌ$ΆπχμaaΕη"ΠΓ"7Z;s`J{hŸ­ωHOΐΡφ~-U3žzτP ͺω~¦N[œYεΏη9νcΉθ—­g¨ΜN l--y~6l›'ΛΧπΟh 9gE―΄vΨ:ξ‘eͺ0a`:±¨Οƒ1†O.ά-).WΚ/[]>„o'tΪ I… ³ΡΤ±’ξ+ͺ³ΪŒŠ™ϋ¦#Ξ8)Σ"mδt³άπητD΄ ‘Ϊ[Χφαg’ΎΟΨrtR―ξ°ε˜ZΈSύ τͺοξc›ιD™8Π7Tit«Έn¬ΞκΙ―FΕίX¦–ξΨψC|/WΉ?bΤ\«O'βΜ‰JρGβŠΥ*m›kuήrήLύηsSОίCΒySΉΌΖ•pϊ νLnό΄".aΠΊ΄{{ή‡Α½½όŽαΜnόΩxξΩΡe?€¬@Μ’ώE©~–ΰŸΒ•XΫpŽέycS»η.6Η”lγ]Ω·ŠPŠ:Θx£ΤO}«τΞFΰ!ƒ#†νΗτΫ2φޘ·ώΖςυ‰­‚Ρ~ΊΆ5l9_–8} )pΌ¬υ3ι‚PΖΓΖΝΝ ώτρχ°]ŸΣ>Y@5½σjfψδ–xwΝέΙƒ>μ_λ…σρί‡wΤ’MšO_ηO³}βv―OQŒδ2¦p[ε-c:{Ιξέ\ •b5ΞΫ¨…^ηZΉ΄©$ζKͺ20]UΝΈšm)sν΄bR)_¬βή}AΏςς―/«U¨`eΤ<{Ζ|R ƒ©‘:›%ΦJ₯“ΗS±ͺœdΰƒ΄LŠ€n<5°O'ͺί"aqΦΔkUΘ*΅Γ ΄«ΞE‘Όf–Wα2—&Ι35m«Α)ΙpKŠ`Y¬u€AΖEΧr1=H"χΊ{™*3K.9«…δ’zΛe©Yb-+QΗG_Ζζ XHn½p’XΑŠ΅Ly­rΎ©Β-#υΛwΩ“9–9s,h‹"VK6C)L{ΈΞ"Jpθ˜pR›«LΤQzΕά0at€G šns±xQΓ³ΆˆΒ˜ŠQpα4g!ϋβ²1$¦ΖtJΨ«ΦSΓ€lytQΗ:ƒρ₯“¨κ+3žεv΄‚$Ζ¨krBq§BrRŽτΐθ5¬iΖ³{;ŽizΨηΗξένS•˜W*©Z˜ΘΌΪx°ΩΕ£©X|σφ6TQ[»P?γ„Ž*ί²ό'Λxξςο¨ήγ)>~¨Rυd”ƒ…''ΗW±Σ—ό‰œΊE‰(«NIΖΝ•υφƒΤ»D}°Ίbbιޘw™Um5€–±Ζ{i«Œ§ΔŠΑU)G­RDΒy(^ˆR|Ι±¨U²i”™γΐφαε½πάMΉρFÐs-Ή "³o`—μπΉk½Γ²k‚λό3μsrΞuΉανάπJpSͺ ¬  mΠ“N α]qN³¨‘E”} L:K-Ψ΅7XφT[ylπξΊΛΕ΅$΅ ΐ'Ψ‹Μ9Gͺ›§j™+ΖHm£ …'*Ζe…²2œz:Ήj³»"ή.Χp³{xYΏœύζ'ιϋ!udύŽώϋήα]šή–1AKœέ¨ N$­uvΒθ}ΦIˆΔxΖ<@ΦK@5mΜ>‚ΛMnϊΡ²Χߞω―Žτ―l“γ1萸%ŸΊDύͺ žJ-.d&АΎΘl’GVy4nΐr ²-Œdr ŸΦrFe+ή| ―\υΪƒ—;Ε,nΎλ²ΒiHΠυ„±bqϊuΦAc†bνV2™΅ΧΞp!]₯&wΕ+=ΦΗs‰ω0άΞB.ήΣ,ΰhgαJV‰ ΛΞUνpU:²~y°Lzη„ Ήh?*L.eΜ©±ωΌ jχέύcK;λ,ν…(…e*{ΌρAkn\εΥΪ’)_BΥd©ν,9'΅υF„PWLά›,œϊ‡^rΏCτXb%ƒg.be…–.ΰΥdΡ>˜’ŠΠΜa‘aΉfμνlͺM\›(SLΦ£dΐΟ’γzί'7ΛΑ†(aWslR&rQN•™LVr“Φ’€¨Ά*Χ¬‚2v8·2ž Sωζ!άAάxW‚ͺ!Ε’,;§₯bAοyP*Ή`”6B1«N£Ι‚WηŒsEDγΗΒTυSτφXϊυS–‘P„’pV‹7ΩfGΉ]%r[E2ΞƈcΝU’2κΘ…CcfσR6IQÁžγXOληυŸ›Ÿλ)ŸA}ώ½7 vρd_ ”…Λ²b₯ŒΟΨw>TBΆΙ9VDB\Ε‡tŽ`t"ρ±E(_{ΤιΛ' US₯τ%ΊΣ"e‘‚uλšƒƒt+5H—BŠFΉ‚iœ6YΙΩ-γp6ΙΕV‡πβΔZ =έδ‹'ψ±Θ„pŠsΓ„‘:¨ό`kΒdΛ°ή3E«eq>š€…†ΦKŠΥͺŠγ‹χ­ί#ς t!CΕ‰&iΈι˜d6=iσΣκ½ΕΚασ©ˆqΠ–ωαvDλχ|’”όρη·=>#ν^w›ύζα\‹rϊŒMσhΫWˏ]ϋγnΏϋϊΩΞ{-wvΫχΊ}œ›`Dήq^™)2Ιςμs°£Ϋό=zΡί₯ρr†ρ¬E T‘TvY‡ΰcΥ€ΗIΤL¬όMJZe’/Ξ±εŽ%XΓ§ν¦jκ χγάλ}Ψ Φ\69ηκMλβ~…E‡œ«δ‰ΩΗδ™J^“+BΘ.£Πx"M+`Ώύ±dhχβψ±ωΎυ}oKƒ@—¬›JΉ§PθΊΪBrˆ@'Š4Ξ“8VCj#qœ΄§$β(›Ω¨§mλ₯(ίRκ§Ω.žg[qφ!Bk`[)Όf'”‹CW"hr#ικ’D¦R€$ —"zΝJͺiμ4{Ž-Š™DeW:Ώά˜3Bς1ΪΟων!`6άKλΩM"~»2‰ΤΈυΆœΙ‡u­ωΟΤ‹JŠ»o&/”«ΐœΎJUΉRV)™₯lIΙ$δ2”˜sΚήxš€‘ ’qΩm³ΓG΄²Ο―O€Z|šqeLIΙΙ2Ο2Z ¨ίΘ…ˆΨ/ΩŸqIVΞ₯Λ*†[ΐŒ;n― Φυa͝6yγoj§0L>{ Σμ’ FΈ~>Lϊ­μΦυέtw½Έ+4`3G­ƒσ.Š Θgؘ…γάͺ`\‘Jαθ˜γ₯ ΪF1ΞLIZ‡7Bzx„ /# ν1Qmτeψςμ1ΏΈyσ|Ηέγa™Qυx΄°Tͺ4@D–\•vhΜΕ' ύα™E Hμ€:D5 ›ΎμŽƒλυ­?H³ΈN†š*S†Œ ZGΤFL6ͺ14>‰P₯ΏD.΅Υ@Œ~Ί¦2/pύΎΦa·>ώύ7ϊq΅ΓοΎoφs…Ž)Ώ}RΔςEM^;‹ΦΛΐ “:gΐŠ~²ΔΘ¨Ά»±)k‘’R…Eaμˆδ>6'DLsϋ€P£€Ύ ±x-¦dͺ“ƒ„-Q-6A*Y:Ο’–ŒCgh©B&’ [ƒ_‹& ΄ΓθtσώυZVMX·’GΪΐΝγΫοaΒqρmύΤ ϋ ΉΌ[Φh&kαJhRP°η² …š@«Ύη °6S \"Χ¬VKH‘¬όήz-^;=χ^Κ ½Φ§¦δ;Z `BrESΈΡ!;ζ‹vλFSCi«&ή}(hιΪIŽ$³―•QώΊŠXΙ!~ͺ†A CfΩΓΕ(V—$ƒϊΚ1Ε:‚Αzρ›3Ω(eΖ UZύ6> Δύrw."/‹ Υc³1JtͺΖγ„Ωθ4Eν$μ>GΆ‘·6UcΈΠΉŠκ „w* uΈ¬*χυ1|;¦|{~' ΌΒfσmσˆoj¬Θ{GDdU¦@ϋdΛ(§†q°,K“² •xˆFΫœ^B­Β3LOQŽ+]ΕH:εELk†IόϊςςΦ'[z•‰Œƒ%  ·EΤZP*6©Š# ;–?ΧY©,πOP?Ž95R|šΒΣϊDΆί6Oeς‰>΅•ΧΧb΄hΖ»Β[=…νϊ”W•`ΆεFιKΐψkς7;²ζψηΪ€ΞO˜δ/Ϊ“ιd’WΚ$8]χXΩ―¨Π•El ͺ ¨g²s›Βͺ¦ΐ #ρc QJcN*’@WΐQ@Φ,XŠ λKΒθrΕZεύΥΥμ³5wWζqΊαγwΡΣHKTΨdR8Ώ, dΕ+°yΚΙίK$:f(j(Ϊΰ#te Ž·΄V g“Ώ1œDψ΅ψΦΓ2©ΏΧ.Α)^ίkŒγίΓ칞²\Bauˆ“bG—ηB^ŒlΨ›έž~Ύ»Ώ³)œ#&L†ΡBi–,uͺ…Œ΅¨qC%‰K5`%Ο“Β`ΤU Ej§£ρMfόΓ)yb^Ύx#{»Zcρ° .—<䈲Xo­„‘ŠNΝ5·tXW:Pπ'eh“Β£–A[“ΈΐœΟ4Ž=i&FΊŠ›όvw1$½O°Χ£€Ω©K†ΕœΰΑpοK":χDœ¨)ϋΜ,ΛVcfD2λ― χŒrU&ΖK‘λaSw 9‹Ζ±{-Nw’FξcΚ0 ά£π5–…Μj*ΦΚ\δ]1c„ϋτBφξ+T )%ΖΜγΨμ΅£ΖΡ0(όΘΉWΔΎ πhρMe!Š0Τ5@jβίjΗονβā1Β†T“’Β6Ξ:y%:”ižΌΔώΣ&•D›΄P©Rs"—vrΞ³«Ψη<7·μž‘YYηηpχœ˜3ζh¦T`{ε$WΑ’mk‘=’™η)\εb–ππ"+—ΔυΨ“—!σϊβΒ3ε—=Όβw8Ζj{Gei‡(œ­Qi‚ΓΡ XNΖΥΚ¨–!z"m΅.³), Ψε0ƒ#ΊΟκ¬K#V_Ώζ)›ΰŸΌ@­ŒοΕ>fdυ^>ώ} iφπΈIΗBυχHφΙΈΊR ³ΎΜ(βϊ$3œσΔ±]*πsŒΐNδ*tΚ9€R|ΤΚdŠηλ!¨’.PœRρŠΐPf,–^vγ=4ιΪΟΝξϋϊ˜bOΠ6CUΏ>«ψ?³Ή~N―™ΈyΊθΕ,_Ό,³¦\»(DδIΟ9ŒO εˆ₯uœκဆylŸ‘©M‘w†x€‘†Sπ(ηπ;δaύτς8R5p}Ώ}οΙ #φΈeΪ*Œ< •α`‘%GΥp†Sn€ηΐίΡRο1¨›:Α’°bΔΣ*bΎsυΡ9ΌΪ δ-nZx5L+­()¦$qJ+‘Φ“ΕΉ‹XέG) +ΩHU” (ΉτΥr—s$ηΛ1z35ζcηΑΞ_^‘ΫBΤ”T`³;C)h.e™uZs!…wΠV!’SΗdJRΆ‰x œ—2Ψ’jˆΝQΞvE4ΔF,Ϋ‡—m©λŸsς¦π›tχ^E«K©,U@nˆR‹}Pc”*:Η ₯.Βτ±”YDπ†P–CUΣ•ύΤΰ^^χ‡†ΦwηΚͺήzκQ¦©kO₯vO9R°#&Ε‰t‰yμκ…艡’@f@·δ(‰ ON‚·Ρ6,Βχ―„Š₯0)Δ|¦ξ•αΤz…%QUW FΰCŽ 2^έ$`;ιDD-ž?Ρ8‘αz¦Πέρ7Ι#ž5fc TuΎ„ƒ…ΓɍΦζ‰RΜiλ „g ͺΦωX{ˆ ΑtJ`i )Δν­Ρ)hΪ+―ž^χ7ρŠ/PHΡbΧ{Ή—ην&·ΌgKΑB•«d¬£Ÿ c#S!rnyŽΊq“ˆTLeΑ¬]r9+J% 89ΖΈ>πΐξ^Χϋς‰~hσF¦ΚbWΏτR΄„χί¬.wέZ%sΡ#­Ή²{{Š›ΗΥˁλ·ρ™kύεΕzɍρ΅»xvχω‹.K΅˜»6Y8Ξ―ΫMάΐ²šK :<‚LΠ‰&fρυ?εφϋ,_σ\SLiWΌ <ڈΣ«Ρ‚—`“–*ΌΛ(ΓuˆDkhŠSY™+Ÿcο§›Η·|ΟWαTεΒXΠΌΞ‹#vΝ( M•H@Gd&<˜™0Ω%s_*ΉωyΦ*.:Ό;D¬}2€―žrqXcˆ’‰Ό‰Ι:—,ΰSΠ‰»Tɘ h1λκΗχtρΛYh«FπȈ%°k%: Lΐ΅‘ +j2‹Ψ—ΥιΌt%Ηθsπ³kίύΏΚSx|„Χ/¨[>y‡e=Ε·’§ΆΒU]²€œΣ€r70Τ4μΕqšbtX<†UΔI,Ή¨‘|‚ƒ5rζS»^\wsβ€YΌ?« ΕH:AJΣΌιOλŒ3‡c_P`+OFNΤοGμ<%}΅€ζΤχHδT’4ͺρ0WM^φ9Jl0 ‹m‚ύμHεΖ•Ρν7―ZΊ()'_!>=p NΕbΡ²aΠPQˆΰΉ1η/Χa›(eΉd*D‘-£,„έ!†| pβζ™ͺ0=Gψβl!€°’$bή“Š‰ha;G‰E…9 Σ$–g)<©‚λ3Υ―ΰ΄ΪT ¬οaτ@uνgΥχ†]_mΛΪ(κΆΤaΊ»}I /μ·mc³ ϊ±Ήι{WτΕΈ<Ÿ8τϋΥκ-#:Β*κ‘Έ³)#¬KΔ06λfώτ)η›~:τ‹ψΐ΅Ήk{ΉΟΛ{―ήJ ΅}1Yͺˆ n~―%pAP•άPŒURotM%8ŒΕd‚š›<|u₯οΫΝ±2’ΛΝSϊυ†i*)…wο«jl\[Ρ΄«Sδϋ¬‰ξaμR>T΅iŠk™4l+Δ9ρ]Τ¨ Ξέ`CHάρš,ςψ‰IΊΆΫƒε©ˆΎRHΧ₯‰!·ξυkίΎ“uŽ΅F‰ϋ_VGmNŒ“LΗgςΠsO£έRΎ‘ΛGηO«τςΪL’ό4Ž–U»Ο§Υ;±ƒl_ŸΟt„1δ|ωzχ©k³ωNΕΊLW²Ow λRԞΖrΡ±kœΌvΩ¦E«C][Ω―Ž€GΗήͺpξΉ3–ςκŒm…9ΗΞ“ΦͺΎe‡šC5Σί/78'Oeδτυ·Ο'ΆK―9αΏo“œvΕνρνΔAΣϋαb́ν8]7xρ2#ͺ—$ͺ˜˜”ˆ2VE­ΚΙIς+g*žŒ"WΟ]₯ζ°ͺp—’Qc^ BsξΔκ$Ή‰¨ Ο^ρb"LR₯+¬Dm”d\RcλDEΘ>O>sΒπ©Ν‹sυ7kΧNσKΑ^vBfΔS  Ρ"dT‘ͺ"“Ψg‰εKsUˆE#yΙ„ Αζ­™xωϊϊηŸ”Δχψ„\ΆάY―’Γ»S³iO{%‹€΄·™(xJ6Dž¨Η4uN₯œͺ©ΗάΖ5ΔΑφMš&œԏkš–]ίςβΈ»:­€K)›D=rTr5† XDτΙ`Χ °j9ΰ­ΞdGΜlΩ„y€BσͺI0μνo–co‘Ν/bω |’ΜΚX± ’²6ϊ σ.U"όεZe²ΦŒ3.ZMάηFj›EΔ”‘σHφΣΙmrφžS{¦]Fz²¦θ|ΥΝH‘`―ZΌή-ϊ(]¨°K˜£b;¨B8θhN|υΐGjjύŠϋεγf•yoVcΦ“Ši2p«€{XΞV;_ͺ—s*ΑWGάVθ)•U(Λjp[6ι`+Jœ{:ύ65ΚΎ©κ—Β5ιŒα”ρXY0Δ,Œ―Α• 5\…†Β‡ΊΙΔ(e}Ι,°±š·§¬οŽ”gD1θ˜’Te6vœ•R»lD¦΄)ΚγΜ«bΡDJ5γΨΨ±ΎzΗμκΛ„šε­„ΘM€6 ΎHα•ΤΨTTx‰Up΅ž'‘qȁW‘5˜ΧŠk’‚ΒpΝiόν7΅φωθΜβΗƒk?^\ϋΰͺ/Φe­‰kΝP)™ιXp–"“ήϊDTΑŒα˜δμ6:>Κ‹X=oφ°«ΣMιXaχφœΘ$^ ΗτΦOdΝ3R‚ZϊγΔgΠC8>žCΦ\ΓΩν:Ϊ|όΚ‹ΞΘw{Z?ύMD€Ά²©hΚ?ώΨ{³εƚ#Iψ~žβ3Νεˆ²ά—‹±y-23² ]άD΅θιΗγ`Ξ@ς@f£n“X,Vr‹pόpΧΨ"”k€H;Ϋ&_šμ°'ap THtd‚Γ‡ΏφΈpt†ηGπ&H‰K5 V–ΑοΣπ*(©JL,eθ±ΔZŠO:DΝ Θ©ˆΧοψΓξVPEZahyν9œήν–B'9‘i υΐœ2bZIϊΙ‹eI–†Ζ„:"Cθ&Ά»γh—Ϊζ2μςΉ?qd‰Π³λ±USZ V;Z mύgۚ,t΄$ί‘Ηί"Α1I ώΛb*ΐΊηΆq‹υ­ςΣΫζωβ:‚:όΑγc9{§SξΊϊ’€ ‡R =ξ"h‘‘e5—𻫕»-C0W¬˜€Kp<ΡςuνψŽέюΞη—ͺYΤ ΐY4© ”ΐθluδrΚIۘk \acΜ…μΕFjΆMbˆa$RˆΪžaHτπG.―€.˟ηίaF]δφZΑ>`;:΅Ονύ7·Ός^t}z©Θ½ͺxτ&Υ³ΧΩ)Œό«³€žρό;λίwω•ƒϋ]₯±KπΏώφ“^ττv_ŸίΟ5h¬Ζ½Y³Υ«Ϋ|@•ϋN uΥ¦wΥηύAΒ₯ΟΣ/Ky~,›lυιΗ―ƒέΊw`ΗMr_7΅oο‹ “rjϊϋgΝ©ΧͺρJπ©ςϊΛ–™' ˜ŒΙίάWξ7Q^šζ£Mϋω·ΗŠΪ:VO„”Γ‡μυ μ}γUώŽι—ξγˆ4|θξνn{Ύ?σ *Ύ«Ή»MΗπ˜ΉΗq68gƒsdB}QώzrΚg "G;θΪO΄]ηγK"σ‘z·sυό£aύkΥ}:Ÿο8ΰ5OŸ‡»όλ1β(hžλ_<`AΪ’ΕWΊDŽ–δ8ΣLοπώώύς@‹§Ο8τL[^]?Τ~Ϊν€όηZ[Nzs–βε6ςŽ\τ^ΉgŸ__Ώ‡Β&ž/n=£? |Ώή&u!']<%ΡΚ‡σ†¬νέg}ΗχvηΕvΏ/wω]^„-B8œΗΛ~ »™Ψ+Υωΰd|dCήι?%ΐγ‰/­Xα‰ΙσπφU©c†#x:χš§ύΫ3Ν‹Gΐrΐ‘£ΗΗυιύqΟ€bΚψύš‹?KΓ­ψΣ§ΘΤ¦4ςa±w•ώ°ΨLοh•δΉ2·ύέΆ/βlΑώΣ&~Nψ+^>ρβd:LιF°α“½/ίώΘ_ϋΞΝτ†7\9Εe±αE@c›»­ …:€^94Οuσ―r΄YΔαφ_E—Ι-Ή}”ΪlΛUkŠχ•₯;~}’··?k”ςόmIΕΏŒΏι`yΧz\/―όΆ‰”G=Ϋ³…Μ©Υ?”Λϋύί,~m ρpz.rΚάεΌΑμή•κwόa™ΓΆε’zΔ›αmΉ]―ΐ'6ίώ@ζ½£›ZˆΥtμ KΞ+ΩμΓΜ―Ιc`FξΆκτ^ΔΌήoμ&n,=νgωΜζ‹υΞ’ΥŸ\?‹’χΓδΧ¦}œ}ΰ’aV~2žΔΌ½„vXNyVφ©{Ζ5ά™cc]u_8O»ΜΞth‡G/ Ώ€svηβ6ΛώŸ^ιyQΝL]P_XΘskχσρbΟπζEθ† dWnγ½Ώ~ζ»“φŸ± Ϊ¨AŸ½hώΪTžk’ΧΐŒχΟo#­6›¬uΡηmd5ώ?ΫFεηύο ³]Ό™ξΏΊΏΞΌy쁈-πΩᝩAΎ?`­N:μΦΪx―bαΌd‹˜ 98φΤ”Ά.‡DIλ”œλDΡϋœ<+ρ•΅:yeνTΒπωqŒNςίόφ|Myρ•i5eFχVΚtو³±`ΦzaGl₯ΑΗ’]«Ϊυ©φΪD_{ΏG}}*;Ώ+aΪέ]±έί'¬ Ά\*nΈu~\χf€_ΟΏθΟ™ρσϋZΛοk@πΎΎΌςΕξΟ9ΌΧ}άκ6{‘Vj9ΓΊ¦FbfΦ|J•»V¦†–«θ06s‰\ED_¬Š\ΑO΄2±φ'’ΣΗΕkϋ φλ―…>ψΞF‚gΉί…±δϊώΊxϋst1ϊννΏœ©ΧF»'ϊ›³»©uξΑ¦.2”Ι*ŠχL:‹ Έφ=¨,“l=G₯#’iΥρΏ6׊ό΅'jˆΤΒÞ·S_Ϊ½ό“ŸήŽ:9ŽwυΘv<ؚm±j[.άΪ^θΒ8ύΫ>φΡεΊνœ>ˆέp±ΖΨΑΤίK.άήsΞΙ9ΎΈžΏ/\?ξΰΠΐ2yώ Ακη>Ο+όί)&jV³XΒ&Ž` ,nΡ*X“{i Έ yΚό?―ύΑ οδg™GZ.Φ³ΏŸ7Oϋξγ; ¦©ΘΑN|_Ηγ3ΨwήKο& ʏξβ›pΧοΟΟ?Ξ²χwgΦkQξ%_ίΊ+5Fς-ΩΦs·‘ƒ+u%"Œ©† W”Ι©ίu‹©n"³Υ6κξŠ4ϋŒΛΔ_&΅ςΊ|ΔζχSEΧΑƒt©AWΕδR V{SPΨ|΅AΏƒc­’}ίUNΝe1fΥ‰¦όTvŽΠkI“FξσOϏGr σΫ{γˆI‡Šx—ki¬t^:οχnE>ΰΠ…R}tΕؚ,‰ec3μΌνΆy6Scί`Θώώ΄>w{φ;WΊΓ¬}Ž;wζξ§³[8+iΗΘ[ ¬l­¬E6ΤC0FZυ[kΒ|ΚΡ‰jΆήQsξ ;β–skϋΩT"%Ξ…½t“²ι=…άΊΛΪγΈVT556ZG‘3΄μλZSώ丌± Ω²ψΆxZG¬-Δ•»ΪώύΧj$›οwγ{@x‹βε«;sz8ϊ› ³όΘάΝίl쬨7̌jΦŞI―΅S1φΨj•†E]²‹©ŠH²sY‘uΖ›(’Ό*\ž»ΝƒϊαάνχΪΉΫ3WΜέϊ#ά\“'6L‰ }”†σ$RB6œ+’θ,ρκ±z„οZ€ŽΤAψΩΉΠE±9ϊτψo°…X5ε&”ŒΧk1Τq*UTʎ½ρΊγ+γsR˜—Ϊ5~Jp³ΘQ˜›½Ϋn’‹ ΰ­ͺκ}*)ΆB"BΙtΡ—BPŠF”] κ‹‘γ0‰‰¨οˆ»“gθσƒœ•ΕΗ†… v²N¦9²Xu›Ϊ*x‘'§©GΣΘ[«SorύεlΥ^₯― r}+pΌ³wωΖ€ή{Οih:·‚ΐ‚ D^€έΣw$“±žΥΈ”{wbρ Hθ2ΞωΈξŒž`ϋα »ΉϋΊ›©5δD‰ήeŠͺT%φXΆ πΑΔΪtE‘£Œˆ'T©&)έΚUΗϊšΕΑΙpΝμ>ŽΚhΐ? ΰš4ӚΟέbπOˆμVΧβ3 T. Y$Θtpπƒ7‹b›xσ<¦ U'NR΅θΨΕ±'Z­°”VΕfΧ+ΰ²θš^ πTv¦₯WΝΐ(€šqZζ|Ν™΄(A·ξ½Q@Θ$μt­5‰ϋd―=ЍcΝΥρ;ρEEΖlQ MΛ9\tΝ΄¬ζψ‰iv°LΑl Ρ0ŒΒ$ ‚"θ…ΒFκΫ£΄*ς|¦tΠ+Ύ’Mεl²ϊœΖΛΑσמϊζ Λ«›“#Q‚ω8"qξ6z$bΌš’οΜYYη\@–Bκ•ΔΑj!–¬ΩΗ[Χ«BΏ’ψ.νΔowTΧφΓγ4» ¬ΈΉφͺ‘©]±ΎX°”Tw‰Εˆ±tπw#φ~FwNΐX₯ci M/goj§&47ΠΦO=•Φ­Ε dŸ|V1°¬Ρ˜c"œ#N| !puͺiςZq¬ͺΛά'>ύsήPŽŽψό“OHϊT| .i|ΧΩ ΑŽšσάB·Nπ’r€PnΓ(Τ€°«oS*mΟαχ4GžκΛsύnΝυ[c‰ΛοI―Λ±•1-ψW~{Z½xβίgtw·«‡#Ν―’šq|δžO’uτΝi 熃eΗΰi`¬>ΕnέUΒB»`<˜‰ˆ«N­α³ΜήΡݝFΘ›’+œ Jq]mց5‚tX`/Ή’δJM¦ζ–1L͈qΑV’Šeπωγ”jοσ ύλΑ+Κϋ·“d9»T²™c'ψΈαs%Γ`ΕF€’|ˆ΄ΆŠ=zNθ³F‘}K₯;Α˜Ή~| σƒ`ς­SUτx βoC*R) πGT ‡ν„l”Δ\Βcθ’CpXmΫτ:¬ŠFN¬Ξ¬™_”΅9›“oΊ΅ŠΘ–³RΑͺ–I°*>°xΩ1b―aΉώΜ­ιH€π‡„ύ\q~|υέ\ߞφžΌ&‹hΛcώyv²ΦU5'ζTσ?‡J4 b—­Σ˜A. ΕXtoDbψ—’“5`έ\*&NΜ>‹5Ο―ψˆZvώG˜{Χ (¨Q§dMˆή¦μ‚λή‹άy+Ο•š1”•{ιψ\@πH!VvΛα"v―GλψςγΫώ³ξϊoœYPd’Wnwύα™ŽZη/Uμ xG7˜Τ’d‰0ΰ»UIω_PͺŠMv>ΆΏ‘Π)ˆΕZ0:}ϊηyy²xα“εž{ΗvΔΫΕΉŒ•uχ•,pξI±+/…ΙΒAvη˜’[c쉦0φΌHΫuο;“ΡγΡΞ.^1BŒT•€Aά™Ή„Šε3 ζΒή°ωˆΑΣ²)!α$`£ΰQ°° ϋνΗΏ¨ΡΖνFž`e«φϋMΩΥ₯α―ό­'Αι†P΄£ŒͺE§&dӈθ^+(b˜wA€ΩŒΌζβ#&8(*Κϋ+ ŽvDρn‡σfdϋeυ'v_³;~ΆEQ8°ucΪƒM₯œu₯e•|8G“Y[D‚'Iϋ iςΧωͺt-²~Τ1pΏ.υΈ< χ#…iφΣ€ΒΔχ3:°dΫ9€ŸuD?JAΖΠt ΩV8½G*‘ ”€$:ϋ)™o)-ZpΙΥμž4ψŒ’­ΫXϋͺΐskυ¦Δκ£q@u‘4`ŒA?ΰψVoƒΨD`Ob›N}ώς£‰Ώκ‘°μάaH€½³Aπͺ$SΟE,³ Ϊδn―ΉŽ­†¨νm―™ΐΎΐS#œ@/o·uρ2rΥͺΘwjΎγΧλ½Ίφϊq¦6%σ¨ίθίϊ.%ψ·&":‘Ϋ bq΅=ρ+pΝX¨xŽ:"Τ`™LEπΡ₯«dΕA$ΈVCTβ#41’ΗΫg™VqH½Ž˜Xϊ:• ŠξΉž"±Τœ@ύΑAΑ©rΏ[ΰ+oΔΖU^&Wγ{z“bΡη'sƒkPl`Lšj:‘±M*°:vΈΙ©wWΌ§^km=¦ riœ΄REΉΫΆ†'ΧoyΚ™ζ^@-Νш§v‹%+bό;ΑN½9C ¨ΌΫ¨ŠΒΘ\²ήi|ΤjœRΌΧΞ΄8‡0Άϋ‘Gο[,O²ΆibοΞ‹œ)(‚3Z#“§–]©JnΫ”‹b™@­šΚ)!ZŒ_ŒΌl ϋe$ϋ–pηFΌϋΉ[―kΞ%΅"°Εjφ`Pρ$Η$₯M©=ιb]TZWΉš­Ε Ν0©Ϊ'xΓα ωfη ΫΏψΦδ]αΐNΉξqUΜΏΗ7w)ύj `ΧRς'Ήj` *U†ˆl†΄/…Ψk-yΩkΎΣ­š8ό{₯πsb=57όs!6…ΖιU•«k*λλ©X˜*―΄i^+ΐ’ζ8w€œ¨·2žϋβ7·²x[ςΫ_+΄βψΧWκLu˜Ώ‚‹΄%Σ)ΛρΆR”d¬IΙ€θp:Œ”²u ’εz/f«|εφ¬ΙkcΖΊbΤξϊQου ƒίΉ=npΦ4aŒͺ€Ώ Δ8sΖΗ"ξ>ΥN%°ΠHII˜±»QU³ΐ½¬Τ*€Jαšαξ~ϊΦγœ:'Δω««Š‘V/`υ±ΛΉQΑ²Κό#7Tdζ”o]θ΄?)§«σ·Μ¦fuΣBu(€γ‰™Sq¦΄ͺͺU,,W ³³WS‘ͺEΠk:6Paη¦Ύe“3Χ@§(oΪk +όkY:M­v`α©+ί”abδX½λ8ήΒΣ{¬˜l’νξ³Oͺ(η6θ΄ ΫΚνιOhΉ‚™β#†€’ε\–IΌΜcͺΐ^Αm,θ€›z&{yx~{;u›rΔy©Žψ€€#Ε»QΣd›Μ‘@sk¦€₯XnΐQ r`U€Λχλ»ΑhΒmΏΉόωm­΅”’’ίγ/.ηŸ–Nώή[OVQ)¬j±£——[0 Φ•ΚΖΘ³b•Š°£Kk!Δy©:?™Š›Α&Β‘’tΕ›2&©`‰«γ.ecΘ)Wi»πήT• %|8αψmΨΘ O­Ωσߟ#psί1—Z‘qfBκ+Rͺ‚A!vTuˆ'”•σT-E’ΊQlz\qΏ*}y_΄»ςόό°‰φ_΄Z­ΨΪξH†(νd«KŠΙ yδ‡η“PλgGΓρuΉ#{–RCͺ ϋ6ΚŠ5b% $αΆͺmNQΖΚW1ΘιΝHάψ²ŸύN4/ pυC·₯Ο±J5¬ty)ˆ Τθίs•K ΔfpΌ$Ԏ+F^Nτ¦θ›C„—Ÿ4Hα΄ΨqφJ¨] Ζ΅Β+ΟGFΌ _Κλ2 c,©w-(ΙΥ$έcίΥβQλγT°Z‰JžΤ>Μώρ[Ξrv΅e°šΦ@ι L³Ή5$cCš‚Σ(/ƒή)p ’κ――τg«Nψη…Ϋ½ςΣγΚ=ΡΘ[ΧμΪR3Kν;vλrτX˜θz)ŽLbΓΘPΐ^rΨΟښ†ί•C~ž*ζ~Z.ξ₯eυ/yΙT{B˜Γιέλ©ψ>φbΊ4!Ϋ?||9{ρhoJͺΐ”«'ΣΛ;_to•΅ΥΔTθSΝs`υ‘Eaγ“²ν9^[S ׊|/u™Ϊγͺ™Γ­h‘ύZ΄·sqύψφ˜Νήϊ°*‘>lVŽ΄Θu8 „P)[³ @ ={-ΧΚP΄δ+ΫT/ΔΎΥ(ξ0uŸLΎΊΓtόŸ½υ ’. 2) _†«”κ΅€ xFΆP·Rp©C.‰@|6p2d”_ιήόJφψψΔ ₯’'WKsσ›Vΐ³ΑjΐŠ£'1­-iΆΡ˜&Ω£ν‚,(ΐIΜ2Ζv/Ζ―μvqšG³λKΥ”4h0`Β@T ±.ŒS,}``7ˆΑ’’ζΐ¬kρͺoJΜΓŁ˜“agΙΖ4JcΚ}•§NPdpϋΨ]― lΣϊlμ3ΨsΥ¦Šξ[#d|U(NdˆM<ϋ½Bc=δˆ~l°λ}=3–ιo©$@Σ”Ψ&·Ψ¨g<Ώ₯‚T™Lx A βo@Ήa’Cσp°ζμυν+€λΫw~?Ή™=$;‰Η³MJeΰ΅ξU—¦V@ΕK.!Zέ{sΕψβ5P» %TΛΚ—œλu…ύιηhjΫS8–Λά/Ÿž§—yάΥ?:σ³ «ΰΰ’MKΔ)Τl ΰjcιr¬Ι«FφIE_Ε¦έ+–Ηκ2„’ρbΨ­έΨAa¬¨2 ₯Ž£SvtΓ±/hΆσ=Z»°ύφxβπέίΟ―Λο‹Ύη”u‡ΔωFΏwž "[ωγJΉcaͺ[ί)dU\0ΩK[HA‡PΑ‘»ΰ*ιΈ ¦Ή8Ο^jCΊeέΉZ άΦκΕΔΉŽ¦»Δ98€Ϋ-Βΐ‘τLznFNœη«=•Jςk*Ωΰz³@ ₯‘’G&yœ2eq‘Ν±[ώβ«tž$e.ιδ ƒΨ‰κJœλζΆήχpδ 0‚hœτΉ|£βΘψ/ξZνΪuΉ F-xg Κ^π ]2eΨ¬NΨj¬Φ~7£ΙύνΧχEύ~iZGΤ :`@TΗι Ržjœξ±·td\‘1Ήζ5x`Αt°žŽWεΈO©3μD—FηsΞ{޽‰Ύύ\YyΑ΅YžΎ›ΆLθ‰ω\cγ XD0EˆHUϋ “6˜Xη”oΤεšCwv€bfρ@εσ²Ϊ'šWΗF(_π>ΉήΦdΤnδcφ#ΫW‹ *[ϋž"Ά"ΎΝ―§ψρύν#Φ͟™η7ιϋ“ώϋΟ»•/O|ζθό qβ‡g/ΕkNΤ8ψή;Θ¨PwIιl΄`±Dˆš5&Ξ&Έšˆ†K.κΨ³ά…:ž(›9*H›ζAδω›φHκΥ²Σ-™N4QΦ3‡¨t#ΆX24rTdΡυΰ₯0vκjjζ(ίR•N-­εR[εb8hQ―¨ΰ΅‘₯X²Εδͺ«h ορg*λΠ€―μVj€ΏΣOά‹Y{₯(›Aύ~|Έ΄ϋδGŽnPΤ,ΘWば(‘ˆ¬„‘ Όg@ @ά₯ =€ζb*d‰ Π―m`σ6Œ'β-7ίσI₯Ε@ fU^LƒΤ+0Π­λIχ’ΘΫ°{Γΰ •Πpμ}0!FχΔϊz†sυ8Wˆlζ &ΫTΙΌΉšΖ₯{ε©_+ί’1Aϊ·Tp(ΎuD ’Z€ž @©F½ώtb˜―Τ'r~ώJJ§ ε₯›ΠτXΩEΔCνYpu‘_Yk’5Μ&“¨&α—F0Υ? ŸώχύP%{ςΌ7χJNVs―­2±€%$«|ν*ω.šψ=F€Œ`Ω(‹ΣικAΎFW Ώ^Πͺυwν!ΉΥ1=³b'…σw κssΘ€ͺ*.X):εΒ¬ε‚©-Ά('̌Q{ηΪΟόΖo+A€αΪS‡£†§ƒΫ³ΡZΨλƒΩZ ΛχΎΧmIΐυ5νΎβžY†‹QΡλ`GΤA;©R΄Κέ ΅Ύbϊ αW-ςωn37XǚΫ9±—ζdύžœ˜Ωm‘™Έ%©Ν5G‘· ₯GœτnCΆCh„!o K?~«ιlΏOή{Πίψό·sŒ61ΛΘνϊ{γ9ŽΩ¦8γŽ^Ώ=?™ΟΜΙόXΓγTZΣmKΊWsγn¨΅,ιΩtμ؞£Ξ»>xb/(ά•tΰγΫ’ίU)šέ7D)Œ6 ‘W μG.ο$n)΄…“Θ\€™Ιg!Nι˜q‰qφMΥ§ΏΠλpψc·©«6±΄ν θιHΚαMΚεl)6β˜WmΫ•A0 ΞRŠb™-{/pτΦέ+M‡ΓYp³_y΅lΌCbΠCΖιV‰₯©Π[ο³MYTM”Ί9―B’RsH˜ )s.A'ŽU{5r'υYoί.ά?a©6dξΐχ X Y‘ŽEFo TΙs£¦Ί#ΞE©¨IyœtΠLΟ=M\ι\'ΞυΚϊΕΛU·Ω½ :ψ‘έ€μLj₯ϋκ³re­@;d+%XΦ)O™KjŸ£Θ?eφ|*ΌΌ­δΩ<ΥίΏ:ŒnύόN-G1ΈνwεΉ­ΊFΎΏξΎμ‘χλώϊΫ-·¦Ώ'κ©ΗžΣ{Λ/,i-ΏBμωύυaΥΦ²2«V£V GψF ϊ΄wοwϋ‰»&έaƒa_œΫZ‡μλΣ¦”—κn>ΰφ~hAͺK΄™δ©!°hD΅Ω―•XyΉ C!Ό ­W1…#€ΊΪE~I#€‰jXwΪτ (΅ΨD’¦>Q~,η{8pμΉuΏΣ›|Jncλ}M@YΌπ#2Œ:Ρ՜ϋΚEtSך0]ήS.=ΪΚΡH΅©(7k P”νSQΆ‚δEπΨV+@ΔxDqΘ»ςή;ΏξυΠ΅Ε7DΙ½oŒΆOM ώΚe;©Œσ»CRIρ²GL• mR°ΊpΛ]9 $yο(½°uψΤU’ Z³οω;υEYδ|ŸώZ ώζ-£ΚιR” U`>•"ΚjRYΡ ιχRΩPcsΗ$© Ή9ΧMVΤ―λ’Έ Λgχ›!―ŒŸΞοΛη·η{ΑŠBNcŒ›_Nηΐω*juͺ2]‰%!XR"Ή―tͺƒa–.Τ0 Ώ°A#u[=»]Ο·Ιx?‡¬‡»ΝΓ#ω+¬ίΪ?>¬ZVΟΟάΘ»Ϋ ¦LΙγ‘QYτίTs"ι(’Jμϋΰ-—Ε`+ι„ΨWA΄Ω WΥΧ6^νΧMΩDS_+˜>­«Ϋχ§t‡\&°Κ,ΩL Θ=PΘ‚φΏ;ψγΚ?2ΊΓΝ‰Mυ.فk—qχ)nΎ˜IƒS*qλ™\οΑz“±₯εZk[)tΘ%³ OR‡ΟW-ζ^!¨ΔςNŒOΧn»dΣοϋʐR―ΕnfΝ―Ο‹σΕ©εΛΫΟ)0IdρσAΈΥ8-Ή%%š8½*/ύϋYy‹MeΧ•by qΎ)K,%e7•Ϋ.š£Ή=?kK{ϋI£RmχA*)kΟΊ§$ &iKyyς“V΅χHwΥ»Vπ₯x{‘nMλ‰Ί…ΟMΪ•Ηtn>9`聇Z0KΕ2˜ΌάrΤβœΓν,ΰΟI#HN"NΪΪp @±œέ“ΖƒƒΉ¦[Ϊ‘(»₯w͐ΞMFήKAv%}‹ FΉΊžυטΚoον?LΟV)ΟͺZM§ΘWΌͺ$6‘žϋ P^JM7œύ„H *Σ]2\όGχτ^˜½ΞΨήϊίΊ¦ΟL’Q-;W¬ΘMEy³@ΑQu‘{±Ύ>Ψ B^1˜`%}‰,`LύZ=ής@?ΨœζΗΆn„Y ΑίΛΖΊηMΌ+Ÿ™!DΩΊx<‘§œΉΡ«ˆ–“:€T„L›BOΦ­7©-° ό˜ $Zkq³u"d&O^Vτ0Θυ+ΤbŽ:~/έ Θ±|Δ'Q›Φσk#“4ΕlCBβͺε–;ˆRN¨OYΔsH ώ,ΑΡ$±ΘŒ*ΩjΔ·θΜ0N4μυ ΄•8u)Ύ”e'2β¦636?v³u&΄L]΄D: +k1€β³#ΨΔΟ››fiyaELKθ πdƎt"½"˜qH€i νΚΈ»e^t°\‹ΰβςςcq~Φ²7·Π(ΆUL‚σNτΥJIHΩΩ{ͺH™†XΪέΕl3¦΄ ά&Ε‹οZΪ]ΛQμ‹'θ-ΐ9μΝFά½“4τόώvaBO‡ρ6έ I €/Η^›f,Ώh¬!­dλ<‘ΞΑdF ΓΠΙq<ΰQ\Τ?αΛvΔ—ζπςv‹;wο”O‹…δΆ+e΅½ž_ψΫ;½ΆS–<Κ’Ώ=„ϊmΗD―𝷞#—\yΏΉ5Hλg Ύ‰¬―¬&QΰΜ"lρ…Ρ:$Κ>ׁ±ƒΙX_Zκ ydIOΜζ¬Rόkq’°»¬E†θ"N&βJXΕgƒIΜbƒ¨›·$†Ω‰ x!±ŠζΜΗG°x}ΈΊŽMΰ?-ί—{o#ψθΛοΐOνaΝwͺΤΘ½nΆΙπ0³g%·ϊυ`ζ±ϋυ·‡ηBwxΉχ€³xκ،‹ƒφjl썝τπλ­ΆΙw~xΩ΄=―,UΉκίmήΰφΏχςŽύΘ8υτ,ߞ_1[»οKθ+˜½οlg`ψεOzx»« ~z;ϋ uψ€²UΚΉΨuϊ"m?6^!Ρ³ΔήυcΫ“[”€dΫ«n`p".Ϊ₯Ύ•ς"S£RDD ηΠ ¨tw  ½’πΓ‰&›UpΩ<ΛNqS{‚΅η¬]κ"ΥeΎƒ*ΜͺyαΉ97ŠβOQ‘jμ3wόFΛ5•ΗuˆŽοc― ¨4(Fϋ€φΔ’cτάŸΌ“σszN>xΦΟ`g†χ,ΗhοβR«$οlk]‰K΄σΖ]‚b++„]δ{ΡΠ‘} Β9›oδ\ͺ“qw—loM šΙTT GP\L•!ΣrFJQL‰.5›hΐ#Ε8°d°e£ QL’π;1ό£υϋ ΅k7Ωk½[ώZw¨Σl°ΕX¦8Κ cρ(τŽΙLLˆΩœ‰΅6χθ«ΠΏς0Ξ’s{q3wγύώH'Έ{žcw’.ω‰— da ρc©ςςΤΙχ‡ΈωrolCΙΜo€HΜΡή_5:ΫΣΌΫ=Sσ}(>kaτΉEx»Ή―”3 ˜{₯T­¨ϋG’PqžΌ΅ΐŒ$π™­TŠœ•VΤD ΅©F³ ΉΫ―¦˜dϋMρˆΘΊ(‹‡Ε‰Ψ²ϊ`‘Μ%«ω##ύ₯œϊ„œB>_ρ_Η Τά °½\Uίmžh½(dVΰukυ{V’@`KπJ^F@ΗToΡW[σΤ&*½—υηχ§Ά£=GίΫumhΧιLœ/…˜ψω[»(7f$©&ϊtΝxUT“φ€SNj„§mor‰ŠDKŠ,Υ ’™PΓ??9“3€ŒώΒόz7nœυρB°c~) ž„ΙKpL₯ΩQΰ[ατ?τψ0ΨΓλΓEΨI½Τμ‚š-‚\r7U§ ;¦Vhc=ΞSυβ“NϋΕΟ2HιLQ^"Θ)7‘ͺ°ϊμwΓΥΐλΆςπ#Υσc£Ž0A]+Ρ‹­KΏ·hcw嘁˜€Κ–έXkj8(Μ)IE(αͺ‘"š υΦ>$J8½>Y+5Ξ>‰XMKց($οjϋ’7*#q!t"uQUƒ5#=ϋΨq0'Z ³ΧB¦DΊuγ-WRΙ‹ΖU5›ΑΆˊa!κ;ό7!KsWiaΙΗ2OΉΔ°²3_.#γsϋ;Rš΄%y+-λAΧ^''†ηJ,ΐ₯\*―bH τDδ½tηΌό&—IVρxκΩUͺ»6ςκΖ6ΦξEŒΊ—†₯*‡QηΞǍaTτ1’Gli!ˆΤvΦvz ή_>~j“Xψτ‰`ξΌ*=ΌI™”¬·έΫThdοΞΕΚ*ζ‚³gΩ‡JIšΐAžk½τ=ΧρgMr+‹§mομN υ©>Ό·‘auί»s'<Ί.Ί{εΑ~qPΎ{$Ώ*Wρο·_―τ²z―p—&^~us³k‘ϋ1@δΔ•›ΌΜ$d¦΅ΈΦJΓu―Ϊ–»³δΖρ@U'•­&•–ΞFΈ+ΣσΡτ_ Ο,BφU,Z΅ΖI$KR1UJϋSφ*v…IANΦsPΕ½€₯wD|Ο“ŽέΞ5δŸτπΞ7π ΉΛi£5–Vd[k™δΎΕH{£’A[ΨŠ1χ†¨!vbd”Μ8H7gx¦·k^πχΛ >™ϋΡBb[3ΙΝ†Π΅G0 ΦŠκ&*ΔζOWΙw”M΄ΖuQp²!•’?—΅e€΅<ΰ/=»t»τΰu2ΰjœJ΄Ύ|t*©!j6‘ά΅`ΛLQ’‚ΙL ΩΩ7pν`ΖQΙwzθΧ―δˆrσm_‹₯jμΰ’‘ΜEψœΔ’²ε‡κQu”΄κ6ΖΈl²Όwω°z•Κ­»2χήN^ζV A7αΧ–Uˆ ^Ρ°₯»Τ‘γ›ά "UvىϏq©ͺΎf?Ξ$0oΟ+Γby[Όz3ΌςΛ)λœϋ-Δ 9•˜Α£œ”0¨‰H€4ްΤΨ³3ε‹κlΐ· ~g\Χώ3aWώvͺ’9χγΛβG6ΝΥh£Ž qO˜‡ Q5_ΕΆΗ%ƒ{9Dg*‰g€ΐΦΤ<ŽyΫflwΒ.±­ΪΩ>7όωσ·N>ŠsUΛUcί+±©ca§•…qVJ ΐΙ,?·μŸ=ψ™}}ςήΆ?ˆΟŸ‰ Χ2κΉ#PšwX_ΠM(.2^Θ9‘ΈΧˆYˆzŠI‰ˆC!‘«μT’cΟΕΣ$y)X»ΫDˆέvήΙη<; zΈιWš_0‰U©΅ͺš²MHΩ9²€v,Ά€ΤΕΏ/šVlidkυ‡Aξj|»κρΨΠeo€Χ°©γŸ½υœC΅;vn­8Ν%¨R\Hβ§κA₯ݐ`++Φ”VtZu«CΏ5ˆ™˜’ΕΏΧ’μΛ/*€O4ͺ<^›ΉL61₯?9Α³ΛhΖ–]S…gc €ΆEž­χT[GŒGθR½€žsNHξŒ•SN¬»ϋ5M–q}Ίm²Lλw¦N?ψn[ |ΤriΞ>YŒLNžίƒ―s|e»–1MslZ{₯ΉSA«Ι ׍8Α!Ω‚ε ’EY}HκI…؁;­^א|©O“¦oώήf|Δ&ρ]I“Vσ—bφW’šΖΤZ Ήρ4„Μ&␂Κ:tR“οΦqΌή³—ΑR2‘΄4Ο’΄Έ“˜%y,10φ~u—Ε±)H yͺ–sϊoόίo½νΝ»΄{:g'.τμAύΊψ{rŠr;ΰ˜ΐ•LuΪz€ƒ†&χ ώ’Κ_κώΑ΅ΟL @D»[ŽωΌΜ=A1Χ¨3ώŸEu½η(žI‘Uχš3{² R€Kš]k&fgŒ¨°vl―ΐg†Ι’K|κ5ΏvW(œq*h|Ρ.YκX8G~ΐl3@O ₯Ή$²4ΉΧΪες΄‰Ÿ…§σ½ιWΦO}η‡IΜνφFΡΚηaΉ¬E|«5 c1’"-&"[SmN€6ŒXhΧ€ΦX”L αί~O‚뒁ɒ±RΦά"†Cψ”ƒ‘WΖο‡&TΞΧED_΅Α”IŒJ3Wψ[TF9[δŸJeΏΖH»”±¦² Κ1Ο,^«lΑ΅ uΔ€Μ`zš~1ϊώό*e¨Ώο–o―_‚[ε’Ε+-ξθιιyU|½<Εƒ{ΧόΓqΎ§%Ύ΅ϋαknsΏ#PŽ΄%MΌ»—ΕFfς ώΒ“ϋNδτ[w―όΐ΄άp/y’+«»Χ—zόΕ(―σ)`΅8ψ‰ΉLdRn‘§œ—ο'z³vΠζƒα@HΟuQC$α(―± 59λΐίR’/Bη,<ΰLΚBNΌ±πΊτΜΒlZIƒΗό‰Δό$u+–΅ΰ=e[£kD˜M©κœφ֊²hΰ˜³<%˜08zN}ώ_§ymφ"x³ΘKEQ%―±₯ZΦHm%Šv1i+z…"T\tν΅šΒŠ»½Β‡Ÿd£Ο?ψnLhοŠrjόmο/[ζ2’ R75ς/‹MληpsvΤΚqŠεŸότΆΌλόVΏοbΟ‚n?Φ]ΊΑΒ—Ρ1 λ`*”ŽŸ΄Θ ?πxPŽώό"hˁOuψQ φT3κς½>žEβ‹GiΨ~ύΉΨ_ƒ)jwς›―ός@Ξ΄Ί>,š€ε±…ΎΊvυ}i7CώόO΄zΞSο³”a’'Ÿν ΄ΚW_­Gj$w2k—YΓγ›―%ž_eΝGΰ‰dTυ™ͺ”ήuΞ@ΡΒ²σΦΕΜAί$DK£šm•M’/’ρΊ‚υ ¬Ό¦ν¦«ξωΰκ£M[ΟυV}θ,6*θΆμΓνYΥώ>{δΗΥη[!i£…―·±hύυΑI:Ϊ@ΗΚΚΊ•‡Ӝΐ˜Ωοmƒο*#±³3Ά™β}rbfH*fνΕΒ{Ž-¨ΰc+ΪF νs”ΓP©ΝrΙ.JΒΗ…³—Θr°ΔΟ(bS}ΜΚ…˜ ΕZ(”"μy΄MΘ²,Šι.1G`u"ό¨™|EΜή₯ϊϋύJjμΨΓ{vύΏΤΕΩd/΅μπα’ξΩS’Ψpu¬‘$C±IΛ>§\+pΰι‘<5Iτ§N;sS2’Σt Jj| SΕφ„¦‹|*£”τθ¬XAΧΊŽE‹hL―Π^θϋΊ.…ηδ0Ε³I”α~κ+ΓΜΆΰ~O@nΟ]끏[A΄•"ΖΣσΫ’/κήύΗοF3Χ΄ω"„½ό‘“Fύi͌yΰΔdxΌφ^p‚#“Ήi;>κA>ΑHwr§΅t~ŒmY^ΟΪΘΏ~˜Ζά>’&‡ ϋ’Žϋ/Kη§Wίέ_ρϋΌMVΒίΗ΄ZŸŸΫ=wέi2c§iςOΎKέϊ…•—Kp"’hBІ΅S, ΘΘhοZΘIυ^€8Iϊ«ŠήΩ~f K1όx'ƒO³!¨šƒΌ;"ω搻Ε7L―ωL.D°²ΛβY£qVZή€ι˜ΫτZ΄_|z]2»CBΊqY”YMΦ₯βt«!ηΜJS΄-ΪXLwΎΩΤΉ9n=枭AR˜(άk‹eέθTM ‘άΏΡ^g8*BΊ†ΚΏεθo½Ύ?½m|8Žο&Κ!η/.$Q§z­5ΉΒ@ΝXη‹ό‡v‚ς₯Ke$'uξΥEΰ©ŽτŠϋ—Κ!Ο(&_S)9Ύ ΗΰcnqŸ:ωή5ΥrjΊwJ΅hν“θ£rΘ-Ęs1XΖf%]₯ͺΑθV B†žν»ϋύυkε¦Χ5vŸΤΝώ=±€œ»FO˜.d… 8ΧjΣ€MF€"r`ΕΆςUEΎωθY΅ΰ‹aγβ:œVωk}ƒπE€+ΔdŽ‘ӘdΨeώΈ_q½~ΜɏΞ;άQυ³!}‹ΫΞ—υtΖ`ΠImω‘ΨΛ‡  Μ'ηƒwSp‡γkΰιYlxxR¦!α₯άΓiώΨuΜΘlοΘь‚Nc{νό™;ΨξO~|σ}Θξwtά—Ζ8ω9π>ž”πΟouA^λ\‘χͺχΕϋζ/KρΪ’r&ΫδR‹сQθ0:ηƚ²§@“Ρ|}#xzÐf„n₯ηZΰ;>©њΫ.^*±ΉT,χ;*r« Iͺ₯ΐmΌ“ {ωεπjs σώZαάΊŽ|ΈΫέ5Fͺ-2;Ήςb/‚¦IEλ*@ mC€ΨRŒ†L‹Α·n‰Ϋ7“κω‘wΜ„ϋ‡vWΜΔ8BΎΑVπVie‚&ΐc+}=c›Ν˜―L–=±Ήsχ‘”Ξ)fγs%$­Ζ·ΒVnaξΙΨ­Ϊƒ{CδΞΥ 1ΒΓ}~r†™yr eiΘ‘zc^ΕΝΐnΗ½e†ιTτΙ(k8Eόž('Δ ‘Υ΅“s·»q˜³Ωκ½Όh–Ξή€J@f^£k1:©!7u΅  Ώr,Υw6’ΐb-ΉIπ”xς5ωτΣ›T iω&:8š‰ΦHF4Χ¦F%ΥβΊκ =Ήυ„XΠ­ 8Β0β΄pε»8— Ϋίϊυ ¬»₯²œΘͺ@Ÿ‘•LŒŒ’ΓmŽ-Z β=r‡³^8'©0ŸϊόΏΛεݏ§Eηޏ'τά^2¦.π쒜ΉΰΤ7ΩκN.ή,θ‚OX=Ÿ3ۚl"eΐ,ˆ“a_C,G±Ν©yόΡσΓXΎp½9ͺΉ4 H혰 ±+Τ$ ’9©½+ОZ±‘@2²s&KlΔnΫσ€g(~96ρ;ZžιΈι΄ΨΌ½ΑS³8α"$[i+ΔΠ•¨15e§₯λŽJO>(…ΦΥ ͺ6+=‰ˆ^&dyNϊT·ε'%7νωχύςXœ>¬8–ZƁΩϊυ•oόJGg7Ξο2Δ1Βτ’'Ÿ"eΜZ=`μρœ²wVb:&+Pλn΄τςιŒεK…Ώ«/οήώ½ͺƒzx.Rl΄‚!+{Η©‡Q«ΉmνβΓ™ϊ§i"΅«›`Ί±9»7/ϋJ`Fk]эU««wŠ@ ƒΦ35©E¨A Ιρ³;fί2’ίΑΗ"ݯˏνΑΗϋan­ §ΩΙΌ •·$/k*±·‘P§ζΨDL¦I…ύϊΔζ₯H₯–η¨νΩU§?ρšΩYZΗ sξˆ<ΚvUN‘¨΄‚˜]YΗ…ΰƒξUQλή± H Υwχyg #~ξalU1Ήe£RΩqnΡΛ­NCkpqη΄IfC]+­Ωk5~Γη†HlθΝ8ζZΦΕ‰@˜Ÿ]"Kaƒ&ΟFD s΅=YpxΐšU¬ ™K{ΊΉα2“Έ34τ "ŸλΣϋγV i:ΧΘo¬Ζ»<7!Λmέκ©βγ$₯D `'~zΡ[˜…β~™ΡubPύόΈΕ­;:9ώR3ΪBόψάήxϋXiκHΗκqχδΟΗƒυΝχ†ΉΫΈEξ}{WNσώx__ή——Τθ§{όκ&έ­ΏΎ[Ύ—e}]~=Ώ¦u(Βϊ°Ύώψcβύ]‘σDιθδη\‰°ή^&»Y` S© $φΊΪX­U,Υ*yjvΘ΄ΉYgRδZmJ*‹$ψ΄[‡}p”vώ֞¦!€2m()œο₯cΙ‹ Q¨Υx«)¨αˆ•θJ—$†μkόβbΥ;ςχŒwϋ,uL6Νό¦L2VΑB€ϋ…CY|-š%o™”2bΒ€ΕkSΠΌ§q΄ύόwrwΕ8W?wσΑb‚?tŽQ꩜αΜ¦‹•ΆgψŸ’} LΛ3”eб:Έθn φφύ˜τώṞϊβΞ^?”„¬ΗΚΥ쩦˜Uԁ@S€“’«λ©˜h\ξTCάm%Ζ~Υτq)έ½=5Θ±’”Ωo ΕVΤQIExΊh*:bΔ SΖ¬I4(ΞκŒ :9ΚUέ€DΛΕ*―ͺ%_ί-œ–ίŸίξ^^Yj*τGΥι„ >έ33-β‰.Vc΄Έu"”e›=x<$€£M–TˆΞε°³)’MjH;3!*?tRέχ§#γ“m9ωnλBΎ_rw\·ύ‘/›_έ&ϋΘΉ‘f-‘©Φb―1%¦¨‚ΨΨFτ \ξjjΝΉΰπ ΄\Ί¨άA€½A] ›Η?yλ9`_mμn ŽA™@ώ”uν«UΣ‡B}κΡ™ˆπ – DEn);]=ΧΖΓ vMd]<ύΉ[ |ΎΊΝoί`Δl]žq²iΨ˞WζΚαα7Ω·>0b¨UP‘°qςBβA ukΰš]’Z΄*jΐΆιA,–‘S,ΐLgΪ„”υθT]βΖϋvkΝο\DŽ ‘ΓδήT,/HΎ‰©Χ-**Ύl‹ή5ΣΌRnθ­ΤκζΤb˜Œ'z[όδΣΤ[ΨΒ‹pφ Gv) ςόW΅‘jΫK‰Ζ#²ΰ[­*L9@βˆ:Ž[μ}ώ«7ΐ+ ςάbΈ±'$ZυŠ ($ 0Θ%#ϊ΄Υ8 7i€Θ)Δ²θ[%4ή/νϊΟά ­†Ο²άfώχΒR±Όb©c[Θ )ε]‘χ.KΉ#Κc―Kί˜¨Dšˆ8›b=a£ΆώπGύυ\^~,.{έΝvk=•*¦^ό+:iͺT9ˆL=₯œ]―ΉV-–WFT£€v,0"cσ1Έ0!άΏ*ΏΫ6~>όˆ~ΜίIή}•ΞΐΩ5W@“ςμ‚Ο`Jς2h€ 00’±1@Η=vƒ‘&ΫΓ8β₯‘΅p―)q—Oζg²•ρόάΌ?}[ΎρΣβΤlώLεq ό •ͺ€όiπŠE\4³κƒS91”Β”K7S\c°(]Ξn”υ#ΥNκ8rμMΠnΠ …κ Σt‹’Ε bν‘ ˜Š<Φ Q$QΚ΅3xΧ–ςiH5YUœt@¦¬ŠZdΆΜ¦Ι^ž¦α/?Vؚέ"£Ά†ͺ©eGΚeg*’ύœͺ(ΰ!‹J ]Lh M“³Γω Εϊ<ύ¦=q°6ίXΟΚζ—ΛΕӏ©‰ό@€:1ρσ·ΞZ­ΙQ%’΅­#kξΦcͺœβžS 1cΒ\‹ΥβΆdD›¬ˆή8&ϋ3ξYoΟΏNϊfάμο£=jϝ•©*!η˜ΐΕΛ¬45›šΐΚlΑJ#ΒqαakδΙ[ϋ¦ψέu™ηδθœ92ϋΪ&λYΐ8ώπλή―ΪΡ·sη&rύWάZzΛ:[Δϊ4 ;q(ͺη€να<ώΗ« η’*έυ†.uZΝ[G>£`ώvnγk7DP­7 >n+ρMήp ‘Uδ«’,~½Φˆ΅& +9χD:[UΓΤ Φ u ¦ζ ΙR’“j–KhœάZ›I …«Ρκ,¦D’DH1;|μsÐŒΝ€ 'οΣ’³³AkύJLoo―‹ςΎ‰Œ›o―Ζ?ΟNΤώ>Ή›ϋbΏ" Α΄ΘkΩaΪ\«iΠ7¨%laŸ‹Α΄²QLε˜ΑΞυΔ ήœΜsζNfcφxŸ{ΚςθγŠH.γ€ΤΤV] 0ΊΛ92ΗΉ³twH‰΅cΧ Υ›Ο$lΖΉιε<~Ρ™{έ³}Š­RΌσψμX}NΞλB Π–Y³ξ!q. ΩKΥ©)tΐ8‹ΉŠq^ΡλQŸΚͺΞ_ζE!€³ΙNW½5³Μ=2’Ε"s‰Fl‡šDηΤ{ Hτ6„τUζ{Ϊͺ:π§ώΙ·˜“sΊ#jc“ƒ:ΉΤ›Λ‘‚Q!:ΦLέ)›o Ξ‡±θήsbœ…rΙλυcCή•ΡάΌ‘Θ{oKW)YΡνΚ΅Π‘š“ ΊmΔD‹‰t―VχMνΙYPT« "R.;Tw­xή«°­‘}€A“ryuMμΎ1ΜίǞκͺυxο­ͺTiGpχŽΩA-Σ$zVβόk}τΧt6iΪ>šΐ1χ“κK¦ΤϋF'ΥύνΧχE=WyσϊηNμ‚NΆτάΥ^^ŽJε¨[bnρΏ€h;6δtδσˆπζ•ς»ͺVlb=0›ͺ1ΈIΈωϊgXη“Z7ϋ5vQ“Qψ,Ύς:9CY§`΅ο€S §±Γ«ΡYψo@θXήl(Ψί]uΘί~!b¬«ͺΩ§—χ·ϋ•WΨ8.6ΛΐΏV6;7Yόο[ΏKŸšΰyΑ†V€α·O”― #p±Ρσδ'o=^£bΪ Ζ(/βέ‰©¦Vέ2 O 9v Hδ΄D‹P,qβ¬ΓN½pΘΆΑβο£gooΐλq^15›ŸΌυΤδPšιF|ΨyΥ%=i|ΗwΗ8‰Βυ˜°%iΌΞΕf©―ξΜΤl\Ξ Έρ(WΟoνΕϊEаbΜ.(3lf¬zaœS/ΎΚΒ=‘$”<‹†ˆέoδ©΄D¨―ˆ‹o(VY΄ΕΙ;ΟάU'.w1Uƒ…/cλVŒΉŒŽ΅ƒm1’o]^šαxόMά‡­ξ·–—‡ψ$έ€‹·;κ8{#(€Je “#PcρΦΊ ΄bSΛ-pMΥ(Ά©#r!ώΪ( σΪeœκ ‘ΫMωπΕΑpι[λ”’šaΩΒΔ–|—Κ[Δο^h[—G0γ£E8ΟR)–ζ˜Β.―ˆ9όνόXVτΦt([ŝ;[Ή€ΤD’›ΚU<λ€χ\dΗ΄bdΣ.».'Β" ±/Γ5ύΩAό^΄›BχA?π 3ιΔΦγ5Pfš‘+utTŒED[`dO•”t*@…Ψ뇇0ΏώJͺ΅$9-φΰ°₯³[+ΣR3ΊfΌZ jƒ/*›A lŠ@ύύΜ*ΘI:δϋΤw ΐ%Χ`Y`ΦϋœŠ Ζ'²]EΜΊN»«ΰXI˜z , FΔ¬ΖιΟ/\*¬Ή³Ί[Br)¦="Ν°EΒN€¦Uκ­#L·ˆύ₯[ =€nIέ:‰Π΄ €Κ‘vιυ•ώό5άb.IV²r­€δΞDΆ7ωτ|Όηg`3AΚBq<Λ ώΟƒΗUΆΘ·uFp«XcdεκρΫ QNξ‹CqSb’ο―ό―ωΉS¬‘Š„άJ•2І“‘ΊΞ¨ƒJR‰V[Χ(ΎI)γδ()MΘΤ δrενΩΰΜΘν~x©9Ύ;*w>μ&ψΈy/šΌ_]”Mm„Χ‡#H6"―b΅ΙŠϋŒ-ΎWλΕό™ ΚυB²Mϋ„ί$Ο’Θ‚ΰ„oίχΠ›š€"GpO?¨ΩπJQzўhο™n9%{35ΓMβρ˜ϋΝ^ͺb N½Nϋ\‰vEQ Τ‹C ^γ|τŒ#“S©˜‰ŠsΠδM ;“;΅ΏM~ψ΄Ίb=Ξ*³sΨ— !riλE?5QϋΣDH4HΝŽ άRΘς*βX'ΧΊ·qκΩκύύ$#¦Ω·"ζ΄‚ ·$½ΎΞI/BrIiπA6!b7vD₯€7CΨμ’υ™lu%~¦π'½.θινΎ>Ώ?½έήκ„Ψt’„_]°ΞΘΣ8•.·Θ,Vpc…J™?κJX§±έœ EO<©|¦ζηg}ωρν΄,`vΈ¬I•':+ Ÿ3HZβ ?§”HύZpƒœU•Z7,oΜ½LμΖUΗέΛΝ7€4ΆuΐŸϋsυ½怬EΡG¬h \³Θˆ sψBΪΫ‚g6Jg=ωρχ λρ­Βμ§Κγ φa°Ο(’xVEUκm₯ePQ*J;μ6%²ͺ-w€ΚΧ*nΝSζj{DŸ.S—Ψω˜Χu]Σ&Z£6 σfιΩH_gLΌgJžgΚIώ’sόΑkή?'—xλ ϊρE7Ί›νSk†Ϋ–]gLΆu!»FοχΘ4γj£―kό»~Ρβm΄)τ&Έ‘Λ ”Θ™r‰^Ί€‡Jsΐ~%Q`Ρ¦—wͺ’‡œ `νrκΆ‡OψΥolXξόR{I₯Ϊ `BΕ`B ˆoψΐ.•˜u ΤͺΪϋ”jŒi‰ΈU›Bο€½γC[bwΫB¦£Nί+΅ι~ΡΣΫΝϋue ₯ >Jΐͺς$l퇀₯š¨Ά― ¬m°5ho]Η)δ²’£w_Ψm•ΫΊλΜ__n>FΫ¨7ET  #eGhVIζVJΤcB»o‰;›mΒώnͺxAπ’uΤuM7λ’9j›H±+ @|8$†Ϋ@/ξΏ½σrΛίή`ogD?*|—υYΣ7sΊšqψεŽtύ%όHΆ€wmξ7ρ΄”ν“αΡ’ρŽ‘Σzλ3{dΉ8}\™ν™Ή8ΏƒvΉ­΅ψ„ψg€1°Ι€βΒ‘%ρα`&iΨO^O‰9 ιœΡΜGž΄`μϊPcΜ–{£[ošΣœ#ƒ0%3œ΄yβ€(·‘ςάι1ν<φŽσQœέό˜Y›€ŒQXt €TΤV[VΘ8N€K” ΎkgΎΒ%ƒ˜>.ύ'ρΊN“ΙΣ΄?ΨUŸΕΩύ»ϋYQ½Α?{ϋ ς[°ˆ\TI₯XΚEΙν}Ήδ$&6ι;1N^”Ωη¦rˆΑ‰Žhά¨·Ό?ΎΠΓxyυΆ+ηS"ξ“$•ΧΓ΅Π53:ή―αζ·ηIRγ“h'κJ#…dε|Φ’;V & ׎”HnPt© ιΐ™RŽTΫr΅Ό[ώYŽΉVσλίΌfbVΫςζ Kz”(q:>uΥm¨J,dδΚ$y»f¨ y˜ƒ/Τ{‘¦VΝ–«.-Nv·|yy~}ϋΘdlΜΝ'₯Vε-Χθšσ&ΕΫή΅νΆ‘ΓΠύ!Ί_>¦0$ŠΚMμΒIZ7_Ώ‡c;q23n‹Ž}ΘΌ‰ΙCŠδ©ΩF)A)PFAސZΝEŠ *WD]mg£Έ:₯yAz„Iσσw*wϊεΫyΐΟΔδ\ςnQwߍΡΠA{…'(`€ =i£uͺE#Υƒ€¬η,g5½ς/ύ’Κ”ͺ&c«ΙN&?CŒ Ά†© ΊξάƒέΩ«U0₯{[‚<ͺ˜i՘7_f·+σ6>œ–Ύ%ΫD8„Γ'ΛTη`"FΔ‰\UvΑ^˜Ec1&P¨]yδT„£az‚Θ.Ύ½zΝotθ Ήά§»ΑxΈI;و†cΕν„KυΝ3+sΚb ao q]ς€dpP2G΄Φ3ε ―©~ƒΡπ<Ž΄oyΎΨHΈ₯\xH>Uym’η¬5ΩξPqr†t%†n-α,†Κ*ΕΉΗΏΙΥΫεΉ‚)Ω -sšΠ8d/ h–‹ φ«œmΞ&Ίcka^šE―*ž―”ί ^}£~l»«»νσeTHaάψηδ½υλ6wΕu€Ηϊ‘5`Γ¬ΩΓ1[“xη+Q…ΛrέJY|ο’(±q¬ακO ΤXž½8*Yβ$“ξ|^Sa YJ±Λ!…0€ΘUi=‘'+„l‰}c ΅+ηό«ΙlWϋΦG5‚O“ΣM:‡5Β!j¦†φ[Φώ[Ύ–ύζΤ/ι<œ(HE?ή‹[> φύσv{Κ‘νΛΟSύ;=Ί“M€»S‹ια­χ°Ίί\+t>LΟ7˜π…TGbš”Β+©ΓHοίep wuwyΡΩ}΅/ς΄·?^ϋδRί/h΄„ ‘Ξ­·]ηU+χόuΥ7ειζΥ~¦[όΗK'NWΙtŸu™{ Ή°δ…Œ”RrΩS`“ΌP±3TWΟϋο<ΪΚω‘g-2H ¬/ΌίΉ›ζ€ςpΏΪ?Žž€νβ/κ±U`J q<ƒ\7;Τj–]8½huπAφ]:q`†e2Χh“¦ν!-<ςk~ρv!qQXš0ϋjΚ₯"ΰΣ\k@B²Y%ΙkEsδL5β }ΚΖZ/Ζssί₯Fk¨\mΡΜ)½XΙJ&",OMηcυΈΆΝ!θS½— L  š€^jAtΘέ₯“”2i낁χ¨©Ϋ”U ¬fx*žΜPF΄Ώb_œοςΗ!Τ‹{I]rt•Z³.”T!5‡¨Cn°u6¬7€G™›i HΥιΘT$ΣCμτΓΫiρolhΧ7Ήž`kΎ)$β%²Μτ’.₯ΤEˆ ΊΥ]3J¨ƒiŒ`*l]IKΑ5Μ½γ ωόσω}~Ÿίηχϊώψή)v cargo-0.66.0/benches/workspaces/empty.tgz000066400000000000000000000004371432416201200203510ustar00rootroot00000000000000‹empty.tarν–±nΓ †=σ'g·1%φ©R§ξέ"ΔΎΈ(,ΐ•όφ§ͺͺΆkVε[Ž;Ψώ»ϋΑqςKω μ` oF•ύ4Psώm=RU<œV7ΌζlλMMYF³ ˜ ϋ§{œPχ¨;‰%δ8‰ξ"l φK£α9£¬Κ‰#Ζ cΛδδ­{»§EUМd‰?Η*fιlW*y*¬»ΕόΎΜž7iώ7ΣΊ•ι.·ΠŸ­šΏNϋ vπτ,œ₯BQΜα Όμ„R ά¨Ρ =œΈ6 ΩΑ£O΅ Aϋh=œ…QθY(X}CΕƒΈ Ύςn,m2’D"‘ψΌΥ”d/cargo-0.66.0/benches/workspaces/gecko-dev.tgz000066400000000000000000001401651432416201200210620ustar00rootroot00000000000000‹gecko-dev.tarμ½i“γΖ•(:ŸϋW θ^Hb_τz桬͚‘l‡$‡_Œ­‘™ ] @`U³oΜηd$Vd±ΘV R7$Ήœ<ϋ²δτ!™2ώ8ŸQ’.“Ε#Νa$³·YΫUώ¨πΗ6ΝΞϋψGΧmΈvtΫ1mS·ΰZΣ ξύΫ ώl³œ€ŠςoΏ?χ•’L–a>ωTΑKψ‘­ˆΏ&Žxwˆλ{„hžjšazΞ˜iΎ£Yžθ„{“OΔ{Κ$ελ$ηψκ*Ο7Ω§σ9τ»Ϊϊ3š¬ηλδ}EdΎ,αmoύ/Ύ:ِ| ‡‡/O^ύο«άθΟ~?ζOCΚ³9lΧ:‰ηd³Y”χQ²\ςtώ9’ˆYž¬£kžG΅šηί1-c<7ψσΖ7Ν“$Κ>ύΤί†›nσ0Κ =γMΓM‰υ>P[“<€ œ$©ς=식oˆ§JΐIΎMy6yΕίΡhΛΈψΚΌ@£σ?όaς‰2™ϋ0\§πΎ{JR&‡ƒΰL±‰^ΨNβZF€ΰ€šˆaΐδW|ͺΟTε/ί+ί}σcΛΓ0y•rΒδ­οΏόμ‹οΎœ­ήά$Y˜'ιt°ΏΕ’ΝχέT8 π™6yυΑžΛώσπΏ₯#wΫύ›‰~"€kJb…Aώ|δz5έ1›ϋošζˆo‚%^1Α~3ΰ[KdεΣύΆ H”ρ: jΩM68Ϊψ±ΰeΰi³5‰’GN ”Χb½χ07₯ γg|ί1φ†ύΣ€7`ΈGΎXΉτ„Gœd|±&ο’:‘1›4ΎbΞΔΒ”o‘,Η΄ ιΪΠEW­YΟΕ³I£5H½­m³Φ:F*έέ8 3ϊXkόΞ΅{ΪΒ“Ι~=zΪd9›τς ?ΈςΓ&d<iΘ±Bb¦|^φ‘,,Ψ1Θ'KΙVW{Ζ?ρ³‰eτί4œQ{Ϋύ―¨8–9‡Ώ}@Œ+8λQΔ)žΞμRfΰ€ώίΦZςΏf9γώί^ώ βΗ=}ή­ρΪ@”P*ˆΎ!ΏύAeM‡Lo$ “m¦T`gR•³Ύϋλ·(dUΠ|Τ¦G€a ‰Α%ϊ?MωΛI=„9ͺ7ΐBΖy6χΓO•Χώ.ηYψž³sΙMyhO+P‡ŸΛ-Aθl{Τάo"‚‘~’< r!\wΟ§§τšΩβUα2ξ­ρ?} ι"εAιλ-xςš[8K–œ€₯ƒz„lSΡE—zπ’6ž6ͺˆν³ρ•Ώo•Χi⿁ŸΆ³8B²YRΙ“τAΛσύπΏ9βΫξΖι6 σέ|Mb"‚ύ²hNyš/JFτR ΰ)ωΟV[ώΞh» ώGoαkŽ πj ’”I7~ p§Σ―μFΗ„.P“΅H‚Β­a/[ωzM6zα#1L‚@D/ύψxΓP„.+ϋ/|„’gϊπ84]ώξΖφ"ͺq^oΩκψ1$lKό-ΓΌ'n»L['" )Aθ΄LЁ¬Šˆ–p‰ε؁ΟOσΌ€ΈΆmΘeι&€rΰ˜ά§Τ †ΉͺΛ°"1‹€μΓC$6β'/~ΤΌRN§π+/.1έHΛ΅kό–UμLΉŽ•›“cΑ}ίΙ΅QΎ‰αEΡόoFrόœ‘ς:”7§[q³iΏl„|υΥ7JžΘ˜ŒΑ+@WΏ S$ο~›)e’Ÿ&O01E ₯Ω€­•>f­"΄©t;Ή_B@ηΫΏ|mχ9ψ¦τίγoΝ₯IUϋ#ρώ’HτŒD§φί²ZρΆiώŸw ›4‘Σ5_/ά"ή&9/³`υH&«η΅©<#i$dہβiGΘ.‰O#ΦiΈήD"¦cC g—ϊvŠu{ε―iς–Σ\ω’•~œ˜TΗ ·Ξ» yΧ8ΟΏΔΗΤΗσΫύγ›Γ Lœ Ε2Ϊς«$°ŽΡΚhͺcώ―{ΰύώ§₯^‘ή»uΔ8-݁:τx= Ρg¨B{” •ΰΉpΝ¦„RI©ι0πωαQμܟxœ†Κα#";ευ*“WoΚ‹Yφ*Α6–αΠBT)ΔƒO?ύ²€’―H”ΗΏ…6@P7iˆr8Ω ξ+ΠMαN6Q™mΕ“υ”*Υu™ Ϋ«Φ …‘£ηΙ?.Γ¦ŠŽηžτ8ΨόθωdΰΤώΆΡΚfj£όwύί‰$]˜d%~έιq·Ž’βδ)^d»,ηλΒrƒΈχ§`ΗKXύsβξω.Έ$£aώ_wΰ#M%ΧηhΘ}iό―ιj;―jψψΏζΓQ‚ΰπuα’±ΏΏΙψ–%en @¬ΑΆTΜHcBΤtΝΧIψΎΜ ooγN#/Γ"ΜΧkQω†]€φΥ¦iC³ΫΦ&ε”/C˜Ρnˆ‰§Ϊ~š¬;SuΝ05€ <Η嚑kšI}β3BUΛ‘άπνΌ<^΅ρ—ˆύPωcJβΎ*―ίΓο7~ω{σόhˆΐ•ŒρΏDόϊ―£ώηϋŸPρ ͺŽωςœΐΙό_v»ώ‹ζŒώw ƒ‰s ?5ν1¬εv  ε5Hke±π•Ϊ`Ο>$²|TŽnω‘dψoWGΣΗψΫξGόΟ2β A=SpRώ7[ϊ_{Τί…ώϋ!€Ξt•&qRΖϊ£―†€ Όaj’„ρŠcΜFIΜ£…γτΞθΑ’š7³}^™/“ΦHϋ>›f=…θΛpې5ή4›/Ίύϊ™λ°gΪ9Φjq1³bΎύS˜―bjb²#LwZl―²ώk?|ρ_J.>\LρΊιμΩ―@Σ£ϊϊ‘Ϋψ@π³T—ΘΞθχ‘μ?&ƒΟŠœπρ§λ?8-ώOψ„Žϋ[ϊ? ’–Ha}4 η·έf~Dξ2ωŠ+_ύεkιsBM–cˆνό_D.‘,uδ?ύιϊ―šitΤ΅Ζόowjφ_‰ήA䩊{§d»j,ΰ«cΤ₯€,υβψΙ†δ”><βτωΏώύnMy–£δ'ξ0δ+)€NμΏn˜mύξŒϊΏ;ΰZώg½ι όj·)cωΠ―Ώ]ΒsraIykGΦQr§Ω Ν •mwe"M‡Hˆ˜’ΝbΕ£Mρ>ΣΌ«₯©$)<εyΞλbIy·+―%œ; 5 wqφ¨e“— VύθΫE›ΧΣm7ˆwσmΩ€λεb-ŽΎ]΄ikυj[§—Vlν°ϋ s»lgΑδΎΔw΅ιμοΆΪΏΓ!΄ŠΠΣε(ΏΡπ–IςέΘƒ‰ eώΞύ/Δ@Y01Γ„1aœσ”€Ό‰QεkΕqυY™#3λJωΐwOIΚ ]κ~‚xB *Rζω†‰4£’αΑΛ@¦α<‘‘B6ΪN)'L6ϋώΛΟΎψξΛٚυ₯ή\-Λ„―³$]ΞΙ_ηΝσ<άt‘«we¨£§Π3ιuΐ—θucΤήveώM’’αοΒΖ_‡<εγ8-OΣΤFωό_OύΗVXH–aͺΝ’Τ½fυͺΑ0ή§€»Γ<Ϋί:•f¨νP0τ(+―«h°wΗκTTœ&N”¨8Ο½hޑЎ8’™ΒΎM—:ό1Y”}»hΣαΫ΄‹xW\ΜδXwψRwW §^.š΅#u’EΆZσυ‘χΛ&uέΎΤo0ξo—_™WξVC`jχΛwρ€$Ϋ8Jdšέ4Ά\ΨΎ6/‘K|J|Ή³τΘ όΰϊ/t#ώΏ±ύύΏ`»S’ξθtb έΡΫττΉ=ύŸ-%yJΩ­Κί]zLZTCθtv}/σˆŠέ₯Οξ1¨΄Gφ΄Œ:ή- IαΣI;ZR₯d!βzŒ-)ƒ#κδΕbΕ’§Έt|Τ–u^} Θ›¦$ζQδ@Φ†ήρB£Eσ‘―;ϊŽmΦή‰8}ΤήxηΪG_€ηΨ~λsšςu’—$±ρB­AωΖ‚&)'[EΔ =ο5šνίήl#`玾yh2iϊ ]ΔΎd3{›ονψΝ7‚K^π(8ωξ‘ιq&£«FBΈξ.ΪNΨΝχ;šνίnΈkw½Ysί†σ ]σ:šοŸA;¬«‡iα„βη“:™žMeκŽ>Ρ‹?-ήΑ>ΫΫάΥΗΠ:y2`ΠΩΒ˜zΨtM«Ν€•uΊώB-οtΆN‘k΅°Ε&d<]'ρίυΑo« Ό—―PΆΘHζaχj7›ΐ[OάOsΪΡV>˜ήωΜίv-υώY?Ϊ°”&ι²(*όIΥ›°%'¬§’jŒ!€wαξΖ›£Οχ_|^C 8±¦‘Άτ–εŒϊΏΫσ>ό»š$BύOeφΙkz‹Ÿ”Φ† lS°ͺΗFΕ0(‘³τΜ¬Τ­5³ώΛna―)οu…5ž5€Ž(ΗQΤE½QΤλυ>ι ͺΐ_―ψ£t2¨Ι€₯ˆwZ σΑά‡¦x‰―5ζώ δyϊŸ'ίτυiΧrΖϊOwρLWA=6i˜€aΎ+ά6 τŠ@„2fIJν €Βx/΄R&°―‚ΘEA%Ζ•ˆ)…:•ή“ƒς¨”―θ“–TYV‘άuά Π¬ΐδσ‰Ζ-ί2W7m•ΆjΎΙ:bΪχ}γΈqžχNUB BW„τšlΣκ6IΦ$}Xd»˜žΧ―dθζ΅ΪΎΰ$£„ρ…%Ιz\uή 2•n»ήhwΛΣ|,’`nΊ!nρLΜΧ$&"<&‹ζΥΧΪ}& +Σf’‚ŽS E¦½θ32―ˆ²5=brnψΊkhξιvΰΧχ Ωͺ‘k^θΜwT¦¬gXBjΈώˆ{S†n(ρdψpεkG†Κ=nYΔ΄tΘ#žΗzXΙ§„^ΰ8– BΟP³]Φ™»7Œ§yΚσίψnmkΌŽΘ΄JQΜ#ϊœF=Ξ—(jt¨Κ2H―T­ΌσeΪ⏍κhjω.Ua™i€²π|Β3œ«αp§UE΅pΏž„A?}kΊτ((Ϋ<Œ²ΣΓ©4nWs„£ΊΘ‚Ηξ^bž?ρτa.‚:°ιšΆϊx›‰t«Kεo3!Aӝ>< ΑόψN>ΌJ_MΝο ³Ω|©7•NBƒ'‹EfρΒkΤŽβα ίΛΤpƒGΨxιE½k7Ξ«έfulίωЬωΌ~Οχ[„cΊξ˜E€Ψι†γš,P©…7‰oΪΆ­ι]ΎL,ΞJVr˜)α γ– ”yž’8Γ³;―vΦ₯‡ί¦Ρ€#'φκίpU.Β C†¦›π]8έπtζ9gƒhσ^­/•1εzκά7,‹1ˆejΜα–Γ|μ€­rίt©νυŒωΟΙzYN4KθΟηϋΦύˆ†"x―ϋΦVM`qΩϊ,;$R³fΗ;’ΕdHeRͺ–us/\œΚR7yΦΔ¦y‘ ϋhPŠSφΟFσωP“gs]q4Ο·‡VDΚω0;hυ~ϋgΛJYΧ]u˜&O"ΰτ– ²¦v¨Ϊ a©κ°ίgŸl΄:nŸ¬7φIΔRFDϋN»€ά¬γφΘΏά΄Cvͺ€―eόAΜBΉΤ/uZгm‹δ?€Ή{6‘Αςwq’Nώ^$8ΈNHΜ$dœόώχ“Ωέσ}_:VΨCώn―ϊΩς„νaQΡ;lΟtφ2Ϊt? ϋίσ ΐ—Ψν±ώύχΏGixƒό†Υήmτ½‡ύ7γλΗ2ͺί¦θΥu ΟΖΣIΎ$”|EbεΏPΚα…ΧoςfΕ‡:=.υά£ƒΠυΞ}ςiƘοΆϋ_ψJ/£,šζΙ”Ύ{w«ϊΊές1mΜ}ό»/ΌdR–K²^ƒΤωEΈ&(?Ÿ°Ÿ1υ‚#*ΏηωΏ_ύΓΆΖσΫύ/¬τU_†^E[žΑI^½¨'&±;δΏ‘ώίΧw=‘l= o9x³ιΘϊ€κ :^ ©±p„¬|ΎJΓ,αF¦­ΌFJƒHΐ9’ˆ—?wΠhγωΏνώο}«Ρh1_&ρΕ’δόΩbΐ©όBΧΧΨc¬~όί“±žά±RΆ‘Θ»EDšβt»Ι§Ωvƒώ χ+cZFOΌΨ>}ߚ²2ςHR ]αΚ₯)₯hΡξΞ΄fMΛ|‹χΪƒ|ΖΣGN;ύ³Ρ£ι:Σα9sN9Ϊ^ωυ+™ΣZωΦ₯"Ό*―q‘:KΛ6 ‡Ÿ)L% †O!n›²έΘώxΌ c˜ZΈήD|Να0%Œ…‘qr€$Τγސ₯‚Ÿ+^B }Τߞώ'UKνŠd«e”ψό…σΏ¨ΆΡ!!LŒϋWω―!ρ§W)‰Y! E°Β£’[;ψ‰±6ΐΧIΚ—$y€Υ½·²5 £7Kό»λ~•€J(YΞ>ύτOŸί‘¬€»θGέJa p!’0 ς5›Όb έ"&e?%αƒθ5zυΊbς›ηŸΑ WΌŸ¬΄>£°A…ΘΚΰ$ΡͺΩ=δΨAη.ρί#ώΏ-ώ/½θEΨ?­!έŸo:%Nλ£ώχ6ψ’Ίυ˜Ÿmœ‡λV›Ξr―ΥςRjU”Dl·‹JSΆ(β ?§όˆ,’n`ΰŽ˜Y ™Κ`π!R 6ŸΌΚ‰ψκ#tΨQ{@v™ξ6y2tJ―Π+@ΐ«φψσFδSΎbϋ€§gτΪΙ@|²,‰‰ςYΏ%qb‰ςšΙ{3&οΥΩ„^‘ͺ‚ΗZτUυΔƒπ³­€—Π]ι}φΏξ{ΣυvώΧΡώwωΆΌπΛλBϊwT~šCŸ€ŽG¦Όή‘hΔ(ΟΞ C >+OkΈ€\”YύΏo»Υ, Wrό(ιbΟkϋ―[cύ·Ϋγήd󗬃n&eD| ;:n‡ΓΙΙ[N%†χ±F§ξ]0:Ϊ<`Ξʨڈ|V„ΰ±re•ΰA…U«••4Qφ9Ίο|€ψJŽŸ—Σέ΄Ζϊοwάk€ύLMΥjϋΫ£ώχςί>–Ό°z¨s=άD*RΚWΉ$ΛXπS&βH];ΑYIΨNυΣε«ψruΞ-T7ΈV ¦'ηš&q/•Δχ$óߍΜNOϋ>F§«y/“ΣΥX28}Ή hΉ£±θΥι7ώΏcό‡£ωŸo»eό;αβ ϋh†¦·ύμΡητΏΟ7M²Μηd=1ΑϋΠνΊ_0yΏ[H Z8ΧΌ‰ͺnΓ9_o0=ή0ΰη*”Zγn'β_°ηnϊπ8Ԉ'SJ€‚†"ι&ŸβχZ™=π ώ Tλίν”οxΔί+―Χ»‡7π>kO5ΜŸv$β7Δwπ²Gίο‰τΔΟ`NΡKmΡ[ύξBeό†‰tτUτΆ%εΐŠ ϋίSΊ"qΜ£ύύ^– ’³/ύΜKfΠ?’)ξιŏπ?Οfn•ψψ%™ŒsψŒΞ€€½fϋvŽφοŽ/g. ΆκŒώ?wέYών*€Sϋo:-ϊXrΤߜώ/dq²`ΕEtκY₯$3~)RΣ–)³•&²' γιΘ.KEq/Lν3-|’NTοΦ;βV©πœΝ§eνφι&Q43֊T†§Šƒ›]έ fηTν:Μe¦έͺdjΆίuΦꬓ½ϋ΅…˜η©—Ϋtυοό„€lšC/ΩΙej ΟxΔi$Ά6Ιύݎφ§Ζ)`Ζι~qαος“Γ옦ΐ‚ ’Ζ(‹»ξW©pWΖ,Q'ΎζΆωp}r'Ί|ΎDύΰ#―u‚Ύj,gωPθR”Xνg“%9Ω7Ÿ΄>ΤδΫΦ‘ΕOƒ΅]χ|Ÿ‘uβΪΈBPμ>όβQσΰβMqHφς΄ΰΟ:όγύ£ ΐ‰Ÿΐί"9ό‹ΕœρΨό·Ά’’¬a±'άέΕ„;2«ύ4$uY¦O^mΆ~Š?ι‘X[kuL‘2ς{ώοΆόΏeρ·έ΄χνkΔ2Μޟ€€ xώΌ$ΰ'λ?hmώίλ?έƒoδσ|ΌΈ•ςMΊΧvsjCκ?τϋK]Λ1=ΛGχο۟gX.Α"&x<7Ά­απ‘Ή¬Μ<ί§χ›Wλ3_@NϊYMϋ|{Œ»ώG!&l.#±fb{ΨόiU$RƒU]·›δ ‘<Χ!εی#45•ψF‡ θ™₯CΏ ±ς]'©ςš­ρί¦'B?©V"5P™σΟω„ΰ|ό›’υ_nLWaΚpΠσ’φχ:Lnβ«›VGώGcΤίΟh°œ†AӺށ ΓD˜iΫu›6νdωΣ)νw,ˆEC‹o΄MϊYDόŸ:Τρ­*³BχΫ 3jWEƒ0Κ%j{u»mž0¨™…iλ»f]cY|D*,]€€œ’œ3 L;ΨͺW‡3RΎε)]qευšΏ‘π;?K:‡},“4,h¨πξX₯Iœl³Ά?ή·αr•?qό[‰“xκG@Θ±Ν7PB±fξΦ q9y΅JΦ|cξΡηΙC˜Lε‹“WaL£-γε΄ œƒZΡoΏωόΛ?π%^J,ΐΟτٟΏώςΫΏ|]όF²τ‡?Μ0ͺΠΎ{>FN> Δδρ’:·–Φ΄Β ΰΈ.«οPŸ[ pdη,y>›ΐϋΌ^$8ΨN‚-]e!ωηδχ“Yέ£E>˜ΎSŠΩΞP&Μ=ΟξώΊΊυΌΓwΝnΠI΄uˆ-³ρζS³δ)kΎ ΛτΤόjs:=―Βmaˆ¨ρŠaχ2žγnm0©ιαA‘?ά‡KιΤ"―γ6—ϊ‘jNΠ»ωφ¨»νώwΈˆ])άΙόίzΛΣ1γώίZώ―'t©:ώΧs΄t˜KOι„ΨFζ'} Ος+μυ!lϋ=+ρHƒGe­žR²ΥIšlc¦όύό/ί)ω ™δ—‚mLρ…μx¨ΩΑσŽz†γη: ΐ.ΑΊ>β»νΏŒύ»8ikι5kΜ~ωΏΟ‰όά€]DΈημ ΨSψ#˜Šdϊιfs^πn9̏=hχΖη.υ_ΗόΟ·– ϋ/l»(Rrπμ2³ίΠύΧ4ΛjΫ΅qοΐ'1ε‹2υ£&Ud¨ύY”δA Υ'=°Pz·Έξh³ΙI₯pΚ—όέΠΤ‰ΐŒσ²Ρ³6Δ²Έχγ<ΗΔxπWiPΉΟ£©ΰΛ”€;AξςW>‹YŠξ_p»Tώ(k‡Ž0χ4¦½,h*0UΛρRœΫǐ? Υ¦μ\\2Ώ‘Σμ ’ςHχ)2WΛY%ςiήΘ{=GP˜ηαζ˜t(ΝGrϋΛΕ7φ1¬1ώλΆϋί‘Fϊ?CνЍωŸnCΫυzEΛΞtQ·κ΄«ϊ’4%»"«({0wΦ+Υl™'d»Ιͺ”pεΊ›{κFύmχΏ«VΙMό ³Χ4ΖϊχΒW­ήΡ½thD ˆ•2FΙe|Ρ!(κ=}ώΌMrήίι&MθtMhštF$ttΨΣ°ο°o€«”BwΫ’θLΩφ±oϋ&΄ Eq€E/6œs>²‹}Σ(‰ω4\o’Lφί1=ώ„>™F4 ’HμO°•α•θŒf8±Θaœο―χ ΏΔξ ;qsθ`hx‡κ 2Φδλ Κ*μΓ‘ΚM]μΔŞ'δ1 ˆTž‰ΑϊΕΚαΏϊΒ*/ΧΩR†§±mJ“8K’ς3˜¦ν/‡ΛŒ=Dd‡AŸΕ}Ρ~”­“­π>·NβÏ σςΩSΚ=’WΕΧδ΅!ί-Cm‘.+wΝΓ₯uΈ΄ΛK†JςG€k’—ΏPΝ/ΰ,MW$fΐO1{Τ3—ΛmΘ ŸΡ¨ΌΏ H–Χϋ›Ϋ φ―Ά§Έ:Š\𒳇8yŠΌ‹Ε“LkρπˆB|zS*ΞΛyΖιώ&²U[ΡqBB&ΞNβΏ-{ΔMίζIytx–ρψ±ςK—³’7ΈYYMy;έИ₯εΫMq7[Eπ‘ jN€εΓœή )~‘Ά?£+VF-—·ας= Šβη6~ˆŸͺžS•K* ―έ&/ώVU'+ψU6ˆ%’žWς"εΛβ γ τβΘ‚|=ΣΛN3=άs [!χqΪςJ ²εΪfΪE‡όίέόΏtkτ0τ²yΉό?ͺ ΟZωΖψ{ΨΆžsίV¬ˆIΠ =Ϊ•β ₯¦lvΎeΏxυ₯¬ϋξλ%Θ?φD³Γύσ?ŒφdAΌΆν%σ?8V›£ο=π5¬w…Y^Ρι7=Ε ΔPσλG€η“οyb6'ε+iφV YHY“zΑh€ Ȉη—’"eοu•‚Ρ)ΤƒsΦ„&Y+4$k4яΎ]θ':ή—bΤ~ ŸuώoAωΏΫβΖŸxϊ0›ϋΎM£Ω-ΣnΛζh½ώ‡MοK xΟ2ΊΓσA\·*νŸΓ’|KvοΡ#ξu Ώήδ+ΙYe[‘.˜γο•DοψωΏŸύWsFόaπ4Ώ ώΗΆ;τ?ζsό›> γ°`\Eϊ·υc™ωΗ{I7έ󄃿εaζRΣD(Ѝk*%…²ο5Ο #9ΉΐΗφ€mρ—’$φέ7άhυλR“αηΆϊ[ωΏγ²ώ{J²U*Ruaΐδ=pΣgyƒžΔV+§£«£Ο=τ?‡oDwmr ½ϊΒΊ{Ztpβ]FΗηζνp­Α„¦'›2fύΔκkβ§!ΈςΓ#ςDy½ΜΔΕπμsβ°-²zΈžsώ/₯ΰG3Gύοmρ!ίK?ΪMoΕϊ(έ/€΅ΗRS€£¬?κσέΟ…ψίΠΝ1Γ}τ2}/όό9Yˆ€—¬Ymϋ―₯›#ώΏΏ·"Ήώ³•·σηd*ЇΤΛΔζ“W9=>ͺ3«·KτM†*•O:μΓΈj?o„ψsΕχΙ™―ΦλΝ;/Yχs°­η‹”,IL”/Θϊ-‰“ǐ*―+EΪ7Λ5 £“άkώRΨ’ψ.υΏυQw§ύί+§+mžηω5lΝΆώΟ¬ΡώssϊK$Έό?y(βλΝ*DΓύΫ|U\7UqΌ-€!A~ς멃7ΰόίkΊ:ž›ξϋ„_Kx2‡έΞ‘υίξ‚O€Ϊ}†—TW&έvΘ8ďεHΜ”?°ψςϋοςύBy$ΐ‰ž›rO¨F εeηJ*ΐKόΏΰηxώο·ςΔί*“cvδχϊ??Μƒˆ,³½$ˆ’2‘¨ώVMπ$ h!BόV«’"Έ‡(HhΌ(%πxDΰόσΗόO£ηύφ_―FNβ£-™Ζ(} όΰ*ωίλh~V]nqώο’έγΏo»§Ϋ4Μws^ _Ν“ŒŠ|Φ”§yv)!8e±Mί±ώΧ]ψFύg@Γδύn!s‚‘΅ρšiΪώgΆ"z_0ήω…6έηϋu1N«p^λ§φδXΕi’όηǚΣβ’ikhΓΏF$Η\hΣlΓiΐςfΫ Zτ‰”UππoHeέM6%€ •0.γ]Ξ”‰j‡zŸTχdUK˜ŽœX+΄¨―yA¬;| εΣΛi‰™6l=ήρβ–=}ή?SOMyQw)–?Ϊ]]šί―ory;χΏΗ%NOεΉωζΗωg0©―ϊύ  ξ0ΎΗdώΔύT|±Ή'£Lύ«ΓW ΈΔί°F;ο±ΎXό―e΄σ?X£=δCήv½φEξφβ:ΫΕ‡« ¨:E]Θ}Ί( GΖ§οBV!εΧ§Ά_Hκ/Ζ)θεTŸK(χΙ³ιε/ΔeπœσΓψ_ΫRGύοmχ°mΔ3άαMΚƒΉˆρΌŠΰΙψ_§ΝQGύοπ―‡]%€œ•?σ|ΕSŠ$αuοτΉU# e IΏ"§κιό_A ΈΔۏη–ϋΏΟ‘¬7IΜγ<›—ΕGΰIφ’ωΫψίvŒqo―{VόΟσ#0Ι#?ςHZΡrqΡOZ°y²„Φ¬neGδ:Tρι§zς#Liύk:7Ξfςνχ_l9¦&Le@ϋ\HΩΩKζΧ £]YρψƒΆqθAίΣƒ±\ρ}i—Ώ¦Ι[Nsε ώΘ£dƒUԎΚ—p&•MΩθΒχ‘ŸηύΏ>„ύΏN€“φ»…-ΛνwΐkΎ^“Ύχ;ΰ<Φ$^F₯¦ς Seφωθ-? †χB/Qs’G‘Ά¨) Θg%oα•Ό·αςΞ†Λί$₯+iχ?κΆΠN“ψH°†Ž΄¨ž‰u8O~ΚΎVvƒƒ„PΗ,`aΔ{rθ’6ͺXξ~·†οHϊ°Ν”rΨAΌ^gβtΧ;Δ*qώ»3!ŒtωW‰Ÿ©ΌΔίFϋχ¦ uυωN*kžiϋD;κκφ(έ^Gβέ*yjΧf¨5 sž dq²ώNΕ›ΰDŸBξ<Ρ¦]ΐΎVœ~`E t<ψγ6}€9§ƒ7πΏ/χϊφh›'4‰ΰEα€§`]qQ€NΊε£ TE„/ τu ŸM^±„n­*~πcjGμΈηB§ΧBνmdLδύοΏόμ‹οΎœ­Ω7†Γμε`ί­t#ώŽοΗΕςXοΖϋώ??ΣuvΕΔοCιΏΦΚ§«£ύοτMb§Όλb)]μγΈ‘Ίb@Aιzχ “T½χCJΝ)±¨«₯±΄έ—―Ε\|―?›ˆΘπχέ6|Ο#BRμμώ‡σ?ΦΈΉό‡ν•θ „xšΓ|iOCoε΄lsŒΎ ώζ‡i# Δ*X`"δ€»];:2ΕΣ,“!£Απ&E–¦Ε:)“Dxp3x·³7&U%Œ5ΙιŠg{MΗώϋ›”#lnsΞ¦‡0ŚDΡ#§G*ΐ?& ’ΪߝœςΝ“EΆZσzd[y³·υ’LƒΥρ’tΩ¨|θ`uΤδ…Α?ΰ°»C-ΔFβ—φ₯¦]λσ~€ŠC­ˆΕΖlT轂ž+L[”0i«² …«ύσ†έͺ™ςΠξB·‰¬Δί“μd£bοšψώŸΆ:κξΎŠ|aϊΟΪρΆ>;Θ°~Δ‘@ρ`!+ ²ΪMX?κ-’u׊Ύi²s‘) zά€wv<Ή”ΆΌ7Ύ=(I d9ϋτΣlΣO?ύ,₯ΚP%K€φ7R%ΨΖTΞ$Μw"i#ΆHΆΉςΔΙƒƒ†E‰i³4AOF© ρ b¦½Έϋε σΡώ{wόΎη‹ΠƒxAUoι0ώ{Δ7Α— Ψ£¬}j@jΫϊQ(„΅Žόώc ¨_ΞωΏΉχXεΖϋVΚyAšΔ90yσl$ΩκωΊόΏM{ΤήK7{‹Š<²ά"Γ.€ξj ’KΆ AZX'ρίΝχo"ΧϋˆorJMΓπ KΣ\b3Γ fΐ "±˜κΎεh ΖνzC"”&ΖΜ<αŒpβ2χšv/ήw;0·Κ±DκΡ6ώŠο•qΜzγύoζ γΨ>Qj!ςΈΎXώw]kΧSΝ1ΛτWΚΫλ›Q…§‘ ύΒΞ σΏ;ͺ:ϊέXώΟ`6ΉτΙΊj πIϋΡΚ«;φθyό_Λ‰;Μα{€ŸwŸ`‡`ΫΚ$ΑrρHΔy­‹βή°LΔƒΣΖDωœ¬ύ4d˜2+‚ίovδ![‘Gα,γ·€#OΘχ΅ͺΦθUTρ9όΊ$š(šά‡φœ«Ζ€^ιhcώ‡ϋΰβP]‘œŒΡvύΗ1οˆo†Λ?Q&ίν”οxΔί+―Χ»‡7πΏ”hA rτ‹—aϊΟυHΐEυμΡώsγύoΩς]Δo`±:πΏ5ζΈώοςŽΘϋέBŠώE2½'=°PΈvλ²Δϋ«.ΛrΏK~,ωοΡάB–έΣo”,jgγmδμTΚ—όέΠ,ΏžΒ“9?‘ε·πmλψž•KWΐ”΅ιAY›Νb‡yαοL^‘4%»Βη]Τ 'πѐ Ο:E{Wφ–“~#ΏΓnϋ²|rιcέrγΗ~š<σ€φi"χφΆ0OΘ~«Ϊ0Vˆ’„.J„}Ήg9­fœ@̟ ‰Ώ]Β2₯œΠ:1ξgoΧΣj†ΌΖ9FzθξΙT’ϋIΑt€IΐUέ ΦκοEI^Ό©υΖ&`―°OϋŒ”"RΆ€Ψ8}fˆ" Νψ‘*:Cc^…©ΘΓu%ύv΄M·²½v¨Ίι‡,άΟ°Ό™ρeΝ£_tφ˜ΘDΏN½Γ– oŠyg'r΅m¨βΐ3DυΈ‡ςf;UΈ >eΚαŽ(Αžš­ŽpΡ—QβΧΏ»ΏΫnŸ―#‹?β7';k[šΓρwk²9…ΊBQκP]kύΡΊ·¬ϊ©ΘΚzΫυ‚nΆYcΘέρ3E@#~fΑΡν\[}eφΣυΌΨξφ€ ³£+%ZτΌM“8/΄lσώ6ŽΎ Ο;”\5{Η18©šοθχ}AΡΰμC,Ώ¦κƒž·$νx«Θ{§θ*i(ξM„Žζ‚Ϊ†8mρn‰·ζ1P‚EΆΛrθ^žT‘ΉOr;²*πxΔR:Ί š$½ΫΟρ|oHz†²w*ΐ»Yγ'θR`5‘[±€λσ}§Δ.+*TQΞα+%s1ίfωŽ$>| W ο^rΦrν[CEηqq«˜ε4zΈΝ§:ˆx•Ιnλκ•cΰŽzΙ”Ξ«Q?d·}Ήϊ4%9―ͺiwHΒ>2Φ“ςߝό?œΡώAμβεσ«Ά₯·ύΗύΏ‡ώ·’±ΏZαD=8ΐό[ŠD΅FΪβ#i$8Υa~˜Z›λ+δOΕuΧh|νIw€ab>Θ§˜Ζ_Β‚eΐ’B@3‘;XMΰ95fκ^deθωΏ½hϋ πΑƒΎdύΛθΐcύΧ{ΰΆͺqHž¦n°­Ϋ;ΟΚυ•&h.nmΈD[π’2sν]ΜˆΏΩ„ΗΒ"$ςΨ7Ÿ΄¦Τ)Œ7DίͺlΫ!ΎΦΣI% Ο;~š^—jŒE ŠoM±ώΛ]φΣΎ*0Ψξ9Ζ~Ύxώ/ΛΠΪρζθyϊίmGμ ω$ΑC―ͺΫP‡!νTŽk„ο ˜@ά‘A―ƒuλέΆ}9.d|±&ο’ˆEI§Ιy(·"« Ψ‹Ψ—V/Gψ™Jχ‰>jAϋ]ŠύΊ΅“J…ιP3AχϋgΩ Z]T™’—ΰ±bΝH–_’xώΏGώ—1׍χλ<»ύ΄Œ\ξ€―Σͺ’[ΪΈwσ’%‘ž™}YΛ(‹¦y2₯οήΥPpε~Ο›1N<¬½έxΦv…ZU^l‰1;—ύNΖ$^š-σ‡$ȟHΚ•ΏΐΎώΆ«ζλίΉ½ξlrœTΐ™ϊ@=…»ΟuK€]aŽωο-οϊΟ₯'νZ«ώ·eZ#ώΏƒόΧ!θ΅Ό΅Ξ¬Λ^‹ΟU°žιΕtBΕΨ‘S¬9²Ό„Φπ |˜Γ!ηΉΤΰώίωΏeŸνrAό· 0ξνρ―Εγ-cΫ‡@Ξ8“Kμ?Ž5ζΈ-ώγ<šG %Q‘CH¦‰—!›AψrψίiΧ±M{τ»9ώ?^ΣρήΓΓ½…rE—½‘°iΡ;*ΆT·ΡM>πh—mύœ,³fWή°Ψσ3#Π;τCε)‰C˜B¨Ό~ΏίψεοYΜσ8•Λ±IHσYš₯3ΞMρΘί£©cŞΈΟή§W³œ²;ω?œ1ώόΏO2n›ήγ…³}”1:~uξJb \yα1ΠrθΏ*>•›o³Jπ_=Ζ΅7€Ύ½M#©‹j 4IB>XϊhΛ/Ο?͘ΓΘ’¦ŸA>₯Iz"4²+Jχ‰€›K£ΜυΊ²¬θdο†ΗΣ"\K¨1γxπΝ~-Yaoi'Ζϊ6z—ξŒ|ΛAz AŽ‘3Oh ‹ ήύ»ρ9ZVΎHHΙ6œ†AHΜΤ(Ϋ㯎دZΝ²CCŒεK“§L^œ’΅θ^Θ‡ΕπΔό zBΖ«|όŒΒf«ε¬L ™€Λ}ŽiψLž’hŽΑΟσ<ά΄Qtƒτ›ζΩV₯cψZV KςΏš0β[²ώλAΐίει^Ω>;@ά]”id^₯œΒ‰ύΧU³)ι&ΐΈ·§½ώgΏ~bί’κm'½$‚ΛΕ’δΌ[*μA€bζv1Wήh;Ϋ%ο9Ιz»ͺHieΫ‘iΝ'φMtωs4ΝΆtΐP€`N6›¨ ŠΣrΕφε\b9vΰsΗΣuΏ59’8G€šγ―€α ’wΚΩΕ±ςA}šs²nfn°RΨ πF_–ˆYωW±<¨ό¦ͺ§-Ύv$υh'6σrτLϊœSΌ ώ»i£ώχΆϋ_ψν]πηzbMΛjΕ–6ς7αΊό?ε=,>EΌδ‘υCοΞdSf>“σηΊxΆ­žνRσ˜Έζ :ͺ‡€κέYν* kw>»πWyCG%V-£Ž£εlϊœœuQ1›ˆΔ|šΟR¨Σ‘0t²‚ŸΛE°Ξχ¬ιΛ oΊ›¦Ye·|Œα,X“‚ψWβ)»χH<©νέΏ#‚/›)ъP̎ςμ'ƒfƒLΝ0™κ@ΫOOΣlΣ•E­ΞρΨm₯ :"}ΙξXρThUeθ ў–Ηγ…₯«qoοΕηρ±"€M%ρ2*S*bΚΑk&Β’ϊEJ6…ΗΘ?&P(ΰ…$‚ ™³φι₯ Όζ+i«δIέΏ&Usu΅eržο6|ŠΡ]π(„SEv‹(Μrq`³ύΗۏ }υυZσύ“l—Mj~…€t72y#N ά°(_’χΛτ…R—ZΣ9E₯νς!τrΨa όJλ΄ο?ΆLJ«€kΐΌΥ»xZΆP¨.ελg½ͺΰ―#ΗΚίaq^/Ÿή„qΎ2H$uΑ₯xsΪ 3ελΏώM!”rX%’s¦θ_(rΐ°€(ζΐ*Š–ε“σ ©•xυEΊ+ot`Ε+±δωlBƒεοHΌϋό½HpUώ9σ‘&!ϋ'¬:μέοΆqψξ%NςF³5‘IφΟΙοαΟdv4=ϋΛ~nV‚ρPcŠΣXξο4η„ζilcΆWι [ΌΈπ·Y…4;δ–Hό·}K0δ{3Ρm2Μ°YiήΡI=Όϊ_WΎ²PκυA ӊz΄,9 {r°―ͺ³‘Ώ|=·Ξ,51΄ΞΔ³ Y Υ]SθΦΦ]Ε~ΫZ«G³ω’Ϋ―(œGWͺφ ŠίΠΑΝ}.ύši\‡γ΄9Θ&‡Υκ­Ν‚]š&b³P"Λ.ΛωΛ“`xxΈŒ§¨ͺΙJ}¬ΪΠ”\E‡-αTμǩΦσ_ύΏSώΫύ?﬑怗ηTΫ±;φδξΐ΅΄=m“χKΨ8ϊ‡cΖͺKσs4tŸ#;›­HΚ™βσό‰σX)*Έ…‘€ΈΏ+ΝQΏWΠ¨‰Q 0Ί\I‚+¨ ;λ3• WŠ?yώο…υρόί/7Ϋ«Šώƒτšν΄γ?Μ±ώλ]πAΪ―‹ο…/«M)mI[)uQ‘,Κ…¬_‘§’ ηŠ^o€Mτ™@πœΤTρpCΖΟ•^ή†£».±;†³Ίψ+ θ?ΏΈ!Έu}‘3D―— ~ρ~ζ@MΎ#ΡΘΫVyύπ›Vχ(‡EΟ$[m‰ςuš<ΒΞΌ~ ?ίΰ_Kq£T°ŸGάͺxγDb“ΡŸό—‚ο§ŽφίΫξ鏞}›M6/P ™8—ͺΝ§0Ά:΁,œΕœ”Μ¦ό§k–:ζΉ ύΏL/Š1€'ZασΘŐNͺL νdΫΒ›<…IKXχΫ3©ΘlZΫΣt&/+ Μ¬Γ;ΰ"Δ?Ξ‹‹„„L„ƒ'ΗuŒΜ.ΓϊΕ{g˜Ρξ•aŒIŠK?(―γΌΈHΉ(ίϊ$ω€†’ή@#Μ‡wώΟ‘ Δ Πxώο’kOž/žΪSmΙ–>ΦΉ‡όΧΆ6k»]EχMœσ5‚Ϋ<ŒΒ|§δ‰²ζ$Γ(5_£Nl›‘ˆΖ΅$λG©Μ7?Ξ?Ϋ`"ί±β5ςύηΩbΐ%ω΅±ώγχΏΘ~³ψOUΨϊšρŸ£ύοψΏΏͺWΎΒ„F>z%τΉθte8+eΖ‹f…@½Ή+…§ͺw;RΘ‹L(0η\”'ΓΌUB'c‘ •€%Σ£zΉ?‡Dω–μήcΚ”Χ1όz“―x$oτλΫJϊUd•x!Ϊ‘σΧψkδξ‡—˜ μzTΰ$oλmΟ1ϋνρ_.€sσυβρΞT:ΗΣζ¬CΊ"/±λσRΊ φJ‡†PΕΙ”’8‰C*όž§ψ½¦> ƒ ΜJ:8iœψ Ϋ½©$?οTbƒ‘ κπσ„ΐKθΏͺςίmχΊ5ίΟΨΗnϋ«Ζθqϊ”€ ―~£όΈβ€ύ±κ9ΖΌL$Ι' ΕΌkΩ'ΚΣ*€+ε‰+2k›ˆŒΑ¨‚JΆΝ4BƒΏ"½―²™μ,L•<%qΒέ#WͺLΌήΜ+(ΎΖ4Ϊ2( qaŒvПx#ΑfJΜαyž(>W01 όXρ”ΟŠObήbα^Πκϋ~BcΠ[Γϋ$e‹ Iσ šRός/zVE[‘MόΧμ՚―}.Iη+E™Ό•"QŠΔslž­“$[M>©<Γk7žHΆžΓbΖ< ƒβQŒH部Ύ1GΗq£¨ΐ"žfœnΣ†Ά&1&χ›gY4O2…€Ÿ)OσLΆ+kwˆΣ\5J€ž’l…Gi^Δ·/Φa²νz³x‚/πt…ρφέ±·€S€‘NΣ*›©Y ΅asΑ”Τυ:›A3_Ύ+€A‘²&;άt„@xΥώnηΚS6Η64ΈΓ4L)6ϊKYy¦„ϊ2Ύ  ¦ŒϋžJˆœ½βr8T†ηsJΆptΒό·R Mœ#]‘0V=ΕYˈ*XŽ(•€Β-πU΅E€γT€£"1J€•ΰΪ¨ 3“0’>&Εͺ6€ΉΡϋa”Ή8ζΕ’Θ#/– yŠA ”)Y‚Oa<–CτWYτΠΑ= ΰΔΈ€?Ι±!μΔ‘„ ^ŠvννΔ“ήΒ,Ÿˆώ`P€ˆ˜Ψ‘Ά{7cj‡ν‹Ε[s’ωzcŠαS)[ΏN·XPαϋ‰ΝͺŠτqΤ„,uό @ΐ\œ”†²9θΝfWmlίΏΗ]= “Λσ}:,Τ#“K%Α€l¨³XφΤ7Ι ΧΣ²’’R_ΔΣa¦ΐ)y"¬!όώΆ Lσ2ζ·―φ_”y@Δ1ΕΚ !&„’ŸΩ΅ Ή’lS₯H«‘αNΟDΡJ²Ι‹MΝx.ΚA!ϊ†1 –‡yδ€»a€¨ό ‰œ64έnpVίύεΏίν‡_|ωΥgϋφΗΕWί~φυ2(~γIo}γ ΞΒ.ΐΑUΓ€θkX$ΫLIΰ½±F.ΡMΒΞ@z’γŸD‰XΦ’ΧW΄Wi!~I±.Κ“ύ5γώv9%œNœEvHΥKΥ*ρ« α¬θ>εŒP„ήψDG·ςA«ίί(ΩδαnIrέΐ”Ε1Δ3HW ATτ*Rί§Μ•,ίEΈŽΥΈΨœs˜«λžm«Δ1=ζsSσˆεp’j,π·LΛΐ/vς:'Ύ½ΞΒu;+8'σΞfΠZŽFhBtP•Y¦―N ΓΑΰ˜Ι-Ν§jΰƒ˜Γ-ξZ:ŽGπΖ€°"`ˆKPΟ”M€O|m½Γ#‰Ξ΄p`BύY"jι•ή²Ϋh ² ˆ™} _θ˜’Θk„]η‰ό<Μ²-P[sΌWo³ύφ―‰(œ‹‘Mawq*Y”δk²9E䆧%ϊ9mξ_6ή֝ν9ς)r\0šcœ˜–›H@/‚©G€r¨3€›±=Η²-έtίfpbt/ΰΊΕΈgj†κkfš“£CΑno6Žί(ف‹KΏ@£}(Ππ|³ΝgΚ߁%$©pDRΫ1(Ί} 7 )ύ”πe¨ν™M Μ»Q΄€ανΗ·EΩ8>€]E:"ufςOŸπΗ'Κ² ψp€ΐοΫι…T*X Α].Q‹R’ΘV•šMx«ψΨ'β"†}Žmγία3Σ΄Y{RkΔψ½‚§]{GN₯L–qδνj³+κ[›ϊŸ(‘7ΥŽc™j[£φ[ό)Έa”k*ε Γ"ιΎΏ“‚!ž oςςψ„BωQH koo–όx9«X ϋ½β§½Aα›8ˆ€Έ%ΝΤ&pΆ+e ;PΡ.εΛ0ΛΣέ9NΔ‰š…ΙCφήM^ΙΪ [‘0ΰ¦αRΫ0-ΧQm¦γ™¦ξ˜†O·©λκ–mz.·MCsT7°Y@-]%Ίmi3&έ3 ,j$fΡfΒ*vέΑ뢎*q“ΊŽ―qUσ™Α<7p™Ε<βΤχ] x Κ™ojA@©Ο9 ,Ο †πΎΑcφΖfY ωΉκΰΓ Lβ2˜4Λs5Ψ $Ι βZΎ ,·τΐpˆα1ΓdDZeωY'šέ?ψ?,€9rv―<  ˜ΕΫr#¦8¦ΞlξyψMΧ΅\Υ‡yιp}n@Mγ”ΫπC²Μƒ]Q, νΡdΝΧt•JαΎsв~zž ΞΉιβ^y’εΩ–oΓ†Σ™BυΣ —;kΟ%J ΞU¦η`ψΎPZε2@ΨzοN¦±le1dΖοkNƒyœ©ž―Ή†mq¦{stΑφpfygF·ƒ@g:Η€m₯”ΐΡ|=°νΞ½jξΕ'pΗε€πgΕuJώN–Ηφv·JžZ˜ΓΌφbhAμψΜυT`ύlΥ#†οi>›ΐYT-Mwaσ,Οt½ΐ£X,ŠMl­oO7›EY`³gcEΨφB’#Xžj]ΧΓr)“ /― σ:ώΨ;ˆ_ΆέΏΎec7\9Γ‘ 3A"χu‹XΔγuM—«–a›ͺahΤAΔ¬ΩpŒΑ9 B€0ΰά%N7j8 “-jυτN6υΓ₯Β<]{²ˆ 5ΔΗΰJ%œΑΡ—ph“₯κ€Φ9ΠSW5YΨά΅= P‡gڞΦ9Ω’DΐaG&š’]Κ›œζοΏκ<‰ ΈάΡ,]ΰ}pΎΐ ·ˆŠ5dyΐ@ §”Z†E=W%†Ίœͺ­2fIΩoόœ6Ζo]cΠ ίΦjƒ€ηr“;₯[†ψ–ψΡV6Μv,ΝƒΩ"4@$δκZ*@ηyγΏώ‘ςΉΙ¨κhp3 Γϊ.H X ΔΓI€Μ›fPΖUέ2-€<Υ°€Kΰ‡X'œ:E-fΘ0fκ.ά«˜™z˜Φ(2ˆκΣf횁 œœŒžε‡45πMΗ ͺιGΗAhοΖΒ°DΠ€!DTυθ\ϋθBΩ‡YΒ€Ί&ψΖ<βχsΩΙ4Νώ_ΤΗqΣ LG%–©y†es€ͺ–Γ)pz ˜ΐafhΓΞ)† Ν£=FG΄c“E•Άfj^ϋĹ̐;7LΓ±ˆγΑ„pίWMώ4€©ŽΜ0Χ’u΅¨#šΡ ±Οk1_αw-)Β.>2qQΔ½ΕΩ_†m30m`5F‰ιNψRažiΤ₯†mΆaΩ(™X ­θ.£ΐ]8ΐ]τφς‹΅τ­»Ά|b»šο9šn€T8Ά―«%πŸ w|ΧΡ\κΑρη°³.³ΚΧgVs- ƒήYδ»Η™Wηu}ΓσΉζΒΊ2 PŠFΣπ=Οt3@°βž‘z:υα±5314ζόG'Τ­xΊσ)ρΓκ!™j€³μΆ FΉίu0ƒΧ>n Hοϊ*ΥMǁΙλF€eέA†7@,ζ¦ΚAš΄cDwpˆ d:°QΗΔΛδ·'¦Έ(’7i˜ sFs›«k/(§€74jyΜUυ`,uΝƒά;H˜ΜΤlͺ‚ CΉ(…ƒhώαδ9[ίΝ‰Λi(Έ;š*6˜ωΫ¬Ύε%SŽώ0Γa άΠθ[HKαέ0/;™κ‚²pA¨†Ω&טO€2ϊH-tΣΤV- L33΄a·„'I"N²{—?"cΚkQ  «&±3•kF·>χ₯€ˆ‚ΜTΈ€ˆίΑVzύβ’šΈ&ΘNu―9_―ΙFjkCή “ςί).8›nBΜUήΫ²ύυSMnPκ™ρFž<ύΆ†y<χ7¦Εδ˜ΏΠΝ³ρSι›Τ">κΏ½›Ύ‡ιύξN}4GΖ¬oW‹[SΊΩn’$ͺmkcαO-gα ΦlΧ `7Bkzmυ'θΨή –ΗUVΫ:²ΗpbΣέT8m RPϋβœ8*+|>lX ¦rSw›λžΛ`[)7`aσΈΟ¨pΞρ\Ν&°ρ '«ε9ύ[^pZuόb\b} ΐέ!ζ:ΊξP` αlκ·΅uΜM 4Ή&u,O₯ͺG‚ΐ€Ν4rοEš¦·5΄€φΥαYgΔpΠ-Κ§ͺΫΨ%ΆΛͺ9ΜO 45†ΞUΟuΈ”Jsu•ΫΔ·tΓ@xψ»DΆ‡sΠΜ4!π{O¬±„)¦’GoΒΕΣ*Œ$ροΧvΐRπw‰Κr:-,τ*z:*qu«Ν5kΆjXœΐ&¬©η€dn.ρ–έs Νu‡θ„‘yΣGba–Ηxί:O­’A£dΪ u/π|›h"?Υ1T ΤωxΎa¦S#œYγΉj@<π«α۞幁ί{Β 8ͺqύκD—ααzΐ]8Α@vL‹ΑuxΙ΄H»0Σρ,†jq›X&AŽΝlORubό )[e«pέbΣ―Χ·¨ΑlέTέU95` bΤ’6ΥTہ“μ›ΤΤ‘ΞM•q¦y±mΣσαλτ>‰Ίrδθy›iΎΆΞΓΣWs5β0ΣTt9!qmΧŠ Ι·T‰γOΓΥέvaή–§> σ£:dTΥ£“μ΄Άwu˜€.εt‘AΌΕS!Ό,·!Ϋ8"ŠŸb΅q8‹N±ZοežσR.Ψ―6š&z>ˆ{L[†jΛe φρQ²δ)F Δ`0±Ζh›qθκ̝Y=z’2ώϋ¨υψ„½Ψ€ιΎt0n’ οΪ<‡kψŽOˆ―qΫΡ x% Ί&ˆ~ͺ2ΫΧ ΐ&@ΏK¨b §sn3― ,H·ΐ]ΪF?yu0Τ)’ΪΙΕΕZάqΎ0Κ>ΆIΛ§F»Ίυ@eΐkΐ’x*Wf ‚έe†ξZšƒ‡˜Sj»&U]ώsTΩA ΩˆΪ<Γφ<292ψ©ΏEχ–™ςΪ”ΈAΐ±… x§ΫΧTΨ4 šͺκΔ 03Β4ˆ’j¨ͺiΨ6Θ}NΐM@cέ8JΞ`δ©„]π¦",h³”χ°Βj ¨„NΕ–ŸΨΤ}―ΝΝ΅νA€νδΜχU΅ΰ°@·  Œ^k> κΐ‚K8HSx0·EΗ¬^9wΏ }ΣL’‡5Ihϋˆ†§Oͺ^*,Yr’>ρ`=Œ±z½ΐˆ#R ½m:Θ‘E†Δ€XG@ŠŒ^\ͺσ]β8ˆŽ©j λj6·tcΰ8lΝUM`:T‹†ΈϋΞΊ“Οq˜ξχPί^€ώ6ρa―“²Rτ±ΥXoH”Τ–Α5₯―Λ„PΛγ T DV Sǟη–C9,Θ€Μ΅\fϊ–¦{Ίa«z,€κϊͺκΩF―2μΙ/­{δ†oQβ£δz.RfjYΨNΛ4­ °|ΗΣ\Οσ}ΝW Px+8ΏšiΜ92‰ƒκά»yu ‚7ŒŽZΥ3-Σσ(ό‚τΑ°l‡†λΪΐλkh:±|Σr-†φx›ΨšzdYΛJ‘]aWν€ ˆΞ‡ϊ\³lUγh8Α€‰Ε} {6σmί²©́«!@A zRαw0!ύ4|’Φυ©97)‚¨F`θPΎn:2•«Lh©ΔιHΎΛ}ξ »,Ae PJ#π‰αφμιό‘jqΦ€4νκGΞ+α–ζΉΤσΉΕ W₯ΊfqΝgŽAA΄α`8ƒω¦ψΐχ=ΰ]lΨ=;‡Œ‚Δ΄ œ¬ΐR烝%{‰J ,3‰φ‘Eρ’W§₯UρU‘΄₯ΐ·―ν;mG§Ί Η8LFΡσΜα@λ `Ξ€»ΣA”e Τ @ΐu4MtώlKΖΈaφƒEI\;*:t‘FV Ν~›•κ§O{~ΰ`―–H¨Js³­1SΟΰͺ[ϊπΨ½΅ΩŠθ½ŒDS`; ’f«Ζ*Ύ£φάkpŒ&~`Αс;t=‚°BQ;γ iΊDL`9Π_ |‡ζ˜pN58ŸDLwΊΟΟΡy ]@λ$jמΰA =ι©gϋ:qMψΠ ₯ͺ΁nεb~ ™άφΨ4Q‹n™ΤΡhˆ>²δΚ°αK΅Ζ•A‚Ϊ3ΐu0Θ,T΅5.48ΈάrA 6Ρ· ίδη 9>ΣaΓΤ^“Œ~A’d;©ηΥ=΄…qΒΡ<˜5-€0€ P½ζ©¦iΈ ]y9Π*•Δyv‰PUj‰}~j‡@νζΌΖωvό΅0Ξ φώΝΰ¨πsΑΡΆ‚ψΒ<|*₯#~Tς<Ω•N£#Ν38$½Sξ8„Τ·&ί;γ4‰“6xmUν#dΗ‘Ί.Υ Θ „PˆYž χ#πLΫ’ΐθΜ€@ζ \Έ0aξ}~ΐ΄€¦’·<V]aU qBIWΫψaΧavЏy|ΐ"œ˜6PΥΞΣ<ΗwuΣP}OΧ@"DPΔMJΗΧ,MΥ ί DU]Uλεφvͺ¦έαϊρTgΒr¨»πΏξ:ιΊŠ:3pΓ L•Έ†ψ›{πΝ†°A«ŽΣΝϋ/£ΔoΉt τϊ†©oj³ΦΡΕύκζ" d¨Έt-ΰ™MΥQA<Ά,]T†ζ!ψW£@ε< :P  γͺ£[Hv Ύ©ͺ>ΐF‘Γ ΦΣ“3Sόz—?₯Dz°lγ-ΘΣ§εGΜζΣ†΄ύΓ―­γΎ­kͺo¨πe σ˜°εšΞ-Η#A˜Ž‘·f€ΰΑ@`h‰Υ­£Gδ>œ{Ά!ρTζοκΠϊ]Ÿ–ακŽΚ „sΐ_–JM_υ ΧtM n»*てRΓ w©c»άœF]ΥrwΫAq‹i<σΠ]•¦„—™tLSw 7'P-Ψ) Ε¦Λ½ΐΤl4š₯<Π5]ΨYΈx6ΣΖύ  ΎΩΝ΅žΔΑMM~x―}’]d&tƒpΖ5ua:WAŒ75y€j1Ϋj`λN΄3¦U&Σ|Λ$ύ T’<„Όν”tu<μΊΆjͺЬΆaΨL'Ί‰Sx‹©Œ‡YΩΊ«Ξg‘Δu@―ΟΊ^A1{·h³‹x–½Έ’ΥJ¬½Ε\ΫσuΤFΊγςμάΗ`m}M·A¦πΣ!ΊνκρsŽέξݚΊ?a}ήυύβπΩ>&π±Δ54Λb”"¦‚aϋši<°™…gΖ€œ«Ύ› ^ΈπH¨­χΨfk“5ΤυV}ϊξΥ΅νœθ:œ`ˆιpSU ΓG— ;0€ΓwMNUhβzVXͺ₯ƒœ2/²‹πXσνβθα*€-ƒφ 8'ΨŽΰΐΞwA6Ρqk du•rW₯:U \ρ}šλ =!sνΛά\k“œΚΚΒχΓe\Ή3`ίύΌ΄djΫΕ``όAΤ·Έ= X'Ηυ ΜΤTΰ}`Š|‹ΨΔ‘Π€‹θΤQ^ϋα©»he΅ͺsΜή ΠSΟcŽI€Η‘ αDs—ΐ"ΗΆ|Σ΅8 s ,žμ“ξPΈι›€ΗΏε$Τ\Ό*2β©+7Γ΅•¨@WΉa˜ŽΟm’λl£sͺξ2ΰΖp1gCΰ;ͺΟ`1l H4ζ«Ύ<$9αc=dŽR)zQΜh‰pπφ—½‘£&η†―£‡œξι γΝlΥΐ §@g05¦μ7CžEκyd-²1αΪό€’MQ–eC˜O`Μv˜p†FGtk8Ψ­°λ†¦{jχΔQLύcΎ;8·)}qfX`έV5 ΈŠΗΔέπΐ«IςΡlΰ-β8N@,ˆ,“ΐˆ<β™=ۚcΔφA>π–Σ΅3,2©7ۚ€ί‘™Φ~3<%[§υΌœ Fδ»!“.γ%>‚I—ϋΧ2]σœτ<ΚV$ε¬ρpΏ„•0ΎΆ‘fΘπϊ"ρπ4βο@φŒ•BHΎόrtΑ [ΑΛRtσΛX˜s*fQΜ₯Ljl_<Ψ3·ˆ:ϋΘV¨Av†π;Ιy*|˜ͺυξ%x+œ¬H™~Œ!I©¬Pƒ\ΏDp€«i–pLA ΅‰θ$`Ύ¦z”f9 ŽλΎ¬Άij¨ς<Ÿwbλΐ§ΎFžαf~p7 +Η-―>λϊj›3έαL@³™mjςΈNtΝ€›ΤφlBΉΕΠ°¦™Ίεϊ–`h₯pΕ|sΘdϋέ(N.γ?oyΛiχκΙ€ttO5tΥ€–¦Ϊ˜½ΚՈα F‚nΐ:ΑΩΈME²'Ν3tΜ™ζλg'\ω&‘+Ej‰ΊWΓ)ž¬ΙΞηΣmΖa~ιςΈΧχ‚2-ΤΤ¨P–ςΗ|Ou™Eαόά½ΩrcΙ±,ϊΎΏB¦~Ό«œ‡‡kηω|Δ6Zdf$ *’`d ύυΧca ¦€Υ ΫΤ[’©HWNξΎ–<+[₯`]κ5<&MθεJk¬ω³{d;_y[jσ«»gψ »_Υ)Ÿδ ΉJΤ1­+½^ΎW1W­β°σŽ:Ξ'Ψ ινΦv„¦8]#ΝςλfκίΟrŒο γ6Ž£5$φ½―ψνλ’ΏΑ4ΧΕ+?ΎΣ²ύκ¬ζι#C–r•5Q°ŽM΅B²I6›œ€βΗjB΄άU_’1Φ8+-ΪΗώ[ Σ™‰ά›· τωyΓQ}η»—iǚc fŠ£±Φ§|τU²/)5ιΠƒϋRMξa#©’‘χΠfεΎΜέZ¦†­suΜη.ˆΒτ₯¦.GΫ)–ΑfΈ)ς²sp•₯8ΘαK‘€ ΊNΑ˜’°·L«ΕαF2œΓ-ρKcΣΣYΨ—hΙ4Ν&'₯@*•l9'.ΑY΄±ͺyΈ,γΩ)ΰγ„―ŠMΠ”~ΑϊL0ir{Ρ’©0ΏRΣa,:1VΌ% €i.‡PΆε§^Ή0Œ†Τ!dƒέΚmφβΆ‘―V4{tœ§oη&αLΞ.wŒ§»LŽ5•œ‹―z&*0˜šHΒΕΩPρΣ€uΨ΄>BΘΊyώΩFc{h{y[ΒsQ’YCύmΜhύžΧ/ύJΫφAp‘±ξδ!ŽΘΤSΩ°e"Β$ύS 'Ο)†T-S’ͺEνsMpڌ¨ΚSc²p;T¨&ΐ@=R©p#=ίPL|0jΗΘ–Α1D­%‚9¨ΓΡ!· ΟΠ£VR‹„ρΫh΅caFiŸ[걁LXΗΨaκ₯΄Ά&W5| 5£SΓω =T‚0θ1ώΚΚg& Ϋ kbsΐRγ9R2ΈΟKtix;*¨{“mIπΪ…ZΰάSdΫ9τΦ’Œn*IΥg‘ž¬&εΨρ­π΄ΐΥ₯Ώ8Μz*ό;ύ"BlŠ(»4e$<κΑ;!Hή* nœΊ’RBπΊ€kl’β[ψ*†Α\Nτ&©ώΎ ― θŽskCΝψϋΉρq–o„κμiAo³ϊόzžγc[‘Έc ”ζ†ςΎvoΛωλ?l:s―ΜώλϋΣIΆΝώΒΜŸ³c¨Νμ=b3“ƒ3rRtRKΉ@δ±ΙΌΥΏέϊΖ GjΦΦƒΩ}5λ}~2;+~ώv}ZN3Νw°3Δ­5Χ ³#oͺσ,LΎγπ!š`­;ΫΪ²φ’Ρškš|τ’GΰυPd9LΑΧΗΩZbo|°Ν6iΔ;½žž‡{“¦s`ιΐŠr @Jμ.€’ΰ"U.‚δ*PΉ-sq'ά€ YΥλΖζLeξυκΎFΛ§3ŚӫЍ&J(A ŽΔϋ ΝΎΐΛΉx6Β' ηaΰT]NmΤ³Οΐ»"žHώΌCΩ<ύÎΡoϋΒ€ŽϊαŒ›ΉΓΠ…Ž>(bιQiœΰ,u1-U IBaιm…νuήJA~LΖ΄dUι\.ςtΝρςΠΆuΥθ"H<œ·»Ο<Πβ8ͺ.ΐ cŒΤh*d!.τΐT’g’\oθΒkαrσ:ŒΨκΆΝpσΨW_žJΥέtu8Frβ_1ΖΤ(DΊ,=-ΆB«NΒ*εuNΑBŠΔΰΔ6ΧΫ’ΘλέμήhΆmK9.П|Τ–ΩΨl«Γw&7ΑŒ‘;…k1$ΐq’hMh–κ|«Ό”°w&4?ΦW1‡m“ί¦"I D-šΧg₯‹βP€’Br„₯²9Xλ °―’¦Aδ.ΦG s’Ξ—Ϋeπ…Œβ2<άWͺžmΠΟmέΌ{Όϋ"Ξ³Τςμ ώΪ5Ά υ ^m ΩΆτίζΤήλΫ‘‹Ύδoψ;ΫΏύ}~Ι qά[NΔΓΕ$¬€v§/₯D/ωΛ ©_a#ψ\xh‘ˆ΄2Qj-Vε " )Iσ7ƒΗΣ?Ÿ<5o}υ-v~ψσ…EzΏnmžγωLΌ•…Σvβ%φŽƒΖŒ· YΊ±Ι†BQw„š™-UOrΌDφ$(Δf*5B¨]j·-ΥiW`MX{oJ ΰZ_k‰ΐς &’—l]Œ!0΅ƒ/ k>™ρ>'˜ϋξ] Vΰώ~X_ιΉ»Τsά揧5hΣηπ°4 Φ;EopŠzŽΪκZ‘Ϋ XQœ1‘PσΤKΗ3°νΡ(xCε²wm€γαF’‘6?ZBsΟ­-±Τ•ϋŠπ?ˆ:™°±Δ"ε“62\Z Ω5_±_΅dŒΔ΅χΘΫ¬g{+!>ΖrΕlίqΌi§φ| m±ϋgΜVόr`VWιjb—ΎΗlˆ3l& •ζUtΟ"]ή΄Kn‹οπ-«Ϋ‚”—Υμμ­wš<­NήEljΈqœΖ$%°°P‘²Υ‘*†›τ₯γŒΪj@Aαξ…Ÿœι•?ήηυλμ Ώέ† κή<&$ΫΪcΘ"ι†Ψ†Ή’Α°ο}-cΣB—ΰ[`!!Ξ]WΕ5_DΓ…<œϋEΒΉϋ?ζΤΧΣ>€Œ-,‚q‡ρ#š#Sΰ–chπρ\ay­‰KEZ+`pœς2Ζκ³—·8>“7V֚L¬a:37‘ζ*ΒUΩ*’DMVˆ.”(¦©δ] I‰΄eΦ^ΨμΞŠu’eleΎ/Ε‘ž΄qN~ k³Ψ“B>hΰΛ-ZQ τΒΣ'ΔεΑCυ{‘›f \iUzT(ψσαρθmα-̘W€+cˆ'TTaz.­˜šλ½L ›TzΥ1‡ožζr0©(z =·UkθJuŠ$5:o§―<;Γ+=}5 d2,‘“Glοs5"§ξ3ζ ΫdS)Z˜slM°_H_c\]]ϊΚΣ­,{"Ξ ,5|£φΞφγgPpFiϊ.cηΰͺβPμ_u0!ΔΝ έ $;₯u‚;$Jͺ%₯žHSΕIαΈ_™§ΩJ8ςŸα…Η†€'§]Φ•E—h§ηRlυ‘κΚ5xuK^ )œΣBsτ$o<]ÞxΝIlh&'Œl’»€0ί;Ϋ B^ψ)»LC¨Ψ+βΒLd]²=V‡aθXRklƒ“[ΌΡ]Y–‹οξ{Ÿ8œιYmC‡d[ΕΔΆLk™%]Ρ£χžl2ΚxΚλμB2 NΊλ„%Xͺήb]R ?+Έχ‰+ΪΘUΌOξΚ΅˜½©!UΙ:γ;vkwΥΉ2c]¦jβeόλγ5ν…kχU}ώƒ›Θ πΫέop`\ρψ ;UrIή:E†rΦδ\Mœ€Ρc§ΤΊ'‘΅jΓQΆœώ„ΏiNR2fςϋ6i<ΊeΦ '>³%ΟEK­mHχήKΨΟB¦X<Φv¨”ρ&`ΜκΦRόmƒΧaεΠόeώηΓβΗό’CέU!έ½J”‚ΑΥ% δOBThο$G‚ :ϊπ›½νŒ™L½y]”οo AΗv=ŒρύTνCO?ΐ {Θ6bxΞy)ΑΒβ²Ε-¨heEίKΔ œ€„ΝΥ½'ιόΉ\?ό¬ΌΏ΄ νζ•ΧΏ·ΕŽΖkxΎy»6 Ϋ:šŒι»γ:)ηρIŠ@N­t`}1Ϊ¦ΚηΰaŒDα;ΕDMυb\ρB·DJ]œ ωλ/όΈ?«Ÿ/o΄Žιζ/oO3ι9™Ή%υΖ+―OοKzZΎ?m*1ΨEYlk΄₯°φ‹­?3½βγ₯\ν€ϋpr²΄„Έ"QŒΞ;ΝΧnH΅Ηζ‹%κ Ήx‚TΪJtν³λlΉ%ΐviΪy ΛAεοηχβNRΤb‘ύΤάυωW<;ιΝΡή{Έέœ?*gψtΏΪΐwΧ`„Α₯κ8z©«PkSRΗΝ‡ Ψ°Ίσ©Ξv €θS¬Š~Ξ#$!ŸYΣΝH?±ΆgΞψθ ήΕ₯Ψϋ~W%zΫςlφΛ½Sη½q @ί^kWˆ”V"\‹… n5ƒQ;{hΒZ•cχΕΧ tη₯£ΦœΏ!Ωήm]αΖ\ήύ>«* I?!ΌΘxxe’Ψ-Ξ’³<$/[ΉττŽI*[π¦ΗΦ9θJ1ύκ%”ƒ»©―mΟlešνΐŸ!7ΈE{ο*΅XSRRi šύ)86ε­¦’+AΣΕγνͺΎ;ύτ Ες aτΔτ²±l_xΉ΅ΫΛΟƒt€ϋόAa­’.V“)%ΣΊB|! νVεNY…8ᬻ˜K•fj©-”~DD―]«Α&:49ωΧφςΉV¬{ŒBΙΙ₯P[—D‰ΒΦ1ς,UyΚTœΨ@:“z_ΐB#p‹\²ΎΚρρQžΡpς‹%ϋνώ3γεβǚ{GΨέ§. «Ά€ϊβ/ŸŸΩ ―ή6䨫'*Ϗαςώ8χͺ'η %΄θ5c]k‘½S¦YΗR•a4†˜ζ›Cƒe³Ϊ”š‚ρ$,τW θG†χˆGXΜϊϋŸΞΦ›«[ϋδΖ*…Ÿž[™νΔ7ξέϋ\|¬Όmω 7U<ι§·ύΨ}φœ+AΓσœ/V•¨~}}ΨΦms­eφmΫO Ρ€Φϊ£¨±IΩ hD–G—xƒΊΤPΑψσυβσAΔŒί6.ζyρgcΫφ»Ν€nΏ]ΎΓ,·ίmυ¨,ςφŽΙυ·?ιωιπvπ;—ΝlΘwΞ_―L¬Ηβƚ¨3γW‘;©±Ρ£{^„όTΟσΰ.π€VMΚxΛέFύΆx εΞλΥΫΟ'ώψκaο3ή«/RΔ}a~φ[ΕN~άδ%¨ ώΤ:!»Q›ΰ\C5%Yι+­ HΘ*TΒΩ» ό›ΨΗΛω4½ά˜aς. sςΟ§χ‚“ΛμqkΙyΐ=ThIεBΦpφ%h-do΅ m¦Uxαͺ’2FyeΉάJy·έ\ίiu‘˜ζρ«<φ–kύζ}Ζpa)ŸνΣ-ήЍ]ΈxΫωΝΦύώψtν?u›_πέ—Ω^Ϋζ·77ς{ߝqlΏŸι~RVnοχ>c« |ψΙΌΌΊΑ.μ½k{Έχ^ΪLΕζχƒΛS3r₯ύ9χ3°=Ό}Y•?ΌΒ¦/·œΡΓOζ―uVŸζΫtΞξ5ω¨έ'Ό}ͺιJΫ+%©j‹ο/œ²~a±ψ ‡ρυA2ύkyNQ{ψΑΎ&τοu(\ϊϋν„.~―Sš§Χ>ψzφΩ’Ž;;nΟεoσ6γ₯ ϋ)ηΝ~Ϋ%FΑˆΨ½°φI»ρα΅ŠυyXυ5hϊχjψιή^ύϊMζΟoδ4φ‘«΅¨τ4s]iœ¬zΖβφl°.Wyn/«νžΪnmbj»νκΆϋλ…X|L;ΓwΖ†ψΒO[€1š ^{ήU:γΒ\~€ΰφ?ξϊΌ)²Y5ω]ϊxLά|(tKΡZ{~}ψΎ”zί‡§ωΛϋσ©·ύ½yKς€Υ Ψmœ·Ν'α'K9²ΛAͺω¦‰χρυύπ•m–_aΏ]€};Ψ@·ΚΥοMόŽΡj±ΖώΠΣΓ€ΞΥIO_©){Ή ·dŠdΉ…F’8έ4bΨTz#[©pu†:bξά…UOr‘†TΥΖΗ _ΌzΨξ‘ν¦ρό4»Τ+&yέηΣ’=Ήͺ–©Nω”›MΖz—}θ©UΦJ©Μ¨ΥVŸ#Zο[1š\5½pΧ’―E#B΄{Λvq„‡i9§ο œ•)eΔφͺ‰*|1ޚδCMR:V*«Vͺ`›ZeγΊω,ΡYaρί‹mN¨ϊ?ς»λˆ³΅ cε GAΙ‘DΒύλ#8ωBos{ΟrχΩ65zŒοQ€Z.YΡΚ½BΰbΧμτ5Χ›‹lkΗ7ŠΘ©ΦΝ§JCφ‹ςΧs(ƒŸβν½ΐψTŸLξŸ Μ!χόtm‚‹Βξ2ΏΎε€Ϋs­­j«΄±Rn|RΖgμiCˆtˆB£žxIθ€,ιFΨ _Ϊr1onާϊh{Ÿ„ž‡σΎXνXΖfώšX”Λ”SŠ"=|ΦJw‰ΉZ‘Υ+1x²­ ‹6:̈°Ψ'©U(56‘ͺt>ŽiηΰΩΏτεwε‚ΛΨ .K·ΉKΆΒH»F0rΞ7§JiΔxψb`Γ|œπΎU«PΈ?w^νVπνys‰#*-³ύ‹―οΧxSŸVO'”mvςβ€?eƒό:F¬ ‹ƒ¨žHΈπ£.F©$ΓΥ”s ]ΙΉ8§τHoΠEΚΦμm1«?~άςΚD\όΌΕλΫύ;ΒΩξ+ζ.Έ;€¨Τ{ζ =»ͺ‡θkͺ₯Š"dΒpH©κŸxΎzΖsήTξhƎοžgλΟS‹ςtΪΫ<}±‹’Œ’³95@@Wš4ΐI(Μ1φWΠ’ωjΡz$jχ&JcpU.r±Ό>I{φZXqή₯³xΒG<<Ϋ­|gw—ŠσΔ(ΛχΧ·[ΏΗMχ°KŽŒUΗ7ͺ›δ-η΄Α?¬kΏυxP“y₯ή‘Λl-Ζw濾κ’v>WXεΰ=;7cq Ό§ΒΤ²¨Πi1Eέ₯…˜›¨nXEήpΌrz7–΄μ΅AŸ-³œΎ)Ο»€\S-”šek˜&ššV!gψJν»K±Yƒ@ΘIC¬ζΞ!’Ώ6τ ΓlŒM?=Χμ;uπΣ’P‚αΔκLγQ `ς5ͺ‡”)I³G/t©$t3.sj°˜εΆώО€π =§gχξ²ΣΑ² 1 ΛuΣRݎi¨=7Ά"Ί£Υ@Ύθ„α'αT »Ίi"3§MψΥ…όϋι6m)vW8aϊXrΒΆnDJ*—Ίs.o²M„x±jNˆΠ„“-΅Ϋτ‹Ή;ΏM]'ΦΘ‘•m­½`‹8|) _υ€lΞZHn…°Δφ>šΤ]ν)Em―²Τ­o7ξΟΎ•»a—TΧωbH΄λτρΒ•’’cuL—tΫΡΩ‘3J€bΞ0{J mYΥ]ͺ]k’FeΔΒέρpېρΆω xpή9oZφ.ŒnMŸpcΆόb}΄|ΪΣiqΞZe36^L¦+a‚‰Fͺ](\mƒQrρΒρj«i€εΐG%ΑžXΕ)»>’LtƒύΒΤxyžn±‘T(]γ¨Q3pœΪ;˜HU-Pu0 Γuvx2ͺθ&4Ρ/»ΤoΜ§8–γσΈλΗ9ܝ²ηνΖκ Ντώ5Ηεi;Wd:½=ζ(ExI#Β`m†QΦ0Ζ.yΖ„_½'/ΥCRΉ©ήλΉ‘5Α™σk3φΡρ-BΜ¨œφπι©kO±Δ0cφA\ΐ^ \ς™TΧqΧ€Αhw²%υ^*$0—ΠΥ_9Φό£― 8:…„½mI*ξTΘ]+Ο’αžuΑΉΖ`Zv΅{ͺηL!rή+cEtκόdωŽŸ}κC›œ6YδqHˆΛκBͺwδduΦιΆη€7°αα˜JQ}τN$:#^r~‘»Q1+‹φσώτΖp>£UJθR…Šς1dλƒDωͺyƒͺΒJ‰ˆ_禝ΙςZ(?˜’Λ§?΅›,ο0{Ρn•„ŠΆΡι&O‹gό±εj—œmˆ»?Ύ_ώ€ώψ~x¦oxέfi‡o₯”c΅‰†/ jH ήš  žϋ·¬‰r?Λœœy}i”n)Εt`9Γe›^΄΄>ΨΊ2ψqχL8VΖ‡¨„‘ΒόI7„EΠΖΊd>¨νω‘ο"KΎΘC{°Μΐχ‘ŠΕŸiœς Σΰά³CdλlvAιh#ήQΘσΨ5οΊ²ν$_75i™΄ψοKΉB#νπŒ&w[°Ίw”pœ•‘΅Ϊ(:)χε-₯cΡxVζm~πΒΛbωΌ«sΉ°·ο1+ρδ ΗΈδΰΘKOQΏrFˆώ\.ΚT]ͺΡY*υm”DwΕ ΐHDL bΥ€‰»½Z‹CΆK}ϋΖ0ΉgΚΡ•ξ)(Ή’ν€ΑEΝBΞ8fΙΙl`ΔΖ›° €#²Ϋ…V,Ϋ)υ*ζ/―OTωa~²ρ¬„fΚͺD1w-rΑ!κΤ]‚Λ₯ΎUω…;ށΡ₯£€€ΡK₯Η½ίόeυF/χΟE­cΈα€@“ΞΚϋΤΊ m=ςZL…€ΣfΩKdƒˆW8 ΈIhit‘ŽΣ.Ω±αμ1'GξΠ:£ΨN.……6Dˆω³ΡB@ ΕUλ„OJͺzκΖva‡Γ©CΨN°1£…Υ³νΰMΔ<'dXϋγŽχpŒ:υœΔŠΒLjL^¨Υvπ…0œ]Θή\I’Ο…rΤ ήΘ€f‹–ν:2ξωΛΟmξΆa/*ϋ΄«?N>^ΰΰ¦JΆz*ˆ>¦†(?+–θή²H+<ΜΡ£Ά…u8Γ°«cxΎΐζψΌM]_L±L‘c²F𱠁Ώ`ϋ$χ―μ’ΓΩ’)’ΝˆεF/–šUτό Χ/RζΉ. =αXŸΪίω@kSSžΡΑc"šqRŸΨ¬\—5‘Ζ7X‘<ާEΔΐr\…¨π^έ”υr›ν•ΝόXΚ>fۘώγε³ο]ΥΕ+Ώφ&Οχtiμ?ρίzςΛϋσ+=-Ž3‘WΎ]˜5ƒ?ά:H _Ϊ qœμδ5σy:i›^Zaœkžφ&hw–>seDzh›tnkΧW—ΧώWlχΥ½±>{λ‰ΌέΞάnSΆoύ_ FGύ‰NίӞΒ3­Rέ»·3ΏΣΥΏώπ©NΥBOmΩΆœΨ{Κj‡ί>¬ΫΧχ‹–Bkςσbq©’gΣ6γΗγϊψι_ !‘7₯i‰Ή€(Ψ½9QWYΙ€­(ά΄θR(!o“Ί5MΐσzLu_ [Ϊr1…XσG^σΠ·?f{‡1³iΕ'-ŠΪ^ŒΊt„’ΪkŠ’^¨³mˆ²'„Λ=v‘sΙΞ‡’1­Žψ”mGμ―—\οšΖ†&Κ!ιλ‹±Η©N>U½λy:Sˆ=Ξ‹{Ψsv%χ|Π|±KηΚκA/ρrmτzC˜lZD[sL¬­ΡΪU!‰§ŠΏ²-0z·Ύρ&ͺψQ‚Γ&ξSϊυ LAK^ηΎoMΚKM'νΦ7nΑc*όΟQΞοšΟO|.W|Ϊ$ώk{χ―pӏ,άX8y©}ο™Ηώζή‘=Ø޲Q!©Ί.Υ™b£‚ο&J’«Υd4zΓ’Ω*¦υ¬SwήWΗ:ΥάΨ’ a°R‡Ο?}·]²r©Wa™uΛNs(Vη₯€’‹Ξ*m[²‡6ΖLΎW£’gŸρc)ς'.ZΤ#ο,—'Ι{J±›B˜ψΠ΅ ½υΪthE _ NRΤ’ΤWي/ΌU$¨M3cI·αœ?{ž\ιΥ}6΅α»φΨ-β+ΐΓ*šΚEΉhZ¬Ψ.”¬Γ’ςT€1 §^ΒψΓ·ς~ξ–U²₯ζGξδr΄Gn$-lΡ)'OνZ –(±―{—_QΫ¬Ÿ?υ―_guρηb ˜s’a›ό~ί­q’ΛHΧi©ΥOTCs™Ι5Ο.φhzσl)貦ؒ·γ]zΧΫF‡9=38φ”k>USήBWEYΑ:°‘f˜c9ΧXV³I‘αηΩ΄@·‰Jέ0ΰ5w‡=·οάpΝJ ±β4Ζ ΅XΔβΛ…L‰,°•rV2ΰ(D"Ji`½΄ΈϋψόVρΒΕ‘Ώ7ώvJo &4[ZwŠtυΔ€c΅9| »ž£ͺΔ©%%„Υ5[lCΑ&§T†Gλ|5όQ\nCήΣ™ΡΫωG?…gί—τzχ6-X1˜8λE:ΞλFή!TσΆ°θM5NΧΖήωVκ@aΛΑz§₯EKΗJε|‰ΣŠ:_δn“nnb=ŸŒOξec/ΉΨD=› ’vͺHhš…~½δήkυΙHώV²™0ΨL1{`Χb:§4ͺΌγŠΌ;£Œs‘L=RΕΏ~UΌκŽ…_Δ!ΜΤfΌο{‰Fϋ{ͺͺZ€`;ΐΊλ †‡—η't˜WΞΓώ»ξNQPLtQQ5^‡XΰΡ°QTX…ͺ±GE€XuX7‚QˆΦΖTU!bγFzJλQp|“?&£ϊ{ΔPWb’{ΕQ‹ϊυδͺF’βSve“IIh [οA˜z²‹‘" o:…¬pB’3R΄”/%΄ΰ']ι*ŽՏﴼRŸδ8άτ4^Γ”a‹WΤΦ½ZΦ” L@>»PDίT-JŠΩT―Rς€Β…j’V‘Μν™κ—»χΐ•x–l1Τ; lrfNΥdx Ds ώ‰αΣ›Œ%έMR@v’GνM€–₯ΛήvCoZήϋ ΣκΤ₯₯Α”’U―"Κr©e„₯ˆ&je*ΞθΔΝτPb³>ˆƒν t\ΛCΡb-Ώ>Ί-yλ\²――8ίV…CκΨ=1φ#ξkΌ―p8;κΧ}ΨΑ¬¬žΆ΅χί—.Η³εΰ½·μy.Αθ•‘T:-x8Ψ¨-€‚TβS€¨sWΚΡΆάΞΡ‹DW;xD”ηΈ&x] <ό‹Η}―bΜ/MΔΫαE)ι‘Λχ―o_πYΪϋ ?nK£ ΡYK™ξμΠξ ‹ Β(Ϋ#Fη”F،±‰F©υBk#ŽͺοNWUЍΞ% ΅BνAsκέΪΠ½ŸjΚϋΙ± vΤ,+Ϊ’|ηŽ’£Rˆ{ͺnX»β`ιbbE5iGR¨Ÿ΄&ΊγσΙρ…ΙγŸ©"ϊ0―ΚR.FΖΪΐ_[[•βκΦΩ% tFM#rh=κψή踐ΓLߐΝΚkΧΐˆ«c¨,6:θX#*Φ¦‰b7-H«vc‡±8¦FΤp^H·!Έ^ςμqI―_ζu΅GEt(ΰyΐβXώ}i‹wλνΤΫΫPP5VφJΧ¨YYΚ5WFΎ€ 0ψgγK¦›Ι±δ•‚Βq ’šΪ?ΗώαρW«Ώ:Ή0d²rUτ![ν¦`+JΙ%wμkΚΙZŠO:€‚ˆΘ–S!αT:ߎΈc*E8O«Kxσ,υ/•(­ζKΉSψB﫝ύΨU§V!V*B°p]#’&Η^Δ/»ˆA³ηδΝo·Ύρœ°aώ;Έ:₯{†k―ίωeώγl=Δ5]f™ƒ??ζο~=BYθt9Kό’| pTrKŒ(αVr5MΨ(Λ ΑΈΧΨR7‰CΟ‡ Τž>Θφ―Δ•Άƒx£zΔ‘먘ιΆ/θ|Νΐ|ρ}ΛρnΉ’ewφH!ŽOϋΤφJzυlgιΣF¨buΑ„˜qΨ3Π†qd ’Z΄ΪIr»F`@“£3]ηΡ4Δvθ˜ρβ²’” ]Ύn³Χω9ΉpKχΔS>ϊς翎>m8λΘVβlx&'(ΚW—™ͺ+δKusRœίn}γEnΜ=žο7―ύ{[lήΙe&{-! |hΞχ΅ΒΪ»l€*΅t$αG]₯†8Λy£'θe«0–’>sΦ0>ΨοχϊΝΨY(σl‹­'›}-Ω²Κ)j4ΰše©¨2Μ'ψ9'p̌Pημ‡sW—tρηC-‹εέ― '˜±ˆpu‰MWΐ7 2F@›Ί%U9±:ΔdZ-Žœν€yŒawkYυχc‰Ίέ¦Έtυ{!ί‘ςΜmOw½,κCΓλΨIͺ‹GύˆΌ}§ηuঁ~<Ό¦€ξƒŸžΎΆωςβCνΧσήΚK|R}ΏI@ ΅|ϊcγυRΙπΝϊsτGΫ°GLpAξlsSΉΉͺ½ι€m׎λ@Ξϋώλ’{’m'd9燋e\ξ«ΐ½>Ν7EϋΝ’/ώiΡ‘Ήν.}τHm₯hv{νϊ©Ϊξ‹γύΦ~μτΑLdͺ¦Œοβ«Ο‘CήTΖ~τ—9όoΨUŸΣœ³D΅πNKžn}ϋmΈˆJIc’’έ’¨Έφ½©*‘&Υ~»υcαυr H<“€?–M '©€‚ϋΠ;ΨΚΗ|·7jιYσ‹ΏΆ-ϊ{ΞΑŒ_βέf;™ο±™]΅ΕκαντvczΚΡKo…iX σeΑΧUwCδE—š„Šρ­/TsV[Œž;—GΨ/w|YWαϋ“d =±‰“λ3›œ¬¦Rw.)οœJ6†XΊiˆϊΉS—Œjw™Škέ’*±Χ¬bΆdCc]{‘GΊΜ_;²‰ϋΩrυ/ωύuopiνφ·ΝΏ] _₯*=c"ΫlRΜΐ³,μ "5ffr%zΒΦ*δP0©/³% ΟΜΦ“lT~Χό­;ΦΞ φα•ί† Όc;ό:_~»υbŸρ"ƒώ•γ/Ÿσήθρϋ6όζoN΅TcPM ½©Vj™4Άs₯Φ›«"EHμ―ι)έ€[ Ό³ύβΪ.ίg|όο9ΛcͺΚ 8ξmρόteΔkN˘ ς,ηΞ½…_“eζ 'θ^eΒ₯iYτΞ΄Œμil^nGσ3-‡ΜπΏ8GrΣΦXkHވ?³L·³Λ^:vƒω )μpQz’7Ž“_i‘―Τ¦„€”!Φl<§ž£“–i_ή+jΙ4ͺ!₯ΠKΚZU}HğV Ύjπ$@b΄·7ί£ο;lsy|xGψIυ •§S‚Ξ©σμΘΈhƒΡ«ΥAΉHΊ™xΓTTŠ’Β4Վnc [+]Kg€)~PΝOrCΣ λΨbXΧ­5Š[4ΒΨT2yͺΙJ©'%Œ¦Ζb’odLΤΊ·Φ»λ‰΄3ζaΝ­Μ…]tΉZνεHΊΫίαr±χ"ΤiMT6υuU$βs©³)E{!S3Ή&£j€<‡"©ό&wŒAj&ΟίΖ­k]†aψ›†ΊΩωŸΦqύΌΥCyŸ?΅‡³ σoFͺΈο­Ws{›pΗ)pIηυH΄{²Ux‰ΎΗω ƒ§™Ύ%«{Ξ91«ήƒGτZ€šHZw:N±Ζް<Ε3IΑ v5ά0/Ω1%”=’Y‹punC‚[‚xyΟΩΣιΣ!)ςΆχŽ ς(DWŒx·’8()ΞΑ·ύ—rEƒ3;q‘mJήΩύ©:!WœΪ5Sk[€%ͺRa·U΄Ν`h΅ΒΥ¦aΗœP‚₯S –έ ρpRΊ•›ώ–…ήΌη€ιΤMN\Ρ\}…[ς)°I.¬ih ΐ§Š–G+­J«’)½ϊΘΑWXφ¦r6YύZ)FψP_ΈζτζΎ|Ϊ“ŒeΜ >„¨«φIŠΙ8K«™ ΨΞΨγ›i[P’Σ Σ[Χγ‘œΟμΙ­\ο *ŽŽΧqjΐš΅/Ί`ε„»zλ¬QM.θš›Υ\_HsO62Β•jΌƒΓθŽb„)+z„|׍°-zΰ•_/S½οή Mj9*ΰο°!F]ΘUm 5₯M'φ–8i£βɘJΕfxΜBΕTrŸVώΔψ„Σh\ΣkL‘­AGŸ‰d΅”ΑHjΔyM­zD ‡™MοˆR,•eŸ €V·+;Bn³WQ΄βDD@ΛGΖ3ρyňδ BέP…ό‘Ν<<ΗiϊBξl€Α±VQt‘3+);ζ`YΑ2sͺ>»δ…ΐ G£Rυ`mΎK£υZΌο<B»7ψ{˜ΩRx5λŒΝΑώ0ž™ˆb·Ia€Ψ’LBrSΜς>/ΫZΈq~ ’7ΰ6λO z;6¨Σ‹)ZεJθάnrw‘ΧΦ΄Εr`œ7£ϋlR±ΉHA²3­YΝ6–xϋέΰ«ϊ4η—·ΚΛ·ΥXŠΫݐ]jFϋβύ₯m% _ΠξAm³δ{©ΫkΉΖΧ―u₯υ&ε"UΈΓ—ϋ#ΪΚ™έV-²VΏ…LΒgβlεμ}Χ‰αχSUF!δV ·έy‹Υ ΙΫμI(‰ Sπp?!GYόKέ­BZv¨“86Τχε’,–‹Υ‰όαԎ€&cυ.Œ&šR‰ΞU‘œξΝΆŠolχ±Βƒ¨ZJlNχ\cb„?΅η‘Πξρδ»½Ν”μžΫώ°mΎ½6όΝοί{l°ψ© ‹] gÊ#?mm"ΚrΦT•Άu“ μ\j+jύΏ/}ݏ·> »8`φqAυ‹ΑΑβϋˍr?νwœΤ£{€¨ΦΈg•"ΥS*;Γ’A€`ΐ Υ kƒύ@ή€ΰ\I,΄-ήΤσw‚ŸΩ ς ·‡ΥόΉ€ξ† ˆ/«ω·Ε ΏόkοƒΦE„<[2&‡ (Ίά ‹€0ΐΎۊTΗaνΦ7ήxύycΡ+-ΏΚ"?-ήN³ϋ“―r‹8κK—E±£U ˆˆ{žSN9λf[•|*Τ,€3‚`ΠDΒ(#wkα‘΅±Ϋ4Ÿ»ώchͺλ“πpFΕ2έA Ά†–KrσΝμHZΏ|α„•GεaςΓΘιX«δό)₯ξI…˜bε¦nγ°9˜™ν=&ΠΜβΗΓκηͺŸτθ^sŽ―΄:Σ‰9Ή¦ClT8ͺpHάy εjΐ¦N…E7"Y N"― <Χ1W#|Mςμ³ωσλΣ±©ά€‰Kc^ήέήΰνc+F©’#wqˆ‰Cc₯R2θ›UCX›άίψΨΐ¨[U―9•pεrθΪH™ΏΞ^_Ο긞SGψθίύκl—wΎφGΞ_Έ™‘ŒΛ5'vOύ!³υγRœ%#‡ΎςΓχ/§₯‹ΣGΥ:#Ξ²/~’θ,5ΙϊFΐ<ΟQGλR͞ ΄*©j›ΰZ Qε±2αW^ Nžmο‘ο.Ψ\o>8έ” 8–Œ•–Β ν &§ή]%λ΅ΦΦcΔ^\νCϋ!³5<:ŽMΥΚ½…¦iΐ‹Nƒb "|ψ$+ΚΛ):ΝήXΡ6λ·8‹"―Υ!+xg9€X4γΞgTϊό·‚pkSΨs«υK?ρ8“§Π„k›Χ¬j38ι!ΞjŒW ,z!ρΎ!KρGS1ΐχ6uήοs7Ϊ€/τtΣx?ή}χΫ"Ϊ¦JΑG#’0iV£P]7QO䈸#`f1VΑ$ΰΖ{­EtDΡμW7υΧυ)šžΌΛΈ¬:%`η€uT0oAΐˆIZΤ±ri\Jφ‘h8Dœ°„"kŠ<Ζώ@¦O™·ͺXwγsΝ’―VIoΓ—ΖnM‘\ ΧΚπ[ n §Ί“ˆnκΛ8>ωϋίv–₯ΐO{ xhΊ΄%_ڍ`Nω¦ 7­kδX½λ@aHG3€εXυΡζξχL›ΜΔΡhΤΖDΣ, bEΚ¨{(ΜZθΨ’kV2"Ϊ†Φ" (ΩĊˆ•χυ±~šΣ{ο½2ŒρcτAYxΒ¦0΅Ϊ*ΒkνΌ•ΐ\Ή8D˜-Χ(L.9ˆj^ˆ–E8\‘nΥΗTd &7ΖQσϊDσ—»3ΤaΛΓ«¨Š…α($όƒΗ•ΊBεΊϊ­½ Ζ‚ύΧ†ΑΙkRΥΖώ…g«Χ§±^ο@άΪ£(KV”"¬U§ L°[‘›Eΰ―Φ©tψ6„–Vk•t!³cΥδ΄ό9['R^iω6HΦ|Dτg{ ΖφδΠSw\sI-#\‘”J\#ŠQliv€y‘€μ.ΐ*ͺNBΫcrΞ¬šK#2/+ξIΚί›  ‹wt½‡εφ?žŸf—JΆ^/NLlœ^hΆpηDkκQ‘|6,—Ιž#‹­ψ»Πδ~1ˆΰ5―ΣΒ› ‡1 œψΧoƒKατžfς€ξCͺρ3τœ&Χες²υ³g †ύ4u•( ν£°ƒ2’'ΔΪN˜Ί¨ΆάζUнHΘP½Œβθda·Μ84z Ύ.γGbuΚ&8΅+―μ2VΑ?kη€§š°ušXc,Ν$/JCπεQΰ§1 vKJ[8E Χ>FΖ&5S«=>±a(Χσfϋο:3ξs΅Β/?ΏlhvBμXkΛ•ΛΙχ§%αΆϊ^ΈΜ†ΟΩκΆqfο1Ώ&g±I@qΕδ£·ήκίn}γυ~‹υί_Sγ_κηώp/OQŸW7&z―|Π9JόΙ―„vU%\M YEbΊb _;κ$…ͺŠρƒb["䲦γλ €ΕMdΡϋά7ηΨ£7Z|cσπ>―_ΟΒμι/7H²ΰZ2 t¬9b{ˆΔUΚ­ΕΪ$λ!ۘ Ιs8]^€»Ζψ>Φ&ΰΨ%L χ*pmΙ YM㽊59=Vˆπ„R‘κŒ‡¨/΄ŽΗ%8„Τs ΄ˆn@½#K4€¨ΞPM[ηD‘θssΉW ¦»&eΙD:Ιf52|‘ V[Œ”SξΥϊPΌsM—³΄ώΆΕσQ=^œ—6½ο› ˆα»/{ο|­—'gϋYΗvaςnF„οά’&ζš#`Ύ ˜Σ—nCΆΎ₯Φ=…\Ό)bεG"ΑκΤHBϊγ²βx..ω΄ζΛO) Jc1㽦¬ АΜ,wkϋ=ϋ㠇ᝠρ‚΅©ΓΔαΈ†6BτΈΏ).πK½»^W%«±Α{,ϋuΤΪ4 £&Ισ€Θ©»`—’Š’% ΣlfŠds AΫσq­+(Ϋϊή-p(C”VϋBƒDξ½›,9§±» /΅ΰ’-ΝšsΥ5KςΜλ˜₯­·t8QAκ‘x‘hΨ%£N-ιηQG–Ύƒ©*P•†χί_jJIΈH Γ.΅Φbˆ\‚³Ϊ•Μrυ&Εε^§D*΄«=…ϋRӍxη¬λ_‡άvΣΔ1Œuvmίξήs8)ωυ΄I#˜45y%šΆV™šυ’τ΅ͺ:ΐ#ΛRZΙ„€`D―(κΰCK#”»‰ΨΧΩŸžqξγ–—]‡ζψάTιŸz?9iΊ³ΉΫm‡%†‡->ΒS5J΅%•"έοV*QEC―YJη Cψ±ζΡτμzο€Ξ«» JW£RN”%rͺY8œnc ©α]‰LQ΅t%&OV5«3Ω’¬ˆμωxζ:-ž–‹—K“ΟoLΒδMpQχΨm–šκ"-ωΪ0 ފ+RΦD˜: κ† Iϋ¬²")f“—™˜ŽΤΗ―“δ¬i)ναδιε@ΓkeсξšM—cmΦ„&±!)­ Ι;Ρ‚PΑ7o|ˆ–ͺ #χ&_„¦gI«ω¦Œk&cΦlυσε~\όφ]'<θ~r0-υ,>%Χ%έ±ΐ½WH†y‹΅TςZ7ξͺΒ6Β-"’ς8κ0ŠΆ–ΡσϋΌψ† xΎ|85C~zžhͺMce‚¬Ε+ΉΓΓ%ύ€±q.%„U–‡ψΏ{©U;„―54,ε– ‘㩨εωΪ&Σρ‘p\9€ˆX‰…Hδ4ε‹Βα¬X4ΓmτΩ‹s‹‘G3œ›ΖΗ{KΗΘ^ιl€?ͺ ‡ΛEΧ0"’`%WεƒλΎ-5Ο«ΉT†ΫEΠ °ΧΚ³΄e|¦kkwο]|{Κ@»Q:Δ$m7]8ΛXˆ[M@ΥsL΅tWR'|ν¨“1ˆWΫ€•tK~}’ΊΉŽΊwJ‹αοƒvŒ£ζC† ¦ΐΖTΕD© L/v²ά:‘G( Τ9p5.B…Ηnš„ζT‹ΡL_4’R7€bœΦ°¦h―•π%™pΣαc€μΦ„·)Dζά½uōΠΝΰ|‘™Φ.UxŸΡ ž^έ$6,BM]λ}νΥυ‚έX Bšž,!4‹@Ϊ+œK¨ΣΩΉ$Δή„“ΈΨWζPΒ§4₯‡½ίσ(Δ/tΎΛ{«‘| tΠ]ϋΆEλ*›εGύ½φΨRœe„ι-„WΚ#Ό°AK«ˆ%—k·ΐb&)δX;‡` ¬Ή©³OƒΈ©†ΨZΈRž΄M,ΑvšΖ†}Π+jz»φϊΡΏΠ€"…ΞD_ΛΩξJΜφs·\–šψΨ τύεi±x½{Ω*|o.εδ:Χ蔍”­"b˜˜₯‚Η±‚ μ‘ ΒΚ­τ’€¦»ΚΚ˜˜ώϊηꆧ―\mΎΫΔRn3²$ΉŠ€6\SθΙp₯hle4„3 ΒؘmΆ"»­Νυ$Ϊ–O{& Ζ]gΔξΥ•pŸ? _ΩΑΟ%Œuψν]φz>μNΦgoηaΗ{AΩ½/u’IΌρRUš†ΊI.υP^J:HAžJ”“&nέu@?UΉDΑ΅¦φ+Λ}eSΦνιηοKΎ4%ΒΗt`¦/KΨC©acΑΘε\8ΥΩU€\  α ₯60ΎΉjΩ…‡•RπΥF#υͺ£€…AδέψξmO—qΔ lFάα²τ5" AH’S„O 0ο@|V8ιρδ"γ£L‘PAζ-,τmώΘŠ}Vυ=ž’ + _XηΟtL €§7@»p+Σ«ͺžTœX·la·"%ρr£Š|UGίL՚C/΅ή*Šr,kpΕ©Ιά,SwΉ,bO&ιάηύ~yβQ%ΞIyιΤιSA¨‘€-jΘpήΔκ[φ.j@βNdγά’‡ωψSͺ7ΞεT:§Uε^ΖI…¬ΎƒcU©!‚’ά^‘/3Ό”]NŠ9!s΅Ξ„V²Λ@Υ„Κπ\™δώ.T6—Fπp^=eϊ<"_Ά,’U‘ζ^ŒOn ΔZE΄`«R’6Ιλ€<#~ΧΦ1Έ.ύeŠF*.«Ά,ΎŸ€η§²Q7«tsš²ƒκ=²Εΰ ‘wΫYΤa½Ž’7aaφSOal­¨3ϋέs?ŒMnE@Μbή.¦KIΧΆ]χ!*¬Nr"DζŒ2:‡€p>šΡΗζΩ‰΄‘Ύƒ& πcΥ­Z"Κ8΄8Ϊp/,ΩpB[HVBΕTTiZ§¨„$’ˈ@Frγλ‹°/ŒοU]Ό" {{Z rj€E !Ήd[&!δ–,]οL B‹Eκ΅IyΏŽμ"ΥΨ›o0y©Ž’Œΰρ–νξF¬™LEΥ€Π» I€‘πŽMΛP1‹Τ|l«‰ )- vB!zˆ©ΆΡ ԁ'Pmς ŒF@bN:TIr^Δ& mρ­§θ­ΦΕ"R0’ κc’_|>Ÿwξλ‡ΈVΛxψΆ“‘N&ƒΐχΩT Ζ䜫½HΟKΜΤU©Ω8 S/,–³rk=Ο%βkμG5εέ݊Χt]'₯wζŽθΊZνiΫ―ηοαy[ξ³'³cΠ£7ΜĎ aΧ·Ύ%\Xׁ5Θl@Ψ·Ε-λ)IΠΫβaυE,ωώ7ΧΧ|νَIΦ&':ΜΜ(4… ³2΅:˜‹•-ˆKLT›N'ρsΡWƒ WξΰΙ)7R|΅~φΩnκ/qvΆEgϊ›?›ι֍'•Ό”φPΡ#œο,mΒY Δ›Ό €&©—τŽΐ&*‹΅ΦθϋΨόk3΅—γ½ς„(ΆΠΗΗ.ΖΤrΛΦ$ ςM9 NΚͺ“―ΑŒ­ΧœC]¨ƒΛΒΎ0Δ‡‘ ϋЦΩ4uC,̎¦c½:v œ$ Ϊ1²7&*B’²„π‰ΡΧΔ™0XωbακΕž1―χYΚ¬°V՜†‹ŠΰD˜\·Ž$ΫtΒ’muλka K†OO˜‡B;­}έݝഩ“ρΏdπ2|)[ΈŒšJ‚ε!†γ0€½α_εSυ‘²eeΨή/g)MΙΧΕlζo‹M]'pο·.ό’_—wΟ εΤ”—‹nό?πL.«?‘‚P!JΨŽ]2H©λXSζζ§_τχεΣΐ}uη5}_Hf„ά1²πRFuL8ζ]¨ V4X+I1Β$lv«œΓž†ƒH­©ω|Ίm{·κYušγβπO²ϊeψrΛ%Yιΰ4l)5Šτ^hXgΨ4α 3d…Y9Β-„YT1ΒkŸάέMeψ2’f©«ƒ>G+tΐοΦ«W²(!–§&υχ„¨,'_’«Ϋ ;ι‚eI}η ΤIΎλ Βτ;ώ'=?²UN}΅ή±4'M•C‘‰ͺŠŸΘΰ|g_”26EΔˆίŠTŠ6ΔrΐΠΕΨΛ›]ςΰ9{>i—Š‘§bƒTΟ4vE^ΨUvj.ΛΨG8@Αα:†ί2ΊψjSk`¦δ ‡֐YτĝΦΡΦͺGh ½Ρΰ —ΑΑ=ς'F?}ν&•Šυπ( χ³άωuΡ EΥηˆΈΤ Iϋ@­Π’Kށ%Ψ r*wύšι“ΥŸ< —!7@χΖΩΨb+SΠ’(ځΥkƒσŒΕ)ΜIds0C¬j—€€m#)$a•χήOΎςl—I:`ιΏ4ζξ3“Ί LˆΠ”δ‹zWJED N²ιYx‡ϊP1ER&ζ%σcΔβ‘^ψιgβ‰ܝA"ΒzjΈQAΘΆΕΠ‰šί6 Hό<4Ή?sΎΆHNrkrεœαiKi£—c;ZΆγ;X?9ΟbeΡ`’zvΒ)84‰Ϋΐ^*LV΅DέΙ*#Qi $α{ιέΔΡόΙ XνSν«q ŒΥ•“φD;yeŸ”Ϊ¦)²-&)e₯>Œ¬8ΗΞ&jΥ9TΫaΦε*š 1Άn ~|ΐO‹7±ZGΠF©›PΏ~£εΧ΅χη石ο\~σΡj,@n ΨEs‘5Ž\jBE£γ―7z3 —@eώ&χ@‡ά²ΣkώΕ%΅ή…2€₯ΊΙA>}ΐIΘ=;EQgZτΡεv„l )ΎΈ4„γηΧw:+‘QΜrν(­—°WΤ‚L°­RDu Ή―Ξiomι^s’v&(ΗΏυ?/gθmFߟ_ι鬆ςΏεŽ‘ίWΧ$V°§ͺ‘w`"ΣΖ°—’RΫL‚C΄*iƒΩ %„θ­Π`eν£ΗζFXŠWΩJςΎ§ΤίF™~³hθZBϋ€§ξΉG‰»‘θƒΠ£mRΩΧUWς$Σ2EΗTΨhm’S΅*–F =P΅:Ϊn•o‘ψZύρ4[½ΏžώύΟ•hλάΦω₯L š]ΰΝη=ΐ#Ο·’ WΈiŽeͺGΉϊgρWžου&–¦”\qEϊΒϋ欑RΌΆ€œΙ6Ή…1΄50k’€W3Β>LaΤΑΏύ<)Αq†)zxΩV©K’·Ε3v6Ξ ΔΆ)†nΫΌΩ…bθΛyΊ=–Εm₯δqώξ3΅˜m„vE“eΉψώ²ϋξρiQψ4Ό=Ηυ²»R3mAΒ Β>σ~ φ~VρΌžυΙΫSX½*zΠ"y Dt$μ2žΎ\wγξΆ6·JΫΤλI…ωqͺsƒrORŸΆdύΒΟ§Cސᅽ‘ €L+ψΌMυϊό™oΙ’ΚkΟ›φ§χεϋ’E™·ωΑ +~|–f˜‘ηF―H†žΎΆω₯€εώn;8–OΫuέμν=UϋO+u\|°³b­·θ ™σ}žΣ£ρ©ƒpΫ–ϊΕ„9ΖΏBέ›‡j―‡]WΪΔ#Δ?X›α™aπγpR³y0ϋqϊ™Cτσiμ°|ΜΤΨ¬|{|θΟow—$K½ikߝ6Μ†3`·^IxΦ)dοΖUη,PxK3:Ή[p^–(Ύ?>έθΙ7j~OLΟ›―VO³·Ε¬ώψ±k•ΪΧκζμηi24N~υ Vm…mΎbŸΉ΄”‘ό@z4nESςR‹§t­Α:γΧγ'³#7 bΰuφ(Aγε£@jXχ„6β‘λ·˜ νΟ‹_­ζc―|ϋ8šΫΪ«γ„βGϋΘnC\Oͺξ;©£ι2“ω%YΧM“#b]ͺo­-[GΤ»ϊS–)»Π3Eλ…Ω:υ€$”“€Θ/jfέtŠŒο^ΑΊ«Z'α’šΦ.ͺΈ™΅Φ½#.Vτ3ΩWz‘vvHtψAΰ\c£Έ8 =}X±-6›€Ωνͺ[š¨\6­‹ž―hXjiξΉηlΕΙ΅Νθ0ψωυ€ Ωή!—ZγšΠP‘"/‡ΥgΛ¦“FόεΖ!Ϊ •()Όά`Δc2UŠ!¬ΑN½Q¨ς€ΙlŒgγˆΰ– :„@Ο^Ή;σVW ;c¨€Έ₯ξ|SΉHi³ …„|ΗΖ¨1E­©ΐ^ͺšϊfی4?δΛQό-㬋§ΖΑιΉ‘L“f6˜kΕšbŠR Ψ„MΣš‰ΐd…ΰΞͺ"Ή‘gZJ0εξΧ«ίψΗΫΐ»R«7yI7 J¦s*UΙ?”Π=`^¬Φ*Ζψ°`^€I:ψΰ±"ΧjSRΉΞ㸏­ΈoqΏΟΫ%.Ι½¨ρήΙΖ4ΘG+²«>%ΉW‡εh΅}τA †ͺ{λ›ΞΒ`aΘΫ=ΰ=;FWΏ##Ύ7• …T‘ō`J‹eφ{΅Λ4`}»ΡΡy κΥ«€₯ε£xU„ρη_§ΜŸžΏ…ΰφ6HΑŒΧFζβ€Ÿqά΄ΝΎSQπ|M7§’±„₯‘(CGI=_X΄{ώ-·Ζ_η)Ζ]+E°QτΞ{麈Έu%­š”fH‘ή(υΕj{€ΉΒη ¬NmΚ*ͺ·/K¦VD°ώξάz¦ψb –4ΖθkAΤ# λΙtο œ‚%ΰ4΅ˆRΐ ¬«ΟNK)©υ―ŒmSΉO:~vg'gwdW܍Eτ[šΖU\› Qς ‘€Ξζo«o5kxCέφ.γF8αowrσ—Ÿ«·εeeξ)–(Gi—oΨu ž™)ΕLeΈŒN#ΰWπ)7/>n&Σ‹,y¬c»KΫnq―l>q9ypΣψί—Ÿε?ρΒοmρ·wmˍδFΦΟώЉΨΗ ΪΈ_>Ζ‘ΐ%‘¦[-2H©[ν―ί“Ε[U±ŠdkH{Ό1zЈ[S™y€ΜsΎNεȻːίΑ‡ͺΰXœLIΎ*+Uƒ,¦˜ŠθWί!‹ΥΒΫ …΄Ηh„‹νENwΓ*v4}ηΑn»TWΛ}ρjΘ…q.8 (ύΧοζ3Zμ|RοτAεύΐœ°{«m{/•Oΰ‡Ÿ}+λώ«ξ――W«—ώ›π!ύς^ΧƒWΫ_[3u]lψΓ&YΚK!kv΅„l’ΕžΝ[¨"4ΐ«ro*3IAd£)ω’—ϊ&ώφϊ½Ώt‹#ςruΨbg‡Ε'’ŠΓκŽΥΫKvϋ€K‘.Οεή.ΘT ΠbωU!óמ―Χ° X35Η¦,5ΐΎE!&Γr \„ZC)ωΣ› g³—§aΈΖσqw‰EιΈ{!aσΛ ³D τ¬‹ΡY”ΰe£f|9βΪ\nΩ†‚ΡΞ©dfZXf‡ίΫ'αψΉρποΞȞ\mL1₯@;Θΐ‚ͺΑΪͺ¨ V+χQΓ€fY«*>λn&s‡5_Ϋ^aP²”LLΜΕY˜ΈΆΏ»V χ%ΜBΗ•#Έ‰Α92„h― 9>αo(λΈςί5εM‰ ΤφΧ–ΰ±ωβ<όTδ’-ΡιNΛtU •CNI’4ΦΑ ¨ jhLsU…΅ PĘ ˜ΡΕΕ§έΒΕ*³ah{΄-/4ŸO«ΦXΔΣœg”WΕ›!5„{„4π Ν |ΔU€e$‡Μu«-/Χφζ°Βώu―ΙΑώΡη6›’«~2t!>ϋ1Ά0HΥ[™™°ΐx‰< vAΝ%|–Œ6¬&oέησ§SfR3 Πε‰:νΊ³ωΊΏ ζ«5J&(} δ%@ξU5otζ(,h©;ΨKU7…€,ΙE AΞ8Ληz ’φΐCCr(ΌN £¦ s¦DdP”1o­—η½K5ΟςΕ»—ί*`κΐB ₯±Ι0Ε €t,Μΐ‘ Ή΄š`©(;{“6πό‚~o<ϋτόpκύθ l4§›ΤW'ϋ `“ev±NΤ&™9šσ€*ΐ|CΊΓά§ΜmΚ΄B’ˆίιδ―‘ΟlgΖ(fΪ·3I‚€J"‚ *² θD sf©‡Ζά`‰& {jψΏ¦ŒΉ‡ί;/ΜΨOΦ·#N»λ¬ς±όYiχ½S/ Š‹d²CΖGΆͺzb•Z©uae8S*φ&M –ώεφNΎMŒ·A»1€;Ό±=²ξgτ|λfε·=ΐ½0wgM™ζώbΉ\ηξLCN’J’ΠΑρy…ŒοΞx–™uEσ]0^σί,SUΆdψ.υ3 o«΄9\ω?ά04{£*ΉυO°­& Β*ΐύ‹H‘9<΄ςΦ{œΤΤΜuΞ!j`•2'FςΆIεηb³}ψo!'<Ο€ gΫtR|~SvMD<,ϋΦ α4B€[>²Φ•ΰπjφω—λzΪ·΄υaΐEXͺρ1}ς‚9a>‰o£Οx;Hm\υ)ŸΒ²0EΣ1«Θ$“ώτΙxπwο=œΘΝFk|.€~i@ΘIYΑPΰ„cHl5FΓΞ\؎E_‹ίֈ⁠ζκηšίΞ-οί“Ÿ±Cΰ ‹₯ͺR•NhD«”ρδ΅VΝGšΩ«dSŸLφΪ ΕΌ#fϊϊh χπω")_=λˆE`1w―’Gp•6ÍΎ.Γ ΙΈœΩ °Xγ"4!β›…Cs`WX;Ύ8Ώ{€sD±„£‘‚ ΫΔsGFγ;‘ΰ±#r0έ΄L5RV•/»ŒΜ©akΎEsf‰ψΊΉ{Ζ³ΈϋξxΝΜ=₯2ζšeδdΨLžμ™νQ–TΥ§‚ΉΔΔ &;+Όΐ•‹Μώ'g$;'ώώl›ΔPL·77τPœ=τΓY―’I(5Α)’S³Λ*WLΑ΄Žΐ“JcEΧvΪPλLη˜HrΛΝτ₯καΪνβ0ΣH V=€‘Χr/%ŒFzA:Ζ b‚@2MΩX ~ZΚΘ©‘'†xQ+Wω’΅M;Έ΅Π%χΎ€4@ ,2ηŒ xH£f₯"Φ.ΐπ usJ#κxJΆ:‘[|4ρΜpꜚ5 ρu΅ω–^–Ϊuœ©Nέ{Q%`R‚‹cζ έ<ϋΊ(‰e>ƒΖϊΊh™²­—¨„ˆ₯μr?o–\‰Ήn―Ɏˆq ήέI,«·"Uε”΅Έš±S-)­ ·-‡`bp1{eιYfΕΦژ*―ŒeW+τθ³ίΒ|£Υj λBmˆ²‚ψU›TR†Ε ˜KrZsFD¦γt‘Μ0›ΥΕA|œE ϋΧ\εZ5,Gc² ‰)Dbρ•ω‹πˆQJp00n„"„Qg]ΜφΫβΐqπΕ€Ό«{’έκm.‘€€%KρIΎ3fPX8B¦ )FwͺBΘ SγCCδ"Ar|GΎOHό§£^ΰTΈhFίΫo皘ώ΅όH72ό4β‘aζ₯Ίλ}bUΝG»Hw•Šxa&ΕZŒ  πunVt1/ί6iσσœΡγ8τΡ:^œ‹iϊϋσ6Z%ad)ά<―3 6^΅ ’Q-ω΄J¨ΤɞΑωfG“ͺUšΒ0Ti@sρ9&ο‚±ΒxΈ6€‘nLkFŠ„ /ͺ妇ζTΆ6WBbŠ$T8]]ψ€₯ΎεN:ήjgUψό ΑgHΌΙHžωˆ,Ιε«φ„Vz;ŸΈγ7ζςtξe~\”'d”υΘαQœςŽw€1sŒxg}φγx\Vˆx4Œ½ΣΩwŸtfVΘtœ}Ÿ˜aFqu–εε ~Ϊ₯<—–…>ή(­ώ[ ΟO­~~ŸΫλv»γΞθ·³L’*\_Ύ>Aή9Υΐ}αΑΦnŸ„`y³¬7S“MrΒμY`žW/ψπΣsΪ«ότ–Ά_/σ‡ -}>eΟΗΪΰŽmd¦7\³ PΖ Ν,ι嬔1 ·€—z™"τZm»ΈQπ΄ϋΊχž7iύeYΆ§wΈ]y8Οcώ―ϊcs萒7 i‚lCΔ·λg΄`2˜ΥzΖΌ/σχ<xΈzFΉΚάύΛ5 vϋۘO« &Θ}φό<έΟLv3tm‡β›!ΝÞ1Ϊΰ½ΝΣύΠ5{x:#ž΅‰ œQŸ2c‘hω’^_χΖtn~mΦD_λΥΧiO΄“―™VΘ>Lχ§ζŽa φϊ­zΡγω;ν°Ϋ&¬.χζΠί½€°|~]πnΨήΐ’wΪ!='? ‘λΛ¬ύdέ'“ν»¦ΩΎ|ϊΙύŒb>5νΕί֏9°ρξΏrέΨηiαώ1Ω!vš%1ΌςΔ½y9e­₯ιΉ™ΫΏeόΎiΑΒ1I›h“bŽcΏ¬—#ζσϊ}Wμy&.wKΊσά>°ΪηΏe7Ϊ«’ϋŸύ­o‚‹τ­M3-₯’μJΝ|κcΚχI―dΗ„ uνω)½,‘…‚ΙϊηΛαΰfBΪν5=§BDή‹Ώβ―‡iH7o_ΛιΕaί]šθ/iB―Pώζω0u˜o€­ož~€8rP=―Su=Th_τH/{j*ž—Ξ_UΪ–Νr}hιYΎ"Ζ"“ΪΨ―_°P«ν‚φώ‡YAW‰ρo;ρΈAa§_λΨο}ό³‡+Ύρ΄‹žΫ/Λo7yO<Ρ―FΘ~L<–™Vι΄;OλsςKz;Ηΰ¨nV¦5n”ΔwΐΡ–"ηŠKΙ…/₯RΒ y%η\΅kΑ­ŒJήΩz‰ςpŽ—}nΘ;cG³{¨ΨtLΞ.qL¬u§ΘF%#DΚuŒΚη$έ±l [UΘV)γ%€₯ n±.ϋΉ]<ΏΎχ/u>‚{rζό'dς>Ί9ƒ/N;ΕT™΅Ζ£$[K©±Hπ= Bl]ΨNk|”2Ks湏ώεΦ£ ΣΌ –Ιi‚΄²ΐŒ1‚ΰ;RuVόSEƒe•J香€Θ†;ͺ$}Έ°ΞW—lz‘½h^*RΚ#«°Μž@²TΥ²R-yVβ4N& ¬ί‡m|€bTiEFln–xC οτϊΖ>οFςς½w\k.Ξ†?ϊγφώUYΚ'­Ξ€―ίbl©ΙΑ·ΑxCs&ς7ς\vŠΝ;@*‹d”ρΒ€OZΔκρ·˜Θi"\RtcRQšŒu:Ψ’[$§³e&=(&(‘kaZ4΅Oφs;CGŸ!w>BΔ™ΛλΖmΟκ‰ξN |ΝΒa”ζXόaΠ™Wηπ fΘp ^K%)†ΩΦD#„M„‘0Wkƒ§―T^ή JUgBy’–Λ$Ό›ˆΞϋ¦€rͺΙͺ°φ–Kn¬ΆŽυΏUͺNΆ‚›iM>άψ>m.δ)»£½™iΧΕ †°½Gοup'ο©·#ωδ*Ν£"Νήπκ’(VωζQ±X0ΰ'\JΣ¦V}z„;΅A‚έψw“Ώl/Ο3ŽW³ό†όΪόί4Gώϊ#Εƒ4-ς"c«w’¦e·θoZ#ύίγ#…§;βύ%ΝΎδΐΥe΅(XΒεϟΪPΕμνψΐΆω’}‘0QZ„ ϊh)+Αύy2—EΊRB /<|.š'&ΐ%λ‘ ‰Ω/o3 L•?a*ρ¬ˆoί΅”Ω|σ{Jϊ¬—3ΎZηe―ΰJ =ŸqΗϊ’«žKa2ΣΪπ=—™³Ÿs-qp8ΨjΫo‚ί` lŸ@ -f ―Τ{vΰMύžΒ@κυm%`fŸe:α{{ΐΡ*“Eρ6c9ΌT kΥ'η‘T'πžϊμ{G˜ηdCbψVdk‰ύ¬dχfχχHbΨ.@EυšΞΏy‰‘ΎδΏ}P—Z]†Ϋl΅ΗknΐžskΟUθ»ΕΗ^fΟ,^wώͺqΫ6ν=Ξk½š0HmΊdΐ†ϋWάΆMm‘ͺ‹ωz΅J³βΐ£e+έ—PAθΟΫτ…e 05 ?VΔ^σ<έ󇇉υ4%ΉΘt»h~x(??U7»φi¦gύ“‹΅`“ΝŠ_&Ώyγ¬Θ0_²(ΖpΈNm@\ρδOΐrςϋ₯όŒ+ŽΥΧM“ocΜ"Ύ(`ξΟΰλπxyI7ό±ψππIα9›“ͺƒ bŽΈ„ |όΝΰΙ”―—ΐ‚YυlΌ7rΠτtwΖ’[ι±ož4zT¬ΰΘΒκ2Ι„ΎςηϊφΏ§Η₯8-²°zΡΞZ΅½³’r‘dτͺΏ}Λe‘‹V$ƒŽphΔ:€„‘”Χ—Q‚ݐ!~IŠΝ”λ\E@d)—y‹³φ­XΰTU±› ‰ό-α‹m…Μ²4+?cˏΆύΘb‘₯“)ε—%K&ΏY}μώ‚%Ρk_„ρΏθœhhN)ƒΌ WL]%(]G…άΛΊχ2Δχfί΅Pτ―2 θάύŠ8ƒbρŸΓ€yΈβ pΘώCμ6ύ2Ϊξ‚;υw€ρlvάkμ=€Ψ:j¨L;r™>Λ™ˆ²‹γ=Y9;ΪιDΝ£v»Gηφž:Ϊψ3:O΅ŽΕmš«#qΧ<‹vήq+βνΧ½Ž>'t‘cηΠ՘εηΧΑGηΘΫγGd{gψΙΛ&-ύŸŒώΑΘž½­ŠtΊ`ω’ςΗ“ž,dŸν”d7ΐ£Χ‘1 α2­ŽΨ«τβ{λοΗ/d֏˛κ¨ϊ΅μ“ίG9¦E`‰LΧω€:ŽŒGψ'λ\ŠIπ¦ίφx]΅<¨Ή–JΊNΖκο³§ν―*θλ―}ηd±ΖΘ4LΧ‰`›ΤJ;0ž‡W͈1sgrh†}ψe° b£cη.#½pq:{Nζς4ΙΣΝyάνiΫΎ°“―ωΰλιόt?ΰ9φ_cŒύ‹ SEΪ‘Λι.7ΞΊYDtΤ ?μώΏ4ΐ9ω¬ρό½ιG«ΥΫ;δ¦Άέ‘Ϋι=ρ:ΰN-„YΛ ΪΌΩdν΄'πo“ϋκA%Βδ|ΈΔfq”ωnJ§Ϊ–ΰPΆ‹;Έ£9ξΟέenGΤ—ΦjΆ”]‡U©πΠΑ©Bf¦‰|ΩZ΅Ϋ¦τ(œ©s3[+wΛxΧώΆV\7šΟ΅’­“Θν ŸTr^lςng½Jؚuu£,ί©JRβ fYG!ΠbΉj)Mξ9Η|υ3ozbΈŽγΎjϋ3{ν,’§ΫψBŒΦœg•v c)WCu/ƒΕ0­Vfγe»χρ‰ŠόΣ²fš©{κΡFλΟ»Λϋ©ηΡV±š^”³yI‹Σ«,θUŸͺ„?ΦΊψ»ίUΡ<λy”tΦ _`Τ¦ζLτL`.η4Βz‘Λ%΄]EEΘβX­ΓpΧ¦ΡC™{ΎΘ7<\ŸGμ—$&ͺRέΐ^kμ¨^±Zq€lR₯{δβTaκΣζύ Όj»[BoΈδ”iί~)sϊiΞ³‚UΊ-°α όΏ[ώ±ώΣ θ_ς*ΰ"gCΑƒωŸΪωŸ]BΗψ߁ι-δƒΩ–yΊNŠͺ εNΊe³Κ4sύ6°L‡ΏeΡ8œΦσΜΫΥή~IΎF{Ε)ϋ\QpwΡ;(ϊk¬zž8€·₯»Φ˜iπόΏWσ=V§9]ςΧ9( [X‰„)θƒ{έΚ­εn€¬Ω0*얁¬ς«'iΆdqτΛ&ΪXΛ•^]C½šκπ tQ‘Ύ$SΎd*-ΚΡ‘ZήωJρnο}fŠκ‘jΘZ₯;₯›Ά~Ύ+'{W~rZΩEŽSΔΥDv+βέΚΆ~ΰ4e{;_5E[ΏθXEϋ&Κp7츍jΌ±›ϊπψwUω<YζxώkHτΧ,τ&ψΟ°M«EΫι?tόΧ†y]ν˜m °_Μ_¨ήkHΧa@ώυIΈΣφ;ςk΄ή™ώ[ηίmλΏ³#‡1ςαλxK—TSώξ( 7Δ( ζ²}5―kΤΎλ9Ά±ο-θΦ8–1^ŠΣš=hΑa$ΕtΫ+Ϊ²$?œ§“—Ώ5ΝΧjЁΜN9.•n‡ε¦Β(.T]τ‡| šγ³δJλcΩxύ,NUC¬7Τς²Σσ«ΨΦφuΫΖέY ¦ς΅/ζΊ^π¬H퇽©u~ΐ±ѝωϋΧuνQώίƒώeΥ–'<ΐq•βΏΗθ†ΫςsτΌΗ_gH[Ζ±ζjͺoΑz>ΛdXV‡UYρY΄\‚ -hΖ]£ρ‡ŽJˆi?΄¨KGΌ¨2ƒl/5L°›τΔ·Kτ’Ό«πžΣ-‘˜γ°[d~ϋΕίΞγ–αŽρχ )υ³'mΧXFΩ ΰn’o~ρΦ?μη'ͺR&² œΛίΫΛpL%s―ύ~ρ_cύχ{ΣQαA­ε_dv™'ψ ύί2Ϋρ_ζh:o"sΨ{}•|}°ϋ"OπYρ?ξˆξKu'J`zβψ2 p°ώ³λΆλφΏwω;`'˜0jSδ@_Bχz9γ«΅‚όψ=cou£_M]ΰ@—.0Ϋ±Έ―ΏΠzfη2—DΓY€x΄u•ψ‡Ό*D·­ό•€Ϊμ€#ƒeDΎ6Cžχ„ΏΚ,[£d^¦*V_«ΌΕQ rY4+iΓη(εE\~Nнα³Gο $ΐyυ?Gϋο}ω?ϊρDΚω—›ηUΉ>[ωGω?tόζSΰ:šۏ'>p‘Ν¬c{5…ΞΚψΡφ©«Η•œ/βνΑ3Ս͒uγχ]ςcόη}ιΈγΛUΜ?‡ν?iλγωΟΑσ]6ΎΓΔΟεΟ΅E7rθαμ;œλ?ݟώΕΛ¨ί›mόo:cύη‘σHΧ 3Ξ©ΘΥΚΉU&/+‰|ΕσΦπq!Uβ­FαhŒαΔ[*^}€ι—Η#oΥδ^δΏVisΜώΏώ7ιθ»/ύ—Q]ηπαό-―eτΏύ_&Ο³8Ολ#jUr*=ΛAthvκ—5™·ˆ½ύε)JΓNwiΗ*)†nλjϋΎ:]v$¨,D_–Œξ²»ΙKτq§έϊΚηWΐΨγ‰ψφΥδ/’-» /$ ξV°•|‹υα„U&±~ :εΑψώ»?OΎΑσΉη&²ί>qLα©σŒξρӎ'”O΄dε%y².Ι72j„7εW9~ΞωoΣν?χ uώ_§tP‰nΉFΫώgΩcύ―»Θκπ {Q’Χy<ΘγφAuQΧ‹ ηΣ(lKI ০i8νԀ̐­€Z3 7ζκΐ›%λ?:›uGެ†ΈύnΉŠεV•LC-`ΥΪΟ'Ο›¬²”Λ<Ÿ¨_r°Ά‘†”yδ˜ύύΏχηkUϋRpπό·c·θ―τΏ―xxΙ–?_&ΝΠ1±#ΙζKυΥ+p|X κΧEΙόr–_φl Ω¦ŽΪ €sψΏ3Φ½/ΰ£³»\nWŒ5Ϋυ?©1=ώ.ˆ:/Β·½¬Ζ@ߏ²Οηδ cώηΡ?—ΩσωΗ@ΞβcώΧ―Ÿλe5ς²ί‘Scδw¦œ_©Πϊ[ΔlΕXΤν?w΄θhM΅TιΟ­φӁŒ!ΤA—1›Y-"Ω:†Ωb΅€<l9ε –$²–€xμͺΐc)¬λΔ-el«,$Q!3ΥΕm€2FΙV|Zš”΄»Άsc'„Ή™Ν+²£‹wϋΛυ‚Hυl'»gαKœOk#φΛK “ν@ρΚs˜έGŒmBυκ„Ό:£Σ©7―Fσ€ŠwΦ9\:«(ΒNίv΄ΚέήΞԒ諈?Š£SšgςΫzBΨ¦ι8ΐβ]Σ΅,BCΛ1KZfθqβ›AϊΪ6¨πI9κwψΉZ΅σ£P}OβκQΑ,.±Ο¦ΜΆ}BΉεI‹qΓγγ¦Ο¨°X–γϋ!£†°„ΘWΣβm%;Μͺ«,]₯Ήl›SmMΝ` eΗ‡UP ΆΧΡwΈ0UΜvε;φΨϊZσΚΈ l­t@Gkκo‚F=–`Ό­r5^]m>ζTaΠe1M’ω’ˆίN"œ.ΗYˆz°4$‘> C“Έ!#Aΐ™o”Z”H—sα‚j"φœ+ΐz£2ƒ ²Ϊ,ẚύ’› qy*$ί^Ϊ93§6β—(ζE¬r»eΥΥSjͺXΓ‡"ZJ1Hm$UcTΗ56k.θς¦κ·7[ξȟRj―I=S;δτNΡΥΆvkCι²μ΄>"‘~_΅Η₯ƒεΒΥ2ϋ‘Φ\―©jy=•ŠΕ{ŽaόkLφ ”dX="αϊΣ–ι/Q3]<βsYzξO½¨§‹RRG¨><ͺ”΄œržfΡ¦¨…zc¦ŠuwχP±G‚•ΈΖώ‡κτdŠA§Ο4D –έΡ‘J,HΧE­”₯σŒ-s<ωέ?ώS>Aι”ΰ&ϊ—Δ,mƒ•žΎξ/5'J¦ž=%N)nπΰ%£ϊφυΰλ<ηό0άߝώuΑΎ2xπό'mηt1ώο=ώΪψEiΣDΎlE«¦ΧνθSu-θΏOΊυ ~υeW# σβ”ak2λR0[ͺa»άν}(ω]ͺV{ΰ ŸΚκU%ϊΐ!>m ZךΔΫ»%J½όxζ(«ηͺγ€% ΔW Fύ&Ÿδ ˆΏQ« ƒ:‘…|ερZθYQeΥ΄όώφ‘ £\\‚>6ΪȈ>nΐΟgΕXcόί0θϊΤMσΏελmΖ›cώΏ»ΘN jύb ΠΊW3;zέ¦Πͺ%¬¨£Τm,ͺλ“Ϊ2uVαΘ¦ΡAwϊ‡‡ŽΑ<•†…M―Ά­Άέ+°Ώ1<ω~0EΎδ_’Ιο_*ύς‡ω’Eρ%ϋ;L1­N’Β‹–λ$βΪ*πΝwώΗίŠyƒ^²x‘2Qζ ΄T&³³m₯₯ξB‰<š>>όΆ;ΚaΠ_X/A‡δΏCΫυ(σέEώwjΙ;ΪύŽ/t(³­“e? ΠΥvΰΙ‘ˆΌλρ±β™Ηα–½Ψgλ9Œ“Nπ³μϊTjξυShΘ('?ͺTΖ œ°Ί›₯Œί½!γΫIαiξIΎˆ––₯λ σ _ ΏLΦΚΆ±kΣPEΎΥ9οΖ·σ‹’‹€R΅.ƒJΞ ±όpŽώOΖψŸ{ΣοΉ„ΕzγϊOfWύcΔορwIώίZ͎M˜fΕTΪqΟΝΏ»ΫηωΘ¨WΎγώηϊO¦=ξϋΣΣiί>‡ΥaλŸ—ϊθ#ΥεŸZΊ£ŠZ[ \F§Δ’ε’e|Ρ &³ΠΧΌηLx·892YS΅άeφΛ#φ}ςŒηΏοB*―ΓΣεfΏ£ν†ΫΚdΩd<3xό―Ξ%`ΰο\&³ΣΉΛ»ŸΠχ΄ω+ϊς<ύY.UJΑiώΦΏNV™ £W)fΊΩ,…[―ViVΐ΅UΜ £ΝΩΫtzϋ֏²"͎°ZnΗκ%ΪΎΦ‹Ξnό–Š›‰Ÿ—Ϋ*ε|<αUΌ»d―³-d3 n·Ϋ½ί2ϝ’ριRΫΖΖ\*ΏJώ·όο–5ΖήƒώUώ½ωσBΤΞ₯)u$8h³šρΏΤ cύ—wωkŸζά—Πwrέ‡‡O‚G«€ΡŠ—ό&Ÿ€/IyhΔ^!—Ÿ ρ™u{Wgσ4Šο’κΚϋt‘p:§2ξΡζΞΡσ«EΓγ?ξOΖ8(ξ―Ί82Ϋο34‚sψΏAΖύ?4ϊΓ·³ ‚‡ω+‹λ’ΡsώlώχAqμαΘόίcŸc:'—3β»ΠΧΧΜJtž?θPώ/£e!mŽφŸχψ»ΔδEΦά¬|όνbΌD‡^o…L3!χ<‹Η­„ρ:z›A 3ΤtΈƒ6Η«ΏοUΩζΧYάνHΒx]<–ώ .™«oΚKs„ί¨Ϊ¦’ΥΞ{YΝ“Žωpύκ‘_6Ϊ‘a€L­„gΏ¦ͺ=vΎυdύmς΄G0Ό†HfΜμφ£› Μ8O•Ι­"ΦVεh`/³g―UΌϊ: 0"\ЬΫbΡρF³μΘΆΚtΛ±­ρ]Kφ6*υ§Ξ[G‡Β4:ž>VY—δ+LήώιtGΟQj|ιΣεΊ›$:[³ή΅ Μ±‘Λ«Λ Γ›nRόπΒ.Ξσψι63f„Γί―φΔ“n7Εο;ΧΦ!Pψm vš&Σ4mω―‡ /ζiΚ‡ΦPψ°Y|:I‹o°θYϊ’Ά―u9…Ηœef—…ͺ?λ¨^ήσnΈΝVQαœš* ―° χCΐrYδš„α\ύ“&yΛςΎΜ²lΔ8JζΥ•η€ΐ qjYFτ΅Ό%"Σ7ŸRΈ ?ŸNƒ²©ώ€8ε—θˆΛ\ͺXΘ9γ”Α·²iΌΔ©=$—iφV^^FI^&€,ΏΒ°q0κΠn­τΗ—rΝ™πSφΆ*ΚΟε”ΐ§(εE\~Ζ)TΤzΙiΥ"§ΡjϋYOψ'₯Ζλ#ώ―α³βΞΡ¬±ώϋ]θΏ1c«ψω…%ΰΪl·γόο˜cπϊ_T„1›λΌΙ¨ί=\M_―V­Ψ1dνͺϋ’8°³ΞŽφΏAΠΠ'»uύwΛμΘdόθόΏ :f›εΐŒωΦδЎθήmt5ΓΛe’χ―\P΅οR}δC ^άόό'1ΪυΏlsδ£qslj\($τ΄l½v?€n՚Ζς΅οX‘ΊUkͺ<*=}Ρχjs˜ GΏΘξφ›ΫυGtΊγΦpg 20U₯QΫΛοrώΧ$ΞΘAYœΎΐ­d~68$©kΆλ9#ύ‡.{d½ΊεžΦΈΆ’•ΩύΫ A,ν4›­2Yo½­ΛϋυΖ—A™e:‡]ΗSϊ^­ρ"κpγ<“c^ώ΄>θ›§A²ŠάΦέέjͺο —lΟP†ΑΟgΥλ …ώ+–ΓΉΐ pPώ«]gΜ?xωίQ¨άPΞ³’θΎxηN€ œΠξΦεΝZσΛςžΖκΦP€ΑΕ"]σ’Q @ώΎΰ¬ϊ?cώ―ΑΠ_qάΫΙΓtI[ώ;£η£κΗ ϋƒ’ιra£‘Β(l²ί—»tάC ?@θ‹ΐ‡λΏYνϊ?Φ¨ ]6άUΆ!ω™―¬^&xQιEξΧΔ/uŸUΝωθ€ΐ&_ψ—[Φ£†CΫυ_Fΰ忌Š…NυOu€‹π*Σ$ZηS™Μ£€* cͺζ= $-fς™Ε} ₯«ϋw‚ σŒ­ΟΡ/έΝ«»7υGIΨ?Τ­3‘J$Dάƒ<τ½Ζ™šΠ+­υ<²m1,ˆs’;ΌΌ€χαΧbΙ‹ήΞ45Οχ<5°ψŠη(n”ωήυΤ™ΠQώߝώλ(.’€TSΟD‡μ?6΅ΪωΏŒ1ίΧmzœύςu₯k"w5VχήΕΜtBϊ>Γͺ(¬ΨσDΩb Kάb£~ψ €Σψy(ΰœψΏΡώ?ϊW΅›°τΠ­β)iΫ\:ϊ/wΟ‘A‡λŒ˜έπa“ ±όΆ”,‡ίΠάΤάW3˜ͺL"‰.ΉηTα N‹/Ξ­zφž­|†)2{†ΎySUYΕ¬ Ί`%³š%I έcE*οžk§ϊΫZfo3e°ϋ‘z‹m§Ί;>ͺΛ‹Ρ~τ•ΙχώΣω8―²λ:~·Cπη4Ιω\Υ‡XρΡFdˆbNDaX]ThΦβ«~Ώ…¨P.W˜΄―˜ cΆpbό,yqJνqLΜZ%~q¨e&_€*/l™ZZeτΦ©“±R9f¬ΖDͺ0W²ηΫ)³xοdTλ™eςΊΫαΦ―€rΤ"’’Ο=\kpn=)^kΔ‡°kώΆ x¦T<½9»žΩmτ‘/*|ωž ΟvžƒŒ1ώkτ―Bmnxώΐήρ_;ώkžΟE±Αu{ wΏtΧЯӐ̻]€h>nΨ…ΐbξx}\q<ησΏcώ!ΘΌIύ7 Ύ£ώλXνCΪΒω4 λ@»θπ+Ϊl”0[–ž=U μ?α‚ε‹ͺ2^±œ ™ΙPsέmi'u—ٚ™Θ‘&#–eμ­R$ϋF;ŒΨU"bcٚρŒ•$έ”DΫάn²qR•[±μ τe§E³΄ͺκ³½mδ΢ښšu«ωrσ±φά΄Vέ»λαiWΕΆΝΞ‹}―#›έί,‘Νη+[dN2}άέlΠ„™έ"Ύ«J™Ύ6C$άψλΓ Λ—&ύλCoΩ²₯\ΒΞ£›rŒ—W%λͺ™m λI}}ςέκ?ΉΦX}τΗjœ—f8@“’VώWΗpFό7tόΗtμvg΄W «¦E&7a]ώ»„ˆŸ™u·C‡ͺ?ϋ¬IΊΑ;₯58\ίφRf{ 2 5κ4^•·λ!dp±'„ ξΌ›ξ”Πϊx=’Y΄\υ,€Zƒ:heΟrΖΏεQiwš| Η)ίV’ιi¬ξ©Ζ[­©=R―ήbš―¬>”ΦΡ#ώαRUO•ΙσΠl‘Τ³‡J-Pΐv«•)‡’G]Ε͝ŽΛeδώ€βγΰ%Kφ:‹ε³ŒΡt›ͺ¦εhžvo΅*-Ÿ¨e(9~€rq¦.°[zXΘ`=ίΧ­κοŽ:ΑGΔ—e9ΗώkΠρόίθ_ 7‹3¨αΆύξ¨ Ÿ/}κ0v ¨ϋ}γΜ1ώc(τΟok!k=6λŽρ_Γ·$IZ°BNσ$Z­dQy{ΚϋΆk½gΙΣ8ΝΆgρ’VZ΅ ¬Λ·N"δ=Σ—Hh[ZΧͺ*ˆ&„h‰yΏ…AέRΙΐ‹΄ΐ8tS-pΑ±Qσ·„/Άn‘€εrΤ? Ώƒώ7ϊA•ΎδΖςί‘]τγΏ†.―G•ΩγάΡεbΖ A§δΊ5¬9©XΡ\7ρΑ΄8bλO8…ι”PΓ0„Ηο ρόοθ_n—Λΐαϊ?mύŸ‘ώC—Wš·Υx+A:Z/Ϋ €sψΏΚ 6ξ»Σ_ŸΏ-§†9ςασ Έp™λaδΒuΏ?λώΥyž[ΦώouΤιΥπΝ©°Q|ΤύξυœΡ?ϊ/’ΛΛŽhΧ°Mk¬1tϋO;۞Xm+{ΝEόΦvό+ξ’ΧθBkΣ‰^—[%»EdΘGΛ—†ŠF<πψ₯ε?Οͺi’QώƒώUύχ›ε3lΔzΝόo#ώΌόΏύYή#O£^W6^*Υͺa|αvόώίό_.Ο€ώ΅\Κ7;E¨Σuώk<1xώ―«ή©όWŸ/Œp^ς[U{Ύ]rΞ„έ]$DλάGq(όΟςτςυR π!ϊ[v+ώΟvΗσƒ—ηgΚΌ­ΙτBΩ£sRώJ€ΞQϋB ΰ9φ?ΗγΏAπβτ‡υ?—Άι?ζ<οΡψ>vu…―Z+ϋh>B]ΟbTοΜο"ΗψAΠΏΚXx8@Λt–ύίΆFύθς7γo³ŒΐΆxΐQeΞkpͺ2N_`P}uλ-.>¬~μΉσ \σ— ΅Ί[`ΕE”μ+»m:$oΚ³§+Ι*ρ1ŠαΟύ)“uΫ?ZͺεcQ₯>υάΣXŸή~―²₯I€ŒΕΌͺCο=΅ΎΤνΖ#°Λ“ΜώΏCύ2žύΛΈΑ[ζ₯ΔvΫω?ν‘ώCη»ρίyΒVηΏGfΘA:{NŠ$Ύgh›Β©·uΫs6˜ϋ˜ζ6qή7 ―„eΚΌΒ/‹»Ž—_ΉόχόίΖ9ϊ+ž|[ύŸR»]Α #ύ‡Ž‚¨c6Ο΅τͺ=_°ψΛT!ΰόΆ­Β‚dT,dYϊ]_™ηsQTΪ8’ΗU§I΄Ξ§2™GIe.0Uγmž9O‡ο³Χ4δ’κλŽn·οbOvΊUr«_κ›uŒ™Ι€η„•Ίυ• Φθ­δέ „κsTω[^ΘεΎ“VΊΕΧ¨τΆ’³Ύ9/oΏj`Μψ0,ωξφ?ΫuFϋ θ_μ½aόq,{Œϊ€ψoΔTwΝTu«8ξ_Y8φ PO•>α݁ΟΡόέγFω?ω ˜φΛΩΰό7νvώgjŽτΊόίk³‘s%ΗΘ½K Α:1z½n.?+.5ΪnΖΟ…ηδ'ξhύ7ΊΞ‡ό?†ΣΚμZφ˜qθς?*dV€©J‹œΦΏX­Έ>ƒΰ1Dmχ§ήۈΜ_³kδΞFƒΞ\9<…Ζ…f˜-§Λ·…’ΈjτTkt ΅MΘ8"©QώοΘ3ΰ9ψΟλ‚ώ΅<«7³«UΧλ?XΟ―gύκ /υ4Μ#\ψΚω{ΪθxώsτW»n]‹Πϊ_cώ‡‘Λέψί½ωΜωϊ?+j§p y—,€ŽœΥΉm»Λ;¨L³-Π¬‹)ŒWΒο’wΤ†B2cη­τΓ΄ό7Κ‘ΛΣeΨν…R•]v”MΧίοͺφΏaΠ+aέφό'±ΫϊŸc8# φί1£ϋ{gt?AŸžΡύΈrzΝ!PυΏŒqΏCώ/:Ϊ‡@m]ΛσAΐ!ωoνψ?cΜόAτΏ#Εύ-άΊ)Ž{cΣlUw7dq.?Ÿ*ͺj…iGxό»€Ÿ Ξэ±ώΫ θ―‹T_f8θuΫϊ?σνϊ>₯?Œ ±  S’ό25Ή¬p?βŽΟ/2œ£Scδ Ώ>νs™ϊψόΏΩΞγΪ#ώΊό_J–ΓSšεϋŠ―Ÿ—„ρ]ςά;όfi²?ς™½‹πΕ–9γφό|pŽόwΙh ύ5ηΈΩω/ΫpΫηΏΖόƒ—+–}q>‹Σ*•39\=ΰ―ΚύσΪΈΏςSn;πι+C§πχŒ31Λθ;-…½w[ϋ?€½ΆύΗεΠεWύ‡w“π§/Π?υk=„U‹―2ξpΕΕNA)*c°£δ½·EGς;ΨGϋοδΞεŒ%,~Λ£όFω%ωθXsθς?‹σiΙΡQΨcώ?ΌTςV,„eAe&δμηΌͺθόωΗ.”Κ·¬΄y«ΨύAYκwΨΘh¬¨όσΏ™ξ¨ώ•9π†υ Λiγ?{Μ3xω―4ΌεΆκχ-S’ž`ιΎLRmμίΏ)uτώηϊ/6υΏAΠ_ΕΫΪM=θό.‹tU”Ωί±ˆo²^Ξψj½/!ΨΧβΕ σΩΊˆzΟ—7Ο;Π62ΜϋO΅©ΫοιόΞ±λJkυΗwηοo%£oτ‡=~iϊΟΓ翌–ΧΆι¨]ώσp>Β*›'€œ§+)¦E\ƒω‚Ρ=bͺ ‘/‘ΠBίeΑ₯°KaRΙ‹‡₯˜ΪMξοŸWJφBh1ˆθ,˜Yœ"Έ6%]st™δEQ?Šέ_)Ώ0ύηYω?­Ρώ;ϊΏ-ƒ4>•ΜAω˜ŸζιΘιΪcώΏ‘ΛΥ:yCΩ­εU;ώ%€Z1rƒ φψ}ίΗ7ϋ^'ͺ>jJ³ΖF…ύΘ»ψYPΰ―KFω?ϊkζrΫό―†ƒΉΎ[ω_Gω?tωBόχΧ nΏG T)ΜίOϊ·οs<{ώ”V5Ÿx­VoΥ:ΐKΚ©u†,8ΘΝ¦—–1ςχψk2υϊT“9;oΣ–%ΐΣ•tψ±Αή8+δ<Ν’Rvω,γt΅”I1UkΓ|„Μy­ͺO*Lh²ρJf“"Μe"3xΝ†ηE‰.8€ŸαΣd•‰ƒG·υΔƒ+_δ°V‘» ,ΖαογΏπ8ώSΎ?.XΎΐŽΕ—‰Κpύπύw™ό۟'ίΒο,δ”*]ΆΑš7[₯šdBίόσ?}ϋίΣγR¨««4Š4Γΰ‡EQ¬ςί==Αο/ΦΑ#pV΅™¦1€iύIo±&·§ο©kΎO—§σj˜cύŸAΰέτq·±Ωn‡ύΟqGϊzσ?ž €0λ;ί‘n½KЍzŸ=υͺβIέ:€Γ›ΝΌ›C΅§žΘίΡώ7ς‘ΠΦυŸΜϊOξ5tωΟ,ώ2•Ι`"bΛ&KΆχyRΘε*ŒτR³ΛM\^˜™άœ3Ε.°,coΟ2dλΈ¨0dq. ΡUάz΅Žc‘Ύ$SΎdΩ—Sžχ΄xΐšGΝ„ ΅rHM6l*6¬Ε lΆ)NΚ9n§hΩm‘Ε Ξe[Ά Ϊs[œθš±γ‡F‰Q=XΞwΎ0œ;(QGkvϊ.UΆLž§@Uθ~«θŽŸkΕ€π+Kς¨1ŒŒ&ζž‘ς„U1-€B•εˆwγΩ–ψ“)ο e?E²1Ί„*l€z•Ε\Uφά_ΚΩͺUΥ-ΆοVC$Ώ4υΓΉςί²ΜQώί‹ώmns[ϋ―ΚυάΐdŒΌό?]ότqξ6(ΈŒ7ΧeƒM“ΡCyΖώwϋ―αŒηοE}ώ«ZΕB./“‡θOΆύWύ;Hό~¬ΡΘ‹?ξώηόοζ? ϊ_Αx·σΏΨΔεΠρΉBšΖ*ψοΦ‚ A― ¦OΥΕ‡ ­2ΣςΤ•x­ύΉ θœόŸΔυϋΣ?\’G0 £ωl•₯₯σχd‰p0'mηqΜ1ώσ=ώΪI=ΞSEgΊuΠόm*suε―yKΪfŸp«t+ΟQ§[N΅#5Τλωτ%*ΣrΩ*ηΚ;qNζšίύ§BŠuΖb=!S³¦+§Ν·žΖUx+$OΧ*r„„ƒ'ωqEΜjžn2φ4‘/ΫςΟ" ΓmΜ†ˆTxεˆυ]&Ο³8Ο«€|{«ͺX4Ol“dœΰsΘ%?Ω/o3 e•a$ΪoίϊΊπ\2K_#qΑ¬’^iπ³δΕI}`1Sӈρ½q¨΅πEͺ’zΥΥ5ό, x)TdoΖ%άί%ͺΟΏ|r읱 ΝZ'|j·jMΧΙ b+lΫϊ‰ΦΓΊ±~:KΓφbΪϋŠζΫsBυ"H§―nͺ‡7gB§’€γύ΅NΓ$ΒΗ¬ΫωΣφ7ΤOΦ&―φΜf–ϊŒΈS•Ατ»P>αγRυn-γhͺ+Λΰak\Σyjuξκ5Ωy«ΆόŸΊF]±‡iEŸ©v;*τΤq·ϋ!MH1Exο}ΑN˞—©Tͺ{^!—έ&ι”εϋŸΥM”ϊ³;Λ₯ΗΟΛ)p£ ‘ρ$~|Ϊ½£W li­εΞ~©Μ’εtXI›Šjf&Ÿ:o«ηΚM£_ΪΪA?Φ—ύLΔ0†Ω\lύϊ¬6αΫΫOύ-οASC ›,Ӝ²ο%υf7`7gQΊηα²…zN%ĝφΟwgƒ–χ»©:.€Φ£€νΖ21ω“Ζk‡\ ϋΫλ­t’&ψ¨³κ:-τ4zζs}žu!Ωj–λIL‹Χ₯šLPŒΰΦ"Τ.DΤΞt>NψΦό7,yϋF_Ÿ! ƒ›}xυœ™cύ­,; ?α­ίnξ©c­}°»Œ ΈρΫ‡ržμL7˜² :‹}]rω©ξ;τϋίμα μV“bί“΄ψf·r©i/μ§‚RU‹gθ¬2/ύυ‘šαΪψa «[0ηψdίEΌά’³SαٌOa DέΏS»sπγ—φ"όuMόιf­“/ αͺΦ[«7υy&yMeγΊλ±wΡrψη<ύίtG½ιߍ―_‡Ϊ–ΣςΡ1γ»όLΜ«γΌ° °QRΐΓ*τΨb―71x»8a Κ£TˆΦ›R`ΒϊuωΡ/QΛƒu”HZ š~.e}9ΉΝ‘ Ck™ŽδVϋ\5ΰœόŸ5κχ ε)O©Ό‹ύί₯νϊ?Ά1ΪήγO₯²hΗyςMO² °EmW lΙ~…άΰt¬λwDV΄]ΤϋάΧ‡}ΤQΒγuιUΪ<ϋτνΔ=KωΓ―vόκdAw`ί―ώΛXω.όZ9ާ?!ΤΆ›τ'φ(‡†ΨάψρΣR.©Γά?M&Θ&&9¨«‡Οxi·€ΎV .ή½ £|ͺ7uq©ξilͺz½ϋPy#ŽV«·ξ«ε?3!Ÿ[ tg»ή¨yιͺ«³€Ω(Ž’/ΚT.»nSMk 8J„ΦΝλ ΝnνΜn^ΫΊMvοhΏdIvŒ.“K˜GU2m Σ#“½-r™=·G¨XH„½Žγž»U*„¬“«ΘsžIΐEΔb}}Ί½0%+–ηx@ΰμ7,OσιωΖ,JΞ~ ΘJόΦ5vU)q9ξ\ *ΉΦξεe”E]ΧΚtέnVάh.N£^kΡΏX"¦olOaχγ‰–fܜπ;ν·(ΰ‘žΝ¬—+@l©}ϊρΣ'ωΊPΨR-Φ.ώ1Γ^Cwg@ŒDΖQXμm5ηυIΗPΆjZζk=c?ω—oψΏΏ‘BΌΏόeε<ƒ/Θ'0wΆ΄•?iϊ΅"‘'jWӜΙPΒ aώQk‘V)ό"<%₯Ίέδ%ŠcxpΚ(|Σ$nj8j–°zΕeςoλ(C/ |šΛςW‘‘κΌ³[γ€b°n§/zšsυσ*υ˜‘ΜpςΗΙJ3.1%χΫDD9RX7‚m›©g›ο‘_-Ά~0Pd֊ΔΠ5θe2ΕΊ+lχeΠF&ψSBe»† Β wXT/iς›’Ό‡O&κ~‹8ΘŽί…δΜι₯,v {ΓM5G₯2“Όˆί?ιΰ΅Ξ”šγ§jtΣrtΥuψ­UλΓ–Ϊπk,~ao9nxΨΏ‹hΎ@K'Φ;MΓIΙa0g‘?–kfž²XΟ8¬bά ͺ§“«Vv”("A_’,Mπ|ςγ§r½LΥzΑτ&¨ς|λ&—»ι)PΉyΨ2Zb‰―+ΧEΔu π’ΝιxAdΒΑϊ+0—hŽ|FF™ήϊΜοDI`*)¬(άOΈ`p^a—H|α7ŠΤ*€ξΐΎy©fϊ·Ša(‘€ύΥΟŘt$Ϋ°§LΣ eή·\Γ–j„QQ2ΎΌΜ”JG’ΈβR?N°ώ2ώ«I’ΦzXΌ“π«ί†iΙ`{kΆ€'BqΡΕQQ)`ν=φ2?mςM=jΞωί'\>Pϋ(ΥΎΧRMΥκΥ_»=όΏΐ³π/σΟϊμ+ςόZl€Τz–›β7ΟΘEЉNμk;ϋ ήV³ Α‹κΌX-4$ΙW+­wGЇΑή4½‘=Ό{„dΔ§u» ΉŒC΅dT#½6ΥͺνNπ<*j]ϊτχ%nΠΜς§V¬ΰ‹άϊ»9z‚?§ΝYυ)Ξ†+N)©YNOWoHB|ίγ~ƒ˜V"~ό€ώέ±26ΙιF*.y_Λ θί΄ΖgλN…εQόG΄|UγK,ΐ%ϋ½BΨvψ5bώTΎπ§Š½ΓrΙΝ Νβφe(΄`χ|Σ±<ͺj°œjt‡m^ ‹Gς΅Wμ^TΏ}όΤH)ά3{Υΰk³₯§r₯jB€‚XΘΝrμΥO»6sq?mŽc€Xς „Λ<٧‰b™όΤ“Ζ{ΣΑίμ•ί`o;ϊVΟΎ 'υύψNύφ±79ΕΡoR­ϋ^Υ8’ΈχEπν7'νˆiišmͺΝ0hΙϋφΰŽΑF΅}Έοh…EςεμΏ¦cšŽΣΆšcόη{όύ½dJύ@{ΕΊH—x8GΈ?”"!τPψ›’6¬2τˆRίt †φ“%σΗO[η” |β‡ |ύψ©r~mPΨΓ§vςΰO9ΐ3ΰΘy”ΩΫۏ”˜yŒ©2Φ>|ίZf0₯CBΈ‘o Κ]AΓ£ŽΙ8cƒ%˜’ΊŽg8άρXθΣβ–ΪΤ BΒMθŸ:ΠΐU…Υ§=Ή„>νΛλ£Μσ…ΤO W§yε©’’2KrΣ2‰οPΓŽν[’ψΆe2ιΉΜτCίΆmZΒ%εQΚL&gΆλyΆg6·©/Έλ…,$Drιΐƒ:ΆvNΕR.ω"Ϋ3D΅B>΅\»ΗOk9…0ˆχpfΜΝΧΩkšrTφΣΎ,—i±.˜ΚέΩυ|ΙΈgΧ­ΐ Sp[Z儇ΘxRςΠA `12[8d‚ΨΒ7$3ƒΞAλ³-K/ΕCν΄‘ϊΎd«8*ʏٗυJ5e\€Λς* m6@Φ–πΗΟλ,žPΜA±gښΥW4τ>Βυ$3ˆiΐώ΄%s ' ά°CiΪ!°‘‘ŒΪΎoxΒ°Œ€Ϊ,gξΈ”vηΎ±$¦XΠfίPσh¦š4χυ‰”–GζCάΐ³MΖ<α[Ά¨!jzfΘΛ&±€qF=Γ ,CΨ‘Ύλw/’PxWΡγΓΌ§W¦‘M™δ-ΟβΆM<Σ`.€tδŽεJΘ0°ΐ€%₯ι3ΫτΜρa§Αa¨i]ypAθ ؁ (γΨfwah––c ,Οσ¨„eΜI;μ#π±ΐm €ƒ`έόcX’υ Β= ΦaΨψξΥyΌ`ΠoHhΗσ©γKbΐ’3-Βy"p–a0ŽC%(O°β<―[€κ¬€Μ ϋ© …εΦΚhΥΧ4%0«*°)F8Q’Υ:kzό«―‹Ψ”'D˜/` Ÿ0ΰA!loΪ-¬ ΰ•ΎͺΘα+(‘q©οJ«szZ㡏·šΜO­Œ’Wζ8ΜυA‡Γ'!0Xλ¦/mSŠΠσ„-@ΎΩ‘ϋ•mΪ€Ώˆy°3솑;Τοζ&bμ8mGE‡Μtf­θhΦΝ—μ‹VxЌ‘sƒͺ9WΉζΤg]C²S'ΨH€₯ΜζΊ μΥ_­υ£iΒεŒΛX+˜ΙΈόυU&A ΞΆNguU>ά~R΅#΅:Qυ c0Ž9LN[Κ^›€fΕ9 EB6μBl§†νILΝ³—G”Δfά€!ΐ7Γ1(εΆη9`ΎI:IΥRΌ*₯WuYΎNKCΫ3ϊέΐ΅O­C=]Ώ["[΅b4Uςu‚%Žh›Ί°Kϊτ._0ύ`Α²:χυ»Z₯½ξyhuςH9h§z•#u—:sΟBάίύνΖύΤ¨zpm¨ ΝΐζΜe†εωVΐ…EΈop°©eΩa«Ο'ž  Aeΐpθ^υ`Sη³1θέΈ€stξ9–ο ψT!—θiΔτ€KωxœΫ\b%  ;Iz6s-Ϊm*ΓI1ΓΪ€3ΊŸNZžξ":σρΪ0άπΈ TB…εIαƒŽΘά΄a€Μ—¦τ Πξ@1 ™)§ r‡Π€„εRXƒύΘ[pϊΪk ΈšΑ₯ezΆ€5ƒ™Τ 4ϋ.·ˆαΫχΘ:Ο₯hλ€Κg’ΐ}ΗβU·ξΏGρ虨ΜΔοΈ0ÎeSjΠ Bh•š>,Σς-Ξ‰c F@σ  ΦΣνίέΆw˜Υ(Ε―‚sjR^ς*κ€vIE —U‚w/V)Θ·—•“]}KΆq—ίv“Š‘EΝάεψašΏiΞ^«-­ab Τ$Dψ"ΠθSπ Ίω0έΌ|§A‡4Α¬ζϊΓd•Κ‰^V·ή…ΡršiYΈ-^ Νͺ–?§Α6ΊΊQΰϋ‚l †a«Γh›ΒvΙl± ~Μsύ>SλΝSœ­WΥ Ξv*kfϋ]ϊζ3ΦΕ΄θY˜&Ϊ4Σ%•τ ‰nΉ #žΚœ³UωD‘E«©²Oκ«ωX–Λ·ό’-u±λΘ΅SY{ηΚk$Z–^€h:έQp­~΄πίΧ~hύςωdη›‘ρuφPTΌMΓ ~ —Ν έ¨"λ/ψύγθ¦bιϋX’ͺΒαύΐgΥ;˚ϋ Ω3ͺ ©΄a΄ŸZUEΊΊ[ύΪήwΦ8υiΛv‡›Ÿ]ϋΕΔFτρμ-Ÿ8s'gmςR<Πς,g@Δ‹γN€Όa(έ<δ°/¨6‚攐ƒSόp~*Κ³”}o«"bΪφ”μΚ£† ΩȊJ6DεΟ`ŒΧtΓ.[Όv‡™žΜ7IA[>°«Cpΐi¦ \ΈΒ΄]JMQ%‘%y†g$p|Λ1|Γ΄9@=BΒ€\˜Ν-κτμ%΅Ά°‘Ήavc Ϋ“˜kߜz}Η ΝμΠ Ι’OΠ ©ιΈVθΐ€xλPΟ΄ Η£·ΰ˜ΆIΗv\ŸΩžςλΫϋ'…žY9bp§Σγ9 oαkΗΏΆκœΓDQξp‰PΫwL]oŽ seq‡ Γε¦π™)aι‚SΒ|ί—A7Λ­oΒΎιPθΫμακζLΧ³Aq½Ϊ (σ@ƒ p97¨τ@›Lά(–t%ήrΈΪχ)3ڏGιMΓ9:&α>•€‘SΒ2Mίuˆνyΐš+‡„RΪέ΅Ϋ ,‘λ‘ ”e„}>@Fρ—iΙ Ί”}ύ&ί3‰orΖ…GB#τ©Εxΰ8ΈΑ₯ K€λ"πAw…νψΤ0₯`d—^ΰπncC–r-y΅τψΫΆ¨ήoIυ/t|Νu₯^Bͺ©ΙΌΝuƒ©0lF\Β₯Ν@6BPԝRΒΟ€mιQξΚΠυ5γΐ³ή¬p˜ζ9Μτ»™ώ5?o.”bOΙΣΈΌ©ΧϊF γ νψΨ;AQvϋΙa6₯‘m²Πw‰tΧ Ρ Œ^ΆITfΕΐd’:Φ<ƒΐ&a& ΅B ͺΫτ^Ή?wΝΤ.ΔΪ;zo> \ΐΧe Ψ€e; Ì\Ι=K2Ηφ°Ύ°f†Δt€°j b ΣβΜq\~Ζ ‘ kΠ ?¬d1ΟΨj±ιl>ž°,[P’Έκr‘Ι½{0K“&|·°ρuηΦq λ‰ϋ†ηqΚ@^2Ζ¨ορΐt|…fˆΛ†ύ)©€ %†t€aψƒ=λšέƒ ¨bR1 +­Œ Ϋ$c»^”ƒ84nψ•θ£i^έ—j‚άτλΆ4\Γ–˜±2ψ—p©>H RΕς=Γ₯ΆmΩnEƒφ(eψQlm-oυ­έΟ£e‰Ψ_‹—2ύB‡γYb5"}WλΖ*Τή7‰*fΏεtϋ΅οm=€Ϊj“^-ΏvAYςJ­g“a¦›?θ6MδK[η‰ΒΩζΔ~·Ϊ·•dΚαtŒ’ή‘ϊ4AOێ«φ–2²ayΚ‹½ΐo›…’ιφ$½TνΟΨ±ŽvŽΊfβΫ»₯άmυΉύ}ΤτΊυ·Δ>L½Υ:Žρ(ΰ”cœζ€¬‘Θ cΊδ3―σ/ς­ζRj¬NhMΊΓTμtmχP’fK<šΏUΗ«;ΊtζΡFϊ49©;3ΥUίΟ(oxσύΦΥ½Bv(Hβ2‡ZHQ΅˜ε2Στ@ͺϊ€Κ< Λ2ΠηŒ8jR½Aw ί‡ΠΣ°[(  ϊ8‘kϊ20r€!ΡΧ- O‚EθΜ65 Ο ΅ CΙY`y BέG6ά»i:(ΖRŒΖΦφŸV,ΐΥν%ΐΛNΐ(@li ‡πΐπα?†΄|Ϋ·Z£γίsBnΫ!ά`yFόΐ&nŸΈAιͺ8b¬ͺε­ΗKB ˆ/΄V-sM2 λWΑ§u}A=β1’kz c0ί‰ξ† ›’ξC2ύclhΞ­zmΌF€X¦'C‹rΗΆΗf[T)Φ0:¦MOψt<_ˆF νί©g©ό΅TKΗrˆ7."…a+Έgcνν©¬ž»X ι«κ“‡:HLI0έτο$“ι.φΉuL°c9Ύ‡Ψά2m-/“,$”›·aυ{Ψ_ `HΓδBO3@WΞz|"εΐ{C°z‰Σ2MwΙ.Α±–Ψ>Pv 52yδΚά9_³ηuusώ­ΓΝΐ–Β“.¨ώά²}C·€E‚yΒ Τ\ψρAϋ α@‚IΗp¨οƒXλ‘Λ»ƒΨzœπΊφS·>γ3J@ο"Ύ ΏΑ`°^Θ CA3‡Ή„†ŒΓr=[ψ‘幜{>5,[ΐβvϊL}˜Η.ΝΤ&U³Όk‘t?ΌZGΣΉΕ]»λ†‹0€ΒM¨Ο Sƒη4 ΤΕπqκΓ~n1PZ‰°CKΈ61‰cŽα20¨ƒ@ΊG°‰ΔhΔ„™'D§ν‰3θe½Ηΐκ YCœΐ…€j` CΫ6 ,Xh–kYLψ !&jωΜ+Γ04}J:žEEΠ­έ•‰tK۞QVΡ/ $‰l.₯λ›ΠgΤ‘Π{O "Fγ†izL<ΔCirΒ]Zœ^Γ¬ΐ$@ίή7ZmδίΥsp„όΫZΆdε΅΅<R ωhq›Ÿt=TZωaΣ€^c…€–R†Δ7©OP_φœή BRΎθ™·tθ.Ω[ 1PΗLŒ~η­ύ ,¨kqΖ€τ‘cpΛχέΐΉαΒΨ*³l‹….w€G$*I’ΝΠ5–²ξδε›Οϋbί΅yξL™ŠˆJΓ0—ϊ[ΞΣ•œ―™Κyp>a™΅ΪυO]»0Η`ΆΕ)1,a›Ά j'F]ϊ\8ž –΄lΓtυ@ϋ΄Έ'œδšOmΧ8Š·\s™u™1ά«/3nΒBrMF‘>¬-P>™α Š„ Λ΄Š8H3Σ -˜Ψ§π•JŠΐφŒevœ3cοΘ$ΟΌk:a`hΊŽ+„θQ[vΜ+Ϋ`³άυ0ΙƒzCΧikg$ΰ$βΥa&hgžοθ ΟΓ’ηΔ3mβψ‚ƒH]bxG<ΧΑπΰ.@βΰ$'pί@1al+nΑΎΎ»Ϋφ³Ζ @Sΐ…°P[,ΟBε£ξ-ΈΟa`.μί΄}/„5a2ΐ΄ΫΥtoχ!kӏf^›–¨Υλ+; αΎiZdN 61`y!΅$ xαsj›.ζβ¨ α!Η&=«½Q±Λu<JrηJΥ4―`₯ϊ’ς/² ;Έμ«lΞ—εύ7uΟ}tύ«‡Ε£"η2Χtjβi=ΑTBΐQ@|Cςd s€γ~€Ρ€ε<'4=Α ΉΟ¦U―NζΨGZŸΡ_6_šsΈϊ2Ÿκ‘ΪγΖαΒQsˆ“0Kη-ΫφΥ§Ν€ΝΑžAb8Άƒ!LŽΏ4%(¦i»ρΈΫXiH#4›pι:Ζ^uχλ©kj%Χή4.Z} ~½, tΗδ6že΅@Ϊ™hM—1Ι™ο’©Λγ>hϋDψgEΑόΈoΨ³eΫ`δϋW·βϊžο{α‘Ψ ‚8•έ‘°lΪ\šHnΚ}J`™γ)~Λ±t;W8ϋZΰκ*†DΗ€½? 6j0@7” `²ΐ`ƒΫg1b˜˜{tŽΐ€Ω’°€ύήΕ…e η›»’ljρ΄TŒ¨Ka †!1].9ΐšΜ°C+„ΎcτjΘ±= ¬eωX›ουŽ`ήΆž{Wg‘) ξqΟeƒP '2e(Lί C̜ψ‘gC‡Y„Δυ©G₯KaΙΉ†εΫ ¬νγBο=ψέ9λΫL…)„c@L €Ή@ΐ’2ΥP€aζ&60M“0 ”ρ³ŽB“£}-0η GΎλg v/WWͺ€γ;†KΡ5€°¦o»>ŽPuCŒ˜¦αyžΝsΈ,\*l,δ»\ͺs—έ‡"ςύξ=lΡΪrΧ1™d(¨@95ΡL7  M§ΓΑAx!άTˆpehYΨn΄ _‹Ϋ;¬}Pό€x€{Έ)H@©)‰MBίgV¨k ώ€€$–λš¦o„ά£ΦΙ=šg¨«F¬Η΄Ψm8h>:WOΤ!0‘%AΩq™γΪ rΗ#°ŒΠm[& [:!ͺL°›y ΰΒθγέ…–I‘ΎΞΦΉ,c‘"ΈΩΉ΅•ί’huΑ‚UL_F ΝΟ 9 ΰd‚‡Μ ƒΝ *’D+γažqε±Η˜uΧO-cšhις C )8³οΉAθK A±-j‡. !ΫP‘ν0#D‚’ŒΎΛΪΦ ‘μΫΙ8ΩΑ-a\­SΩΞ "\m ³{ ΰ3"qΫ·=τɐΑψC‡›oω.ηC&-‰„p•§Κ {°…ŒY^D<^'Ω΄ΕΝ«£Z€ ]Γ°Έά–ΠQ½AS1φ ׁi`θ(πtK:θζά#’ΐ?*ωD-ξhο1ΖΌ/·Ÿ*—_?Υ“ΦΉλ ,ΣΒ©V½kRP„ΉπL_p-?ώ‘~ˆΉώ Ά 0ΫMXέ³•Ξχ Γq‹²JP3ŸΒΥΧ΄λ3τyFˆ>$Σ°‰Α]ibώ?Ψ7ΕΘΙτΆ€ΐ{²κΤ¬>Ν€^=CΩ9Ε,<7‡σC«)Œ‚…κ8(/½ΰ¬bΣΓΝψή0}Ο£&&Μ³`[‚²οxβ&οζ‰: ž›fe‚τf°½ d@¨PϊΑμάT©ύ]ΜιΟ†/\‚:T’žDmδ‚Ϊ―πRQΒθUŠ ά7*6]2Κ΅P”3ΠΎ,i ΓδIny!ΐΧΐvlΧBΫ-Γ€²¬dM+ {©©΄6ί΅υju° φ·zΎ U/°0ά CPΟe>Βp“H£‘ @&4χlyt B΅ΥtΉlύ+΅ŠK{H›<·&Ε½6IAΑTM“―EΖ«ψϊι•ν2‹αz΅(` ˜w9\p<ΐνΔ" d™– Φ—Žι3κΒsAραŽkυφΏhΕΨW7ΎψΤτ<ΟD}Δp1A l9Π?˜kΫΒƒM‡j6¨"nθX>½y-F’;}·p]΄}©Χh.Η0i† ½"¦AXθ›6' 3]€Σ‚X\ΐ=ί°, 0}*μ9cΌ§ΤΖ’•'ΆεΛLΥ?œ­“L2ΎΐZr{vΚZ•šo£κkΫIಝΆ‡”žγšŽ(€1`3Ϋ!‡‘ΪœψΒ@ΐ\ίp€ΪrΑ:ϊo^ί-˜ršš μO،ω:χN₯…¦ ȍATX(-LX‘.ŒŠ4ιΦΛήOλα>›kUΒκ‚. XζΏͺ.–ι«―y”|ΩΉP°|χB™Θkrθ kΏΑ΄†T˜ nΈ€xX!p Ϋ₯hx!xθΣυ=‡˜>ΐ8L d3i ΟΕTœ–³Z›SXNΟ‘Α·CHn0rΧΕ 'HO o$0=ΐρ €<σΉοΠ³:¦O4±ƒ$₯<TDΧyjοίΫΥrσ€^iϋ!°VΓgV€ιfƒtΧ2lx‹Zάπ( ž€£κ³Ήα‚κ)~Χ —„} z'»ζ·OΞ\V¨`ŠFŽG”a|aΉα₯-€—»΅₯0Nˆih΄ν °Χ8,mΓ6ŒδνJxƒQpP/ ]‘Œz~θ³0B;W%$χ‰ Ο1 ΅Δυ\ ΊI₯Βp½P qΐΎΝωp¦}|gcί~6XhsΚΤΈG₯k™‘ΝΫφ(F«˜ΎM„cΓV΅(ΙpB•§ΗςΘT8hͺ–φΝA°  š§Δƒ3²lЏέΐρCΜR’JΐC©ύΜόΫ{ΣεF’\iτ>JΫόΊΧΞh,φεaΖdD‰_i;ZΊ«ϊι―#IJ$3“‹*YsΜΎΣ³΄DQTΖΈ#G˜uΎEAφ’H—¦-ξ9ΌέσzυQkJj€αV,CΧ€:«<›€H©žkν '}§rLIiW₯?6ιŸ—zπgύ©±yΪ}ο¨?―%uX:]ίά―6§D^ό”τ8~Βn₯ώv£ς{_κ‘5Ϋ1_›ˆΐ)hΉe‹a ΩuψΗ€T―%ΉH › ΄dhΗ€#Zz‚[IΗ[ϋug°— 6…\šτN=w+uάR±hΙF›%‡­™š¬δMU‚₯…ΧW`Λx#{Χ@ £Ζ₯―uΆ9:H·ψ ½.Ι(V‘&Iΐ‡ωΤΖTΣ8ρΠΧ/YΚK"Vα- hΠ‰A ½2€ΫαφΊπύ0Ψl– iG0'{©%5Ρiι˜ΘALm„uυ2h«Ρ0A:%ŠB&FΣκL’ζ‘πΗτρέ€μΤόμHιόΣXΑxρ«‰XjKDΙy*ΰ`g©–βμkΠZΚΖμyό‡ ςg€Ee ‘εzf8ϋ³Xy]]>ps…Ύ‰π+Ύ t &¬WτM}MΊR„”«ά+αΨ€Σ.:¨fPΜkέ->ξρΥΛς‘^xQkRSΊHoZ¬&N΄%°fI3*Ρ–ͺΔά Δ:ζήΨΓΤ› ημ•;…‡[ή_λήΐkΏ|”°ƒiV₯Πιb«‘κz*½ˆϊCθQz%J3³ζ¦ ΄ΧiΘΪ9ώb€τWz’ήϋ`-.~c‰Εό„m„ΛO’Ν``O1rTEƒ[DOηD§Ž‰σ†IγLk“Σ"™'»4œΙ:Cόΰ³EΕΘ΅.ή‰$Y/}F—,NΣφ'R20oβlc­ ε›rΞε,ιΕΕ5i(Σ-3α¨Οξ[1χντœ3xΡύ9Δυ‹ ŠΤ¬sΆά3J΅žl*0\SŠ₯&eΘ €_Δ±08h—Ύ‡‘ͺaΠμو΅<ϋΨ$.΅Ž(¨8«*E“4ˆlŸs-ΉΦlW’½”nHM•² ΄±w˜ΠΤ@&gΤ=v»pš”Rϋ:°ˆΗ?'“hެ €Ό\½–%[E\gœ”§²uEϊ ƒ& XpiSsޱυDε£ΐ(j½±*n§ΟϋN«—“I<ί^ΏΡυ³ΔE:††₯ Xπ―bŠΞήbG„ήsIAIv'w…Rϊ’|—P™ :&γΏLœ”ξΉ–ƒ¬$wMQ°Μ΄:τ¨ΤR (ι’ZU/ ͺpWDxSΗ[€q]b“KT-p8’ +—‘›Μωb7MψΏί±ηΧ@ΰσ‘šr™Ύ”ΛΧ—§Ώ―ί>Ϋ€/υJ³B’€)I$Ž?Ί’έσN±‰XR hό λΪ-θ€ς}c"ΐ@S«\žZgαΫ\ŒZ€ Θ~Π]ƒD¦Ž ‘&b±ζNc’¦ηηC”Ώ=πγΫV7tvœ ‰GψxqιfS]ˆ2Bqα.Jύš `2N.j΅Ωƒνχ”²ZΒΖζ Ξ\IΝe³φχTPάΙc_9&ή°"RΪ‰”€UE²nΩ ''έCΡU‡ΰΊhγ9#χBAη"½—)Ξυ?Ί[όΡρΰt>U–oΑύΘ)ρ)&i€œ@ΕHœ΅a1RψsΆ ΎOΆsΧ8C#³Γςm»8CΓ‘βn8ΤPz"¬#&ΥTΧΐM²ωKŠ`ςFΐΉgΰΐKm 3%<§²ξήξo†§˜ξΘ9υ‘§\Έ|¦η?GhόςEϋ₯ƒΗ§’₯)‡NrT P©a]Uβž‹Δ}ρLX^U’ͺl”Τ=™Ϊ™B=κθΆ—ψ"³όώΌΡ/܏„κQWΚΕkaΊ υfύd’+U|.$Αލ;IgEDΫ§8ΉS‚ω.v¨šΤωτξzPyΊp€Χm°M’ά΅΅¦ΖΚŠjήΦŸώtׁεzd’,Ίτ…LAΚ)fΞϋŠΖ%K‹Σ›:X‘ΤεKΏhίIεttB ΑGχ:©_ &Ή†˜­—ΐTrŽOf­m}m]Ρκ¨jχΏΟž…εƒqJΤ¨"φ‘’<”κ+– Lڐ3u*EŸιΒ!›Χ•T₯J΅+]JΎώ,leΖ―έ‰"’Ÿ3ΌR1ΑΫ±5€B%eώΰ€)+ΥMWδΥTίLU V=·—ΧδkΤϊbι°Q…‹ν~5¨Zπƒ’šcΈΧρ”’2RLη’-Α7ƒ΅ž€τ°αpΞν„ΰacΣm`R>wάμs§Ξq―ΉΫΫέ ΊίvD‰ŸHY7/έΫώ »ΓΆ’²Œ‘$KΠ7 0©ζνΩb²Ή€šΒJ$κ€’Φ 0:+x8QŸwέφ9–ώPž·έΎιVΐ玒ππڏ§Χ»ΥΛΣP³‘t«Ώ™€ήΏo:\~ τя{XΕ+€…9XWd(m u!G+HͺLM0‘@θΒΨ’%E4˜βOΚΓ}RΦž€ΌωρΉP»b±Ϋξ;}wώLφ‰ΎŽΫFŽ“bάόγΗίζΔ ΗQš₯λ-|T‚Mά<Ο‘ΟN™HVϊ:t+:ŠmψΡ€α&lᔣKshώ£«ΖΥ•@;½!U[OΞ:ψʎ«$ψ–NΆ(•œπ3‘πIΐzv­²..MCϊMύόκΜ oωDΏSO>xέYkΖ`²pέΥI")ΦΝψψmΐ½pΚtζ.ΘTΓ•ΧE‡§—G—’._Αχ{&ζT`2€‘ξ%WΡUZ>@³ψ†-Ω•‡³Τfς\ϊΗχTFή:,.`Ί-α—΅.`˜*9Σ{"§04ύ δPΔE燬”hαμuόEžΛϊμν8 ΰ.}y]Ίρ@ΘX…{”Aθ}³Ύ "©†LΑ¨ ²c(΄hšƒio`сD”·|‘?ΕΉΌz. N ›$Ϋ‡€6ΓIσzV°x`ϋΐXk/ΉΪ.‡^ΐπ’ΛΨ’]L¨^mΘ(OOm7œcω›’ΓNς1ΟενξS5ͺΙIμ6,Ώ?vΩ±χ±‡”cρ2>m½α& VD#ΰNT¨™Δ)•;ŠήEφQuYΥjccι­3$ρ m²6οΆeଋαwqιe!qωιΛs»iχ+©A’,‰Wι(½ouΊθ$ΉψoR‘ ¦ΐϋ†²‚σͺΰ‘lΊ'˜άή©Ί…οκΜeh#<ΣcGπŸ{y|Cζαv— CήΎgϋΒσ{}}―{/­ ΞNΊέΗ=E:«Ώ=}—Α=·S­­ΎoεvUo΄ŸO-Ι!³ΊΚ*hΧaβ†Ψ Η¦l†ΉŽ]28a· T` …ͺfaΣφKύίΩ“³~΄΄α#±{n_Ψχyw 7[ϋϊ3 λI+·J₯ZΆb5CΉ3n*zΖ^dόΟVε©εR†‚ŸήpξLΞΩ“σ1!«r•©πΐΙΨ0πΙΰ¬Β³Df€fηBΐ /ΡJg•£κ‘γtίεαΪW™Sω@Γ•pιvBVgόϊ“tW«2*XΎI‰@”o\R ‹γ…o’Ύn°΅˜A°” ΄žfšƒ_bί.3fxρϋκρΝΫΖ†ύ…_–ΟX5>=³czυYYr‘ͺπ’QNέα5€”›2Mτ0aΏ –S©¦¦[ΚΔΙ7L1θzΥJλ/Οκ\\θpήφz€π“kuυIj›%|Tξ« ΕρuA‘W)₯*:a₯‹E“.,ΆaΧφB©wœZψθfΫgΎΞ9αέΫΩSϋιFΧ[£ζς°σΒ‡‘0ly=2Ο»b?H΄ΌzΰM-dl―ΝlO₯Ίι&ΒΝ:j`±`΄¦ŠΫPY *ŸΊσπ€―7Η‡d6ΎφO²BPE&CSvšC΅*8ίEθΈϊΰ¬4pNVtΑV‹Μ«d‚¦*xΝΟEJ>tΗσ7ο pΙΒ8-ΖΖQΧΨΒΥ•ξΌ³)i“Μu°.93[o,’@ΓΕyڞμG2βˆειτ_Χ(΅°4ΠU* p’>ǚαtŒ6r ύN|Y±&'ΡΜ /*hUυΚΰΰΐ*ώ\­φΧΧ»―΅ΟΟΤΓυλ²;|@1l­R( σԝվ’μ€]’jΛπ’g‰}μΜ\#³§,·Νoι]αϊΡ(°Μξ %56H»‚€­3k©ή—”,,Ά5ΙNl³JDΊΌ€°n₯av \_:cηΧκcρGbGyρ’< ,‹\ΏRc·d`g`›,,Β~Φ&Oμyγ4˜7Gφb€.£ςαάν|qŸωΙω{43rΞ—Ξν φa.―5U5–ΑΣ”₯ZJg/©•Mš\D¬Ό6ΞxμvŸ[¦Δ ΫζΌ‰Ήtμί/Kό˜υωx¦‘ύΝΝα5Ο5²g‘/“–‡p`w©gŠ΅’j·₯'μK£6_  R9U ^˜jή\€ΏΡS;WIξX£Ν“‰3χOνϋ­άΐŒ.kά[σ“`Ε{(Β–μ¦a²ͺwJ΅šc*&)pΤ€›’Δv †ι»ΪsZ‡έ4ς·x‘›ΉV ΡΖΤπ«¬KΩDψμ\…sZ•b6 ψΩϊK°ΡΝD­Ξ‘\›ίλσ$΄«ηήKεN&UαWb(ΎηK=_…i)Ύ…Ζ©EVYΈΑZ½€κlYq Lf¦₯ϊ'˜ŠŽΈ—±OΙu…+!Œ„Έ7i«N¬{”FγQ…ξ,g±ˆ…š‡;’΄9lƒ‚Ρ 4eǝN§¬ΟΖ ΧίΎπσΛΉ₯χ?”I¬±Έ>]7GΰdηD3Ακ¬=Ά&HλΣΨ$Ύ|€rθZΈ‘ΛΖS‹ΪQi9_ξef†,™sΧ.hk 9JTή{ ‰E:vχT`ŸTξ“\WεΩu"— ΆVμpd0C:Χ2W|²Ž^Mu[(A?‰}Π0“•¨”˜€=8˜kΣJ NA C2[I³CxΎ_΅H—ΎάΆ,2HE# €ΔτDIQi₯ͺιξJΠ*ǞE`Ύλd sβ₯ά¬μΣf[Žnx—·’,Αr ρ†'₯Cg,ΓΧf“!‚γθ^œ%v…5ΙEi*ΗσGŸοϊφί·βΎργεJλ§jΟ¦‹χHE»dχ%pΨlΟ©©‚c’ΊΦJ—(MίY ˜‘ΘHΛ5Ζ$uΕ3σ"=QWeτnnήέΛΣγΣ~šήA»Ω¬€Ίΐ3“Y%θ±φ,οχχττΧγMKΈΣ±ψKm=6)¬φε˜[αςϊώΒ©ώω %›1υXœ-HΒYι{Ξ^eW@ςŒ‘+%JΓπΜέ6₯£¦œzπ IλHΗK=ψ¨ΙLή=σKΏ'y|»‘‰5­#ΜCίaϊ€Σϟ܎ΝΥ°‚ϋFlyro₯ΒζΚJΧI’’g€γa5ΕfΟ8ί“+,@Ϊz2ʐ‰μbM‘\U v;ΡΧ(bς5GE°tl ˆo„•†qJDέtΣΊyRζ-ν€ άLB―­…/€‡}6R?ŒuωΕ;ηT• ¬Εσ€’mδμ@ϊƒκτ―α«"#ΪάMW2tͺbP£)Ν³γ{ωΖWGZΑ$ ΨcΠvνΑxjsžΉτ\΄°Lg° \Ša\£k€υωŒψ}ΧτI'›·—²:–‘±χKΧΆQnΩKΩ)ψT°ΥU“YDŒPη)ΐDŸƒ¬+Θ֐-dωΝ«Λ³ˆΰ|_Mδ­»ΕΥΌ}v^ ωδ G0Ζ±L,β­-Δ"±‘l˜£ξ »E%-MΝ[›“Ϋ‘υGΗΈΥ?„'‹§"±>`MΣ=4p|ψ,,¬b§΄†OλΩϋ`qΔ3^ξΘΕ€ŠϊR)ΊΕ™3Lχ&τ{κΠFڝ5‹Gφ₯Λw ¨½‰°£odMΆn‘ΔΘ+’ΕY%«3¦΅p 4>i%ΐˆ„γύ>ΐjείΆ5§jg‡_9Œσ.^V•»τβΑ**"Νwϊ¨h5Χ–;.wλ’³²φNΰZ lύ₯ͺΰ‡ΥΛκΜ AΘo#_ΈΩt ηv ρnΩΒ‡ΤΧλŠΒσ?Φαœ\žœΉ“°“†Ά<ۍϋΉi?T‡άnΗ±SΐtΗχχ7όΪΚ3Ÿ ή¦•ΌclK_e0|9φx0:p³:(‹ξM.ΩD‚“TΞH‘€+ΆΫΨG’ΪuΟΒ|ό\s½Η―έ†ΤvœΏ,„ΪΖ X‰Δ–VE‰±‘‘ϋι[gD<εΜΕ––­‡t_+hzΤ=Ύρ·‰kM·ό₯£ψͺΘ yp“Βrλ—]GΙΥπ”²Γ­·@Ύ³Ββ0ρΫΡΧ“΅eηΰ”χŒPυβΊ9 ΘψXΕξΓ)§A+_D2Ί4›―ΡΒΕΒ²Ι0QπΘ:`™ή)Μ€ςDžUmΧτΛίͺ)οrΖAkŒ ΐ[Ύ3gιΙΰBφ‘χ.mn,c…ύυ1DIκΠ=b§E@vTLNηkƒ$[@F•SN¦xΧbv"΅¨_›ΫεcX~³γŠT,€'Υαs²Jp9Ϋ―gkγSωΞj@sj.z“Έ°0aM]pΣμRnΝ₯/aΩ,/)κ€ZX2hdγD₯;ˆ›δή†˜$% ˆΖ¨Š¦@&βΞf/iœš*‰Ÿ6²η ςυυ~|Wj—; ΕF*{%½ˆΩ8†Η0¬jtΚI·ω ιΗ œ$ΝXde”N‘$ωx&Yu/λbΤΧpΏ ά3ΨΟFΫΟ<>UYΡkk Š|‹u R?œ­ΣβJk‘`E΄UΓe€ΙWpU#Ρ9…χI·rψ]#Mœ%qt―‡ͺZ2Jώ₯†;°Π βxΔZ½dnη4؈±E΄Δ#ωβuΚ₯δΨ|/Η»Ο§–œX«qβLώWX^ 4fͺΐΡIΞ”ςFε‘, Θέ’Vn °Gšτ²”Šo/šNβψΓ$šέEΌGΞI3¨W8ͺ ~Y»$zˆ|Φt›m²/t₯Γ ΩV F8 Θ™Ξ-ΌEλ κ>‹ΒNŸ›Ο»Χ‘μβς£s₯)“qά ŒXΐ ψπΩ₯½Kι¬g„Rξϊ-Vt}q>?Έk²ΥΏ4<ŠΔω±MY†g²ΎαeΨF‹\¦σ.Š^–’°rjδ$Lς° `Χ|?μicv ²‰rK™B7d|R Ά}ž0Άn ’ΨTvHΗnV$=V½ §Wχ€ υζWΎΦ f8ΥΜ؞!N0γk€δγδ†>θEj'Νί€YgΚΨψΫ»OŸέΧΥ³dΒ+·uΚ<ΔμK‡E–,Ηα‘›ƒk–2©―ib°\lubΥD\/Ζͺm‡2Eδ`Ω’™λcπΩάσΪI©&XΤΤ΄Χ@ΆΰρΩΚϊh«]Π"ξ%Oπ΄‘cσ'Ρ].X£–‰gIΗg°qΤCdιͺVΝΖ;ιiΠρΨΦ Ί€Ό`ρ…Υ’h†/ΐ5΅J0*Κ΄ά²μ‘«ΖτžξŸWο― ίV<ψ‘Yg―<@ŽržlŽηBtΤjΒ†ιŒΈ$G"e.šŒ-Ƙ%}`:ώMε­τ—MΜδ#v²›V>w”žސ4[;8DK/ ±QΪ'$Εƒ­ Ύ-κzΉ›3ψASVZNWv ΈλEΒΕaΰͺxMž««~a «½Ώm4^.ŠMh“ε.’O½‚nΙr#-_’/Άˆ£H{Uΰ:ΉR­nσγx{ϋy[^_ωEtδGΕΟΛ3x ,φr¬+ŽΌ/ „Η%—’B#Ž’6Ή©X“8©‡χ-RŽqFfϊuu+}yΨ΄Z"œ[·-~Α/lzρν†οηχθzf>―žR›M€Ή©Ωc₯a1S͜؁“dΓμ€ρ₯ŏMΟYbΑ3Φ+ΨΦi,~DΘsvΰ"xΧn$‘)°τνπYΊ'ΨP +kl˜XιΞŒΟExvίΰ+^(d¦¨I]βtcί£e?£Μςk'B’Θ!eΥ{N Na ¬Y8’ΤΊ$πjΤ­(P―μ΅05τ5Ν«™ξ‡£Έ)ooη%ɟ-s?ύ'=[EWΙ#‡Ρ‘Ϋ–“t©@ΩΊ @όj0M1ϋ(bχΞ«xVW­—rϊ³u8'—ΟŐ‘;"©KΛQνR€)7΄bγ}aίl#Ή‘<Εήu©M4γEA§htΏφnŒύ?NŽcΣ»ύπ„/žήRΒKLHˆj‘D,ΈΝ jθH'+VZΈ›Υ$Κt!UR’2NČΥ–ή“š\ƒ/F)G΅Β“ŠQΑx)Ψ¬ΪD|Ήc4ΐŠ gS}–B¦*¦Ά“ϋ±’γϋξvζ™—λ–έσŸΦχoΟϋΠM ΐΡΛΔΉ?ωϊp}ρFζ΄;›Œ€bΑΐJΕh’‹Ϊ”j 4OκΙWφ―š2ͺΖΔ!_œα±_Ό:ͺ#1‹k“Ω ΙfŽtŠ>K+LηΨU}”ŒηKρXX‚—^ΖC€F­Ή°ε^φNϊNbλI‘Λ£±<ο] ˾γ$=Βg°° ς Γή[ γΠΩ^’…-Œ>Ĝkˆ 6ˆ`.OOΔeƒό)GψκšWœζ%³J‘hχΐTƒ“šββ3]eUEΨ1ψκCŠ{“f{6КΊΣνΐ†.ŸΔZδΊ+;MΙτ‚3ιA»΄¨ †¨I™( ,3ΧTρ£βlt]Z½Β£]8„k(,Ψ$ωM•ΗΠαœr5t–Τs€A-<ΕλU°ΏΣΐxH* ζ‡πΘ7νΆέΏΏ~ηŸW-)!A{ŠZ.}Z"Jƒψ!–ΓΦU-ŠtΑΒτ2[η€PmΒ8Ίσ³γxzWΕΕUoΌδ΄EnuΐˆlNƒ%šͺLZ77υI$ wI³?)ΊΫx<œ9ν/₯ρψ†Κ-―EΒΝpς-zκdu C?ͺ^2ΏΙ΄`η€CBU­ƒUPn 9#Su«ςpΨΝ`ZβaΫρuΔc:/χ―7―Οεq'TžoΦ•lΫŠρΛ·kΊ=υS‰2α½Ό·!qgκ-ΓٟόΙsyyε©Όςλλκiς)7O?»FIYqqKJγ‘ΙΑΎΈZaόZ’¬§\ άΏsΎNŒ~°Ϊj€Ξ”{³>Tο©ι»ηέ²E-²…ϋ™,C΄;© ΚΝρjwm§ψqxεΉ}»hΦβF ΖΛGΜ,²σNξ)s&ΐζZkE:U ’|iR–€ώŠpœ¦•§γ•Άϋdw‡Ύ™ °;iΓ’Ωωΐ‘½_Ό“—k’#—Ήεh₯saνΡψΪ-Œ’§‚(νDιQύ ³ 7™t–&3Σ&σ3Œ;½W.ψςoΰΰh½tΆjYΊΗw£½•šU΄‹5Gή‹† Τ>4'έ‹«=JΗŠ.xψ—95πqJάςάYU8@“Q$x¨X©‘+$K"‘€ƒδ)κξCΡοJ=Iφs€σΉδ^‘PΞy`KRižb`Crφ₯d(bοF§9Q3ήƒ±SΕ€Ge3@NԜ<Βwνκι{’³ΫίeΓ-ύ1Iz!ΛvxzιfΟ[)βΜr£Bΐθ!SΪN‡/;―wΧΧ„΅ΐΡ,:¨‘8:Μyǁ4„–rεQ0δUi9Dθ*eΊk$Vzr”§§8±kkϋθP†ΐV X$ŒΑφH$’&­TU€D₯bάX^“[tͺsbμq—tX`1.έ­ϊΫΥ7n„sDoI9‰4’‡1*IΔ(ΦFΞxΈσΧΜxΕΈs΅RΥͺNΧT}τ†&ώοχ ϋΪ6Χ܌ςDψΞ{‹¨—?Φ€‰YT„fJ_@‹Αͺ•i ~Ζ‰g­ΩΧLN°ΝͺEL6ψΜtLMΑηkxeτΪΆφV(πHαΉ9ΫΛΨΩϊεSl“νpΘ6c&€ ΅4RS.b#ΐν²SMξ<Θ*°m²½Ÿ’ςGGωώ:–‡]ήπy+m0’$³5·Lά Ly†‡)λΗΞa;ΙΩ’(Ό7!&`± ͺv&[̎]Δ#χ„_ώFWΕ’‚wΚ’HzώHΐO„8%ƒΑ4MI±–ΰϊr*xςΕ‚†)‘»'9Ξεu΅Q:lσϊσρ­ό81ψOYέΡ΅Λβ «(Ϊ^“dzΥ caD³ΤˆŒ;³εφIήP½ ΥΰΑ4€μ{όˆ6πyCέΌkΔ3όβ :Glτ”€}ƒ73χΪ„έ W[ρZcί«–“Υ%ŠN«ˆIX©LKΉώcnOo|#šSSm$7iy'u¦mρ±_έi¨~νζ&Ψρ€±υΌ?€«,0’&zžL ΧΞUBΉ8FE€ο-•—€Ξψ₯Φ—ΓΆv›ΖH'ΊΤ——oOŸ_}φ{Ύ’ς% ωΗΠUk“gtΏz~ώy+­V^άU»+|?•…4h(Nάδ$ΰ‹§zΨGρI²³ά}vψΨ~χ©M°γΑ‡qU"3°³ΰύ˜u+`ŽI<ΎΤ‰Kγ·8¬Ψi•˜ιD29ς#C—XΟΩ―Ω~¬»έ‰w_ίΩΑ}jGkvΖÏρρœ‚ύ‡σ\[8SŸ=k_>·ΧφδΑ°4Χφ„²{°Ή$}·)ΡkθQc— ϊ$%EΧ€A’½P˜ι^BpVZυ²k)Μ7ν:>81₯W/Μ`W}¬ΖWgqΘ³Τ'©Ρν&Αα—.I½‘Až}NΤΈAK8>-U8ί9™±+%ΰεΧ•Œ”ZΟ亝q­{ZΖtβBωϊΣαJρl]%E`ΞBe£‹’Α+=!ΑdR*!*ξ5Χb«δ q0,ς <Σk|ο:ώς+ουcNόΰžπΛΤ†ά†Χι»π—UΉ_ύΝGnΓ„ @Ξ›ΤΙ_°PΫ+ώλ―—*5‡Τ՝\&Tι6έbn%)Ÿ7Α§(±‡˜’Ž Ϋ6΄lŒθff΄!ο©T†ΩΊt‚—όϊ³ΤK‘jŸΐ8λ֐deYeƒ‹&ϊrθQεj)GLf(Ψ|χλ[užfR/`ΫM2₯Ά«K3Šκ₯C+7Ϊό?ΞϋŸ|\Λο++k„q0γC₯ρŸ«Ώ/:]—’ζλowΘηχŸ mο€FΧϋjpΏδq·/OΗΤβcΏΑŒ'³g~ƒ[Χ―Eη&NŠΨXΆ˜ά¬o–“¨w€X¨Eœv8³wΨw±Ξ„ΜŸ@9ψζυ¨’7ˆ²|=™H`)ζ½Ώ^ÈJ 9ώρέφ7vp›ώ׊ήξΎ²€‹Œο7¬iŽ$ ¦„υ 5USu.ΒV₯©ccώQτMt±©†¦}£ΤΙ›j]ύ2TΈ,gλ`vΦΡ‚ί0;˜˜Π΅( ͺμ\/©Ϋ ξΖΦδr‰ σΖ ΰήEΡ/ρsš₯¬©.γ¬ρ|X²ίΰZVΝkνmk>™ΤδκΗ 2‘ _š«¨hm@;e²π½ΛΊ·σ΄ωΓWM#›±[•Ξ™ LP­©U‰Mζ›α2\Z9dM’ΌGΊzB6§]ΰ—ΜΗy»x τ?IΕ›¬€+kV’ͺM¦Q ιοΧ#6φƒ§Ω9ŽΡi Ό@䕃όυΒ—YΓ|ύAFU]e¬ΉJ)Ι'>ζ€η0£6R)-{Εό[$θ‡–+`~Bžaν~Υ ΌώV^Ύm4K.…­›'ω –Η.ΕξΕ’ψβeɐΕY4*J#ί₯l΄Χθΐ\™Ξ°G>…™¦Ψ§Λ$ρο₯8{RΦIΗΏΑ― %o H§&UE?•A’W.—u ΅d‹©’‘μt΅Κc–b3wςΗxιοΟίή™KˆξιΤξY2ϋi˜Ÿ^δ •·ΥΣωΘβσψύ4 SΫgiK,•NuΘu5ΩeT*Ύ`{΄¦δΊeαΓNzqyU=“Y°ΗΈ.Ηlgώϊƒ§Z”Z‰.wι‘Mpΰΰ Ί΅Ψ΅πζ½ob³ Ο8πŒβC© 3¦MN—UΓν]Œ-fΕΏ΄λ·Ψυw—ε\Ζ(Ξ~%ΨœαD’-8ŽΕWγ$ƒΛαDH²MΠ„‹ΒΑΠRšlE &vΟΑœ)*<τƝΪ/Ϋ Ζ‚±½sρ΅==3έΌέΏξi―εMΤί›“œσΌΕάμƒίΰUg–’]X:Έ."Ι"€Λ/±— /ΗFe°K)Λ‹ͺχ”Α>-V5υ^T$ώ­1Ψ_Žœw€>.+Fδ‹7gζ,+ -ν@Hι^ĝΈ†©NΕiΖjTSRΓα-tπά ίΤ›uζͺ=Ywbm –@ SŠdͺ|™»DϊΑΩ ˆQ\r +\pΥΞΈ .Ήa΄τias”ώ>”Υγ™Εψλύ²QΧ»}}έ±4·$W»›ψκϋŸ7‡uπΖΏ{―—ζ܍[Ž΄Ά–Ξ0₯HgΊ§ΪŝλΖ‡Z'ΐV|‰¦&+ΖY[η+&;Ϋθ€ΊufβO%َ㰧gβf’°ε Σ‘΄‘–r «—ΊOβί₯a{qαk ڝBμ‘5°Σd•\vΰdRM3€# ·λΝ‡ jΧ8  ΈR‹"Hœ%Dt”ώ³ͺ0%鞀B+"ε †RfΫ<ωΖ³’sσΝ½φ”σ }ώŸωiέ³ΞήέtΡ9σÏʉLζf€«γ-‚κΟ7~z‘mλΧΩk‘OώUύϋΓœΝ“ψ©βΥ™Œ£ΓΫ— ύ?ΘΎ:'[žϋύS9·‰Δ9“pMδœrΏpΫ>Ίd?s0'`Τ!z:δσ#5…~ιvzˆ[ά?ύ…}όvΡl€ϋ}892‡4πφnυrdšFэŽΆKΝΎ:)Ο’x0ΠTJΰΩΣς‘Xvρ,8ςa­E3uΙδŒΣNξ—½‰Έζߎ$Χ.ϋsΏΊΗύ΅“:·GΗ^Ÿπ8ΔΦζΖΏν¦Άε‘μκ?w τ#¦‰,χ_›€q2Α‰cΌzμ{s8šΣΡ=ο½π2<œΛ__κ‰ΏyA π{χάήφσ£{οΧ¬Ι†ΪMέ,°‹Ώn=ψΗsy€‹vω‘yΎΏν'~ϋφyKϊπ‹_qΛm‹Ίf2hFyg:½ύ₯˜‘¦XΓ8η‰σ4LΗΞiΩΤi;Q ;/?₯œμK:}jΣ―y/΅iFτΟqbμͺ=sύ]ι&›σρmΟ6Θύξπϊσ‘>έίΗfS=qΰ§σ·Nμ?u±Gόφϊή~Λ’^ζϊΖKw¦ο›]Ύ%άί™ r,oσH»Ύ $―™ yY‚β,Τ9y'~k’γ6 zζΚlΖ§ξΣΉhdΪη}Yu!(ί?sψnό€«Gψ…^ΪΚY=Ύ]h5G@θώhϊvυπΌ;ΔΧς'ίξ—. X>oλ"w7Δζλ›ΧχϊΪ^Vu»6/Ώ½0_°qvΧbbχœϊΝ³Α¬GvΜΔ΅ο…±“«&₯n‘φ/BuDβ7FαYΣ<A\΄ΩΏΒ¦R}Ώ <Ÿ“O₯Ϟ>“Iͺ§MΜΪyC{’»y,3?βρΚ_θΝAςvΜχΒ”yίMΜ=υΡ‡Ϋδg>έA½ζE ta8}9 1/<©gγ8‘ωLLxΖΞ;ρ‡;Ώ,d.„χ‹Ου"'κϋœ7)ϋπο~· §ΌΩΎγϊˆzξΌ΄H£ΞƒΘr`τhΨμͺώˆ)„OFL―‰&Π«„άv_z|zxzyΎΫίPγpέΑœOΓΦg…~ŽXζΑΫ0OOχςo \’έ‚γΫuƒ·IΌ|N β\l7A˜―„Ζή‘Φyoێ η—¬wΠ™·Q]”eWΉk¨ ƒ“μj*¬|­8Ζεφώ²¦θδŒK2.ƒ)σ“ύKΐλψs_<3t{ΗχΟΫό„Ά8ϊg&o=ζΞΗ’ΕŸVφBwwE_υΎε¬ΖjLσΟ5”›ϋJHάΏΩΜΝd4wςVsΈEcύ#f£ϊΗZ˜ε–‡/Ήω% zρeκh\ιjηΣ—ήnžη+Pτά\d4Ϋ‚ό’»ž§ϊκd\œ7ί“ηιπ:μLvωVώŸ@–φ‘φ™ύΑžπ6|ΨνΫΠΌώσDžtxƒόkTεΒiΎτŠιΤίe@Ξ?ή΅Χέ‘γΛ‹²w†ΤY‘€―ξ†KVτgΔ~/Κ όHΏN¦Yύb«‰…»Αˆ―Oξ‰Sϋ…@ΜΧ²φΆA”_Ψ& ‘—/¦ω8ΛԝγΡyΨ 7S©Xg„cμv‡xΡόmώΨ™ΰm2Οazβη+‘_Λ—]Θƒn#˜_χ‘'“ΰη’ίΏ5σΔRόŠ όΪΔο'Fœi"gtΐ3g^NΊΝcst ˆ|ζ,M*)‰ϋξfGŽΩΉ₯.SσσΈDΐ’φδTŽδ―η€|ΈL¨ζΞ2ΔΕσ#‹r~νΠ©„³+ΧÏξqΞτUΫήΑGσΟ=Έη»κΕΡλ„ ΐ•ŠΔ.Ll:σρ?δ§ϊa}ω-l­ΡxΉΈάKm.%κ!“bχ%ηζΉtOZI·η‘ο†₯^š7.†ξc>>ŽΜϋŸΗνΏž3δΩEyΏσΆβ¬kΓQνέA\δŒ[Ε μv“5ΎrX6k0€XΎuu6*ΕζusA)|iqͺ>rEߘL―½ZmXwGή(-τ":Μ3­ DB0‘>ΪgυΘH—ohV{Qέ‹^ Γ!©Ιυ ͺmΥΎ&[’α€s©ΖGιC―|V”½(¬ΩŽTδμρSϋJΚΰa~ΝΤ½ΛΓκqΥW›Νό~OO=ή΄‡ςςύuKˆ‰Λƒδ7<ލ°ΉΖAη‰*μηρKωbϊώρ©Ϋ}Άσ/.lB³ω3owό0ύ'ŽVδ»I§TCϊκΗΥ›IwΣΌͺΡΥBˆkκΐ―—dmμœͺΩ›zbWƒγΪ)Eέ‚Κ!š¨ŸκΉ΅νρqωBŒGf§ͺ²ς―%zu&ι(Πaƒ ΅J›[|Γ]WJ^ΊΰYΜQŽέšrΕΡ?Όέ΄§ΗΎϊv+ΙCkΗ5ΕD.Λ6ϊ|ŽŸg<ΐΆQΦA«΅ΩxI±ΛηtˆL{zίxδQΧ΅΄Ρb9hZυQ·J«—‰žBaσ[»›uΣ‘©L3BGhŒγL[²ι΅9n,Όƒηϋ(Γž|£x»[½Ψη =ΨGΒή·!οφ\Œ«u6―ά6Μώ·§—ŸΠlšςΆβ½–n¬t΅YeŠ>α ωs7–\΄@-;kSH"τk œq‰’ϋi›ρ­SΙE₯4gpΎ_ύΡ£&«49-)«K%ΒΣχΘ_­C*ΩGPΠΩλ’½Ž₯c=¬DOaŽ”Ώβί7ƒ§<@XZ½ίφ¦aΚJ)Ήv„aΔΓ’η_aοe(€uU‰ζΉ£XΎ ΙΖ”•ςΚiΜΈνe7³Γ^·­GJσΚ‘]ωZΰš±ŽŒ9+ς.vΞΈ9S‰Φ’n9˜ΦϋΜ? άRmqΊΜ‘u8•ΈœΗZV8„’MV8%πPͺλ€όVμ½C!aψ΅ΓQπZΌϊ¨ΕNžŠs©Νi#ψφ^^θκ"nd€¬U d₯1h=λβ‹ΘhkmZ1 σά{«ΦH[R₯š"Χ€"b¨θA5ς³ZΟτx<θ|lˆΫw]€Ue€„bjυΙ([t¬ς‘:χKΙDŸ¨6Zh‘`ƒ26₯­€υΎT­stθΫ~Ρ#έξ₯έI3-b»’†Ϋ„γ,ΙΰόGν³“h-ΌLsρ°ΐ-ι«Δ€JƒΖ˜ώ53σAjGτkih[ΨT€ h WΰaL“†‡Θ@1±HpÁ8η΄κp&΅h@ ±±Τ‹5ϋΣ™$«·§Ν₯θδ?Ού ?«"—ζn†ΰύb(Π±*―αH4ΜUΩ<­ιΰo…±ΣUgS9₯]©χΊΕ°θΆ–ŒŠQƒα₯·s”Α 8gΥΑ°»–¦]π:Ε[ 2sΚfζ ΪτvχOνϋM}ο}Žμ&Žτ fzˆmΪQ—ο|σΊzXg›==—~ηβϊώmθ:o/›œε[–kρš`ξ)΄\Α¬"°ˆ΅! Α Šΐ±Ϊb½ΰ|Ž€’}Φ!FuΞδl―&zy>Ώ―θ¦Jp{oφΆΏq0_φΨeϐŽsνι2ΩZΜ‰\Lθ ΏήϋJ…R­Ξ…Ϊ%€Ββ¨%Λ ™bLΰωSΣ£¦ 6‡n^οKGS–ŽxεuΖF…zŽ.«žTqΎ΄ήrfίΪ8όΑΠΦ ƒΛšˆŸ³vlΟ¦Μ“οοoψ΅•ηqΓ‹₯!ƒσUž]§^²6!΄“Š.2˜ž±% –­½€CL©9ΓΑYE0>:Ή>‡^οξδ@rή-΅Iπ Ε‘Iؚθœ-ΝhΧ€ξ:|>μAΆ 8η«§²©ΕΩ¦“―@-εώζξιιϋΝφ)Ζb7ω-`‹jA"&žK+\Π |@ΞAy•f"₯N\ΎͺQ€ιΥ¨R;Οm΄Υ³ΘβθΕnΤ₯htpΖ6ŽpΛΉRΆ.( ±ͺ–X_…/οΩηάjλ¦œ=λIπύ³ υ7ΜA»{όώ:ΊsKGο=OB΄m₯Α©fι=ΦΊ·0 b#*|’€8uUΦJ{|KFΕΉΨ¦[ΙOόψύαΘzνž[~j­+|ƒ4gp‡.}““w₯«@όZΆB+°’ 6#NΩ ά‰Ή™»€ψΈί?GΛΣqι•VAΏ­/ΞMγyU+ΰΏ5¬δ―΅Β8tͺΞIΫ€Q9–*Ο=ΏTďά(ΏD kαKjSμT1­Ωvβͺ+7ΉsIΠ³ItXI3Hίb4Λ_Νof΄uw™Αw/‰'7ΨςΔΫY§LQ1k_ˆ²s!z*£qπαx|ŽΛ/΄BΗυN^<Œε5mφβΧRΧΪDjb$'ΐJbΒJ*‘€ηJq@(‘{•Kφΐ«°Ϋΐ‘ΐ Ν†~6Š #ˆ³΄IΛ*ƒ${α:ΑΩ½θ΅₯μαIΦ:ZλwΝψL­€GΫOΤruΣaηΆ Χn/&όϊpζ‚ž«€­υbξ·9€Ϋ4“pt²ΧGΉέ?|ώvzy+«7―ί~ά”Ί^‡Y_žώZί?ά‰‚δΫΗ?ΥiW«ΏoŸ~¬6χί{Ϊӏ«v[*>|ηϋχGΜm»Ότ‰faΒrϋ9τ˜.lώ·νίΩωΔΏΚλρiήώήΨ<«ΊazV„Ε¦z }­ v½Ύ΅‚½<’βjqϊ3 ΤΘ Ύvxϊμη₯α,h¦dΗ±Ρ¦φJ:y]‘‹q•ΚY—ͺόΧν@°^€ύξΔ”­wΓ]ΏήΓΫΝΑ2Ωοoΐ=’Ζ§"5;Σ΅UKϋ ™T%φ¬3S"Px§.x!tΦΦTSμˆάΜΖε<ά[h·ζ™x2&Ο/›^nΣ3Τ923Ο7ερu΅‘[“Yn˚\ 6Δΐ¦Tιl¦20pN†¨ΑOZ ά†ήeIΑ–bϊ-XsΓέΦμŽΩ^œΠœ8X‰VΫ€UO©0e,ΎΣδhO>]΅ΦΝ i5Ž,œ«9Ϊ|‚Υκπ1=ύκΓ€ΏyεαιOπσΥΛνφ₯S π ώeTrz…ΐόSΙ°†€bžBRχΗ6Eο €1m—3Β±­aΣΉuʍ³-ΨSΣ±ΧώώΆNΤΖYŸ†·~“Ž φ₯&^‘»ΈVA˜€d\R-Εlc2€0Ό‡€%K¦·¨£/δB“Μ«0˜ΛX$­rZΰώ©ώUξΏOηm™Ž|ρ™1±›_~YA€δ‹’½Ko‰¦R°ΕΒ³sT=h 0˜₯ZΓ¬0-tm `G+ίΩ)Σ‚ξTΓ»Έ MΞΣGΚώι?βς‘ζ3υTZφ”CοYn›uβ%θ¨:¬yJ”UŠ£½hWπ…g¦ΪuŸβΝ#ψ, άfן;ξuŽΓΜ[³xNdΓΐέ+Ή,격…’J©TS‰Ωεΰ3\d^–Vκΐ &PJXwχυ|πmΧ‘k_`κ¨BεP}pCυΈ¦ι‚φ,~*Zψ9Ύ#Ε!7ŒΙšά2V8wPr3“2rv4v(ΞϋͺZΖGΆLΦ™Ή `S€ιπc“³r¦œΙg™Ρ!0K3R'RQΑk€†υΕ&(ΐβ°‘ωΑx{0Š{Θ‘αΈ*“:'Π3KŠTšζΗσίlϋOσf€Ξv•Α‚)«ή΄Σΐ¦΅.ΖHμͺΨ%αׁŽe‹χ]‡μ`Uαi«$#,™¬υvχΒ…nοŸZΉΏώ坌° dΙ άB―Ί )XΥδW¨ΙαV|Uζ bjJΘU»―'ŒΌ­θη™ΐ£*CmΆ©;σΝθέ'nΜG«cdgΕ/7Γ$]>όΆϊώηΝαυ=Μ¨ψb@εA—τΝsyΓGΣΉ˜`­–<Ø dΩ+€bPΈΖ|D&ƒ>Ή’υxΓ³φ֟Lμ)=v'Φ_oa[7εΰΗs3φ¦aw_›α@Ί‚«`‡SΤ`ΐ>Βό*ΨHJE‹MšTm=¨œ%εƒ/Z²–\†eλΗρΒxY ύaŒŽά))­€βau J.mΫ•ΔλB”ϋΫX€$3αͺ·ΝQhEƒτξγlϊEx΄zόy˜ !Aν₯?Ζ¦Ζ*j2!b=αkαGA`5wn°Ό1˜K]u¨!ε£Β^sQλΉΔ›·§ο«§‹Ί|l3A€ρn#žή$o”„δ’1Ι:βεα1“ΦΚ«%³ŸsχU›jгs7εύν ΐι£ϋυ°―θC»{Ω\β>ΤΣcγΫΖ›ΖσκρΎI.woξW[—4•>vΦ¦Y½ΑΫa…G`ιˆX”"<`ŽdΌ‚ϊΉ4₯1ί\Jp=Δ"ΓKEGΚl"<΄rήY Χ>σ›OύΡΙ ZοžΣ0‘5Ÿψη ϋž]Q.ΨR•ΟΤjΙ™š ΐ&₯κ βη%.(Ε™Ξ'ͺυy:ςΉ«FΣ±}αuυψ}―Φώkστp?ΊΗ]|›Eρu(=QΨ Jfi&LRΠΊΪ¦ΊT_¬q!%όUgηΏRXρ&jͺ7υw5κbf—χ{AŸk‚οV’~AE+pβ¨8ƒωKΩ4`lτ1Š£*I=)Γύς͙ȍΪΛ¨ς%- X\ΟAςΐφ±q:‡₯zŽΊoΥΦ »cν6¬’{y[4`«TyΤrzƒnδj$β½ͺοΫmΏ}ωxΗΔ/B"Kοζ†Ή<³T|α$ΧΤ(7 Η”Ήζ@ΙlRRͺ \ck$ P―‚W‹’“έYν’Ε£AΨ„iεΞ#«­JCfͺ=ΕΪ‹iFWΉͺ–J –>Yΐ`ψWΨΙfœbκ]&e¬‹R|ΞN7½ΙΞΆ«VIΨ7–k”hvg0ρ3 B΅³’uΆ6ϋ²m>vΌΖΘpΈ8§%š„Νή₯Δ­‘‡Φc–[.rΉU²#J°ΟΨΨ‰”χœΈš™BςΟπό…Cώ·:”κY―«ΫMΘ DYoT)_žŸ&™ρΓΐH71ώ‘>ϋ©λŽƒ—±ͺΨn4cNPμjμ»=Ε±ΎwWμp·.žh‚…ͺNΑu °ΪLmrmΛ"–h4ΜXŠΪeφΉƒ’‘cS@ΒΆt<Ρ>zŒtŸ˜ΑΡ„νJ΄ΝΝΪ&uΰΦbωK]+ β>«gDW-{ΠΡβΊAΒ€š±V[ƒΙ Όξ½• #Ά:ή_ν\Ζρ{£›©F3`uK§˜±ΨΦS5Υο“€V'Ptr«ͺ2;˜'m{aλήƒ~°tΰΜΨΞ²ΠΫ£φοωΡsŒ»ΈψA fΌUΉzEπEZ΄‡Έ«Ξ΄*$ŒΩφ£ΣΨτά ζ³£›'Χt›[­ΗU+ke η˜wε c”4E"TYDuΨυ!?H½ˆfhθ"'˜\+T©UPΘι\Ευ3ίΆMκΠΏηΗ(QόJtfhσsŸώΠ­zqΉ›J  D% M՘ά:Ξ©ΙFΗΦΑ.[ΔΔOAIξ°“j|λhš9­=Μλι!ξ7Vk»Έt`ΥYΔα.r τ¬ƒεnŒfɞΰΒq.ͺjͺή–}r•JŽΣ–|$;9X±Ο£ QΏψqV91”ŽτP7ς°R &³–f%ͺ’γ›Τ‡V@γHGΨ°ό£ΨΆ’=Ό6]\ƒ„!&Β―μŽ‘K΅š-x€(ΐ"QRΪΦ"ΉX5οSk₯κԏ^»vτ­ιΥάUψΫχ¨ΛΫ8N–΅·€•·M8‚Φ‰e ;ω:€^g0FΫ•’Ϊ(Ά …ΎΛ6˜@ubMΧ—Χvͺkθ³D$ŠΞ5%’$Ž]Ϊ€†§Ψc q(φXL Γδpκ(p1ΰ?s΄gΊ¨γH΅ΖτήΎο&R„₯©‘\½&Έ)-UάT¨3BŠEt%$κη1V;|•  \eΑ­νφψJοŠM^;³ΓfnΖ‰ϊ΄¦ “‹]Ϊ8Hh§2Ύ&ŸC†‘ Ιΐ][ναΒ;8P–Ϊτω‘τΨΗ 0:"l‹{ΗP,YΐW’*Ηξ΄)oμJΦ’~a«qΕͺδj“%ΑC8” D9Ι2ŸI>΄Qg7ΫΊDPΤΎίŒ‹`ζ€€ύΝNηωαΆυΘPΞΜ|ωb™Ϊρ«Γχ—ϋQ•퇉ΐΓbSzaD8{‹+ "6g’[²Q9[•ͺΔ’ΠuReθύtUԊ‡+ιω6½!ӌg|»ΑϋŸ¬μυ ώΩs``PŠW±asJ%§δ 7LδXΙΥ>α‡Rτ䀊Ίβ%ΐΰ[bšΙ)†ϋΌΕΣέT5mζe{St^ΜΏτy’wλlΤΓΓ₯‘˜3=6D§Fώ*l‰άό'xμΣ³-§A󯉩&[€·$ίζξρπiš―κΕ«&"θ5\GuœrφƒΕ³‚‰Φ$wC1ϋV€Žkν΄₯d―tΝ.9ωŽsΆηοίΖFvqEoηΈFiTPkS›Π[‘07–Eμ-„½*ΉΏg²‘80)|‘ρvΙP5Jg=ϋψ/‡Φί_CάUJ©£T” ΨQ΅ΗΞΖφΆRf–δŠ΅Πd=”Ϊή|Ή₯»,’μ)9υ@€ŸnDΫϋ£0ςΏ½nϊrΰΙΝd6ξφΏg§j‡­fI./Ιΰ %o²nΝΤDΰ”RιΑλ―‹I•ΐjW•X2""+ v}ΨHyδ΅N\‡σΨ‹ζ‹ΉP1ˆH’o©`¦ŠθRe‹Nη+;Ρg€2Nm†S}˜ΉΩΪ¦`ν;»εoIcŒ:™*Ί58γDM1z°›™’TAγΐ‰ΙN ζF΄#a3­sΎaΙzœΦvψ”ήK83‘z(k%ξώ—Ό|#r―wOo7Ο/όηŠZΌ< 5X&kΌ‡go ”‘ϊ„έhδς86IŠSÁN’Θ‰ΝΛπ ‘‹½υ:"γϊ•κωΝ4ˆ₯cϋRI’Kˆ7‚LΩφΩgΈ8₯Jε ¨O³Ύy”1”'Lœ6%Ψ―B 7ΟνF φΣ_―7ίίw·ΝnƒΏαθ„L~θ΅ϋο€iΦ΄QΞ’\sΦμ+k2χ>p”Ί­¬rz”λkQHsΉ»ΗηΪbQq “―ppI{έ {Œ@<Ÿ/‰s$ΣTsXVm xτPU#i“žDΗϊΘ:Ÿ\²ι…Ύφ’Eσm$Š”΄e֍€nάH |· Ίpb€b"7gZo:[@;'™ΆΌvΔ8ΙnpξΚ)ξ.ΰ¦ ΰ©ώγΨKνΆ£Λ9dΙΒO0a!Δ°p~¦ ψ8ύρpοyB`{ω’-UuυΥ, ψ»ΐΏ&Žͺu―{PŒ…’Q!*pŽή“ΆΉbοR(~Ϊζlο·e|Bκύy=’u<ΰሡύρ·ΉΊ>jΣ1§φ[ RŽ=¦H ~Έ•BmΆ¨ΘxΙ›”+Φ’TI’’ŒOΩ„™5ύϋ‘ΟΣύYξoKί|η©<ν₯Ωac–(ΚCΥη42Ω•αƒ]†Γ,ΥŽJΡ^*τJπΰΔ3*εΞΊ:άο>3 Qο;1ςΖ‘_]Ϊή²„[‚$’ƒ0T’hΚΧ.†&ΐξˆΚi“r.iρ!©κ¦ΚιΟη+9Ο5½˜€[ό^ M΅l€μŽ@[G ;³ψ2Mΐί2Ύσr£DPU5§φψ}#‰s³V4ž](Q{™¨―ΌB.Ό U…δz4€”Β¨Ζjƒ}@ι\ΐͺcq|₯F,έ>μ uάυ)Οψόο?ϋΟώσΏό_ΟvξΔЊcargo-0.66.0/benches/workspaces/rust.tgz000066400000000000000000001300101432416201200201770ustar00rootroot00000000000000‹rust.tarμ½i“λΈ• ڟσW(²?ΈάSΚ$χ„kܞξzγκξ({bή‹ς ‚+)R&©\j’ϋ; )qΡ’J¦˜χ^¦—K‘ΰg_²m^άί ž-ΣΕ£ΘQ¦wΏδiςΓύπg›fο}ό£”Α΅CmΗ΄MjΑ5!&ΌoόΓώ`ώ<›Νώα+ύϋΏ7³Ων2*n7ΓKψ‘―8_·žΛ-ξΉ‘ifψ–ΟCί ©OCF¨ϊ”ΒcF©{ϋ­j7»Νδ:-$6]Ε&έύ=τ»Ϊϊw"]ίg°Ξσ˜'Kuu Mώ Ϋέnx±‚M‡{[ήήόΧΝ?LΧϋChάΗ‘ŸρμεžΗq*ξˆΘΰHΧρuΞ?‘”ΆΟ?₯†1+όύȍL™ˆHζxR7Q,³…Ώβ"JςO7‘δΕ6“ωμŸg?έβnsh2OΓy^·Ÿne–Gio;rg·77νN3ωι:Ύtww7τ[σύ7?έd< tOΞ­ϊ±xN³|……ΎK±Y5’O7ΥpηΥpηB ³3{qΫχφπQ,ƒyΒΧε»-οτχ"Χ‡ΪΚuo‹$σόP#ύΪέό΄αβ/aρψΆH}X€•fΘγ\ͺ{…̋ݝ@ζ"‹6E ŽΏδμGΨ Χ{¦Ξ6WΟp‘EΗRΰΟ|VΫDUcjχφ&Ž„Lr‰7~ψώ―³ψqφŒi%ητΰŒK‚TΧ/ΉIσ¨H³Όy’ά!Υin{pςΘψ?ΟήΉΛς+ρβϊώ'w=ψ#Δύ4-ς"γ›θ?1»τίt&ψ_›ώ­BzKΫήνXσYQvτ RUDkYSγ₯,"AψϋŽ@£h™ ½QwΜ;=ζΏΎ,`™‹¨μ!ž#κ~Φ2[Φ_B²]/Δf›—£Ή½I!BΖ±Ύγ`£F.3έΚΊ½ΙeΘΚΥΰέh ό€=]ΏήβRώ‹•Œ72kp+ϋ:­Τ7[LR ³θQ6Y#ΫΟσl2Y/ žC_š«Ϊ·ϋ €>P€š[̒9Oruύ v;νξV„Λož’$HŸςίήή5&·ω&jΝ AwoΏέF)\δ²ΐλ_R~PΌ\_QΏ}D©(bΌήδUΓυ&†Ω‡|mΦ‘M”ƒΰA)‘ώΏ&ό‹4sμP.ŠB-bNΑί4­6όMfLπώσδe•>U‹ c^HZέ!@–VςΉ’ξ΄M©‰"ν©ΘCΊΫ‘ΜpkΕ©ξΕS”™guŸήqBύ ’Ϋ ‘‡ιecΟίΆ5_ΆHzόόC .ΐ&5'ύίuι›;°x ώΔθΨ¨mOςUπ™ψ±€Ύ*μψ΅žaE€Kψ›±ιόAΧ›9¬ΘRÁ“ό?#mψ3‹ΨόGηΝΫ±ΝβŠ5G`WΛf5v4έ΅XφžΧ|ΣβΪUWm¦έΉ3ή€e#Τ:—ν―·ϊWLՎœΑˆΐψŸ™6™Ξυα―Α†Ά€ΏΠξΠΣ™τ?WΑZβo [­ςωtSjκη{xW;4ρ―€#Ίh˜!ξιϋHΗ 4lήQ»M Zκφ†ΕKmEς_ ™GΏΚϊ©ˆωFaώ;Fπ‘₯yξKΎžo‹(.--&rσό%―>lΒ=™<.βtΉ¬,LvΨ2€έι[FE©Ι"LιΗπΖ|G6‰‰ eœϊ₯‘ϋΩιΠΰ:­ϊΚ΅ΪyR}j›MΣ’δ5ZΟ3mJ³Κ7 ™©\~Rέό%υp>Vγ‡0{lqΚ“οξŒk΄cžƒjVυz§όCe5,xk±Κτΐ ξ€―i±QCΘ€˜lΣ J–εœoo΄ŸΡSš=δ@Θε|ΤΌΪYεγ0*WΠVκDΕ•θ… φ¨xM<—Ήΰ›ΪΌ«Y΄™σ$Κgω^›B7gΌΕτl-8ΝJg‡έΩ&L@Ο’@› ±'kw9 ͺ έ•;ƒΒ/XΪ'?QΉj΄λ<…¨xΎ¦Ωzg˜œΡ_׏;|Μ‘ώpχθ uϊ!έ~T›y”v»©ž΄za¬‡³ΓcbμΠqˆΆ­ƒšχl5 O5zΕ=šη0ͺTyRqV‘m[Θ ŽιJiΛέ‘Ž½˜=¬ξϊ›f”ϋΖώ:Fω@{Εά·ψΣβ‘ΗΫΎQt‘ϊ*$Οΰ tΜβε>EΆωšΪ>°[w/τΧχ{Ι·›MšΗϊ)_iϊBW/σ'ž%€ŸΤZT{ΐZσš@όtہ½Zͺ$@Œ4/7•z±ΌΎ―v\ύVi¦Wύ“«mΐg‹ΏΞ~τ"x‘aΉζQŒξt-κΐ©Ε³?]ΙΩοΧς0›8V?λWΎ‹Φό1‹Δͺ€E=‡Ÿΐε-ύβ§Άχ βΏρY9²Π%ŽsD£ά οώ–άή©Ψ…σͺYεφrτςφx"ΡΫpνσQ΅ΡΉCΜqžγaΩy&y oόψ§οώε‡?έ­ƒWy#–έ4φ₯εžι-ε)@JRδ.υ²πy.‹<θΈSh7‹–ΓΠ[˜^y%ψ!⏀¨/Φos冑₯Bζ9lΠ½_Ε W―rΠ¨=5ς—D¬v2Λ¬ΌΖ!–—"MΚ«dςηψ‚俁=@.њl²š““€¬ŠˆΗ%•ά»ρΑIύ/uZπ§„:όGΠ`DκP‚Μ_€(f"e ŒL–w9‡οͺ€‚Y‘ΞxžGΠξ)ϊžΪ6³έ>šiƒS~wqπA{gΎ™ς]6ΝKΟkHΔλρ?œ:Ω>0όηd'ϋ)Ν‚3(Α)ψ;΄νGΩδ9 ώο·ξυκv›‘άΧΑΗ]μϊ­qn’RZTŠD³bΕ‹YΖR€@„fQ’p½ηg¨ΩŽfνNΣΥ©ΧΫΟiJpώgdŠωΠψΝEšΟδ‹Xρ(9NNΑί6;π7‰5ΡQπΏΨ«ώ23ΐUh€Ω³C OFAγ°uγ/³jλˆΕ›gβσD:G‰ΐψί:?ŒQϋ ΏΞζΫώ”:tςΉ:ώœ³ο΅q”[§mZ8zηA.žUΟώ6ίΜώΈkςƒ6… H:ͺ5Ή:Νxλω?Gtώ§Ά5απ:Κ’ΨκrOΐ“ψίκ‰t¦ψŸ«ΰ½GLƐΤ‹γ~5˜Ζ§­Σ^e€™Σ,ŽVμ,/±h”»Ϊ)σrm?€(?ΐΟύUςuύHfςHK€œb( ™‘ηǞcΔμ‡οœ}£Ρμ)γxύ·§IΕύa:'ρU”A78‹0œ8ƒX‚/‰d“ώ|ώΏα\x9-8©aόoΫΞΔ_ŸU:z”%Χ|φ9<Ήzσ7ω |³v&KΓŸi?ͺY΅οΞυ8Ϋs‰=Λυμƒύ–~χΩ73ςτ*VθWœ‹iΑψίΆ'όAπΞΕφR p ώ–ՁΏ3ι„{ψ#‹U–Ξώ‡”kžΜ~‹ϊύ@O’η.Γ¦|y”BEoΕWθ]ε*έF…<†ΔαΗ«"ς}Ης/Ω‘θUηB pώw {ςϋ@π/C#.£§ς»ιXl\ο‹ο¦όμΔ;ξBD"v‚QΓ#Χι£\QΆΰq|8‰g7όN₯υlΰ5BκzνΤ}‘D΅Q£Ft"`n§ƒε†K{[DqJfη½ΙΡ ¦š}Aψ"ΰϊo:SώŸΔqo³’fΗώOlc’>ύΩΛ¦Hη+ž―*[<ιΝƒΠL ΪM>@9ξάNϊ€ˆΘΌ ΌW)ΘΖο7CτQiΔΡ3±>€rς‡(ΗΔ<‘ι6Ÿ•hp†Qύ³m.ƒ™’;»»²|ͺ97K¦€γͺ―).Υίn•“ΧίnΫ‘«XOd¦Ϋ$Pς{K_  ύ‚ gQ1-wομΨΪu€w#n;ϋ-΅"Mς΄§έΛφ9šLψΏƒίdΌ€ώ[Ξδ1ό‹αRŸbtσ± ώWΦL”Wο : s_Οω0ΐ%ωΨ? όγh³yΉnώojέόΆ3Ω―‚{ό΄σΪ‹¦β³ύ€EhΧσOη³*ιEO1΅ίq”ω^j¦½»mšΣιbΈΡήUbεΡ¬]¬jΨ.䀋(―eΑA.βUžΌΫν@3OδS­εξ(Υ£p‘ijύoG~{σχ-`ζ=ύωR ά*ΣάwJ”–σ’Ξb؟'K/ΈJŠΨƒΊΫίL―ΔώY»Vh±ή΄₯+§·—WτθΆi‹ α6Ž{ΫIλΥΨtχΗΠϊy.³Κ\ “+wJω(ƒ­2W-»]5ŸCƒj‡ΜΛœi¦©–­—Ώmž…ϋM›Δzj… η–•ϋ-Rx ~6~¨sσwΏ«€ΆΛ(ι ¦π·‰X‘§¦ρ€UŸι5ΐΌi„UFΧkxw!cχ£*άa6δ Fο―JΝS©1ΐΞΤ‘θn6[?Ž”βJJޚ)L]U·xa vw„» P ;Ν9@τεWLλ§Q67WgY!—G?Bώk’£ΑΏD:€†`OΑŸuλYΖdC”EΊMŠΊε^ eΦΛ–΄rϊ6ω’ύ}[U2έζ9ΙόwΆSτ’L:Š·œHΑ%ρΞdώšι|38%“ύΧ2 €Ι°ηΙ§Q"ηk±ρ6~ΎnI˜ƒ˜Vπηšh‰u§ ΊPdS[Π¨YeSOlΝγθΧΪyΈO+Π/Ρ’γVG¨έΟί ‚c>%s±ζΩΓΉWξ[kTΜΑ}D ΡΧ΄Z!-₯;όn‰ΝnRφž δ₯«έ"΅Zλ^‘Ί_nΦοΏJnή-ڞ̬ϋ9KfώΜ$ΫJ56Ι³Σί»Σ7s€—Ψ™9ιG‡ΏF οΝ&νΒί$“ύoώ―Γηυ²i'φ.e?JΌ_%α—|[[±;Ρχ;γΰkŽEοjυ_ %ύ/sθ€CώΗ8+]A -ΧQ֞Β@ –²‰4ί…‚5‹Ϊυ‡yΥκΰ#…Ϋšρ^e18%ΩƒTF2˜οFCΫZδAπςϋσ|λ£PθΛμάXp­D:, F1AUJg β£JΨγΩt½ˆSυ"– jΫΘιyΥkχjGγέv†|ξxV7κϋ½Φϋ-žΨuΞΏ©`ΠΗΖ£Φ΅ν‰ώ_ ώe½–{ μΊψο9ςŸέ‘ԘδkΡΆ[Ζ“`Ο_k_Τσ·ΛE&Γ²T,–BδI$Ρz „ €¨χ1<ͺ-Τρm‘ϊΠύjpβ=•€ΎΣΑΦy]ˆΕo¬·θεW‘“W«Fο0·auύ’ΟΐΕί.ΓΤ0¦ϊ/WƒIυ³{­YGΩ˜ΰ$―Σ~‰ε{εκ©%ΌB/x2ώ‡°Nώ_jOω‚Ο54π{ΐΒ±βtώΟ8gŠKςS:ō!5σΏ—•άί(œΣ‰΄l6Αύ_Oώ‡2·’*VΏ‘™ΐψx= J= Ζ«s2oΥΞ½)“QoŽΒ\e&Tv*.TaRUκ‘.@Ž₯Ν/4ͺCσζμ Œ]ͺžwώί&\b±œιό€•fv©1žKτλζ°}ίͺ:Ϊƒ§όWΒέψΟO-ΣJ7ά2HEΓ§ώ‹*ύδ<ϋEοF›€Šyώί¨Ί(ΛdώΟžσΎŽη<Θm½Ώ%]ψΣ‰ƒW€_cύ¬ν˜WΩΩ«'Έ[ͺ{μLDίέQS™ΟκόβEιόU€™V<Θl0KπIύ—g›μΏ#ΰΆSΥiόΎΏc&ΔώE‘,Α—ψ8Ζď₯‘ŒX“8Œœς5ΊωΏΜ)γ(ψΏ©'ήνfPίΔΏ]/Δf«dψ™ρ—=eύͺΈΔUqΦvέ}ώ•ξ₯vξΔρςMΛR€Μi;•xGLΗ+Dwτόe«ΑIΫC³m‡θ·ΈΝΚ,[kGΙ²Lb¬~V£.rY΄«jΓu”Š".―“br»ώΞΖΓp—՝όG€?ZpƒTˆ‡kζf(λwς?Oώ_#Π0ŸιΠ4ΥΊλ ωΑ ‚–Ÿ …ϊfšΉ(‚;a?f―w@ΎŠχβ‚Ξ4λ­9I™Χ8cη&“οπφςahυίiύŸeχΔLρ#ΰ&&ίΗγη!ι½ύ3‘ιΟόό7Υ ώΕ€όͺό?£ψS{²Œ€;œΊJ₯ςΪrkljϊ'fO“Ο^—+©Rͺ͚₯ΐΡ-©@„™Ÿ¦w·G©ή­­όόΜS6Ι#ΐeΡΰΰ'σΏXέϊ/l²eςΈˆΣεr―\Θ^]OcΏdηνΝZς|›I‹=]˜{<ξ­J’u@½Ε=«„)ϊM§cZJ N u;Giτ&Sι©ΤΘr£ƒήš5΄‡+wk‘ϊŽΩμ―’―{Šj$@#72‹QŒuΚ&“XΑ©£2dύπύ³o0Z{`ΤLφΫ»“ρ*χ}Α*xά_§’t‰νeΙΦ^™₯f’ΦΧΑCgΈ$ώŸ:“ύηjπ―ς?θl*‡Η5ε?‡vν?¦1ιF U$ψΓ‚Š(Ιχp|‹ ‘n…©;]Žp9Β΅ΤœΌ7OΓyα&=IΥ‘ή5SΉ«œ.­·bœ›ωœ“ZΏ_obΉ“LΣP“^uFςΩcΔg›,2ΟgκΐδGŒξΊλg¨9xώG―9ω„·ͺ6ϊ€ΰ$ώοδ!Μr&ό?ώWΘzΏAx<ήͺ„{Έρ}λmφ H°Ί§(YΎεW#3+Ωασ?Έ[Sύηπ?04:»Ο :ΐΣρ?Vώw’Žο€oΰLδΜΪύχ}₯w;gŠα"ΐ: Kπ?™κΏ°Αμ C.¨ ό―‚ϋςΏˆ*νέvΈχ–@πj͎qΎDpαinΩ‰ώ yώ!—ΔΨΖΔ] ώ{ρ?rrŽ&N¬ ²π$ΰ$ΟΊωΏTNΰ ώΧ֜γγΫέ#“ϋΥ|ώίN.ΙbLωΏΗ†.³ΗAΒ@.Βζ„?ό―χΘ„Ώΰσ?ώ'dΒcΐ?Ξ‡/uώ&qΊυŸθ”g<ύvΦT;BΥ€=`em§‹‘φνJ Ύ¨Ό@Kz-±bDξKΎž‹OΉΛ Σv9vΠεXϋ «μ$«t]z[f2S#«”1Z%ۈΉς‡Ε‘Έ·=™Wφ—[)l,ΘΡn[²Qk—ΰ¦ GeΎ7GOίΙ7<©§†7Γό\θƒΛ’rv‘σ*FGηΛo݌–IεΨ¬rυτUΧ„s]°ΚΡ’š¦κ§›eTœ›Θ>“ψͺcΆMαx3Η4 MΫπMiš4`‘+ˆΗό0τϊΏ8ΗEΈξgχκޟχau₯›ΑFR<ό›ΛαpΛς¦+M. W\0Σΐ²©ošΆη…œt} ςΝΌxΩΘΆŽu“₯›4—έͺέU­f°Ο²σό((Ίίwέξp+?ξ<ΚwJΪύ»mφΟνqΘΐχ£ψΤY£ύα…ΫΣΪb|S•ζhΤ­Vs»―9Φ ]σ$ZŠψε|0cuΧ €Τ… %‰τx2β„œψΎΰžεSjR"!Η‘,θ?ΐJΆ2ƒ_²έΐλΦ/γύoλ]w_Κ1Ÿlό{˜ϊ eXl$ϊκλτΧ(ŽΉ5ςmY³πΟ™ϋ Ζ…Εi©‹;Μ,x!—iΥ΅OΤλθz4WθΏ’<‘ώ [q c›ύE w6GΤΗ^E kŽKάO·Eι»”₯ˌ―s ½όώ_ώ”ϐ²₯Ε λυ`ΪΉ.έπύ_χ$@lψ―₯Έ‰#ΟL2β‡ε/yIό™όΖ‚>iD<ΙzνSώ‡τZͺ˜'ς©–ϊd·/XνΛD<λ•ŠΙ3 !₯Ε²…yq+°ΩΟqυ™}Ι9χkΧΛ9ΐ»:ξ3Ό* p2=ό|ρ6Π«‘BaΥrόΣν§39‡=±α ,D%fLό}ρΜΐ%ώ–5ω”‰•([_Ϋޟΰuϊί§KέΏW:΄ν)#έ>Νh‹žWΝ`—•V¦^h_\tίZA΄‘IΠγιΆg^χZ] ΗΤxi7ΤΓό{ Μ³ΰKω―ς)ˆfΏ9pRιΓ–kΕ—“πο1ΔΌ 7…nΦΫ$ZΨζϋόγog1ϋ²x’2Q*τK•Ψ…"ΎVΗ½FORώη‹GΝgύώZI:pŠώΫέόO&1§ψΏθŸœΌ/ίοF?Άν¬4©e, ]Δ°c]BζεDΓ怉λξξΎ&«―αgޱFžJ‹Ο©6§m+-HeBQύ£Ϊ£\`ΌT£―T!Έxϋ¦•Eψκzοfy„έ³|­gΓΪΞα‘ΞtτεQQ_ΜΧΫ“ƒύί‘­<υ?5Σ~΅,š0Λyα§Ϋ:ΜσψώϊΕδr8‚κdέλχζψ»q˜Bδχ7‡}>O“yš?7m•Cq“―sj“·Ϋ6豜υ~aκ$-ΎΑŠhιSώΫVκ»rqN‹ΐ~²Z½V=Ά:„Ϋ|΅QŠ’Π+ΜΡC+Β+Ÿη²Θ58Β₯ϊ'Mς4–εs™e+8vq”,«;IωΰΤ&‹|aω Jl™Ύ¬―Rx Ÿΐλ_RΏ|U_)τωdpΣbš+Nγ”Γ―ςΥxR'CΣμ₯Ό½Ž’ΌΜYώ„9γdΤ/€Ο&ΪθΛ§<πΐ%‚/€ΫΥΤ’P—ΎΘ₯¨oβfάͺ¦ώ/UeΤ¬Lχ~«Lς š~Yφ’ϊUέή%|f›EΕ φZ=Ζκυυu ŸTWyyη%ζΥuކϊς—ώφΨ­X.ΉxΩ­2f^ϋ5MͺlsΜ§†½·Bp)4ΎΑ«μeS”Χε’ΐU”Š".―q ΄žrZ½‘Σh³»Φ ώ©5ΙF―ω·ϊ\"±©ώϋυΰίτXπMˆ|Έπ§ΰOνnό™β/}Ίρ£"ŒωRgFF!ox.ψLW΅jNήj£‘όΐ.ŠœτγΑΨN~Εϊο*Φ³cͺ3‚ώ―t3ζ_δ ƊR#ΠuδnΎ1"‘ΐ­:‘ˆχ8#Χ§ώ ώyqΝψObОϊο“ΧαQeˆYRφ« ΌΙΠo‘λΉ§΅Ί M,Ÿ{£Τύm”)₯o˜ϊΑVΉΜ"GΏΚž†υ³Cm7<ιk·G$Ÿω$_MτΏK§όο#Β§O€Π’εŒΐΙόŒuλ9ύώŸEμ•|ΠCΛJa¬Ώ ¦ποΆ@ϊsθύΕ&“Eρί¬|ψΞμˆΜ²4λkh΅ŠϊΨΈ; Σ“c΄—£ΠO`CΚztΚhέΣF?•y©ΡΤΔΕΌώ€Έ¨ώλTgTψoxˆd5ΐ)ϊO»ωM{Κ?ύοϋ9#Έ*uηEΡG`ρφ‡δJ:ΨΣ¬|r έ†gy_+usΰ ^CΙ5Β™θψΗΒƒ¨.±ΪSώ―qα―πUθΏΑιΒί™ό>¬όJj‚ΆΌŠJ”|ΑD%sώΗΓl²Ž`€‡4Ÿ¬Φυd¦5ΙWΗ_‘φRΩμ31:Ώ4x6ΕVωD«?Gό? ψϋ―I¦όcΑίOc>‰‡+Υ£*Χw»ώΛδ7†ό'£b₯sύ“;U «ή€qšDΫ|.“e”TUaΎ|–τˆA‘ΕB>ςΈ7ΉtυπCr ˌoVΡ―=νͺG#Ϊ„£$μεIΤύ!ω˜(β>vD?8Ψ*S° γτ©·νξρηΓ `//b˜?Œ%–’θgλcύδ‡šθ©_•‰ό|θˆυθ€ώΫ(.’€KίΞž¬Ϋ“Υ"χ₯θΏ.kΏ|ή θk₯|=Τ%~κΩΛ¬πβPΣςρδ±xj ΦΑ ίΜ\βgZ“ώg,ψWž°Ρό)#=ώl²Œ@›ρ€PΙ2…!&YσEύc-yΈRγ7οξHΑ`D„y’ KžPx•ƒWsoŒ6­+_`FΝ>C?Qw…Έ¦ΑŠhτI²ΰΈZ}Ί€ςΡ ϊ«Ώoeφ²Π){Ϊξ?ώͺτWWγžφΙ€EΊύ)ώΓp&ωlψΔxŠsh7ώΓ±&ίΡω?•Aˆ*1νυΩI} ^U…phΥδο,ΐΝHL‚( «{Θ+Β{Φ]›ΐΚυ“°β †με .1υ‘’8·θ8f\Υω§1 njΊ$V©JψZf„V‰»u~c,QŽ §1CjSξ€σΆ{rt7’§υρ•UB»‘˜Χ―…ω(”$E―}yοι€ΜλeŽ]ϋ> lώ²φΣx‘dPΐ }›o|Œ°’3ίΘŸ’C˜/±9“Χˆό_ε„s•ψΨβ=ώ“Χό_Ηk™/ƒ’ζμΞUήM<Πgεv!2’Ψ|Μθ v>χ°s±œ”h“ώο(ύ-ώϚκ?Œ |οϊ”˜έϊ―™μΏD.ηQΈΗΚ„—ΏΜΫŊ¬KΫηv¦•xΕσUUπnόX.™ΙP“·½z\πP<μ4Œδ…!Ο2ώ’€σƒ*Γ5t:=ΤŠΞ…Θx!ξκg-šJϊŠnxφ#]ΔiΡ+’Ύκ{/šΨΣSΛ§ξ·ΎΖξ΄ŸW…έϋ:™χζcyϋΛC jnΆ9ˆŠ›½‚Ζν2ύΥΥτ:ѣ˞u Σιί TXΓk»}βωšΡΏέΆ+Υ}‚“Έ†sJλςo¨GΧW- ^T²ΟTIμ‹£cΤ²§ψρΰΕVΜqώΜ4:υŸ™γLυ_FΰxQΌτ»ϋ•μΥΌΘdνΨη}ˆ·:ε} €T5ΪƒjEύτC€Ώx­&ž†πξαAΞ±YΘ…μΥk–Ο9Β }Ž…pϋƒΨe/ Lj·Λ(YDλMί¦έ{zˆζrΑΏδQί¦i<Γq_6R<τ΅R:­JμΨηξ¨{θΥyΎυ±ό΅ΪϊσΌXΒ΅ͺυ+“Η9pΰ°νUqαR:QΜψN°T^,Iu Ρ»Yξ½ό΄“Lσ<έ+?Kψσ"–2FBͺή,§pί|Τ,ξ}JΦQ<ΖIη5RI³όu ύνrWΰΉͺ=I'gσƒe€Ή$‹ƒ5!&ώoψο1Χπ4ιρ˜τΧαΟEΨ t²~-η4/sςώωΥτ?DΕzΆλ?Oώ£θ’$-x!ηym6²(έό]Dρ έ k‘ΖiVGŠβψj»WΧ"·M"D*σ§(ΠRΎgΎ³^ύΘWω%€zφΉT‰|±Ξ•V< jα*IΔjgψρy.'ΙκKΓγΚ“ύg<ψ«D5Χ£=ρΜ±'ϋΟτύς?©z2‡Μ;eνŸ){ΤUσ@]Ÿ5Ί¬’ΥΗ63 η„y—‚ZυŠΕ+³‰½CόW‰Η₯dͺ<ύ/χρ` ΐ)ψSΜυΠ’aLπΏ6ύ˜$ΜΦ„o:_χόE.ΡΞd ώ:!ΓΥπ?1Μ …ΟΖΒU–Ž -ηLόO¦ϊ―£ΑΏŠγΉRύƒτΦ™ΰ‘π.Έk"_Λω΅ώΗd ώ«hΠς―'ν?ΤκΖΠΙώs}ύΟIϋO¬Š–ž;‰–«"~ι˜‘. b=Ju©eεz ίΡqδsJo§’ˆ&&c\ό?`ωΧ‹μ?”NτDψ—Fυ«δ3,•μ)ώχϊτ”πέΧ†’Ύ ₯{mͺgσE¨η΄ό_6›ΞXπίK’}ψ/ φέϊ†1ΩF2žuώ«wqό<³Π_―πχ5s±~@ΧΊs q#‘$,Ύώ/ώo’γΑ?Οj€OΑŸΩ?f“‰ώ__ώ{{’Μ«θ?Ο'JE;‘‡KΞpΐKτ–9ωpΘτ§ε?Φ΅ΩΖDGΞωΎό"_Ήφ9Y ΛΊ&M'ώ<ψW)‡αNΐίth7ώΛ΄Œ ώΧ§”Ών{₯^[}αΝU .6LΖιΡή2ϋί'tύΥΑη~ ¬ΟSoζΡκΡ‘–Ϋ(.’δ0λ|αD.eΜύωι&έ εε1τΌœ<œ\Y—d³3ΏšUՁΤ}­ΚΰνQρφγgHΖ’p*8ώ@Α8υμH[@NI¦Ωϊ@ϋϊω‘>$]§ΩfΥΟ₯ο?2Ησ!ΌsηΌ53τ&‹Ήθσδ(Ÿ­Κwΰ³»‡ZΓΙKA\θiZ>™*_ZπP?ω‘ζωΑœΨΚBΪΫ|4P6ν7 ΠΩγό2φ`Zμ₯ϋτ*ω΄ΞO?`66ΰΣβ”2Γ$loΙΌvœFΌΔώg?–ό§r]ΡώgΪ=ϊ_:Αω―JΡωU½€2Ž£‰Qν=κ.ζ…Μ‹:θV $7ΨT˜ρGXζ™ΪW³m.ƒ™2S{νnφοι KEqTΌΜ–[¬X!Λq77Yϊ2ΈϋP²ž#!“\‘ΪΎλμ?~œ}Ÿ_ΙΉs —i°2ΉIσ¨H3•ΘtU›όwχχΛ¨Xmύ;8κΜc -κκΎSΝο‹Σ+>#λ¬Ιώ7ό2πTύW£λΙΜ)χψ? bΕ£d—Υω@η\ŠmU ‘;λC”ϋZ²=^Γͺ¬@½mΒƒ%ίPkϊ^‘”ΏΖRT—ύηJ§ZΙ7O'ϋo›ώz gύώ*χοuμΏ†Νzτ?ζ”ηκόίΘ!=_HΦ„ΧΠή2ΕφxψψωS;Ι£Α?~\_Qώ7˜Σ՚Φδuό―lη-έ₯όg«ίέwξΤ­ΕJΖ›–E¬±ΌJnFwkήδZkŽ%’ß>έ|ŠHΜ‘I^b³Ρw_f•oΚ$βΌςόΝOυίFƒΏf·VΗ NWλLω_ΖΠb₯μΉ‚?U$ΰφζοΫ΄εuώ’ΤL|©¦=ΚπZΛ]!άΖqΛUγ5œrΕιΖΈuώΗ­B§ϊo£ΑΏταΌRώgJ-ΦΝ:ε7όσ„oJ\ύσ‚~nΆΎΛœΑ―e!άεόκovάξ:ΕX~Ρ1–;§ώ]Xα°¨šΒ€VΚJΒƒ9΅NΓΠ1σΏOώŸγΑ_‘«ΙΤ4z겉σ£"Œω2ΧdB χbΕγ‡y”i‘‡-εƒΙt.ύUg;“Χxπ/Cͺ―γEœnό—9ΩGα&ήκ Μfv=OώΙXπύy:'Ζ)vη8ώΣo’cΔy€ΛŸ†`NΡλVύ2εƒώΣΫΘ₯|>Ο8ωW;L„u‚Xa΄Iπ*ό? pύ·™π˜π―S’ ΐœ²ΫμΐŸΩ“ύgϊ2+4Ξ5rτή‰ΤΏ^JœΣ@~\ΆbΣȍΈ#›“=δσΣtSα‰ήži™›:F0€φΥχ{oΌ–ΩΪΛ¨9q[Ÿ7ύ;x Η¦ϊΟγΑ/Ÿν5τ?ΔκΦ€SύΧk™Έ©IΡrˆφ7RaO€σΗcιμΙώ3όUΒ΅+Φ#΄―ώηDG Mί#ωLΗ’9/φr ωY ?OoY•u·―‡N]ό‹p—Υ+&2%β±σ?Oς¨π/^Aώ7hόOμ)ίΥι[(Ρ€₯Ξυ;–ρΟXςί€ώX_μjρŸΔξζ7 cς¨ϊί)§ηšΣΚώ†œώ―,xψΕxœΟνθBŽ#ς9GπΈωΏΨ€ ώ»"‘ƒ0§ΰο8]ώϘψΏΡδΏΧχλΨq?~™βχΦΖͺ˜,x%δq.O–ύJΏ‡ Μ©σ?Έ$ώ˙꿍‡u‘ηΑΐSς₯έψobMφΏ/FώϋΒc²?Ό·ΠPeΓO—ρ>[ς©jΙO*ޏ‹‡/‘„LφŸ±ΰ―ƒ6NζtβΏLΫ™δΏθZςθ©F՞FΘoNΕ7Rψv/Ύbκγ/2@λ\6c‡Ι&Nγ"ό? p ύ·ι€ώ'\#ώΗ²{β¦ϊcΠ Ο€ /β΄ΚζK`χm²ω>œͺͺST“­ϊΐ¬ΗIό?’β?Fƒ?‹NΙ΅τΤ0¬ύδ5ύοΙaHϊςΙ€o‹@†ωθ7υμγΗ ¨₯Χ‰ψ€GΥΙώ;όα¬˜ώσ$ύ7Ίρί̜βΗ "\Ξ£°Κη @. ζEΌcς§ΛΓTωAŸ’@csμΗμ`ώu`!ΒWXίZs«…΅½] †b0>Ί€†4ΦnΜIίžMQݞΘιW‡‡KyQώsŽ—΅ŸΖ€Ÿ’%ΘΛ7³'ιWώ·lkЁώoΆΙ oιΝ]ψ_ Υv5kpαw,SμxΡrŸa"²Φ‰Ÿώ„χπ[YKμΏ6θXπΧψγjω_ »§ώ‡γLπώ_κ΅ΨƒΊAUtzLb{δό›sŠΎόͺκ󽈣Νζ₯ΪxKY²ήF Nβ‘lfLφπΝO•gΟ§Έω2βYΨ_9ϊ|ΪΗm‚r™fQιΘG§›΅LŠΉΪ7·ΨA.²hS‘@ε=4[Ιx#³Y‘Ξ–2‘τ2«π_”θšx W³eTΜΚοo Sβήή<Θ—§4 τχՎE$ό8ώ ρŸ²oΌ\ρ|£Š#!“\‘αΎλμ?~œ}YΙ9EΜΫΒΠυ 9Xς@?ϋρOίύΛΊ[xs“ζQ‘f/ψ`U›όwχχπνΥΦΏ΄ͺN<NZ_ισΥBωtDωκΜσ&Zpώ·ιTg<ϊίΜψξϊ?Η ]ωϞπψΚy₯PV u½±κώHΆ_θP‡ΊNΣΧ J'h·“˜uhYρό?ŠώMώ£+Φb¦έ­4εƒώ‹ζ2YFIiκ³,D2ϊ~”uο©PρύΫ_ ρa™Λ?GH~]b9Ε“…Η՚“ώw4ψΏ(₯Ο`ΐ)ϊOΜnώ/:ΑCʟ›t~5βϊaεΥ7ς³‰o‰4&ςϋεΰ8€Kβ™πxπίΘE”]+ώΧ°h_ώΟ ώ#ΠŸ―Νίη|‚§NΙgNqσMρŸcΒ_<\MK c=ώŸSόΗG•’μω ·Ώ-Sΰ΅”ΜΤ\Ύ’ϊ‹”·{λnjy¬·ςf%u¦t/kNR—FΗΥ³Ιλjπ/!ΧA*†Lύq–ώί²;π§&™ό?Fΰ’BfΚν·N±Ž’(ŒdV!v ‚Ι₯|.SDΆA«!%νΩ£'…\oΒH‡³@ω{^d²Ž0ξζΰYΖ_M dΘ·q1ί£m!sΩ€BN§‡Ν6Žƒτ)™‹5ΟΞνΗνe¬…ΦH΄±W#­E .I]‡“6Ηe›#Ίνφζγ.}Ηeoχ@fΡc‡žwΏ0ζη͝ά£―ƒ\ωΦGωΚƒέ5†•WΎLη°` ψ«Ϊ/x½W€ς$šBwyΒτšΩDUyΒUΜ ™{sίE4ό"ρK©θ3εq}μMΥϊ*=’ :Wy“ΕΩ+¦]`g xΘ›Ο˜ϊγRϊO™9Ρ«ΒΏ‹s¦§=ωŸ 6ω]ώΏžͺ΄1p7π 4Ϋ x ŒK&ςϊηDύmψλψΏj+ΉŒœ‚?ι©Θ¦όΧΑη#g½'&Δόρς3c²ŽaU€'ω³““Οό ώ†2 ώ»Σ Φ§TdβΎΊyϋ* ‹J1Y‘>ΚωTt oΠI?όΓ5Z“0Z.6YZZξίBNΑ_ωϊ΄ςXSύ—λγΐϋπΉ8­8ί¦JέkKΘ_’–Ύ(άΖ*Οc”GEΪ¨[μ•Z§ύ Eώv9ŠŠΥΌάΈZ³_Ϊ"^³θwΏΓωΛ`›ρX―Β\§+κΙfτέL€qι·1KΓΩεL»Ί¨Fε°z²ν₯!ΪεΊαϋΏ6I#4žwγ«σ©!τζ:ϋόΏ"\’Η"Σω_“§ζϊiΡ₯šΰmόΟ“—UϊT#[₯"έ&…Fφ- PΨ‹ΚδYΫΗ5Ίž'ς©DaXψƒ(S.€vπ§LqΊ\VnΠq»ςX΄L¬ΞCN QΛ7q(υeπ+’Χ@I ΠqέΜάwVΨe3J³‡°œ―€6μ΅νσfP£0ΆœΞ ΩΊw±ŠrεΊ+‘§¦^…ͺJ.—Hgx±£eψnoΉ”]³Eƒb©‡]§\ž$)t ίH’ΝFy‹”J³ΆένυIθ₯6;i°‡5‰Κ[έv€]ν³yEΏ­!—QZ§"šγ¦»Ε2v |DΜσh¨6υFΎίt«τ­φΆΑίF1vs«.ξΠψυΩ ·1λa%ΑŒΓΒθyφ#|}¦?] ?L”ηΫNC”–_Α-$ΡrUΔ/Γ$,l3s&©όK§γΛζd»όQΏΗ³—ϋΌwώ< Λ"βLώŸ#θyd4Žyğ/U$Η•G[Σhχθ”:Έ‘SΦ βφ{Ίψ\‹[¨ΚΟlζi8‡ ΪfdH+Sω³/`aΝ;mL³§Q«,ήθΌ…I„ύ,}JΞυx<9ώ.CΗQ Υ=½σΊŽ΄θΣϋλ"}Ž‚‹mφΐ9υ‘’8{δΐž,Τ£ChꨱJ•¨Ί»…―ƒ ε š‰•ζŒΪ=,ς†?½ΰ~š5ƒxφξh³Mž€aλ~³Σ‰~±ΫK–†­­x¬«φΫύAu5¦WŸΪ]xoΘβ5λ~fopAu/0·w~ΏλΎσεjmχο―bΫ·,CyηΨ½“|`ΤΡ%YͺΡ-£uΝu  ιΖ­S‘΅ϋΞΠα©ήΆ½φΚ}Ο¬+<3―ΐ5Χ¦0…ήξ{žφΆΡ  ζΘΘηGΫ7ήμοKχ Χ½ν’tΞσγMυ+(\5X‹KqόΈžβ,cΝπςΎωDν8/σ½¨|ΎˆΦk‚` ιγYJ²™Όο}|«TΧκμθ.;ιΣΝn'.‚†ΏXΚ‚oŸΥIόιvχψώπ›ΝnP‘Β Λ’@ιc΅f8ΖE”i[ΎΝTͺάωα…ξ}‘i…mΛ€+©EΠΏpΆxΜώ¬9Αγϊκ#iσρ\ΌVΖΌΓTύg9Ψƒ€ žηωςωΣΝJςΝ"Χ;0“Η‘ξM±BΒ‚ηˆPλζ' yw ΌΕ7°³ΏαΙΛ7ϊή±Όφ·ΫgΧ^Ψζίΰΐ·ŸpΌΒGΏ­Ÿ₯Ήz²’Ω:*ΰΑooοš ‚z0η~τjδL€R7 ƒΕA·‡υ‘Qp’ί4&ΧΔ2ΑΑ};Ϋk£TzͺΏέV+Ή7iΨ₯κ¬-ΆlΟ+ˆΧ%χJώƒμ&₯Η=‡’fηxg(ŽΗS{§ό’νΌή8Ϋδ!Αψ˜dsϋ[λΰ1Τ}}ςίΠΑ—ΙߟδΏλΓΏŸu{Χϊ?Τ2;ςΏM¬IsωίηΑπ_”ρj­y”π?ΰ1•*zψ/·:{A“¬οhxΉk'?ΤY ˜Vχ˜Ώ¦ΥƒLx"$²M€•Γό²H™SβΆPΡ.aOͺ#ΊbX+˜Ζ³g(*UM›Eά£{81鱚hUΔ+¨Υ'ΗwBΉp\Άξ̐5Θ »½ΫMT jvΧccgΗ9Βe;»νεΫdΈ– °Egε•θWΊ"ž`ώqUNπφέWφΉχςι―Ν·Ε*Ν4ϋπ],ŸgΜ"±*`-ΟαηπDy wΝΗα~—™|šύ+ ›'ΕμχΏπuσυ–pβϊ­M“_yœΞώGΗ^FΆ~Ν~ΏΤχύ₯λξ·θ8Ύ¬€Υβ™œηΫΝFKO-Ιΰg˜ζο~§ηωσl€„ίδ³Ό’JŠ1ΛΆΙΌ€£8ϋγώοY ζ™n=έυΘ«t-7ε6>*Τθ Qη«Μ«οβuωYΌ¬Ώά©σu@Z©uo΅–ΥΓmnλcσ…g«Χšό?F€?ΞΏƒθΗX7ώΫ°§ψ―λΫ΄ΐœcL)}~^­R ―Ά™œpSy»}ΰφ€Ž[VόΧ.5]ύ|[Ώ~Lύ2|}爎ί»\Cέίϊ|ύτφ§΅Σύ kέτώ²β ›|eΧΉ―›ΎνΥjγN«΄ρ­³ΥΪ»—Oh΅Υ‹ε³Ϊod_€Β~­ξZ59­Έξ°‘ ‡$k ’ώ―Ό$Λ”c―" ^Ν νΙ3ρWη>υΊwΰ—0 %χ%_ΟUΊΡ‡Xy[·|°›Ωαžxόθ²qτΆ₯I} έ5‡v0@·δ”tΰšηCδ2¦σ? ό­«AΣΏŸΞbuγL2ιFΐΗ"φΟσΩν  iΦη8*p‰?³¦ό/#ΐΏŠG+ΈΛΉŠ•βEš½«ύί έϊΟΆαLώ#ΰ­ζžεufv^ΗjίψA0‘ύ/πό_έώg°ιό\ƒήOΣ„bΐιόΏώίt¦ϊcΰd»žλςί‡Le¨!hΨ£΄~ζν‚CgλM„δ#œΔ€Kς?Ϊ“ηυΰ_ΩΚ@’kλbwλΏΠ ώWΑ*§EΛ‘SΤιTl­{{ΣqΌάCΨω… λxM‰ϊˆϊΈ:JDΌ--ζχu»ϋκΈ&zΪKd ©aΞ‡Π[“ώjψr>œBkΗŽiNϊ«ΰ:€ΰΣΝZ}©=Φof³[Dΐ9›ΫoρV³P Ύ·η<άΌ‘\sΎ­zκ)ρQ?ΣΌ§²=7•βh³yιΏ[ώ³δcη=ΨΎUΒ.]-uQ+9Ϊ/ΕQς Tα²οQ1‡Ωtζ€³DΦΉ}Ώ+ά΄^PRTη^mJi=ΡD{Ν“(μ™]&Χ°Žͺ Φ–G&GίΘeφ؝‘’7Ž:Ž<­’d½€μ½Yy–‹L­/"λϋσݍ9Ωπ‚οΝ~ήξŸgθm cƒNω#bά ίΞςtφ$g@ŠΖTττΰ‡Ÿ4?¦F‡ο‹m–Α˜UK„ ΛbΆέά5·ΑsΉ^e ¬|>C=Rϊ”ΜΤ8wΣl‘ 7£vΞO₯wλ]&cΙsyW₯θ‰ͺӐιψΟΤΙ0“‘„αΓΚ’LsϊVεή‹0˜I μϋΩSΗΠΰΐ(|ΡΐĜŽ8O5†ήK™όϋ6Κz«τIBƒo«ΧeωUxIϊΓ¬ΔΈ\θ5Ζι“^ΐ\}^ε-žš“Κωέ즙˜9ωeD9ΒNΏν^SmΫύ­d½rψ`Μ@μΨ*ΰΑΠ`”Ι „5"μfgπŽLπSJU „u°]žδ7Eω [&κy8ɞοByk:Cϊ‰Θ^πΈ,‘ΨI’ˆ_ξΚΔΝ{ƒ©ΓΛΩΝΛΩUχα[so ;hΓΧxόΔ_r<Κp2WΡr5KΆxŠ0+s‰;f€"‹όά3Λ”ΗzέΖιlγΡS#ε’48EέG―zλT1Ѝͺe ψc3 e&bžΓ:+Π«M‘₯J\Yΐ]ςςVυnφο@‘Ÿέv+Η€2ΗΥD΅εΈΰD[!υK@p_ΚAθσ¨»ω“x l@²p'υω…c½ί;¬Θ~kXhό=H‘ΐηp±]―+΄Λ»βjͺH–ugΈtίΒ’bΞ„m_CB?T#Vο<©-(%ίHdΈŸ₯[€Œ sM<Ϋ έjԐ—Π &hΘρ„ιΩώρ_·ή«ΈUΧeBŸ9@₯[•C°β4ί Ο€Q€ΚVΊΝtΞΥH-kΩVνμ(Q@‚±DYš`8ρέMΉ_ζjΏ`”PnτΎΙρLξ΄ΈX Θν˜Θh½‰%vWξ‹Hθΐ‘£:6:HL8Ψ&ΝΟΘ(ΣG?‚υ)œH%……η Ο,+œ‰~£@­ά5α œ›§j₯«†Bφ8^έ.ΖΔYž2 3€&pθo½…#νFE‰ψς2Νy±Ν…ΧΊA:ΓbΊψ―Ϊ$IΊ7r… ;L Ύ Λ’ΑρΦhc7qΣΕQQ(`οέDώŸnκόMwsώσΜ€{€φ~«¬D{ΏχR7νέΥ»οŒπΞΒ`[ζώ Žq ώ¬4,j½OΛCρ›GΔ"ΕLgT½ιs½νir £}\¬6 κ}δ3²!Ύ#…Γ`/ήπ>τ‡#B0bkύ^ňΉŒC΅eΤKzoͺ]₯ίγΈΐΛ¨ΨΝ?–F–?΄α…X}ϊYcCo‚ŸΣκ§ύ)Μ†¬SœΓ£¨fΉ"έΌ ±Ώ»γ ,-”ΎΖ„;«τmbφ_e`t:>φfΝΞΧocΫσ†SqιΘQό%Z紇—Έ[φ©‡πέτχ€ωsΩαΟz‡ν‹›―@NΕγΛ‘hΑιω¦g{TU `;νΑ:ͺ;…Ν#ΕVqϊ +φoͺίήέ΄X½jς{«₯—r₯jA€b±›ε8ͺŸ›MΫ™΄ολΨιŸJ~‚‰qY¦8ρ4Qθ “‡’pΧόΝρ―όGΫ3ΆŠ₯<”αU£λ6oŒπΤ·pŒΣHœέ“zϋPW:ΑyΑ―ίΌκDΜK₯ S­`A•G~θ 6T1κMάQ“uυ³ΒV{ΈΎώ—ΩŒΩvώwςΌΖί?jOΙ3¨ΪΨιcyWψ‡R—€™½K”ΐ€―’x­σ₯ μ:ˆ,3e$K–w{ζ)ˆ§Ά’}ͺ_5[wΫ—D8>Cθl7rεEφςߎ³ŠnέEι\ιuoo΄ΉU₯˜΄IH}'τX@…Pίp©ΝΈΰάδ°}&©c»†-l—‡6a¦0ΝΠ’Μ η"ΐ {¬Ώlυ%œ»ŠUW‡ΣzͺSΩ(MHοz‘²΅mήd/•””›R0“Ο¦†Ψ–gJβY&γu8σBΟ,‹™CΣ₯”3ΈΤ [Ž|ΕR]°«ίΰ ™·γ œ;΄ΘΊD2G„%ΫrxΐM+tL Στ<Α-Ηu-Χπ-aQ/Žς)€ ? j[vΨ»kΉ«μΘ;‰¦JγξΩΛZ.!,2Έ2¬ώΉxN3`E•ͺυΠΦλ4‰xcL[σ]]Χ“\ΈqŒΠτYθ³@X4© "τ.ί•R„^ΰϋlFNΰ‡< Vΰ’3ΏwEPQmΙ q»œ¨~―ω$Ξς2{ΨnΤ«‹LιΊΌ μ‘Φ°ί"jΛ@’ΐλmΟ(fž8²lν2,-Ίƒ/`ΰΈ’c­8Ÿ–δΆaϋDV(™’Ž"''’SΛσ 70LΓ§–ΫY؎#₯Υ»€/<Ι£9–Έ96Υn=Α@=ί-9[΅b˜4U—+ω<Γ’E΄ ]8Ž%|._qέ°ΰΩ>»Ϊ₯=£>ΠhηCυκ™ Nυ.G衳摍x|ψ»ƒΫ.q04« ™o ξpΓt=—¦/“Οπ¦¦i…!μ>ΈHΔ7Έ”SθŠ†>(Τ΅>[°1šmΠ 8ΐηψ dαΪ¦ηψΣΐ…τAN#Μ•ΞΗΒ8VŒj%œ$ιZά1iΏͺ '$ƒΦ']Πγpτ΄ΙΡ±»‘ΩpΓ&0₯’˜ <q€3gL{’IΟ飐³€jGnκ“Π1Θ#`Κ;μτΠ{ °š!€Ι€{6 0kgΤ iφaΓ³$:αr uCQΧI•"Ο0|δ «K½χψ]Β‰kΓΘ<ΰΚ,ΰψmVΨ6-J κϋ~H€[₯ΜΎΙLΟ‚Ψ,ΰ8ΟpiΐŒ_ΫΧ›ηΓrO£%ψUμœZ…€”Ω½rcΩ»₯ŒuZΣΦΝ*σψξΆ²Ϊ«Ÿq‰ΑjKtω«™‚ 5j¬‰πbžΏhΜΎWcZ³‰aB„O*}J~Aλ‹yέω2Nύj‚ωΜυΕh•Κ…‹\·ΨŽh=Ο4-άΥ±†Χͺ7Iύ#v—Gί5 R3,θW†C¬f ³mΫ&'³γ]π2Οu@§.˜§ΘpΞ΄\΅Ηβμ–rOmί΄ΙΧ°¬±Nό―IΟζ4Σͺ™>ͺ€W(θ§»0γΉΜί”-Š,ΪΜ•~RίΝ›dY7ίςGΆΦ…―,W£ήvγΞst4½ΐ’ιόug±kmς³‰ψσxΞ—Π3ίv’Σ@ΰ EΕΛ<ΜΰcΈm^1ŒΚ ί?nΚνώΌ―ΐ†’PεNŸ±¨ϊΤε1޲f(‚œ„Rh{*‰τ ·ϊΪΡ>χ0υλΆm›Ώžu=L&jzpgοπt…™{1k—bμΛ£\<Ž{δ‘τγΣΆ ½tŠΌ\ΐ‡σψΧ@Ή–΄οeS€s,>Ϋ]’&=j’šVT΄!*?ƒNcσ]vpm™Ύ ΦD;6°ΑYpΰӘ£¬ανώΝΑ­B²)Pβp›š.PΡώkrΣጹ@U=ΰΚ\L—i ύΞ!θμ(£@{ύ~/qŒ@MLKHίCXΡΗΛπ]—‘Ϋ,$ΪΊ₯αJ‘™Ω’Μp=ί@WΫ0”‚ϋ¦ "€ί*PcοΆ6κ$KΡ[λ:Ύƒλ `”€_Ά}NΕ–f`αόǐ¦gy°ΦhψwνPXV8zžΟ·α琬XΟ 4Uœ1Wυζ{Ο—„8ΎΠ΄Ψ΅άaAΘΡ­_9ŸΊΤρκ—@Ίa²Μƒ{&Pt'јIzŒ“9<Η–δά9©CσkΤπ‰Ι\šTΨ–eΫwl‚8² J1ΨΓh˜fnΰωv½ αeΒ©kGŸΤ‹Dώ½¬Lη9Δ΅‰(ˆΒ°γάSλ»φTZΟ&/ΠΆU’§9t ˜γ`^οU*Σ&οσή>ΑΆi{.ςζ&³ί° yi3jπPΑ aΑξw% ?^€}" &€‰–f`]?`)'~Πλ p:ͺι> Y%cp&φSv42yζΞlΔΧιn_ήξ‹Μ·dΰJDaZžƒ[$~Θέΐς  Τ"π βφ€A€Β“ΆaSΟ²v€.7'±³8ŸΐuέVογqJ@ξ"ž ΏΑa²n(|]s›;„†\ΐv]+πBΣu„p=j˜V›Ϋ>€κΓΔpk‘©R˜ε}›€Ώρf˜ω-ξ³Ψ λ.Β₯7‘7X^PŸPέΗ©ηέχ…ΙAh%šcFl ψ!}ƒΪ?ƒΪ£εΖ^αvΔΟΰ κ=‡­ΞΠ1‘·Θ yvΑη„Zΐ p` CΛbΆl4Σ1Mxΐ`*K”ςΉ 8V†aΘ<JΪIΏ_Ί+³ι–Ί#³¬Ό_@HΩήJΓ«ΠΑ©-aτn€1*7 ?Μցa₯>Π O x Ψ ΐ5άτ ΰ€€Ρ·ŽΝV+ωyυœ\†@ώ}+;΄rh)CbBŠξΡ@Maΐ“Ž‹"°ƒ&°όph@1Cΰ–mϊR†ΔcΤ#Ύτ©'Δ Χ“›T¬¬ƒStΧόΕ—˜(‰ŠsFχωήvWt¨c Ξτ‘mΣσί …αΐ UnZ&aK—H’€‰f‘cX™Ίk”16ίφœ‹cKΤ σl,™ςˆJΓ0—ϊW.\nΉJ1yr=a›u6ΪπQ׎cϊά6Έe JX+°˜Ε@μD―KOΆSš–ΑlN]>MαvtΝ£–cœ…[†άf}j gπm&l$‡qιΑήα“’8°°Ύm2ΣΖZ‘@Ν,ƒ4auΰœΒO*)2Άl³σŒGΧΑ>(ί‘^ΰy!χˆIA” 1Ψ•ΐ’-ιsF}W„Žk†ήΰ”ΫΒ ΜŽ"εyλ°CΜg.ΓΞ/₯ν-nΞΪ8±)Eƒ¬ηPΞƒΗ-Ϋ„Y·lΖYhy@“\ψ]f‡>3Pu!m'‚bKC½²s¦aε‘Ψw“<)7τEk ‘@ <W›3Ξ\Ο¦#y6½ .³ˆνHrθΓu\β:6ΊΗζp€%φ_e>4QΜ-Ϋρ[°†7w[.η–Ο…ά Α 0}[LΧDα½ξMx.`bœYžž`ψΪ―‹j›·ΝYΦΆ KΔjίρ”†1Σΐ™SƒψΎE€ ˜nHM |ΰ j1sqψT†ΠΘΆΘkίΊ±Λ}<Hϋ²q§z5―ΨJυ#²t;Ήν«ΆΧΛt›zζά9ήΰnρ(Θ9ά1;e­pΫΡ‡PΫ7ϐ"ڜ9°γžήΐΛΉvΘά€ς˜NkίΏ:YβιώŠώZh―αζa9ΧI'΅ΕMΐ³Φ &a–.;ΊνΑ—Ν€ΓΑŒbΨ–.LΆιΎd„GΖ,Η&0αψ* Σ ΰM„tl_ιΞRΧ–J†>4­‚Ό. ~½ ,pθ6Ζ²š@νκ?@ε\ ξ9¨κr…> Ό‹Ό`>›φbέUyήέΰZ\Ου<Χ'"΄} D0 ‡ ²³ LΛ₯Δ₯–ΜJΰrM…G lsŒβ7m ˜n{Π‰£²―C1$ptΠ;ρ|b‘„tB &χ=Έε»&'ΓάC sψFhψά’Δ€-νά¬QXζ„~wS’EMαƒτ‚u(lΑ0$ΜR["BΖ +4C;z―†" –k kšžΤζΉg°μjΟέΑ‘EΘCΈΒu@Ψ έ‰˜ ζ9aˆ™³|/t-0ύ8u©t(l9Η0= ˆ΅u^ τΡΐοή™―3 XΨF`@…ϋ°©˜šŠ4,ΰΉ‰H“€2ΠΦό Τ―ω†mς½SΠLΌd .TIΫ³ Ά’c`™g9ΞΈ*κ„θ1HΓu]KΟαΈ°q© ΰ`!ήRΕ]φEδΗΝ{ψFηΘ =7ΒΈδ.ΘPpb’η3Η§ΐ2'έΑx!Βς©&Β‘>pˁ%Ω™rτy³Šψ\ΌΏΑΪΑ€|`ρ)e’X$τ¦„3ν œμΈ§έ.“b W“F؏ρ|νŠ8ƒS8Σ΄σαώ rD=€D³«οsXΦδ}³Vψ@ν‘Iΰ‚0{μ^43|*B%;DJ[Ψ¦`Δ—p`gx,τΉi9£ˆΠ Cΐ† )uCOœ½Gv9.[‘ή‚‚gzν ʝφΜξ+βv^Τ΅j’r\aTή 8*žΒgA<wΌφή#剐Ύg‡Lrn¦tΫP1˜„S₯ u‰# ¬M'ΕφH`ΐၬ:{ZŸvR―Sωƒœcžwgη₯‹Z;R˜©Xyι†ΐήΑ.f.~ΰο ζΉ.e<αo DP O¬=ρή=Q'ΑΈ)@V ¨7—†εϊ!@…“  s¦Rϋ;˜#?„94΄©$uv-’M€-QπŸQΒθY>VΜ)ήέ©˜9h”c€ g }™ &FlaΊ!°―Ύe[މΊ[ŽIe ΨΙ U+@ BS'>θΎ‘εjΨηH˜Ά]/ EΧ7Ρά C—PΧα Γ±‘zCΐu2xέ΅δَ•ΧVΫδ²³―μ•p:Ϊδ±³(ΞΠ Έjβ,€¦a-|‡Δ²LΑΰP\CŠ ξpœ7 ·δΎ ςΚ!‚-«jΈm…ΙΠX&œΚ<ι»<†EE@™t=b„wmΛ3ΐXΈ WΒ,ΨΈ,3dΌΧS˜η+΅?1ακνž›Y}  2…α ˜ͺk>wBK8ΐΥώΣ–ŽΕ€λω6@v¦ΜEhϊ‡·^l³Xy3Ά¦ρTΡ‚[άuBn†5<ΣCΆ"΅\ Df3€IΞ"Œβ“ιYvΐ,Λr,`ϋΑ·Ÿξώ {ζa`<‰žηωςY₯SnoΰΑσΦZ€>)•‘‹ήΚ°e}TaM ŠI)έ@Xυ<8aQŽ=Τ‡˜!ˆLά~ΧLa>°₯/ή)Σ!HdMŒHr(§†rΆ X €0 Pχ=BDš %JK2ΐ[Vτ8΄™σ…|.2ήΪΕΓ§WΆBΚMŽϋΥ€ΐ˜€„„#ΰ†νίNlb€(Σdp`=i3SΫ\a;ζΑρkpε‹G™λΊ εΓΑpδ@ώΰŽe.:΄PfX Š8‘mzδzΔ΅θ‰nRΈ…Ϋ’kKšAsΊ™Hr΄Š0ƒπΠc'tc°Σ1EΟ<Γ4Ρ-€yTΪ8rΞŁRk^FlΛ§…*¨ΈΨ&™δb…Ε鎜”-¦’θJ-thέ"ρ=ΰ¬@&„γ!₯k;ΜΗω0gίήΜεΘv(`¦– ^` AΰŽgΨ6°ΪςΐzΖΟ†·ά:Ν ΙNŠXœ{>χ}A₯…ΜΞ•AT³¬tΐ`G:0+¬IΏXŽ~ΎοξSί«rV7t­Α2Uu³LΗXύΜ£δ‘q£ΰyσF™Θλψv8ΰΦώΛ€,ΈαΗγΓΞYEΕ Α OΗsmΒ<`γ01Ε „΄]Sqšφρem/aΉ<§&ίu!y‡™;ŽX8AΊΪF|ζρ‘ηžπ\`τL_€ŒιQUμ@I©+’Ύγ=΅Žˆέnyχ‰€\iy! VΓ㦏ιfύdΗ4ώφήtΉ‘$Η½RΦΏξ΅i΅ωΎ»klΕά·ΣsΞΰEχηΧ/.(R³ΞΩZpΟ(Υz6²ͺΐpM)–š”!3~ΗΒΰ ]ϊFͺ†A³g#Φςμc“Έ|Τ:  β¬ͺMD °}Ξ΅δZ³]‰NxτRΊ!5UΚ*ΠΖήaBS™œQχΨνΒ=hRJνλΐ"œL’9²‚’ςrυZ–lYqqRBžzΘΦι' š 0`Α₯MΝ9ΖΦC•n£¨υΖ¨Έ>ο;­^N&ρ|{ύFΧΟAθ–6`IsΐŠ):{‹zΟ%%ύ۝άJιKς]Be.蘌J3 pRΊηZ²’ά\4EΑ2WΠκ@£RK- €‹jU½4¨Rΐ=^αMo‘Ζu‰M.Q΅ΐαH2¬\†n2δ‹έ4α~Ǟ_?ΜGjΚeJψR.__žώzΌ~ϋlώ½Τ+Μ IΊN¦$‘@\8ώθjHtΟ8Ε&bI=( jπA$0¬k· Κτ‰M­ryj…os1j‘( ϋAw ™:&DšˆΕš;Œ‰šžŸyPώφΐo[έΠΩq~4$αγΕ₯›Mu!ΚΕ…»(υk‚‚Ι8Ή¨ΥfΆίSΚvh ›'8s%5—ΝΪίSAq'}εl˜zgΐŠHi'RVΙΊe (œœt# EW‚뒍ηŒά ‹τ^¦8Χ{pόθnρGΗ€ΣωT XΎχ ;€Δ§˜€‘rf#qΦ†uΔHαΟΩ&ψ>ΨΞ]㠍ΜΛ·νβ GŠ»αPCθ‰°Ž˜TS]7ΙζS,)‚Ι瞁CJ/΅1Μ”πœΚώ»{{ΈΏžbΊ#ηΤGžrας™ž} ρΛν—ŸŠ–¦:ΙQ5@₯†uU‰{r,χΕ3ayUIͺ²QRχdjg υ¨£Ϋ^β‹ΜςϋσzDΏ@p?ͺG])―…ιR4Τ˜φ“H^¬TρΉ;’"μ$eœ!lŸβδN ζ»Ψ‘ώiRδΣ»λAειΒ‘^C΄Α6]HrΧZΤJ›+c(ͺy[ X|jψΠ]–λe‰²8+2)§˜9ο+—,-N?lκ`ER—/ύ’}'”ΣΡI Ώ#*έλ€~14˜δbΆ^SΙ9>™΅Άυ΅uE«£ͺέ>{–Ζ)Q£ŠΨ‡JςPͺ―X60io@Ξ|Τ© |€ ‡l^WR•*ΥV¬t (ωϊ³°•Ώv'`ˆˆ|ΞπJΕoCΔΦ •”iψc€“¦¬T5U\aWS}3U1Xυά^^“―Qλ‹₯ΓF.ΆWψΥ jMΐJjŽα^SΔSJΚH1‹ΆKά ΦxΓ†Γ9·r€‡M·A‚Iωάq³Ο:Η½ζnow/\θP|Ϋq$N|"IgέΌtosψ+μΫJΚ>2v„’,Aί€ΐ€š·g‹Ιζj +‘¨“ŠZƒΐθ¬ΰαD}ήuΫηXϊCyήvϋ~€[Ÿ;ŠΒΓk?ž^οV/OCΜ†­ώfjxόΎι;e"Y θλtΠ­θ(D΄ ΰG“†›°…SŽ.Ν‘ωWWν@τ†Tm <9λΰ+;’ΰ[:Ωfl TrΒ7Ξ„ΐ'λaΨ΅ΚΊΈ4 ι7Eτσ«33Όε AόΊN=ωΰug­ƒΙΊΒuW'‰€X7γΰ·χΒ)Σ™» S W^ž^]JΊ|ίο™˜SΙώ‡Ί—\EΞψυ'θVeT°|;6’,(ίΈ€@Η ίD)|ά`k1ƒ`)h=Ν4ΏΔΎ]fΜπβχΥγ78š· ϋ Ώ,Ÿ±j|zf7Ζτκ³*²δ"UαE£œΊΓk)7ešθaΒ~,§RMM·”‰“o˜bΠυͺ•Φ_žΥΉΈΠαΌνυ*:Iα'Χκκ“8Τ6Kψ ¨άW@‹γλ‚"―RJUtΒJ‹&]XlΓν…Rο8΅ πΡΝΆΟ|sΒ»Ά³§φӍ·*FΝεaη…?"aΨςzdžwΔ~hyυ0ΐ›ZΘΨ^›3؞JuΣM„›uΤΐbΑhM1Ά‘²T>uη=ΰ!H_ nŽΙl|νŸd… ŠL†¦μ4‡jUpΎ‹ΠqυΑYiΰœ¬θ ƒ­)˜WΙMUπšŸ‹” |θŽη₯oήAΰ’„qZŒ£±…«+έ1xg-R&™λ`=\€sfΆ"ήX$.†‹σ΄=;؏dΔΛΣιΏQjai «T@α$}Ž5Γιmδ@ϊψ²bMN’™A^TΠͺκ•ΑΑUόΉZν――w_jŸŸ©‡λΧewψ€b2ΨZ₯P@ζ©;«|EΩ»$Υ–αDΟϋΨ5˜ΉFfOYn›ί»Βυ£Q`™έJjlv[9fΦR½/)YXlk’%œΨf•ˆtyaέJΓμ@ΈΏtΖ:Ο―ΥΗβΔŽςβ%y.XΉ~€Ζ6nΙΐΞΐ6Y,X„ύ¬MŸΨσΖi0oŽμΕ]FεΓΉΫωβ>σ“σχhfδœ/ΫAμ₯Γ\_k«j,ƒ§)K΅”Ξ^R+›4ΉˆXymœρΨν>·L‰ΆΝysιΨΏ_–ψ1κ9ςρLCϋ››ΓkžkdΟ$"_&-αΑξRΟk% ΤnKOΨ—FlΎ €r ͺΌ0ΥΌΉ£§v’ά±F›'gξŸΪχ[Ή]Φ.Έ7.Άζ &ΑŠφP „-ΩMΓdUο”j5ΗTLRΰ¨I7%‰ν@ Σw΅η΄»?hδ1 nρB7]s­ ’©αWY—\³ˆπΩΈ η΄ *Εlπ³υ–`£›‰Z#Ή67ΎΧηI.hWΟ½—ʝLͺΒ―ΔP|Ξ—zΎ ΣR| S‹¬²€qƒ΅z HΥΩ²β@™ΜLKυO>0;q.cŸ’λ W C qoVXχ(Ζ£ έXΞb 5w$isΨ%£5hʎ;NYŸΏ}αη—sK1ξ~(“Xcq}Ίn\ΐΙΞ‰f"6‚ΥY{ lMΦ§±I| ψHεΠ΅pC—§΅£rΎάΛΜ Y2η]ΠΦ@s”¨Όχ‹tμξ©ΐ>©ά₯'ΉΚ³λ E.l­ΨαΘ`†teψd½šκ\·<*P‚~#ϊ  9`4&+Q+(1I{2p0Φ¦•œ ‚†dΆ’f‡π|Ώk‘.}ΉmYdŠ.FH‰ι‰’’\KU+έ• UŽ=‹ΐ|ΧΙζΔKΉX₯Ω§ΜΆέπ.oEY‚εβ N"J‡ΞX†'Ν&CΗΡ½8K(μ*\ k’‹TŽη>ίυνoΕ)|γΗΛ•Φ%NΥ žyτΒήgΎα!^ŽYηi½}iSυΕΛWZΒφg ₯υήηbα Ί(R€sj¬`±ΑγΌΘ ”hD!ΜΪPΨLΟνgnρTfξη,όρŸoœΙ|7«^Ό[Κ€RΗCnt½·°Ζ±UD8ό™{Ά6k₯“’υ;ΆY65ΓΔΝt!πχ$"aεΤ(ΗΖlρξc] €Ό΅Ι…LŽ,‹B· Ήˆ 2Xh*M*ξ υ9$ψ"P‘’ζΩNοΥk·‹ p°fTqΔqΌ#(š΄!Μ:Η ]4Θͺ‚SeiηF `ιk†ΖRΝΙ=Ѝ%γ,½ΙbυΕDί”Ψ\š-νqΒ£‘:θJ­5R‘"’² ΔXx¦ebJrSίE ϊu ΡΊατžΪyαιΉόχ;ί Rw¨‚ϊά†€ϊττ}Lο‘&ŠvΙξKΰ°ΩžSSΖDu­•.QšΎ³@0C‘‘–kώŒIκŠg"ζEz’Κ0θέάΌ»—§Η§ύ4½ƒv!³Y;ug&³JΠcνYήοοιι―Η›&–p§cρ—ΪzlR4X+μΛ1·Βευύ…RύσJ6cκ±8][„³"χœ½Κ€δ-"WJ”†α™»mJGM9υΰA’Φ‘Ž—zπP“™Όzζ—~Oςψv#jZG˜‡ώΎΓτI§Ÿ?Ή›«aχΨςδήJ9„Ν••’$EΟHΗΓjŠ%̞qΎ'W:X€΄υd”!ΩŚBΉͺμv’―έI9PΔδkŽŠ`ιΨί+ γ”ˆΊι¦uσ:€Μ[ΪΈ/˜„^[ _Hϋl€~λς‹w:Ξ©*X‹ηI%ΫΘفτΥ3θ_1ΒWEF΄Ή›d(θTΕ F'RšgΗχς―Ž΅‚I*°Η νΪƒρΤζ%YW <­ [8Θς›W—gΑωΎšΘ[w‹«yϋμΌςΙ₯AŽ `Œc™XΔ[[ˆEb9"Ω0Gέv‹JZšš·6'?ΆC돎q+ͺOOEb%|ΐš¦{hΰψπYXXΕNi ŸΦ³χΑβˆgΌά3‹υ₯Rt‹3g˜ξ7LθχΤ‘?Œ΄;jμK—ο€Q{aGίȚl ά"‰=W$‹³JVgLkα@i|J€ Η;ϊ} €ΥΚ=Ύmk NΥΞΏrη]Ό¬*wιΕ)‚UTDš)ξτQΠj-w\ξΦ%ge(μΐ΅Ψ 6ϊKUΑ«—Υ™ƒίFΎp³ι€Ξνβέ²…©―?Φ… ζ¬+Β9Ή<8s'a' myΆχsΣ~¨,?ΈέŽc§€ιŽοοoψ΅•g>ΌM+yΖΨ–ΎΚ`ψrμρ`tΰfuP.έ›\²‰'©œ‘"IWl·±ŽD΅λž…ωψΉ ηz_» ©ν8Y΅(<°‰-­Š€c##χ/·ΞˆώxΚ™‹--[θΎVΠ>τ¨{|γoךnωKGρU‘1ςΰ&„3δΦ.»Ž’«α;(e‡[o!€|g…Εa*β·£―'kΛΞΑ);ο‘κΕus%π±Šέ%†SNƒVΎˆdti6_£1„-Š…e“a’ΰ‘uΐ2½S˜Iε;‰ ?+tΖ&Otχvυ[X‘Κs„έMΕΙνΈ(cuP ž)#t±ι”p ΎwUΓ–γ₯ω…«ζΟlŒΓ ΪΕ}ΫΔ;-Ε* +©5YX˜0h"¦1²&ϋ”MbhMΗ,žΘ;γ„vc†#Λ΅ΈΈJ² S9ί΅Υΐ“θ81ϋ ’lUN9™β]‹Ω‰Τ |mn—aωΝŒ+R±JŸT‡ΟΙ*ΑεlΏZœe¬OEδ;«yπ8ΡΨrYz€’Ÿ?^=Α66¬@νΒmxΈκt¦λ‚ΣAψ&ωΜ©ΉθMβΒΒ„5u!ΐM³KΉ5—Ύ„dd#°Ό€¨jaΙ ‘•ξ n’{b’”$ C’*š™ˆw:›½€qjͺ8$~Ϊ|Θž3ΘΧΧϋρ]©]:μ4©μ•τ"fγΓ°ͺΡ)'έζ €€7p’4c‘•Q:E’δγ™dΥ½¬‹Q_Γύ6p{Μ`?m?πψTmdE―­1(ς-Φ1HύpΆN‹+­E‚ΡV —&_ΑUDηή'έΚαw4q–ΔΡAΌͺNhΙ(ω—ς”υύςZΡFG ϊμtΐBƒŠγkυ’ΉmœΣ`#Ζяδ‹Χ)—’cσ½ο>ŸZrb­Ζ‰3ω_ay%ИUd¨G'9SΚ•‡²( wKZIΈ1ΐQhΛ @P*Ύ½T h:‰γ“hvρςΤΝ§z½—Ÿ«Γ2ŠYς!„Γ…‹\£Θs(““ !YM"5eΐ‘!§¨JγΞ’Μύ|z{~»ύσM˕镏‘·"η +>5_€‹X§^ΰŽ¨Ήΐ Š΅WΊqΟZDN+IˆωKžG^aΊ•Ή·f΄LKG ¬‰I$­>ADΘŽ*KˆWΔ©ŒœΰR‹tzΔχΩ±–­γKΗtFΩRΠόαؐWνΆΤ§—·s΅M>ΠύDΤi&ρQ~t”C­εύ«DΏνY€’iψƒGλ †οΪϊ9'Ν ^5β¨*ψeν’θ!FπYΣmΆΙJΌΠ• ƒf[α€t"gj8·πn­;(¨ϋ, ;}n>ο^G°‹Λ{PŒΞ•¦LΖq70b'πΤ1X@†έρ-*„hγ‘τhŒEΟΥUo΅qΦΣ±Ιη>ΈVΎ=Ξ\Gο<”©±‹ΧΒΒΈŸDηΑμ:ψΓήB•φ·e:XG6ˆ9: mDν₯ƞ٣zofζϋUο„bΟΩ/owΎόκ2u)bZΐπ .Ψ=HR ΈΟ"dn\―=ΐšR”}ΣΌ )uΫDE{Ξ6–½z«\«•Vpλ’υbΑ ΕUγ_Ϊ:‘‘‡z ‘ 51₯βΩ„ -ΈΌ{šm’ztζ χ₯δΐ~ƒΣπk-u `¨τΝκl²Ι€gz_Ίd;υ˜2·qΒO’Δσ¬αωqŒr=½$ύ•©{*¦‹qu,€&›š•θs{Q©φ"ϋ§”τ&i6`Kqρ^ϊu|EΙfπ>\"»ψM~w œ˜³‘!%y£wbjEοjP"…½Q‡$€a]²pΓ)Λ~:–ϊήθζνeΕΗΗ6uk]˜p„ta,΅ ΐxKP’·'χΣ€ΐΰΒURΞrRr±³;6K―!S MC‘}}υ‘ITy;Φ,λΰ}Χ^לk’L`iq_“t”R!` ψŽ’VΒ &RΏΨb―Β‡.ͺ ΅Ζ'ύΐoεW’>?δΪ“α]mpΚuγ’bNCΡ£δ_š˜‰Αš$τ`”ΰ;Α%Lj%ΌφA3Ι CžΨώλ]ΉΡ¬EꎌϋνΫKyΎ…Ό–ΆY.D°BΨ₯ƒFs€’eΒήΖdʐtŒkL޳hR(κ4γθ«LuυΆΨΉ)˜ο]£εK΄{ ΪXιY"αk»Γ©7Ν'8Dδ€A'}πΰ³K{—YΟ₯άυ[¬θϊβ|~p)ώΧd«~1hx‰σc›*² Οd}ΓΏeΨF‹\¦σ.Š^–’°rjδ$Lς° `Χ|?μicv ²‰rK™B7d|R Ά}ž0Άn ’ΨTvHΗnV$=V½ §Wχ€ υζWΎΦ f8ΥΜ؞!N0γk€δγδ†>θEj'Νί€YgΚΨψΫ»OŸέΧΥ³dΒ+·uΚ<ΔμK‡E–,Ηα‘›ƒk–2©―ib°\lubΥD\/Ζͺm‡2Eδ`Ω’™λcπΩάσΪI©&XΤΤ΄Χ@ΆΰρΩΚϊh«]Π"ξ%Oπ΄‘cσ'Ρ].X£–‰gIΗg°qΤCdιͺVΝΖ;ιiΠρΨΦ Ί€Ό`ρ…Υ’h†/ΐ5΅J0*Κ΄ά²μ‘«ΖτžξŸWο― ίV<ψ‘Yg―<@ŽržlŽηBtΤjΒ†ιŒΈ$G"e.šŒ-Ƙ%}`:ώMε­τ—MΜδ#v²›V>w”žސ4[;8DK/ ±QΪ'$Εƒ­ Ύ-κzΉ›3ψASVZNWv ΈλEΒΕaΰͺxMž««~a «½Ώm4^.ŠMh“ε.’O½‚nΙr#-_’/Άˆ£H{Uΰ:ΉR­nσγx{ϋy[^_ωEtδGΕΟΛ3x ,φr¬+ŽΌ/ „Η%—’B#Ž’6Ή©X“8©‡χ-RŽqFfϊuu+}yΨ΄Z"œ[·-~Α/lzρν†οηχθzf>―žR›M€Ή©Ωc₯a1S͜؁“dΓμ€ρ₯ŏMΟYbΑ3Φ+ΨΦi,~DΘsvΰ"xΧn$‘)°τνπYΊ'ΨP +kl˜XιΞŒΟExvίΰ+^(d¦¨I]βtcί£e?£Μςk'B’Θ!eΥ{N Na ¬Y8’ΤΊ$πjΤ­(P―μ΅05τ5Ν«™ξ‡£Έ)ooη%ɟ-s?ύ'=[EWΙ#‡Ρ‘Ϋ–“t©@ΩΊ @όj0M1ϋ(bχΞ«xVW­—rϊ³u8'—ΟŐ‘;"©KΛQνR€)7΄bγ}aίl#Ή‘<Εήu©M4γEA§htΏφnŒύ?NŽcΣ»ύπ„/žήRΒKLHˆj‘D,ΈΝ jθH'+VZΈ›Υ$Κt!UR’2NČΥ–ή“š\ƒ/F)G΅Β“ŠQΑx)Ψ¬ΪD|Ήc4ΐŠ gS}–B¦*¦Ά“ϋ±’γϋξvζ™—λ–έσŸΦχoΟϋΠM ΐΡΛΔΉ?ωϊp}ρFζ΄;›Œ€bΑΐJΕh’‹Ϊ”j 4OκΙWφ―š2ͺΖΔ!_œα±_Ό:ͺ#1‹k“Ω ΙfŽtŠ>K+LηΨU}”ŒηKρXX‚—^ΖC€F­Ή°ε^φNϊNbλI‘Λ£±<ο] ˾γ$=Βg°° ς Γή[ γΠΩ^’…-Œ>Ĝkˆ 6ˆ`.OOΔeƒό)GψκšWœζ%³J‘hχΐTƒ“šββ3]eUEΨ1ψκCŠ{“f{6КΊΣνΐ†.ŸΔZδΊ+;MΙτ‚3ιA»΄¨ †¨I™( ,3ΧTρ£βlt]Z½Β£]8„k(,Ψ$ωM•ΗΠαœr5t–Τs€A-<ΕλU°ΏΣΐxH* ζ‡πΘ7νΆέΏΏ~ηŸW-)!A{ŠZ.}Z"Jƒψ!–ΓΦU-ŠtΑΒτ2[η€PmΒ8Ίσ³γxzWΕΕUoΌδ΄EnuΐˆlNƒ%šͺLZ77υI$ wI³?)ΊΫx<œ9ν/₯ρψ†Κ-―EΒΝpς-zκdu C?ͺ^2ΏΙ΄`η€CBU­ƒUPn 9#Su«ςpΨΝ`ZβaΫρuΔc:/χ―7―Οεq'TžoΦ•lΫŠρΛ·kΊ=υS‰2α½Ό·!qgκ-ΓٟόΙsyyε©Όςλλκiς)7O?»FIYqqKJγ‘ΙΑΎΈZaόZ’¬§\ άΏsΎNŒ~°Ϊj€Ξ”{³>Tο©ι»ηέ²E-²…ϋ™,C΄;© ΚΝρjwm§ψqxεΉ}»hΦβF ΖΛGΜ,²σNξ)s&ΐζZkE:U ’|iR–€ώŠpœ¦•§γ•Άϋdw‡Ύ™ °;iΓ’Ωωΐ‘½_Ό“—k’#—Ήεh₯saνΡψΪ-Œ’§‚(νDιQύ ³ 7™t–&3Σ&σ3Œ;½W.ψςoΰΰh½tΆjYΊΗw£½•šU΄‹5Gή‹† Τ>4'έ‹«=JΗŠ.xψ—95πqJάςάYU8@“Q$x¨X©‘+$K"‘€ƒδ)κξCΡοJ=Iφs€σΉδ^‘PΞy`KRižb`Crφ₯d(bοF§9Q3ήƒ±SΕ€Ge3@NԜ<Βwνκι{’³ΫίeΓ-ύ1Iz!ΛvxzιfΟ[)βΜr£Bΐθ!SΪN‡/;―wΧΧ„΅ΐΡ,:¨‘8:Μyǁ4„–rεQ0δUi9Dθ*eΊk$Vzr”§§8±kkϋθP†ΐV X$ŒΑφH$’&­TU€D₯bάX^“[tͺsbμq—tX`1.έ­ϊΫΥ7n„sDoI9‰4’‡1*IΔ(ΦFΞxΈσΧΜxΕΈs΅RΥͺNΧT}τ†&ώοχ ϋΪ6Χ܌ςDψΞ{‹¨—?Φ€‰YT„fJ_@‹Αͺ•i ~Ζ‰g­ΩΧLN°ΝͺEL6ψΜtLMΑηkxeτΪΆφV(πHαΉ9ΫΛΨΩϊεSl“νpΘ6c&€ ΅4RS.b#ΐν²SMξ<Θ*°m²½Ÿ’ςGGωώ:–‡]ήπy+m0’$³5·Lά Ly†‡)λΗΞa;ΙΩ’(Ό7!&`± ͺv&[̎]Δ#χ„_ώFWΕ’‚wΚ’HzώHΐO„8%ƒΑ4MI±–ΰϊr*xςΕ‚†)‘»'9Ξεu΅Q:lσϊσρ­ό81ψOYέΡ΅Λβ «(Ϊ^“dzΥ caD³ΤˆŒ;³εφIήP½ ΥΰΑ4€μ{όˆ6πyCέΌkΔ3όβ :Glτ”€}ƒ73χΪ„έ W[ρZcί«–“Υ%ŠN«ˆIX©LKΉώcnOo|#šSSm$7iy'u¦mρ±_έi¨~νζ&Ψρ€±υΌ?€«,0’&zžL ΧΞUBΉ8FE€ο-•—€Ξψ₯Φ—ΓΆv›ΖH'ΊΤ——oOŸ_}φ{Ύ’ς% ωΗΠUk“gtΏz~ώy+­V^άU»+|?•…4h(Nάδ$ΰ‹§zΨGρI²³ά}vψΨ~χ©M°γΑ‡qU"3°³ΰύ˜u+`ŽI<ΎΤ‰Kγ·8¬Ψi•˜ιD29ς#C—XΟΩ―Ω~¬»έ‰w_ίΩΑ}jGkvΖÏρρœ‚ύ‡σ\[8SŸ=k_>·ΧφδΑ°4Χφ„²{°Ή$}·)ΡkθQc— ϊ$%EΧ€A’½P˜ι^BpVZυ²k)Μ7ν:>81₯W/Μ`W}¬ΖWgqΘ³Τ'©Ρν&Αα—.I½‘Až}NΤΈAK8>-U8ί9™±+%ΰεΧ•Œ”ZΟ亝q­{ZΖtβBωϊΣαJρl]%E`ΞBe£‹’Α+=!ΑdR*!*ξ5Χb«δ q0,ς <Σk|ο:ώς+ουcNόΰžπΛΤ†ά†Χι»π—UΉ_ύΝGnΓ„ @Ξ›ΤΙ_°PΫ+ώλ―—*5‡Τ՝\&Tι6έbn%)Ÿ7Α§(±‡˜’Ž Ϋ6΄lŒθff΄!ο©T†ΩΊt‚—όϊ³ΤK‘jŸΐ8λ֐deYeƒ‹&ϊrθQεj)GLf(Ψ|χλ[užfR/`ΫM2₯Ά«K3Šκ₯C+7Ϊό?ΞϋŸ|\Λο++k„q0γC₯ρŸ«Ώ/:]—’ζλowΘηχŸ mο€FΧϋjpΏδq·/OΗΤβcΏΑŒ'³g~ƒ[Χ―Eη&NŠΨXΆ˜ά¬o–“¨w€X¨Eœv8³wΨw±Ξ„ΜŸ@9ψζυ¨’7ˆ²|=™H`)ζ½Ώ^ÈJ 9ώρέφ7vp›ώ׊ήξΎ²€‹Œο7¬iŽ$ ¦„υ 5USu.ΒV₯©ccώQτMt±©†¦}£ΤΙ›j]ύ2TΈ,gλ`vΦΡ‚ί0;˜˜Π΅( ͺμ\/©Ϋ ξΖΦδr‰ σΖ ΰήEΡ/ρsš₯¬©.γ¬ρ|X²ίΰZVΝkνmk>™ΤδκΗ 2‘ _š«¨hm@;e²π½ΛΊ·σ΄ωΓWM#›±[•Ξ™ LP­©U‰Mζ›α2\Z9dM’ΌGΊzB6§]ΰ—ΜΗy»x τ?IΕ›¬€+kV’ͺM¦Q ιοΧ#6φƒ§Ω9ŽΡi Ό@䕃όυΒ—YΓ|ύAFU]e¬ΉJ)Ι'>ζ€η0£6R)-{ΕόW$θ‡–+`~Bžaν~Υ ΌώV^Ύm4K.…­›'ω –Η.ΕξΕ’ψβeɐΕY4*J#ί₯l΄Χθΐ\™Ξ°G>…™¦Ψ§Λ$ρߏ₯8{RΦIΗΏΑ― %o H§&UE?•A’W.—u ΅d‹©’‘μt΅Κc–b3wςΗxιοΟίή™KˆξιΤξY2ϋi˜Ÿ^δ •·ΥΣωΘβσψύ4 SΫgiK,•NuΘu5ΩeT*Ύ`{΄¦δΊeαΓNzqyU=“Y°ΗΈ.Ηlgώϊƒ§Z”Z‰.wι‘Mpΰΰ Ί΅Ψ΅πζ½ob³ Ο8πŒβC© 3¦MN—UΓν]Œ-fΕΏ΄λ·Ψυw—ε\Ζ(Ξ~%ΨœαD’-8ŽΕWγ_’Αεp"$Ω&hWBEα`h)MΆ"P»η`ΞzγNm‹—mcΑΨήΉ‹ψڞž™nήξ_χ΄Χς&κŒοΝIΞyήbnφΑoπͺ3KΡ.,\‘dΐε—ΨK…—c£2Ψ₯”εEΥ{Κ`Ÿ«šz/*Φμ/G Ξ;R—# ςΕ›3sW–€„–φ €t/βN\ΓT§β4c5ͺ)©αpι:xn…oκΝ:s՞¬;±ΆƒKW Ώ„)E2UΎΜ]"ύΰμΔΐ(i .9.Έjg\Žά0 Zϊ΄°9JΚκρΜbόυ~Ω¨λέΎΎξXš[’«έM|iυύϛÃ:ψGγί½WKsξΖ-GZ[Kg˜R€€3έSνβΞuγˆC­`+ΎDS“• γ¬­σ“mtR έΊ3iρ§’lΗqΨΣ3q3QΨr…ιΘNZHK9†ΥK]„'ρί°½Έΐπ5νN!φΠΨi²J.;p2©¦™ΐ‘„Ϋυζ‡Γ΅kP\©E$Ξ’ :JΩ HU ˜’tOΐ‘‘ςC )³mž|γYΙΉωζ^ϋΚω„>ΏΟό‰΄ξُ?gοξΊh„œωαGεD&s³ ΐΥρAυη?½ΠΆυλμ΅Π§NͺώύaΞζIόTρκLΖΡανΛ†ώd_„Ώ-Ούώ©œΫDb‚œIΈ&ςNΉŠ_Έm]²Ÿ9˜0κ=ςωˆšΒNΏt;=Δ-ξŸώΒΗ>~»h6ΐύ>œ ‡™‡Cx{·z92M£θΖGΫ₯f_”gΡ΅ιΧΌΞ—Ϊ4#ϊη81φ՞Ήώ‹t“ΝωψΆg›Χδƒ~χxύωPŸξo‡c³©ž8πΣω['φΗ‡ŸΊΨ#~{ύFoΏeI/s}γ₯;ΣχΝ.ίξοΜ9–·y€]ίi’ΧL†Ό,Aqκœ<ŒΧΏ5Ρq=seΆγSχι\42νσΎ‚,‡Ί”οŸ9|7~ΐΥ#όB/mε¬ί.΄š# t4}»zxήβkω“oχΛ—¬?Ÿ·u‘»bσυΝλ{}m/«Ίέ@›—ί^˜/Ψ8»k1±{NύζΩΖΰŒ Φ#;fβΪχΒΨΙU“R·Pϋ‘:"ρ£ ‡π¬i‡ .Ϊμ_αS©Ύ_‰žΟΙ§gO‰Ι$ΥΣΏ&fνΌ‘=Ρέ<–™ρxε/τfΧ y;ζ{aΚΌο&ζžϊθΓνς3Ÿξ ^σ’:ƒ0œŒΎœ†˜žΤ3ΧqœΘ|&&=<½<ίνo¨qΈξ`Ξ§aλ3ΘBi?G,σΰm˜§§ϋ?ω· .ΡnΑρνΊΑΫ$^>'q.ΆΧ ΜWBcοHλΌ·mGΠσKΦ;θΜΫ¨.Κ²«Ηά5Τ…ΑIv5VΎVœ γΏr{Y StraΖ%—Α”ωΙώ%ΰuόΉ/ž‹Ί½γϋηm~Bۍύ3“·sηcΡb„O+{‘»;Š’―zίrVγ? 5&‚ωηΚΝƒ}%€WξΏίlζf2š;y«ΉFά’±ώ‘ ³Qύc-Μr ˌƒΓ—άό’½ψ2u΄t΅σιKo7Οσ(ϊ n.2šΏmA~с]ΟS}u²?Ξ›οΙσtxv¦ »|+O KϋPϋΜώ`Ox>μφmh^y"ΟΞ:ΌAώ5ͺrα4_zΕτ?jο2  ηοΪλξΠq‰εEY‰Ώ;Cκ¬H׊χ Γ%«?zΘ3bΏeώ€_'Σ¬~1‚ΥΔΒέ`ΔΧ'χΔ©ύB ζkY{Ϋ Κ/l“…ΠΛ Σ|œeκΞρθ<μ„ŽΞ›Š©T¬3BΏ1v»ŒCΌhώ6μLπ6™η0=‡GρσΠ―εΛ.δA·Μ―ϋΠ“IπsΡοߚΖyb)~Ε~mβχ#Ξ4‘³ Ί ΰ™3/'έζ±9:D>s–&•ŽΔ}w³£FΗμάR—)ƒωΏy\"`I{r*GςΧs@Ύ \&Tsgβ‰βω‘E9ΏvθTΒΩ•λαGχ8gϊͺmοΰ£yΧηάσ]υβθuBΰJEb&6ωψςSύ°Ύό‹‹ΆΦh<‰\\ξ₯6—υ‹I±ϋ’sσ\Ί'­€ΫσΠwΓR/ΝCχΞ1ΗζύΟγφ_ΟžΉG[ΰP|ΕιoξhΟ4”CΘ'ϊΈw°ώζπϊω‹Λπ«±ήΣάιK³΅½ΐώMΑ¬Ξ/K³~9‘|{»πŸ6¦'ςμ’Όίy[qΦ΅α¨φξ .rΖ­β‚v»Ι_9,›5 R,ίΊ€:•bσΊΉ Ύ΄Š8U9ˆ’oL¦Χ^­6¬»#o‡z ζ™Φ" !˜Pν³zd€Λ74«½¨ξEW/αTŽδz†ΥΆj_“-ΙpΉTγ£τ‘W>+Κ^ΦlΗF*rφψ@ι©}%eπ0ΏfκήεaυΈκ«Νζ~ΏΏ§§ΏoΪCyωώΗΊ%ΔΔeΧAςοFΨάNγ σDφsψ₯|1}ψΤν>Ϋy‰6‘Ωό™·;~˜ώΗ +rέ€Sͺ!}υγκΝ€»i^Υθj‘FΔ5uiΰΧK²Ζ6vNΥμM =±«Αqν”’nAεMTŽOυάΪφψΈ|!Ζ£ΏF³SUYyιΧ=wΧ]Τ:“tθ°ƒΑ†Z₯Ν-Ύα+%/]π,ζ(ΗnMΉβθήnΪΣc_}»•δ‘΅γšb"—e}>ΗΟ3`Ϋ(λ ΥΪΏlΌ€Ψεs:D ¦=½o<ς¨λZΪh±4­ϊ¨[₯ΥΛDO‘°ω­έˆΝΊiΧΘT¦‘£F4Ζq¦-ΩτΪ7–ήΑσ}”α OΎŽQΌέ­^μsΠμ#aοېwϋ.ΖΥ:›WnfΫΣˊOh6My[ρ^K7V ΊΪ¬2EŸp|‹ΉK.Z ‡–΅)$ϊ΅ΞΈDΡύ΄ΝψΦ©δ’Rš3Έ?ί―ώθQ“UšœΞ–”Υ₯αι{d‹/ŒΦ!•μ£(θμuΡ^ΗR‹±V’§0GΚ_ρί›ΑS ,­ήo{Σ0e₯”\;Β°βaΡσ―°χ2ΐΊͺDσάQ¬_†dcΚJyeŽ4fάφ²›Ωa―Ϋֈ£₯yiεΘ|-πFΝXΗƜy;gW\‹œ©DkI·Lλ}ζŽPn©Ά8έ ζΠ:œΚά‰@Nˆc-+WBΙ&+œx(Υu~+φ^‚‘0όΪαΞ(x­^} Τb'OΕΉΤζ4 ‡|{//tu72@Φͺ²ž³ιΚΉG₯ΈT£D>bΙpL’MͺW£j)1΅Ή Ϋ€ζ›ώ‚WΔnοδ* tτΨwXŠδ“ΒlKΛΨ°ζ1ΈR›/‘ιŽγΦS’j@¬νΪv&€ςι‘α―Γ_§ν?xνC)ν#Ιw<›χόϋά™ήνΩλ9ΧLž…†•¨17kΒp-[Λ=E§œcΥ,Œ«‰¬a– ElgφVkV*οΘδΜΞΐΓΈ†bM‘₯©‚i:… σΠ]GΤϊž±[t©42ήΑdaλΓ[š ξY0Κaž7C@tέ|NΣΥqrΘzωΣήαBKΧΚΐακN%Υ:ˆΓΐ+ «dΘXkό'v1§$νΖC gY« ¬»|ΠkΖΎ΄v;\nσ‘Y(B4”lφ-°λpͺέYΧ›$}t‚I1ΐμQ)JΑ:+j₯ςWΪ@ξMΙΑ²ΖΕWΥ¦T4uγ-7 4Ι=΄hl†“Εω΅*kW±άΤ½ YϊT ½§ξ±ΰφ—bXγ…υ~U•ηφ5φΜβ&x ˝ƒn½N@§ΌΠW§B(@6†DJZ#:­ϋ4z^Ghf—IVρpκΕϋ™{Ÿ΄žuρEd΄΅6­Πyξ½Uk€-©RM‘k@1Tτ ωY­gz<τG>6Δν»?ͺ2@B1΅ϊd”­:VωΠ ‹ϋ₯d"OT­΄H°A›Vΐϊ@_ͺΦ9:τmΏθ‘nχξ€™±]IΓmΒq–dpώˆ£φΩI΄^¦ƒΉxXΰ–tWUβ@₯AcLš™ω ΅#ϊ΅4΄‡-l*R΄…+π0¦IΓCd „˜X$Έα@œsZu8“Z4 „ΨXκΕ‡šύιL’ΥΫΣζtςŸηŽώ…ŸΗU‘Ks7Cπ~1θX•Χp$ζ*€lžΦΐ‡tπ·ΒΨιͺ³)€œΤ{έbXt[KFΕ¨ΑπΫΉGʎ` œ‡³κ`Ψ]KΣ.xŽFb„-H9e³ σ…miz;Χϋ§φύ¦ΎχΎGvGϊ3=Δƒ6ν¨ΛwΎy]=¬³ΝžžΛΏσ q}6t·—MΞς-Λ΅xM0χZ `VXDŒΪ…ΰEΰXm±ήFp>G@Ρ>λ£:grΆΧ½<ŸίWtS%Έ½7{Ϋί8˜/{μ²gHΗΉφt™l­‰ζΏD.&t_ο}₯B©VηBν’FRaqΤ’eL1&πό©ιQΣ›C7―χ₯Ž£)KGΌr‹:γF…zŽ.«žTqΎ΄ήrfίΪ8όΑΠΦ ƒΛšˆŸ³vlΟ¦Μ“οοoψ΅•ηqΓ‹₯!ƒσUž]§^²6!΄“Š.2˜ž±% –­½€CL©9ΓΑYE0>:Ή>‡^οξδ@rή-΅Iπ Ε‘Iؚθœ-ΝhΧ€ξ:|>μAΆ 8η«§²©ΕΩ¦“―@-εώζξιιϋΝφ)Ζb7ω-`‹jA"&žK+\Π |@ΞAy•f"₯N\ΎͺQ€ιΥ¨R;Οm΄Υ³ΘβθΕnΤ₯htpΖ6ŽpΛΉRΆ.( ±ͺ–X_…/οΩηάjλ¦œ=λIπύ³ υ7ΜA»{όώ:ΊsKGο=OB΄m₯Α©fι=ΦΊ·0 b#*|’€8uUΦJ{|KFΕΉΨ¦[ΙOόψύαΘzνž[~j­+|ƒ4gp‡.}““w₯«@όZΆB+°’ 6#NΩ ά‰Ή™»€ψΈί?GΛΣqι•VAΏ­/ΞMγyU+ΰΏ5¬δ―΅Β8tͺΞIΫ€Q9–*Ο=ΏTďά(ΏD kαKjSμT1­Ωvβͺ+7ΉsIΠ³ItXI3Hίb4Λ_Νof΄uw™Αw/‰'7ΨςΔΫY§LQ1k_ˆ²s!z*£qπαx|ŽΛ/΄BΗυN^<Œε5mφβΧRΧΪDjb$'ΐJbΒJ*‘€ηJq@(‘{•Kφΐ«°Ϋΐ‘ΐ Ν†~6Š #ˆ³΄IΛ*ƒ${α:ΑΩ½θ΅₯μαIΦ:ZλwΝψL­€GΫOΤruΣaηΆ Χn/&όϊpζ‚ž«€­υbξ·9€Ϋ4“pt²ΧGΉέ?|ώvzy+«7―ί~ά”Ί^‡Y_žώZί?ά‰‚δΫΗ?ΥiW«ΏoŸ~¬6χί{Ϊӏ«v[*>|ηϋχGΜm»Ότ‰faΒrϋ9τ˜.lώ·νίΩωΔΏΚλρiήώήΨ<«ΊazV„Ε¦z }­ v½Ύ΅‚½<’βjqϊ3 ΤΘ Ύvxϊμη₯α,h¦dΗ±Ρ¦φJ:y]‘‹q•ΚY—ͺόΧν@°^€ύξΔ”­wΓ]ΏήΓΫΝΑ2Ωοoΐ=’Ζ§"5;Σ΅UKϋ ™T%φ¬3S"Px§.x!tΦΦTSμˆάΜΖε<ά[h·ζ™x2&Ο/›^nΣ3Τ923Ο7ερu΅‘[“Yn˚\ 6Δΐ¦Tιl¦20pN†¨ΑOZ ά†ήeIΑ–bϊ-XsΓέΦμŽΩ^œΠœ8X‰VΫ€UO©0e,ΎΣδhO>]΅ΦΝ i5Ž,œ«9Ϊ|‚Υκπ1=ύκΓ€ΏyεαιOπσΥΛνφ₯S π ώeTrz…ΐόSΙ°†€bžBRχΗ6Eο €1m—3Β±­aΣΉuʍ³-ΨSΣ±ΧώώΆNΤΖYŸ†·~“Ž φ₯&^‘»ΈVA˜€d\R-Εlc2€0Ό‡€%K¦·¨£/δB“Μ«0˜ΛX$­rZΰώ©ώUξΏOηm™Ž|ρ™1±›_~YA€δ‹’½Ko‰¦R°ΕΒ³sT=h 0˜₯ZΓ¬0-tm `G+ίΩ)Σ‚ξTΓ»Έ MΞΣGΚώι?βς‘ζ3υTZφ”CοYn›uβ%θ¨:¬yJ”UŠ£½hWπ…g¦ΪuŸβΝ#ψ, άfן;ξuŽΓΜ[³xNdΓΐέ+Ή,격…’J©TS‰Ωεΰ3\d^–Vκΐ &PJXwχυ|πmΧ‘k_`κ¨BεP}pCυΈ¦ι‚φ,~*Zψ9Ύ#Ε!7ŒΙšά2V8wPr3“2rv4v(ΞϋͺZΖGΆLΦ™Ή `S€ιπc“³r¦œΙg™Ρ!0K3R'RQΑk€†υΕ&(ΐβ°‘ωΑx{0Š{Θ‘αΈ*“:'Π3KŠTšζΗσίlϋOσf€Ξv•Α‚)«ή΄Σΐ¦΅.ΖHμͺΨ%αׁŽe‹χ]‡μ`Uαi«$#,™¬υvχΒ…nοŸZΉΏώ坌° dΙ άB―Ί )XΥδW¨ΙαV|Uζ bjJΘU»―'ŒΌ­θη™ΐ£*CmΆ©;σΝθέ'nΜG«cdgΕ/7Γ$]>όΆϊώηΝαυ=Μ¨ψb@εA—τΝsyΓGΣΉ˜`­–<Ø dΩ+€bPΈΖ|D&ƒ>Ή’υxΓ³φ֟Lμ)=v'Φ_oa[7εΰΗs3φ¦aw_›α@Ί‚«`‡SΤ`ΐ>Βό*ΨHJE‹MšTm=¨œ%εƒ/Z²–\†eλΗρΒxY ύaŒŽά))­€βau J.mΫ•ΔλB”ϋΫX€$3αͺ·ΝQhEƒτξγlϊEx΄zόy˜ !Aν₯?Ζ¦Ζ*j2!b=αkαGA`5wn°Ό1˜K]u¨!ε£Β^sQλΉΔ›·§ο«§‹Ί|l3A€ρn#žή$o”„δ’1Ι:βεα1“ΦΚ«%³ŸsχU›jгs7εύν ΐι£ϋυ°―θC»{Ω\β>ΤΣcγΫΖ›ΖσκρΎI.woξW[—4•>vΦ¦Y½ΑΫa…G`ιˆX”"<`ŽdΌ‚ϊΉ4₯1ί\Jp=Δ"ΓKEGΚl"<΄rήY Χ>σ›OύΡΙ ZοžΣ0‘5Ÿψη ϋž]Q.ΨR•ΟΤjΙ™š ΐ&₯κ βη%.(Ε™Ξ'ͺυy:ςΉ«FΣ±}αuυψ}―Φώkστp?ΊΗ]|›Eρu(=QΨ Jfi&LRΠΊΪ¦ΊT_¬q!%όUgηΏRXρ&jͺ7υw5κbf—χ{AŸk‚οV’~AE+pβ¨8ƒωKΩ4`lτ1Š£*I=)Γύς͙ȍΪΛ¨ς%- X\ΟAςΐφ±q:‡₯zŽΊoΥΦ »cν6¬’{y[4`«TyΤrzƒnδj$β½ͺοΫmΏ}ωxΗΔ/B"Kοζ†Ή<³T|α$ΧΤ(7 Η”Ήζ@ΙlRRͺ \ck$ P―‚W‹’“έYν’Ε£AΨ„iεΞ#«­JCfͺ=ΕΪ‹iFWΉͺ–J –>Yΐ`ψWΨΙfœbκ]&e¬‹R|ΞN7½ΙΞΆ«VIΨ7–k”hvg0ρ3 B΅³’uΆ6ϋ²m>vΌΖΘpΈ8§%š„Νή₯Δ­‘‡Φc–[.rΉU²#J°ΟΨΨ‰”χœΈš™BςΟπό…Cώ·:”κY―«ΫMΘ DYoT)_žŸ&™ρΓΐH71ώ‘>ϋ©λŽƒ—±ͺΨn4cNPμjμ»=Ε±ΎwWμp·.žh‚…ͺNΑu °ΪLmrmΛ"–h4ΜXŠΪeφΉƒ’‘cS@ΒΆt<Ρ>zŒtŸ˜ΑΡ„νJ΄ΝΝΪ&uΰΦbωK]+ β>«gDW-{ΠΡβΊAΒ€š±V[ƒΙ Όξ½• #Ά:ή_ν\Ζρ{£›©F3`uK§˜±ΨΦS5Υο“€V'Ptr«ͺ2;˜'m{aλήƒ~°tΰΜΨΞ²ΠΫ£φοωΡsŒ»ΈψA fΌUΉzEπEZ΄‡Έ«Ξ΄*$ŒΩφ£ΣΨτά ζ³£›'Χt›[­ΗU+ke η˜wε c”4E"TYDuΨυ!?H½ˆfhθ"'˜\+T©UPΘι\Ευ3ίΆMκΠΏηΗ(QόJtfhσsŸώΠ­zqΉ›J  D% M՘ά:Ξ©ΙFΗΦΑ.[ΔΔOAIξ°“j|λhš9­=Μλι!ξ7Vk»Έt`ΥYΔα.r τ¬ƒεnŒfɞΰΒq.ͺjͺή–}r•JŽΣ–|$;9X±Ο£ QΏψqV91”ŽτP7ς°R &³–f%ͺ’γ›Τ‡V@γHGΨ°ό£ΨΆ’=Ό6]\ƒ„!&Β―μŽ‘K΅š-x€(ΐ"QRΪΦ"ΉX5οSk₯κԏ^»vτ­ιΥάUψΫχ¨ΛΫ8N–΅·€•·M8‚Φ‰e ;ω:€^g0FΫ•’Ϊ(Ά …ΎΛ6˜@ubMΧ—Χvͺkθ³D$ŠΞ5%’$Ž]Ϊ€†§Ψc q(φXL Γδpκ(p1ΰ?s΄gΊ¨γH΅ΖτήΎο&R„₯©‘\½&Έ)-UάT¨3BŠEt%$κη1V;|•  \eΑ­νφψJοŠM^;³ΓfnΖ‰ϊ΄¦ “‹]Ϊ8Hh§2Ύ&ŸC†‘ Ιΐ][ναΒ;8P–Ϊτω‘τΨΗ 0:"l‹{ΗP,YΐW’*Ηξ΄)oμJΦ’~a«qΕͺδj“%ΑC8” D9Ι2ŸI>΄Qg7ΫΊDPΤΎίŒ‹`ζ€€ύΝNηωαΆυΘPΞΜ|ωb™Ϊρ«Γχ—ϋQ•퇉ΐΓbSzaD8{‹+ "6g’[²Q9[•ͺΔ’ΠuReθύtUԊ‡+ιω6½!ӌg|»ΑϋŸ¬μυ ώΩs``PŠW±asJ%§δ 7LδXΙΥ>α‡Rτ䀊Ίβ%ΐΰ[bšΙ)†ϋΌΕΣέT5mζe{St^ΜΏτy’wλlΤΓΓ₯‘˜3=6D§Fώ*l‰άό'xμΣ³-§A󯉩&[€·$ίζξρπiš―κΕ«&"θ5\GuœrφƒΕ³‚‰Φ$wC1ϋV€Žkν΄₯d―tΝ.9ωŽsΆηοίΖFvqEoηΈFiTPkS›Π[‘07–Eμ-„½*ΉΏg²‘80)|‘ρvΙP5Jg=ϋψ/‡Φί_CάUJ©£T” ΨQ΅ΗΞΖφΆRf–δŠ΅Πd=”Ϊή|Ή₯»,’μ)9υ@€ŸnDΫϋ£0ςΏ½nϊrΰΙΝd6ξφΏg§j‡­fI./Ιΰ %o²nΝΤDΰ”RιΑλ―‹I•ΐjW•X2""+ v}ΨHyδ΅N\‡σΨ‹ζ‹ΉP1ˆH’o©`¦ŠθRe‹Nη+;Ρg€2Nm†S}˜ΉΩΪ¦`ν;»εoIcŒ:™*Ί58γDM1z°›™’TAγΐ‰ΙN ζF΄#a3­sΎaΙzœΦvψ”ήK83‘z(k%ξώ—Ό|#r―wOo7Ο/όηŠZΌ< 5X&kΌ‡go ”‘ϊ„έhδς86IŠSÁN’Θ‰ΝΛπ ‘‹½υ:"γϊ•κωΝ4ˆ₯cϋRI’Kˆ7‚LΩφΩgΈ8₯Jε ¨O³Ύy”1”'Lœ6%Ψ―B 7ΟνF φΣ_―7ίίw·ΝnƒΏαθ„L~θ΅ϋο€iΦ΄QΞ’\sΦμ+k2χ>p”Ί­¬rz”λkQHsΉ»ΗηΪbQq “―ppI{έ {Œ@<Ÿ/‰s$ΣTsXVm xτPU#i“žDΗϊΘ:Ÿ\²ι…Ύφ’Eσm$Š”΄e֍€nάH |· Ίpb€b"7gZo:[@;'™ΆΌvΔ8ΙnpξΚ)ξ.ΰ¦ ΰ©ώΟ±—Ϊ?lG—sΘ’…Ÿ`ΒBˆ9`αόLπq>ϊγαήσ„ΐφςE[ͺκκ₯«XπwMUλ^3φ €7 %£BTΰ½'msΕή₯Pό΄ΝΩήnΛψ€…ΤϋσzDλxΐΓkϋγosu}Τ¦cNμΆ€{L‘όp1*…ΪlQ‘ρ’7)W¬%©’$EŸ² 3kϊχC9ž§ϋ³<άί<–ΎωΞSyΪK³ΒΖ,Q”‡ͺΟid²+)Β», ‡Yͺ'•’½Tθ•ΰ ΐ‰fTʝuuΈί}f’<ή2 wb.δ#ΏΊ΄½e ·IDa¨.$Ρ”―] M€έ•Σ&ε\βCRΥM•Σ žΞWržkz17·ψ½(šjΩHΩ9ΆŽ"v$6fπ]dš €Ώe }ηεF%ˆ( ͺjN%μρϋFηf­h<»P’φ2Q_y…\xA« Ιυh)…QΥ0-ϊ€4(Ή€U!ΖβψJXΊ}ΨAκΈλSžρωίώχŸύηωΏώŸΒ΅ό”cargo-0.66.0/benches/workspaces/servo.tgz000066400000000000000000001426151432416201200203560ustar00rootroot00000000000000‹servo.tarμ½ι’#Ir&Θίω!Α?άa!Σn3o!ΉCrf)B.GHŽμ²$ΔN*x4ŽΜˆανkμ“­ͺ»γς@ @V—gugζμTύτ^ΖΕ·όΛgoγόι›_>Mζ)όΛ2ŸYoόQB΄ΆγΖ|ΦLi‘“π™REԟ‘?»ΑŸυreφ;ύσΏ?=<<Ž'«Η?<ΰGψ²|ΆΎ=z©΄αήZ"-S*ψ,™i›¬Ž‹H8ε^RψSράΓγ"ΞςUΔGŸW«—εΎ|χ>―έgŸΟΎ,‹}VόύžωO|πρŞaΣαήΓG?ύη§?ώάμOΉ.°B/ω<ΞWΛ/Ξϊ―γEΎž‡§g;?Νςωd•/Ύό7€ŸWωlΪχω§D7ΞΏRŠ ηώ=Δ—8qξ'qωσ'\ύΥΒϊψπ׏δ3όδωr钝<μ‡yœ–WΔγ§Ι‹?l£Π88_~cπ%oβ2l’ŸΙγ§O?ώyΆό Ιήπωσψ^ήσmtΨΙ©ύυν –m5ρ{οzΫqόω“]―žσΕ.ύϋγΏ=Η‡Ε-ώπ?ω/Ρ―ώ.~‹Σό%.–?Ša²š”=b„쾏σe1ϊϊŸ8bψ깝 'γρΣΛΪM'Kμv²Σeότ ^^½•|†iyότο°ΗΖqυωΡ§ρ_Ψιτ/ΚοO9φς?§“ωϊυ?z˜η«Ώ°σ·Νe»πΟΕ v1Γˍfό€Δ<ώπηρσα,Ν'―»eYΟΏOζa΄|[Vm^φhf}Ύ„χΦ_ νΟΫ­1PΟίύ_.ό8ܟΛήρόiͺϊ—υŸγ*ΟWΟαψογLΠϊϊs­όώ?Y₯©/·|΅ΙορυίnŸ'€“Υς€£Χ/6ž^ ψηO)ΪΥzKΎ}j΄ŠΛpmπ…RFωxϋ"~Γ½cΖ ΝdJ.E³,Xg T—Dk‘΅it'Ξ\ !.ΪΖR»ΦxΆθΚ“ΟηirkΚ¦ŽϋvξΖ†Ζ½λυ$Τ&雀‰9Εp6χΈτ-ŽΆ“YςΞ=-J>ϊ>ž–»₯œ/°SΚVx#<1ž>νΡώύρ଩έΟ@_yšLΛΫ·]ώR΅βnΔ½uέLΩΫ&ήρ_oʟ υ?Œ ψŽλߟςη ϋ@]Οαϊγ‡aύo?οΫazΝYγρΣΜN§ΉZΌ|ΚΣΐΙ· π«—J―B6Ž ΄ŽΥ›~$ο?t)9ΈτP}-8ϋaΩώλ1΅@’σό>5„'θ/0ΰώ/€τχYόΈ„Σ3χ₯<±ώLͺ†§ΰ|X›π·žLCΝΑW|«ϋt6Dhΰνκ+ο€_γ›Λν"ŒVo//Έ‘Ά¨υQa%AkQΏυκξ1N―mνš›O Ίσ―gη{Ιξέ3«ΆΞμš;‘O½7§‘PΕ±Ϋ~οπRωΕδ₯΅£WΪ±Χh,ޞF$Εmπm£>\±ͺ±γ™υbzp;|οI'Ή§<@^ήΎΰ—Η?υΰλβ"YιaKόX©ΰŠ#„X±Ϊ›—’Ε‚ζbό`ηρrΗ3kπρ(οI t ώƒ/ΏΟϊΟ—«8Zάώ7Ρpk]Χ(>¬μΏοˆ©Γ=€ζ[Μ»ΐ^=L¨Λ»¨#6αΠΗΈ+J§WGεKM»{Τ΅¦6Ϊγ M§hθΛ΄υΧjΧϊ†΄'ΝΠSϋ–―[QίΑ•&ξ-ρή>ςm…vή&~Y»³h»KΧAtxϋ>¨ωJώθΥ½qΆ^ΕΓ=S΅έL_Φup M·€Ζ[Ύ?w½ΎtΌ?τqۘ/zfψΣΖDbπγa"Ζ,ζί1± <Ό:Ι[ΐKa‡.ςI¨_ΓXΙΏB)Ϋ―‹¦ΫΌbdμOJ|έM«θ`»ΛϊÎ{*Ž΅J“8 7Ÿτη O©θ ½ώ_ΎΑι_¬=2 N$ά΄ΥnjφΈUΙΨWiά-T48 Ϋ?:zžΥάƒΣ{ΞG™ΐ%ώRϊŸΫ­?Φ.Ώ¬ARΎύηΝψoXΑζτssρ€˜ςG‰ΕϊS±ƒ7Υkœ»Π:θοΔ±=AΛμFϊ‹έ”Ψ έΏ,r?šYΏΘY…ΟΈΞW±ϊ|–tμbZ¨·;%„CςωΒOaێ&³—ι²!γ8 Ϊ;&ΏaqβΔωΏŸŸΝֿ΄(υ|2mψqΕόϊίmν}ŽHΓΞλξΐΪϋόφ² @Žp2^·ΝΙ 5δžOίc=ΗZx}+Ϋ96³σσCέ@T+dΠΈύΦθ€—ΔI=Θw]#O­ΏζMύcCώ―;πFώΣN]ο η;ΖΦ/Ϋ;Ÿ1_Ξ<1%εΎ\΄΅“ηΏ6p‰ώͺαόί‡ώη³§RΙΣ›xώΣΜ/θ ίƒώοkϋ~‹ZΉνζ䍏ŸΎ$€KτtΘ~·υΟžΊ Ιέ"«ΤΝόdπΊύ?nι#όna΅ώ»d§Ο}ςΏ21θο²ώ5-ψ τ?”ΘωkλkϊΞj_]}œOυϋZ"ޚΥαh `ΏŸ-yΥ9_Γ5ΘJύο w=θξ²ώ@`Ή§ρvώ„+ή΄‰AώΏ_’©ε[τ]ŒχΩ.ŸΗΣάζ2άΆ¦lω:Ͽϟ–oΛUœ=•6œ2}KŠΏΝXYoηΞφ‘ώχ}Φœ^{-ώqzύ9•ͺΎώeNΐaύoMνΛΛ†Tr˜~lz+κρlΙ­ύuWhXžψxεθ6‚"ΫίΪ‡½’ΖΠwŸ‡8*sal»π:UW6χ6Y$YbB³SrZucΉP.wyxΖ‘…cέΙpάWa Δβ°+EΛGς~TH€^£cPm «™ΟλΎͺ+νςα° gδbήμ%7 uώ;쉧Vή«vακš€"§ΟG’xΦ/?lή%βh€ΣH‹Qg΄%3}Δf_β)²υQΫΦ£5Πwοχώu6m\‘ΝΡwTέnΌ‘Μa‰”€t ήRͺ j²–hΒΥυω"ΒΛΧσ`WΫ…ΜͺφρΒΎλΏΥεφ§:΅ώŠ«fύw>θξ@”0H~ΏΥJC`dβžΠΦIV桍ͺ‚μώOvŽΔρ€Τ1ΆωW˜ύΏšM ςO¦uŒ#›Gτ.+59υόοωβλCž–«π‡?ό_°SΙΎ< 0-ΦMγΓF{ύP‰xπάηΗO!χλμξ­4΄αpΨΟrί€±OγαίΎό-Œπ9’ϊ½»Ρ†²ρ_ώώoξŸώώσ,`c‘Λ7/ΛΛΆπ§r’]¬žχΊVζι&ς‘σί›p‰ώ_ιί…ώ²,ςγiκΛψ€―ζ-ωϊϊίτν;#δθ άάσΏ;ς9η>ωŸ»¬™…ΏWπ)ϋ/£ϊ―œΣaύο@›φ_΄γMόΣ"&§Σ­*θ}fαΥl*:ojΗφ`nρΖͺ°ΐ@ž¦ωj—ZlΫyΓINfω€υψ‚ ?^ŽλΖαί²TΖΩ&ο²pFUd2_ΕEΨV\£~OΟε9kvoZ’«ΥΗ»iΏLΥπ-•‹5όΗΟ±χ_Viδξ^ %Νηφ³ΊdοΒM€uu\Š<3Kψ―Oπ%ϊ6ΰ{?YFnUY&ώΧrΠόΈψω±_.Qe³υζΈ ^Q³Ό—>ςL‘ K˜MVΉΞηa1±εΠ³„€βσ“_δ“εΞW›šNδrρΰOWΈ žΣGεg—΅;ΎΒQΊ|ύXμkέ:Ψพψ―ώ9ϊ―[/Δ[βN€BξΌ”ί―ώ3ΧƒύηλΏzFΣφmκ?S₯šυΔΰχ#λΟ/ } N–«|Ό°³JΙƒΞτWΠ7tΐΉΖ‹ˆ’ίH eΌ»`£MEK½¦rE˜/Χ χ_–ω½zδdπσΙ$·R:·`ΡO‘T«αͺ΅ 8ŸŠΎω (Αίζ@ˆ~+€]Β…AΡϋ§†ξW™ ώχ_~Τΐ§π?Ρ€%ηωOχυΣΰ½ΠV5°xΩ> ψΑ~@πWGπƒΚ|ΐ‡ψοώίL υΏξΊώ=¦8•™rΩ”τ{όΞόΟ ζF^Œ ?ζFpνTΤ7C Cθί ύΏŸώωo·ώ/ω—~βfΣ²‘Χ  'λλFώNΕ »?–„σ[>Šγι@ΓAͺθf€Όͺx¨ζδ6Ο+MήΣ&ΰ‘s[!Νo©QιΧΰ§>/y;vjχπ`²νEZ 8ότπ8³γ‰ŸFϋςžjΧ¨±Λ±]ΘΏ?ξ Ϊ0]φ.ϊ₯qΗad̞£aχ…β-@Ζ– γ~τΏΧ €—ψ=ψέݞkΙx μΏ?”όηߞŒ7ΖΊ–p1°ΖΩ.GΘ»*FνC ΄σLχ²ifη]&>iΌΐd·Oπcγ§Y>ŸΤ χ4ί5]ΗUž―žŸή΄vίί¦¨_μˆ9’8₯;[°Iqΰ—ΐ–ηγ)°Γie{gΛ[ζΛUœN‹ΌZuƒΜξJγΉMρƒG6w·²Qύ‡·Ž—«“X‡˜ίθU0υy‘μΜβ‘ΑόΙ5;§μ4ο°šοh±·Ÿ|#ηΏ¬έN=πgkΚ>hΉTΥWΧρυ§lΣφ:όν‹QΪ«Φξ”TΎ {―…υlφvφ‹κξΘ‹χS/q%|€|Α‘©ϊ¬w±_Μ΅ΐlίpΆ…ϊΝmoΏ¬=MέΪπeμ­Ίcυ’ ‘Τ‡pxυΘσ-η±~±K-ρa΅Γ:†έz<Λύ₯Ϊ=ιψ²k}ά©!ΰ;ΰ«i¬tγ<‡/ίe: |σ—M\ύΖωΏοΏ΄jΒ‹Λ'DmE2²ƒ;φΪ[΄-ώΘMG ίΒΤΧ―pŸ¦¨Ί(oΛΗ_@ν2>Wρ PΫ„gg²νΞΤ>½Ϋή³%RϋΆWPΉ΄ϋςσ§9ΰ¦oq΄EΕ“Ϋo_κ—ρ‰|τ½Zƒω})[ŠKuΨf9Κχ₯]iVς³Ζ²ο·Γm•_`ϋZ5.Βλο/»y¨(νh,β^ΛΗπΫϟΆtdΣΫCͺRά0ξψαΪ%|w5+W6ϊ ΕοTG³Ψςγ]ͺωΫ4Γ½pΚͺ Ϊ_Š]kσχ―Uύ?l,Ξ¦αVZΈ³up‡•[¦ΣΏ˜η«Ξš!?=4―Nς+»,WνΒ?—Χ³ξkψI , Σ¨T2ΆωF‘έΑyΥN>o(]Ν‘ύΖt’ϊŸ@7Ψξ²ώ‡^ΐ½(Oϊσfύ­―;θšώg€y),n²ͺŒμ3L›₯dWΟ“ωh‰\g/β·Ό||ωάΜ+l½ΟΧσ"΅ϋ¨'4$¬-7cς|5ZFΏ^LVo£—zρΦŠŠDmG_/Ϊ^_Χ™žzΗ%UuοˆΆ8₯*b폕 ιN=ά ƒ>Tܞx^φα~~’—ΕŽΡν>aM΄εϋωnχυΥ<Φ8ρk¦ρ0ƝŸκb[@z‘98ςXΫX―'‘pοξςε²τ ½Λktj>Τ‘dZόL1šJQ`νφ£_\ͺ[l,ŽΘφC•ΌΎξώ*GOyioΓ_χΆ~ΗU.Z.Šΐ…¨0ύn&±a3}”Ηξ(\QπμλƒύώλCΈΔO³!γνΦΒ.Ώ π»ώ'‚5σh:ψί‹mΈζm‡Rν~luu΅V­Όa±νΧlοΤωΏύWCύΟ;ρ΄LάΤ‹hŚϊ?9ψίAs,yΓΉ©½Ζ» ΖφεdπsύlΑgΪr―ΟΪΚ z―HΧqώο―§ƒώχNτΏˆΕμ“œŒmΙ.ψ€ο@Ϋiό~Ϊυ£ΒχRiώπ•ˆξάSϊ’0”*({Πώιΰώ/†όow’oΞIaλo¬€~ώΏŸ »Ε¬άΑΨͺΎcœΎ%)HSαxf–ζƒογώηOΑV6έxˍωφ™ΚΝΞσgϊΟό'ώΏχ«ύ;ƒώ7ןI9¬#5t|Η₯ΐΦ\ϋͺΐϊτ(μύi–Ξ,˜ΦυE―Φσί«φο"ϊΟ$π]θ|ΖΈ„wφ"#ΕoΚ^Šό»Ζ½&>½ώŒ’fόƒΧπ‰—v0ιέΐΌΑίο4φˆ=_ΟFεfέφ°;βεO…#°·g”ϊ¬xew¬Ε:šΜlYόf 8Š–:ΰΰΐ~—a±…‡zQžπ֎Ά]W„JwΓ;ο…2C¦χί7ο ^βOισ.λ_ν^Uΐ'γΏ΄jΪΙ`»ώ«ϋΗb½”*Ϊ€—ΏuȈιMXί·όΨ^ΧωοSp‰ς?ίiύKυλ-ύ₯M?6Π;ΠVqύ€₯©gεώQ΅ϋυ©&χο;3ίΛzΧH@wPgεβχœk 9’ίn:™―_Ων0―γ8Ύ– |mΐ§ιΓώ+ θθ;λ?wηϊ¨;~ΤΏj2žΏδθ“ίmh{ρίνt•ΟΏlnώ oKτ{φ²SηnωΤΰs—υ_Ψω8ή6“`’™E ώ?w Ην=-ξ6G3<½Λ tu*]μμΑ™α¬σόOώΏΟϊWhnhΧB7σq:ΰ›ΠΒΣ½ζΈςœ›‡8Ž;χ7l[>ΫE Ϋ¦“– Άzv±°oUYςY!f{%œJΏΞ–0΄Ι*MνxΉύ%ΌΘηω.­ζΈΪ-Θ΄΄h WΫΈ>mœ_±ξςΏΙ*.Πͺ²νλ±”‰ι‹8μΌgή΄UχͺE^β`;«ό4K―ΜV1 ϋ\„0m‡U†·-ng¨ΊΘKχ3m…Cη’Ι*χΟω<,&Ά25α―΄9υΆE”wzϊΒVέtΣΘ30O3ϋ²‰?§5Χ^έΤ6 ‡νΌ]qγμ₯*υ…Α‘u‘φ=’‘›„Ιf.ΔyΗ3θΎ]mλY[fίy½J#³=9ϋΡp ΰ†ΧƒύSVGΈB•>Κ_R£ΈWΧkψχν©vDœ†z”_ύr―A‚!ŸΑΑZ¬ύaZ]sΫΉCΘ[­ϊs{u4Ÿΐ§W8-υ™^…š7Vk͞_–§½‘ γoYBpίΥΌjiΌ°`Έ‡Ϋ¦’yΉOΟωsΛ·$0:7_Ρo(φ²Δ/Συx2oΝ’X]κ1ύβE%ˆΞsJΌNfγΫ„ΉΒ«f‡“R΄<ή―8rίΑ›Zd- kςΜ±H}…¨kVc>·π2π?`ωΣڏξaΣ&γ£Χˆtyo½ζkoκτΠνc%ŒY–/1ΩυtU\[Ο€fbxš“zΒΟOS c΅zΕπ†ξZΕ-ΥsYžW9·­ŽΥϟ:ϊ΄WςvC-ΏtwSyΊ²­°Ϋ( {ͺn­ΐνv‘>\žφ£‘Χε4τΰ΅WV£m8Δmκ½ώ6ΛΌΎ[3ψυ όφ…ΰ©ψoή\₯Ι`Ήƒύ§E_K"qΕ‹ΆΊnύZCVӁ5u^νϋЧ ς φ€„ΈiΦϊϋ ‘‹†…Ω¨ΩΗρžΒΫΥ%‘³$ώ„†³ήΡ5ζW·Μvκ [νΓ?/ΐj¨vίυ߈“·πdR4ύ?Ε`½ ώΫιHŽ(ϊ¨IέAτ·ZށֈηNώŸώ?œ›­^όΗθ?g²ι;Δέ^ώ?f”kVnnXιn%'T?9”oΎΑωΏ›?πΧΆωŸ€nΡόϊίΊςΆΓO°U©w»ψΡ‹gm”ΏΞuώοm£ώ»«όΧg θSφ?%ω„ƒύχτΏιfΆ_}uτ“*ΊΛQύ2·τZ&«Κ+lλπUχΰnρΈώ!<£οU­δΆΖΠ3=oλ•LcrμΗjxy‘ί¨ΗΰΝ插)ΰο^"«!γπίΖ₯’?ΐ©όŸΌYƒk=ψݝωι$Rl‰«,ΠΧΣ,_T°*Λώ`ϋΰm?pοeqWW1ŒvΠCΤΦ–tΰ€ηΟa-Έ–]OΛηYœ<ΎiμΌ»-2©vνΠ#έΑ ž?b[mq˜±€ήΥθώΫΏώ+Ό :Ξ3»ςΟ&™ςΕΓΏ¬±ŒmΘύϊ qΓ αΒήΈG $ώ=_„²o°ΨeΑΖκ„­wη§…5ΝΚΖωϋΏύ»ϊϋΟ³€/ωr7ΌeΣ%|©—"ΑωύΣήT@—θ4θνΦ3ς.Ώ|Ÿ °ηβΏ§Χ_Ή^kυί(Φ&όΏ3F Ž~Ή9:#₯Άκ‘²tελπς§)ς(”ŠΖΣΓΧ@C-†j{©;’jοžεARιŸ8‹ή·wa6F:ncδΣνuΎΌ1Y)΄Kΐ/x―”©ͺαvΤDŒΒz6{;|λή…νmγε xή¬ΐE[·αφ9°αoq΄Υή_ΏŠδ£ουY.Ϋ["ΠΆ·œ‡Ά½»qΉ ΫκΕφnέν‡φΨ²ύǍ7Vz€b·[^]»ήŒEΫήΌkΏ_4Z‰TΞF«z‡Ξσ|*\Ηύ»C@χ―ε[1ŽντΏε/o‹ΙψΉPlόοΓΡ}ϊgΈu2·S|ιA?ΗW¦πHXϋΥ]]©~η PΞΞίΪSΗτОΎA·œŒ·˜ύ²WI¨_Kƒr"ΖΟΞΓ"Ÿ„Ά5“Ρψιδ₯ΠAo΅ΟuU7Ό9Ω¦+)”ΩO°²|†c·AάE…"­ζό 76ΠΈ·ΈRΰί‚uW―§+ tŽσζΥΝ±±Ψ—ΖeαρΪz‘fv^Χ„ΞFŜRΜlτ kV₯ώȏ ΕU$όΪxZncόŒ ¦qΉ<_EyV%†6™ήϋφΓΧuΒκκΡPBγ8Lp πi½,|œ―ͺ˜Η ?9φ0Μ8†ί…—ΙK„7ίW€* 1™cΚ/Ο0»ΣΝ-ΥΗΟ?7j’Œ¦κΗ”€u+ψ_ώοΉψσeς#r°ήzύŒ–n=Λ€§τΏŠ6ε?­ω°ώχΒF•;aTQΫ€‘­Ÿ5΅Αη»ŒΆTΚΰΗOϋ²dΉΧ4BζΡΦ>žΆ΅.ίvΩνv­ί&!ζ»Ϋλπλs=‡y=γ[‰vΧΛο8ΘΙ|Ήυ8lF+dΗD}ήvΉΞPνΫ3ݜ΄°Ύ¦υš¨ ΫlAΝ•m­§•ήϋπ?ώu³©Κ=ό°zΆ«€ Έ–E/–hxΏD7ڝŒ¦pχ%φ—ΕΉ°ώχ‹.αšωίο’ί&»™ύ—ˆ–ϊOZώŸ7αΧχx9ΜdwQ†¦XW—J‰₯YΙJ%CδΎ²αiΙσ—vqσχsώοk£ΓωΏ“ύ·r”ΈύΧMωk5Θ7‘5ύj“G7¦pOω\χͺΝΓ"©QHΪ"¬T‰T€όxvΆΘqτ_σ"ιfCDΫάΈΜπύο?*vθAξηΛd‰†ΰςΕΧ‡<=,WαXΎΝύώπ· πόΓ2n_aJzξΛ‘LVo wδλΥΓχhΏ>@§aφζ>.ωβ?όΫ—Ώ…n>ΗVφˆςΘΌ`θ₯ςΓ‘σgϊ?ΨοΓΡϋόΆυŸZό?8eƒόw7ύίr΅ΐ|z©Υ‘#¨lͺώφoήSWέΝκώTΕO ωd>pώο_Iτ.λ_ζrΏeό?ϋ&ύgƒη=πž--.,{”ωzφδ_ΦΛ½Š6-e’φ‹άτŸxvΕ†Ξ°rχ·&ΐ9ΌΤc±ƒΦ˜>ΈρΫ¨ΦŠΌO9Œd1 ΅²υ«‡ςT•ϋύη›„ΈνΝdω؏ψj…Ιb[OŠ όΌOϊχόοCώ·{MσΏ τυhδς?݁7+’λήhfύ"g»γ:_ŎΒGoσmˆΐh —€€/ϋ)lΈΡdφ2-"Ι0tΔΘ–ΚJ7εWΏ»t€§ΞέςΏ‰As—υί€Υ%ΐSλ/E3ώ[ρΑώwϊί¬Q—'Ψι-ύˏ]―ΰ@ΦϋMσŠξσߟp ώ—jΠάeύ±βmύΏ Χ²ιΗϊϊ\Ή‡{cΛπ  ζMjY΅L–ΦξΪ0ΤBPζ²Zθ`ΊψόίίW ώ?wZ8[―βkŸ,ΰ$ύ'M_!‡ό_?ύΏ/7α@Γοwώ{d—ΰβίαόί~ύΧ‹iί)€NΩ oθ™ΤƒώοτCڝS©fO¨|>n½GμZ―A>˜ΰ―°ΞΧGj]jsdhɝ[>ωΣCWέqΧΒΑ`ΰ¬ΓŸ.ϊίs ˆKδ?Ιό»υ/γ1¨ ²±l΄/“›Τΰ‚6νR ωnΒ[σ?LŸΖqΈ+ΕΠβΗWχoql/ξ~ Tž•ΩΰFΙ :³EΩωxG•oCk¨Ά$ g'CΈF2ϋ§vη9LΡ·ZζΎMλϋρvΉηy₯ΓyJ‹1]ύΖϊυ+'Œ¬άOw7AC‘" ·κ€²σrΏ_%·δΣDΎ|οxλ»έbΟN‘vœΟΣτ­y ΧΪ-CσΞq‘ιa “5Νm(c/K+Ο wέΞτ‰ΟJcWK1”/β(ελyΨ¦+/’ο\ΝΘ·ΝΓΟΎΓ'πίέκΘ‘ώΗ½Χίχ%œ¬ΦŒšψnψί; œΫ˜O*Zͺ4²δΖω·"L“ΛΝΌ'ξ~ ΆZΚ΅:>ίmΠΓ’YŽAήύύ1Τ»¦ΰ»Ήϋξίrώάvοίz·ήفƒχο­ΟFά{ 7ο> †χ9އξμL‡ΎΧYwcw>„ΉuH»ύͺ­έ{Ψξί|Glϋδ‹³ψN€{.p;@‘b™ŒDώ/₯‡ψ―;―/σI?πdώίfώΙτ€ο‡|§†·ŠΖ}šΜ‘cΔπ4&Ό%Θ›«5 Ψ’ζ6WυΕΰΟtWA8₯tών"Βο}ο] Sέ)qΝ|*Ιξ­ τ§ιΗΐό_26ΔάΕώΏqύΑd=9‚ZΦτENΰaύ―Ο/!wΗIΩn ^FΏρσߏ#Π%ςŸΰΓωΏΟϊΧ }œ œŽ` ώ/ι έ„ώ$ωVοΌ£™Δj` w?f—ΰψ4œϋ¬Ϋ4ή4―$ΌG‡ϊ/χΣ·Π~·Σ―a²(γhi£mΎεseC>’κHBxςYšŽχ.β8ΎžΘ&O;žΕ }q‡Z”‘l)v±°oί’ί>‚ιt'ώ©2n#iάd•¦vΌά*5έΫ*ζ‹PͺH‹ΏΔjdΛmΡ5ƒ₯P±υ4ΫέgY3AKz}ΆΓ,•©h…zΩ—ν{›)Ύ&«άnW·ΉφΗc‚ζρ{Y“fb­ΖΫ›―g£ύxΩ4™―βVALΛΆVlϋŸcZi˜ΈmΧ°8ΆMσͺΤ-%˜Ÿ,"υZ―bν^lXŠm±%Žf½ZφΉ(›[΄VM΄¬―;Ϋ m=]/ΦΕέπy>ΑDΨ#7 “νP6Λ8žράϊ₯―ϊ–—«B?·‚ƒςP pωσρ}Χ¬§›ύς€nw「§Ή;Ό}ΫΪΌ5›J@l‹ύb²ι»λŽΣ«pΪWϊς,A›l³§ΘAkΔΨ°iΎ8ΉΫΦχE§.mΡ­fΎφΫΫJνχcfξζoν›<ΚΜηG2Ϋ}|“²΅;K푇‹ ²ϊ“{©αί½Λ RQ›ZͺΫέ…Ž§ZΔ—ύ ͐ΐηΙ|΄DΡδD7ΫVσ}ρ„› m#«]k)‰RͺέPΎΝhΪ€/_η@;Ÿ–oΛΌ©<@x½βΗψ±`‘ψωaα·P£’m;›:,}/σ— •oΙbΏνΌb-Xn{Ώϋ₯;τO‡Ήϋͺ%FŠ UΒ/[Φψeϋ=zZšκφιFύWp―ΩmΟr6Κιot ‹‘Tα¬eˆIΝΧh.o·Ά&M'σ―Λ]˜g9Ο~aWq_Δ~›bυπη„όwϊzο·ώΊ‘ύOIΦΤλ{ύοΟ'²=/ίζΐ3ΧI…‡š’M° `Dγ#bφ!’jͺJΜ…M9ΛηιΰJΩ§+ΌΥ{κγΚξ}{/†|όόίΣώ7θnGQαΏό‚ͺ¬;θ©dMύ/QƒύοτΏ©ΡlQCΦ5Ž[Ωl§:P½vαlΥΕ΄ύQΞ΄(TΪ³œΦ°{Ε9*–cϟֱ{ΊUΙ²η°qbψ9 ΕΙ±ςBΝw΅ΥjjbN'…jyσΫτzφη―j€.ޏσρ·/7wΠϋΛ|ΘzGω―ΞέDώ£’³–ψούόΏa©<ΌΧ†X „.ppΉuθΟΝΝ2—˜Ujpθ© G}H­–Œšy`_ί’β?θLC™_ξΑ/+ ί―pşŠύ8ύΏ£ό―ϊ»υ__0θΛfc΄²Λ―EΆ«Ύ΄'γ?T#―`zΘqώ2ώσρpΫ-ψ8X&οxώ{’/‘lΘsώΏu‚Έ]ύΏ–όO\‘!ώσς_‹pΧπœ,|+›ž“[·‘nO©w Xοτ;!b΄Θ.@אvbُ)1tŸ;ΧΓωΏλϊχθr’ώcχZόό7¬νιοΕγ eۏΐNžΨΐ%ρ” υŸξB·ιsžπ`₯λη²α!ΉδΏ;ΠLs¨ΔŽ2ϋ<:ΩΠz Hΐ–Kνμ°¦ƒhq y{ΩxaΪ Hiu•wT…θΚ[OΦξWςx oor–bŸ/'«‚?DΎlΫ›φ§εα½π½‡WmΎ »ζΖ/“Χ8=Ό»ljr5Ώ˜Ό΄ΎώΰΚω>)οrAiλΉ$Ζ¦₯Φ½Κ`Υiάο8β4ύΏOώ/Nϊ―υΏ¬{u=Ε΅nΚlΘzώ_ηβο­ή΄‰?]2Xϋ%%Οwφ8‡M·Επ–(β}u&ίΑT+5ι9σ‘ztΏθ|Q Χ_l> a„εWj£XΔ—©}+"'‹μ™₯KΖΔNG/vΉήΨ½=ΞtΡ>@/Ρ²‘ώσέψE ͺJ7ΘΦ’•ƒώηGΰυj,ΥwΔ“ωroέ‚΄ξΛ!ˆύFη>ωίΔ{ηυο-ΰ$ύΧΊ)‹ώτΏGδ†βOΝIϋηNωυΰq»υο5άϋ½λΟ$«―?Ρjΐ·‘ίσΕΧ%Πζˆ•[f.–„ωΣόy,Λ‚|ŸΜ'«ΗŸφ›ΖΛΥΛt=žΜ›Ϋ ˆ½£*1Έg6-―o.μΕ§ώhΖπκ§λPtωμqχ–ή{e^²ΟUυξΒgp΄)pΓ?ύωÚ£οΊ‡Uώ°)žα±šςΰ†Mͺ³ΒΓπΟ¦«|Λ=…­όση"£Φr4Ι†ώ!=Όελ‡yŒ_ΉŠ@Hv1™Ύ=`§+ζφ8γƒ}˜ζήNΜψCž–9π­υKY^UΌχ§ΰMΟvυπρσΓn·. OΝ ƒpώω«β濁Ύύ*ο ~ψ²ΚΏ?σΕ?£δzυψπŸΕcΌΐw@/koΉϊΫξΥπΊΩω_-ba%Νoσψσe=§ώνy²|€αψpΞB λ—ψP•d^Nζ>>Μ&ωH–xΫr5™NΦθ€³Ή°ΙηOx ώθ)χ}γ~p ;χΟΫi#Έ2BέcΕ=ό?Έ0v%¦pLζΕήΗίΈ<ΛgμDιPϊm€mΌΔ»fΦσΏ~*ο=ή₯ΒΑΎ,M―*VΥ»JΓ(hΖVrF˜§Y–2¨ΣΑnΉπ™ $cΊθς§#KφΛbΆžό§_φω 5ε‡ό%Μ6OμOXϊΊ~©–―s’ΛΏ'Λε:.Ώ0-©όσβKuβFJS‘‘βΣɝΆŽE §ςσ=ί=·ίωbεF»*O0Ž Ϋη‹ΛόNλΧ›σI`Oθώ?ΔίβOEŽ >‚l–Ο0nΫNƒύΧqœG$ο@ίΚM‚ρl_’ΎΗ1C˜Υ>Φl:™Ηϊb‰"0€Ο!Z)“a>Ea€Φ2,3F%AUζ` ½4ΐ΅Œ:~¬£MQGX;&£I·q<™M'Η†7mμC†ΩΎ{YŒΜŠθΉΰ–ƒdAΙLDšIΨgΡhΛο« %‡ΡQ- c–Ϋ`˜ AEθγ‘ΞsφX³cχ~Œd`QCη£N> ° `‚»8Κ3M’† H%°qρ Α&K½£ήzΦΥύη Νv9ρ_kK ‹²‘½HΡ₯d­ΠΑzCαƒΞj*α„dΐ&£.x Δ@'(¬Y€:)eλξ!Κ?/Žl/τbΝσΡr@9%+•RύΰΙθ‡ν%ƒΡΞ³€I–1'‚‘†Jαdα% ξI–œ–£Bq-’υ±κρΨ0ZΖ€‡₯ο1¨Lγžž$ͺ)…Ξf>RbΌ:ΐΈ%@ι΄W^Η’ŠΜ)ΙXf„†5…q±Φ…ͺ/DχŠΝΓ"~― ’χ>Hγ…M(AK]'&rž2–xJ3Λ„vΕHβ,Ή,σ4IγƒHVϋΤNΞ«μ-(H?bGΠ)’ψRUρ(>Ώ†qωοl:ΒόΩΗf’Qόχκ€—E.’w‰ežH…ΑaήΑφΐςD&‰°ό6(ͺ”Ζ#ELNHΨ μγρΑLσρhωΆl Φσ0œŒ,ksh» >Β(˜eΜe2ƒs©ΧMLΘ«ΜΚΑ«h„g,Hx°ΤΑ§A0  J‹† +ddRη£$‰’l_,)“[¬¬‚ϊψ‚ w­ΡςmξG«υ|Ό\E sρκ‹Ε΄" %Ch2šSΚ’’Τyχά)β)΅0–Šy‚ιω.Ή[6Ԙlλ€ΣΊπMς‚¬oΎW“ιδxxΜαΙε²l{™ΜG/•RƊζUώ΅0c°ΟŒξFՓŝ€lί›΄ξω=¨$Χΰ"Ίo™Λ% ½«ˆΒ< ήΒCΪ₯©ρ5*F­MPΪm&`Π>ԝŠ2@šςbί;#³:©Ρ@‰$A™ˆhΨJΘ@Β ή3 ΛH. _Ρ‚ρ@΅ΉΚ@xμ: %κDL ½/Ε¨S m½Κ}70ZοTx"§–!2< €Τ‚Tή›”bΦ$v*T d¨ <,”ψ&lΗr‘:p\XО‘O³|ŽφΏΆ΄\'”‰h-©ΗŽβ Ε΅½8“v Ώ™ϋΝρ›Y\~X– σΙλC₯ƒάΒ±"8΄ψΊž£Ρ­.~ξkΩΫ:Β‘}«Ο˜³6p§*ˆ˜Œ¨SΫc €  epΔ@ΟT”NfΆ(G̚%ΦNͺwzΩbΆΣx4I₯ε―“χhPξ|mυ~β Ζ1Η& ƒ…딨½MΰŒΒ±e$f‰Α0ͺΰ2a`―8ŠΆg4I%°g')H_ΑΚ¨”i—9·7ί;ΎώωpRαlϊ@+Α›”Ε,‚”’œ v€'δμXΠ:Β_AΐΠ©‘ΐ˜γ]gHΣβmT8wŒ^μbUψ]]ΟaŒ‡5 TyI@ŽTd€ uΰ ½”žE<`-šeh0g^λHŒΜt§…Fƒ΅6k„”_Cz άrη\Ί4H¨Š(š8γ<„ΘΉ`@S9αŒ[e,qήd*œάl'Αξ¦P_½nη*ΙΈ”ΰηΪ‚€od„…PBP-ZΙ`lLUςyπ‚œeœ‘qτρυ₯³Ηξl•;πν₯©ε1ŸY;ρ‡οζΩg/1Ε`Wφk|ϊώ<™–do/Mρ½H_SR΅²θα–ΎaΤr9ž§ΥxMΑU:f5ZΖUCΒκϋ©p…*(°2Š(³ Ά`Ζ£υ.©€xf8ωQ[fZ+ζ RŠ Θ(ΊΦe„ΤΡΡ5E,Υ»:—‹,eNYͺ³ˆUDJ&V˜Μqaq³n,©AΆO6#‘"Ne23Ιu‡jίvω—†ΘR4^K穐8‘ξh&ΐ€)eNθL‘…RV ‹Ϊ&D@Ξ:©υ62sOΛΙ,4eψΎYN€‰d„Aθ Θsn}–LD’-¬•huΘ‚s,Κ`€2 δ x@Κν’ΣVCτΣ§ΘώPκ ŠSŸΟαTΟWOXeω)ώρΘ^DW”§κ[ΡD‚ρΰ±ΚH&hΰ(˘ΐ™‘„hΈβ½Aš !FhΒ‚J‰*Ac–˜ubfμόΘ­y/2pί§ΙCΧIπ@°“1•EJ¬ΧθΪ@Zad6P‰~ Jˆ) œΫ5Οε^`KcB”Ÿ*6*ͺ†oΏξψYα%2ρ£bΙ ˆ$N¬mϋτd½C+Ε₯¨JAΕ •Φ,ΪBMς4z.΅ PW’)ΑB¦MF’pp$ΰΛ °V»A}Όβτx7“YίΣ}Ϋ΄“ΥΐΰΠΨΐH-μ‹,JC‚½-­Θd€6Ω¨(Hΐ˜149i“ζIvΒ—νκwsα–ΥσΉ²α>NΨ>ό΄§QρΫ€Ρ19μEv΄ŠŒ08¬ΌmXΨy(EΑυ$œ1ŒmΙY£9§ƒ;r NΧ‹Β vu H3R•Τ!0­ π,MˆpN :εbb΍9ΥfZMEŒ ΡΝcχƒ=ΆΤΞήuzϋ— εΪ/ΟEόφΧzƒYh2Pί`σJA’6,‰ΦHχŸŸ{c»vg£ΧsΏψγK3kγM}ΣυΜSI€lQm8όKΝ`±\„Xm %›¨³Q£₯Α'igy%=Θ*‘p=ϋοΘΠ~m¨>ϊ&ΘIε1œƒ §€™6³ Δ'JAŠ’Μ#@&F` Nͺc–K43ž£NλΈ‚[—GξψσΧ4Οη ύN8ΐ©Ζ; Ηώ€ͺ@Δ".Γ +±,%ν]άq,Σ ΦD¬Θ“ψΘ:Υ"_M'½ηWX" =DK!#‰`Φ7\+ °ž'Rγ@£L*’€Qω·Η3½mj>D(8 θ2ώ²ˆΛe~L;Σrσ‘3UΚŠ.Τsg‚ –A5@&±Tdθ§“ΙeŠS€ΕŽΆΤkE£Ε$PΈΊljΊ¦ ¨έπbp{œήΐƒv(r-U0,%C² ‚›’†ΤΘ€‡Ϋ5,ήL JdΈ…kǚ;­0šX–… ~Ι]•žν§άώδ_ΦΗΘzφb§ωΥΟ‡δR ΘxŒLXš΄‘V*Œ¨Ύ˜‚t$)K<KPζAšV?Ν¬Σ&užnYEυ‘·ΛΥυέ‡ ΚS [–0M` ΔYržŠ°U.“@ΌA\π@ ~hj·„"ζ ½hχ0J¬xmΉΘQnΥ–“ όΘEk3μJ)ΐιtFMŒ“:bδ2Λ, d`§4θ##˜­k<ž^Α’QκΘH†>m$lk°Fšρ€Ω₯Υ`ΔO²π7œ-E™%h₯ Pˆ,₯ Νlˆ ζ C ‚]ίκ¬+ϋΩ±aΆϊ‘f‰68Σ·“’GΊΕ’·ψ#Ž2Ψ@4HL’σ6Θ=,zpω€J Χ`ΤFΆƒ4_3ΆΎ|Ά‚tΗΐ1R(Ο_jcΩ»φ- Ž¨Αθ!u–iζau“0Z;‘2`šΑJγΎtΚ΅J£™φG…ΎΚxl [hη@ηίμς"νΖM(ί³―(8fίΦ,)ιI‘ž£tL™+Νη«ΡΧ2@ϋqœJΓx o+5q%–zΆΣtTk2}ΛηOγ˜Οφθ5GŒκM“"j •ϋK‘ΈΈΠqT‘£[΅ –uiΧΚΐ°ΎVF›²δσφΣώάl’=ν}’O΅4JWŒ‡GT0—]άζ§FΙOmY₯[χz―‹έ§ΡQ·ΓE>oΟ\΄ N9NΤ²f6›6©/k ΉQQm<)JώτŽ•l.Αώ€ŸœΏv#(μΜή½M“±6γ„qžIΑ¨ΰ’ˆNZrη X₯£6oρ1κ 8 eo%o_ʍ}θΠlΩiŸ¬ΟψΦweω6―܎f/icηΔ\G¦Ο7ŽTί9“Z2.70Iπ)Ξ%φΑ0•IΗ… l*QMΠΞjzΡΩΒΔΡvMσΎΜΧ5ΆΈ\›Ϊπϊw¦TAp,‚ Έdύέ’‰l³€3fΰ›PSpA‰ΠGΛΉLaΈμκ<ΔλΠΆw%›°Π-#˜fΰ "0^8‡Rbh°Δ ›EνA^§ZdΖSι=©50έα€<{…V‘{i Ÿ£¦z­w«« ́¬§³ύ¨`Ό',‚ΈGp‰Š¨A†ύP$ΘOΜ~ ‚Φ}Ψύk„Sΐ1Α6 7ZQiŒCΑ.ƒΣ(#1JΓ’sδ ξ0’ φŠιΰ )tχώΙN'v—Ηο=iq™“K²PΣσ /!Α:²Δ@lς)((ήp⇝β“Q]*?ž6$…Ύ₯>βc RbX*ΖΩ¦)”Τκ‚Ι0΄5AŸ΅qpψ)€$kSβAˆΒ=©Ϋ5΄λXAg½ΡχIX•NVί’šΩ—cσ5y)ͺΏ4” ½+>₯͈Pΐϋ²ΰπA-΅ƒγΞ μ‘BL8Y3αDJ2’αD݁ψͺξcvŸ}Cbωa”Π‡Ϊ6nγΨφTAψWJG»ΡŸž βwVη)ϊΙxP))M”)2D>y#­2Ξ)“¨70OB4_Zΐt&‹,Λΰ°©Λ"ύ4_λ& ©οc` eΦΚ€¦Tθ;t‹§D€n«œdF‘h Ј­Q(fRX ηι”ΜΎρυϟΰ`qΜP―0‘„VΦΜ:Ο{hK)'Ϊ»cXGΘ’pΏ…­Οα`θKΗ7³_›ΦUΡ·χ<‰ Z(@¬Ϊ*& Wδ„ρ7τ\φΡ‚ͺM1e†F‡Vͺx—κΟ}nλγΚϊw=f Zh8],Pρeˆ#:€e°-aœN`ΞHr€¬²Zy§-†&9ΗΟ&―δQ,η°G”Šΰ.±»2Jώ<ρhŽ£θ΄Vjšq2ž—uΩβq§‰Άιcύ‡ €κΐΰό‰8ΨάKΒᐃœƒή1eΪj+ Ύ,Β<€ΐ€Φ}lϊtΕΣ—UϊΎώ…ΖΒ―$¨ŒTΟ fPˆLyˆORZ$?ΠzΑ"£–™“Μf¨h8{ίτΑκ<±9§Yǜ2Φ˜ξwΰΪ£{Dζ” «b₯΄Ζb}β™‚‰uTJ@FF¬†9†sΟ ΚΕ©ωΨζlΘ½YϋΐΊV˜ΐ|Qά#³ƒ­Θ΅ιD9-Q ‘ ΊΉY+#“„*bαΖ¨©¦ο˜}˜›ΰ’ θ?hŒ;ƒ‰θjδ1΅ ˆTFA]²{…α>d„fζHh0I΅SD±, <ΉwL€ΉdZάΜTο” “ζ+£\ €uGF@䊁%E²r S¨y‚₯<`Δ(:ΐcŒ@²ά=^:”ώ7΄γVSλD&<Α TŠΈ€Ω5΅Δ [Βl šhβ™Η”Μ2šΰ(K=…h’wƒύΈ’VΑ@pΕY2˜0ΪgpZΔzul(½@Qτ΄ t"#™2ΦQo€«f)aΐρ"‘@^{{@6Λ"zζDΜζ`S7Q”•³3tͺ7;U?_05ύΣ άΠ8‰΅IπD€ Α. Κy˜― 8ͺf DF½ζ”„V Ž₯cΑYU—L>₯M»`j»‚Σ1Œƒ†Π²­…³}YˆΦ€*,5€:%‰&Y-W4E‹¨Œ,C“Η%Ss±βμœ)«žΊΆκ1’›FŽ·@7 Cœ©aΗXn ψ,1In”² $Τ'&aCΑρσ™·‹ζMΙ‡ΪΔμ n8hϊ·€u’¨’:EL΅ξΐΉ{Ι3CΈN:©Δ3ΙΉ H©φ€qv—ϋΨ}›„˜·Ί›‹ήΣu±@θ;eCηJnWBDs!ͺ(Μr!ƒΖl?RI’vVΪ‘ͺͺJNU9‡ύtvUΞUSΏ¬'aδςόϊArΖFηάΝ!m„u }fœ=δp^΄ŽGΔ ˆ<1Đe.h8"LΓqΤ[πM6²eͺ«₯ͺŒΎw·£’Γ±χ.+5 ½κHΚ40`Δ#š+:΅)’GωΦf\‹@mHΞθ3Cu%θΝμ›‹£υΌͺ¨vα4το07`Œ4™€AθH9r@"žλ`§σCΕ0˜³μΡ*ΒΕNMκ¦Su5rοΘdl-Θ ‰9Œ€<Φ`, •™7ΐ«šM@ήABΦa1.€ͺ-ξΝΔρΑWΟ“N³€lΔrrΠK„Qq4 ψ}’Ϊi—2›ξ/ΦΞ€ {gu°mp+Δ°^Ψ)΄.k,μ±bϋ_;…pM"ζΪΐ…  Ftr01±ΖΫhM–HŠΉΓ"J[4Zey»ΈΨ5ή x μκ)¬œi^˜h²ŒΘ(#Κy₯αΌηΙz—aώΜ˜7$0r$K,Γ@rPκ GΙƒ(Y1ΡSΞσΑ.¦-IH7h’πPΉD”εΠΓ's22A•O «X‘Ρ–B !3n˜χΐ;lη›UοŸPΣSRΚͺ‘ΨΖ'G]>wυ‘'β3R#ρ0!7œhά“Ο¬fk:η9ΗLtB΄ΐ`Ζ€‚ už6‘όh^=y»<•Ιx{“UžΩΗΕΑD^}ŠμΛ’° ¨ οcL*±œb&‰ή‚8DŒHΑΟιZΨ"€?œfηνŽ39@°+;BǏkλΨΘq1˜C>LΛF₯πΎ‘;ε ΄šΜJŸq„ΝLΜ׎% £@Ο ž }Š„zι4VMH::‚΅Ζ"?SMŒc9χ6wΤ%yΡξ'@ $Ή 'rS@›€ο"e ―›”)汜§ Χi£šΡζ΅;sΔ[°»ˆ!}Z/7”ς€H?YΎ  pυ\°ΞrƝ3Ύ‚¦“@_u3γ™ζ•¨φB2C%΅˜IDΉŒu:{b§φm˜ζλΥΗΐ ,€‚ƒiTΚ<¦ΠΡ³P%™qΊq ³ƒŠϊb΄@šΠΑsΐρνΤΎιœs4­’θΡK>™1s)―’απU’—IT`VO ΞDq<ͺ‹^d0=ΘΆš#Ɛg ΓΒ©ΆZ}°hΛϋwZ°ž:-©ζΈˆπΜZ8 1=–rcΔPΓ¨`1aΊ'{¦ΌΑt0:§Ηx4ΑKΘgO%ό>9œ [σYY‹oƒ Ξ{ϋ₯ 9>ΗtΣ£Ε²‘S­χ΄ψΜ]Εθ@F9 …YŽEΤ M OAš†]I¦ή‹R+X1P`~&Μ΅Λym*Υ»„Kc³HΡ³,I,κξ„€Nd–‚ cfWΖb…TBQ„L)-?χΚtνΉΐ±η,³‘‘+*ε33N¬E ά-8!F £ I C%Φ>jXKVΧζϊΎΘW-QUύ§;5π°»@Š€6 "`RC$Vuš‘™EΨUr@,ƒu"%P«€hȎ΄Ζ]%HλRΚ„ί2tόxDk]ε…'ο}ΙMΚQ© €'ZNBœsτK! fEGΨΤΜ(‡sˆΜ*ψ )vmΪΊψq1σ¬€ΠΪrΤΓΌΧθ΅]‰Z˜w—LuQrMΜα“oΘ½[1B.8†U6‚βΖs—),5‘΅Q°u%VdΓ"σ˜Ά£Άΰ4S¬:¨ΧuΪΆύ_,ΟδΣ,Ξθ­oΥ+· ]QμΊ¨¬η- 3ΥNbqi‚Υˍ˜'*&xβΜ€"šH]4]-˜κ=EΕ.J‚ i†”pΏ` N ²‹O6rΘH ΞpΙƒ‰ΞsuζΔƒ‹B¦Žˆ"YήΉeάωυ)„ψP&qοή–Σ(UήΘmۊjgξ˜ΝI‘‘η{° dp¨LΫΔ4 θUŽEN1€‘γό»Vtώ­±œ}―&OYŒ Ό˜ΐ6_fю,Ό Ϊc¬*Vx5h TΚ[Bˆδ€cα„ΫήυM}”†ηSίsOSFΐ:!±t§ΌnaぜΝΑzqφ#51Β©]˜•Ρ ŸF¨"Uυ» ΙΌ?Φt^»±Δ”J=MΊέͺΕ·EŒ(‚·KΰυŒkυavνΫεQΓΕΩ³κ~·Σ―Π©³l#ψθΏΧR8jή―ήπη,‹¨6ΐΡ„ρΒρ$%FEΑ‚s˜μ…jΓ²\ό0·pšςΓ—yl6O$σy XŒ‚ŒfΚ,€6nHg°­0ώ/p¬8΄τ ΛΘhωl1œ9ΰΝέΧ.ΓJˆ#§JiFŒ‡‘œΥΣ}š±Ι°HVĚ³&sΚυ$γΠ[ΰ:)Κb† €BqPΖ5ς^Jθ°΄ &Z‘€Šd@,’ϊZ2Ψ³€>AάtA#ˆœ™Fπ­— ΄ΠΓΦAΐA3—VΧ2nθΑΥSΆΔˆgQε šz―2ξ–0§JΩΘ±0ƒ± Yi‰eݏΤ+ Κ€ d/ί.OnzfI·ƒΫk{΅Łε°p„RO°ΐ¦Α2ŠJ₯Β―\Hκepοΰ™4Ζ pyXάb:“Ϊs©ΣYΒΖς xΔΒ^½ΚŽLΜ ‹›X0AbuGΠ}š*„Θά‡SœaP…eΚ£ ΟΌ]ΑςiνŸ—;ϊu²π ί»ώ-+,fZqqSΦB'‰³\bu ΨΡΏΘ"“&+ρ$«ΠUa –Π•Αœp8 Ό₯yο΅'€o”G=Γ5“$Γ,³θ“Σ¦Αx.PΑΆuSkjΤ{Χ–X ™!!i2QΑ ˜4‹™tŠRΜ”δA$p yπΡ„1" οŽ˜ωn—Ηez=ӝΐΎΌ<­ηΗ’«ΥvΆ΄½ )›«UΓΈV―˜bΙ­ύu«ͺn„Nž2Ζ΅„Φj’ΐžΗ6οΨΊYt™-ήg7:ͺmαχΏ­ςΩ²­ΚΖ{£ˆα» )΅zH‹υn>Φ>T’u“09h(Λ·t8ρΎNFΥ}εΧΩttΜ&τ³χeΛ­νΆΆΏ²kηυ:‡}σpκ~IΚ’ ­½lΛ‘μΥδλοΐTcuS’½¦’<ά€j/[–εIH` zy„ §ΏΧPNΫ‘ΞΞQu)³χ%[κΡ'Ε%{‚osV” s“Ru!eFDΤ’―Žάk¬ ΦڌΧΦΣώ:[|ΏΚΟ]$…π™wν§HR„ωɝ wEˆΝƒB>Bh…3$$₯α'ΙJ”‹$ο&e$η@Ϊ©Ÿm½œFjCΔZ™˜_σ`νmς>œ₯MM»7Ϊ‹oRό4ŽΏ‘sa»sξ† ±ώώ‡ˆςΆω|μK{ηζΪ[˜όγ»Ψ-\χηϊVΝ―ΪτBΣ­₯ΎΛΈƒπ~*'τΈ-ε,„τ^ϋD΅Ι!Tα^„ ΏG Ίψ‰U»[5œφVpΕνta-7ΰnͺυΉˆ wցŸ_ί~ݜOΝΐe+’—˜z9pC2Ή›¦(ΆL=…δ\o@ΆΆjtκ£:η'ηπxΘ…g~£§›sH%5π…¦1XςΰG-Η³°FίCdαž@vΤ@ a Š&|΅΅Œh•^ς!εKΧ:K§IέΟXνXg‹LεCωΪo=½΅[Z"Κc‰οΞ Άοϊχχ§oτrσe·d ΖάƒV!€glšͺϊv‘ά"VΥsc_ΰ₯² *–’–rσ "ώΪeίDΣΠπ„ΔξZΞ^^Ÿ¨ςύšfwli·+yυ]γ‰―@ΡQ²>}am©FκUNΊ‘dέirY±‰ZGΖI‰FΔO€nV¦]Ά©3ςžyψ+1mΞY¬ίxkW¬₯…Ε».„•Ξfν{‹Θgœx•”j±VbΦo.F:Ξν²ˆ­_‘GςI,|Δ‹_Gk?9Γ[­­Ρ@y‘6π©d°ΕD΄0.™˜,ΔI Ψ=ηδa˜§*WΝ©+?6ξmά?»θ—Ξw©λRΓOq΄œΚ™ӏ±›υΫλ Fΐ‚œs”ͺSj<₯ώQͺ™uΟΨ€FΘXΰ}PΪZW5ή™ˆš|ˆ<―Μz:7Πη§Ω‘ΪΘye;kθ t1ηζͺ H¬ͺ3N€‚:c3¦“l%ωTπ5Hl PΗJžξΗΞ3τιυ°‘+ψΦ«±dŠ”t›ξ©8έtv=•ήΘV*, σέ1ΦRI){f©ͺϊumβ‘3=ί<ϋΟLηBΡiMŒ$Xψp–‹λΫΨ}Ν@μΒ—LΚΪfα"#ɝ.°Ψ]΅?ΦFϊ©qκι/,,ΔEΐοΠ°ΎAαN’κ=k[zFΰπR+֍žΣE‘aΨ;φ)RΖfγU=ΫrρpΜ@’'―{“BaΡPYξeƒMH‡»ΰW*R3_h9‘ΟζΉΧΖ+«‚κšΒΏί-J+UXMΡφ|ρaΕ9ςQ{ϊ|ρΜlŸ¨4œ^"ϋδC·IvΈP•οBΦ$ΔΓΙΓέ±ll—Q­—#%ΝVΎpvMm¬4₯ݜb¨YηŒVZEbv蒌Θg ‘mUτ Α›δψ}’:θRε [μ,7Ϋ–OσΧ·›Χ!q¨±Wy―ϊΗ[θΑv#'X©ͺj •Ή3‘ΆjA4+Ξ(m€ΗoςΖeΗzom‚!Š’r=Hέ2Χ §,( N³|™(κŠ7ω ;€ψΰΗ$-ŠήΫ6ζΔwφβημύϊτώ0{Ή[όξ‘Ÿ^ωߐΦ$Q ή―ƒζbεβ{ΕΎςpŠΊήz2ΞpΜ,Uκ5v+’ΰˆ2άΐW‡£ση§³γ^ –SϋΙ;Σͺτς:R0XΒΘršg³,<ΧΨ­Ϊ ΫJ‚ΧQͺι ααΙ£°O9ώ=—~Ω·o§ξ’·G&τώΤfί·Œ*‹αΈku*΅¦ny₯ε_±LwτϊzΌT“38‹ž―γԜΐΙ&ͺ&œϊ{h―Κ8}(ΔA Ώ_ΤRι ϋmαeΩKυ¬Ÿ^„ƒ%Α4X¨²a…ΏzνY“’ΕSϋKxφ+ko€υK%ί0ΧT‚έΉ”`ɚ(fY-εnΦ$idJ:ΔΛώršΞ™ω'λf¦ήΫQεζ άL.ͺaμK·T`€C‰;¦ΘFξZ”Κ-±-6ΩPΛΝ`Sgη€1Iιρ3΅»­ξΧώ₯ΨοͺΜΚu¦zέ²\·›-5λξ-#€Ξ›pc1ΑVMuΥόKΉ[―KŽ;°ΤΨ).ˆ WΨμ¨OΎ «rdΒS—‡8㽑ž<)T ͺ¨"β° )­χΡ3Ί΄pδfQ…pΒΪθ»6Zo€Fω²ϋύ=›Θ·ss;$›C‰³Υ§ΰ³ν•¬ Ξ€Τ™CΜ‚3)ΛτJ“\B³ϋ΄…ΫοΨαoάΓΣ±ΉMΞv@έ ΙP²ή;Ν*“uYΈ*έYλ­΅Κ°σΞ₯Θ+J#¬’wΦΌωlΗύοίΑKOg&ώγ•AνωΤkΏ Φpk;FhχΒo57γ#‚˜…IO\J ˆτp9I}΅«½­0χ‹f_ ϊ†ώttFΏfφΒw|*'™ΌXΑεX2,ΕS6Zd–”0ΜΐΫθZ+ω¨gmb·›’Ιύ@Ί΅§]ΝΓ'­ϋZ«έωπ[[nΥ"£·ˆώMΊ”«+¦Φ}ΚEςz€χ @6jερjT9“'eUoΘ.΄­Sxΰ‰MtΩώ ‰˜w±`—sΙ¬„Π¨jHΔ¨DŸz²1+νΌP€ΣlMμl5RγήGηνλ&‰qNς5“vή¦i¬‚!ς^{tKb,šδΖUJJ9@κ­SΚΪͺjΡ‹’JΒ nδ0β;Ά£n:5δ:ΐάMΑ¦u¨6―½fW΅χTYЧCUV”€ \ œ;‘ςNyΫΚη§η‹“±rj7Ο΅Θ!SvʸМv&ΥhT4d# :E+²tBφλCoY ©WΝΏwρΥs§ίΞ§†AŸrk‰ NOΞtWœORE)”β¬GΨh«€Ϋ―”œ…ψά‡ΰ‰l­%Φϊί—YJωΓ[½=βΡ‚η›6©OΙοΰY·j„…Οί₯ΈωŒ<šV¬΄ e#‡O©v}›ˆrΡ@WΣσιsηΧn―δ“E”Πθβ w½ι•C4ξF5)ΑLΙωRŠo­#€ϋ•λΩ Ϋ‰φΏrΞΔδΟZδχS!θΤDR/S2ptEφmun­:'dŸΘY„qΘ&αQWkΌU5'έ |nς%ΣΘέά£9fφœ”‘GP(’‹XΎDNmυpCΜΖH³™ΐŠA#€dόP%!ΆΝՍ)Χξ₯ΘςέrΰC\Wko;_ŽiŽΤ³δ‰W5§GΥ^ƒκιFοmώMx¦@ώ‘žϊ{Ρ ΄­mθBΉΠ“α&jς2«ΝΩW»"—‘Dι†LΜAˆS=Bp Υ)Ηhoφzn-’Rzͺ&VΉ# ΉwvΒ\$X³3ΐ{‡ aBPƒP©θΨ p΄Xv#§σλ¦Λ+;¦Žk₯χ:£ΩϋR/Xσ/7Ώύ .vδŒbΫ9ζ,…ΘΚR+TCˆΖΚΒ%$κΏί”Ά’2iΎ6εώΐŠξφ„RνΉ’…΅ήΨΌ¬eω―‚'g(HB)ΨR4¦f²AX–»_±ί‚ ‘…žr¨¬ ‡ϊd*―\‚q,€+Αύ1puκΣ}{{žf=؏›ΉCςΌι4[>ώcEΎΣ‚v~OQΡLΟY{VK½4Qξ±UqD` 4ŸKk8x•‹‘ϊΐΒ* ―p¨Ίθύ©Ϊ›™ΡqΧo7ηΛΗΨ‚γ˜6ˆ)ˆ­ΎZg‹ν.F-|N€t*θk6+R[`ΎXro @p€˜xΫ#ΘΟόςΆιΡηβyφvGevϋώΩ’} ˆ †Δ¬ΊœJ΅ij6½ζ΅π eνTΤΩ#KrpbVgΕ”G‡χσζ £IyeyiΡΣ½W‹άΌΨΒVG›\’ζiιΒp]$ڐ€:Ή·Θd…=}<βΉζ ΟG΅όS“ykSE}{ΙdURp½wEϋHE•¨αˆ6ZΈΗ?B ήjφ¦Y!2cΝy|{~ςόύΈΗOΟI½v+-x6ΐ=`Ž›Ι%#§,‚ΰ’XEΰςά”άe&U1,5₯‘”³ >»gZ|{]θ7θsd”ΗhwςP*’jFvH)ϋ&•XΑ(3(Z*“£©Ί” Χc²TΕu΅Έ#Ε͍IʌβΫΩY%¬#žΎκΦΐBCΖΐ‚KΕT―aΊ › ~Ϋ‚w%‡Έ/"YΈWΥ-’rΟg, ύηΖ{WζνΧ)<3νJtΊ&a.,RL df ΩVεŒ₯”0 `n»Ρ^Tsή›|ψΘ°…lΪ•«|9{Y%%彟ŸZ\Δγ'?}/Rυš³T` )”ŽφŒ€ŽΑβ³PΣr.οrΆ΅!ν“Πͺ[Tx2–1wΆ(9Τ’ΌΘ²j’„ΉΦˆη4%&'6k @$₯Š€¦ZΉΤ)€¦•D‡τ;ΨG“μ_Η…‘pxςۏšœ­Τ\KGb­s.ZΐB=Γ½°V™ΌΝJΤ„‹‘QΦ ιdC‚ξΝ—ςqp0TsZ–{»E7ί ¦9μνωw»»ό#Όπ›ΩθΤϋΝL’Š}:Ω«?hŽ6Θζ»½’υ΄~󍐦ξψ­Ύξ~χˆw΄νPΧ/Ξ6Gu?θœλ` wsLτrωts‰€ξ Oz(αΡΈK'’2¬΄%™ΰ₯Ί(€€ΰ‘θΤs’z9«Τiκَms―ςZ[1φγ³ς—oάξ%½ίtβmζfηλναά‰ΥZΏεUIόωyΏ_5ΠuΔMξIJW]MΙ“EΧηEΓ§ΈB©)’AU\UVBU‚?τ€ζ­-Γ9ΖΛ»Yηι-ΆμΤσ@)’c9ώΎl€ΞΆΦκ|UΪφ8r+ aύoΦz/ΊΙ+=]δΚε£y™/žιiφ― Ιζμί7GκΙ;H’(|$ΤΚ5«`ΓκV3&9¦ΦX``Ώ"*ΕΈR%»€˜άhz~_“ ”FΝVnύ―W~Έk,S²Ψ–‹ί©r»><Πύz]σ.+J»?τρ³ήΟΝ―Π&œW™’ςΜF-wrπC˜(Ψ΄ ω Κ¬Bf₯jŒΖe«U'―btaduΊΨΎuαϋ³Ό7Qc3°9E?<ώžΌ¨«-¬ιN8;Ω5NΔ]ͺΈ’άΞ±ΉZͺFxŒΘRτΡs@FZw<œN@΅?¬I°6§Ίg»ε[ΈυGkJcυ"vM·97iΛ4¦UψI PDsαΕ1&€ΪQT‰ΰuD\·UΈoΗN~1‚χ·{ ’Ξ·WεΤ™eo€“₯J)§*7x‰«ΙHkΐ#—`΅φ¦°―ώ@χ€$kXtτξŠ qt‘5^n―Κ4²^f.rϋ[’ΞrΣ:b>΅Yh’»τ˜FalΘ,ΤπιΏ±Ϊqυ\C`UjΣߝ*ίΎΑv`Τ@˜γΪTPV”QV?"§oެOe M Υ"!Ž#ZŒ&UάJ}xαΐ λfŸ‘}ψΥ­κ€™ΎύΊ«σWάm―’Š[Λ9Α~μHcYΓb±]S—ΖΠΘΐΨJ5εq˜]2­¨ΌFQΎfυ…Φ]jΎ£ωΙαlΘ©$κ½yαnVβθH€cY ™\’ΞQwΛΊχ œ)?Cb£Zα*³—-Wή9™ξΝΩΟx»Ψ:XίJνΒ] ‡Χ>[άΌΏΟΞάZ \xCΈΏ}΅‡„ؐΒ&t.*UiΙ·ΩΈˆ½l΄Ι4λ%ΰ7FόΦ8(ΐG.ΕέΟ o~ P¦–)©ΐTU]lΕ{$’Νvϊ ‰9Κ=4ΎΜΪε„‘²ˆž…AvGΔ“ZKcμρŠ倬ΜΤWI‡S·ΒˆPOIΙi5°ϊδ±2Κ·"θDN7—KAz—” ɎHόΉ»ι­ΔΡeΗυΧΛΡΕΠτ–”"biFz₯+ kŽΘ„Š΄PΠ–lτ€NI˜KBee—Œ²Fr¦ΤΛV^Ύ§΅KΖ&_m>Œη£Βf}¬σφ8ΓV]Μ:|γSr¦ž@+$—uΰE©₯΅Ψ»«έΐ- zμ%“Γμ#r(4Œœ<;a‘Ζ5/ΒPz|(GUηaQQ3t-Ήb§£hοεPΌ·ΘWZnp,i§#ΟRU₯’‰%sμν ρh/»ωΙrwλSΚY%«³₯5kŸu,΄Jl2+=ιΞΘΗ…ϊžΦ4ΥOu":—D΅<-k•'Χ,πδ\ ΕΘώΎˆ=’oΙeΈ…p ͺΆ”"ΜζD£E8I±GͺM§/γΠςω›±=πΩ!Β9U½¨ϋ“υctV‡Wr«‹Έα_|δ{•Αρ?ϋ•9-ΪIeΚ髏(§Lˆ²@―1ε’ιBΪo¬+RΨξ”/a Ν"ΰf_€εa@+‚χŒ\ZξU‰|Ομ νΞ‚½Α„θ$Υ¬Ρ&$ΗQ—Ž0¦½vmpΨT-·”=Ω ^/tdqΞ#™ΦΊ΅:β垰‰ZŠdω—X²…ΞyVο ϋ~z:.ΐ©τς–»Œό\Δ—-φ^;&ΑΪξSœΧΫΛς©―Ÿ—§ˆ―›φΩ4·r‹ΉΰΥӌΩ%‡TBΉxΏžμΩ °λξόμ„€ύζψχ―εζcΰΣ>’δήSΩ=ΰά^sΧΑκή αΎΪ}Π x·qΑ,Eρx³<ΆΔΊ\ΧY‹ΟΨε>οΰ-LΠΏ>Ξ_ΪbFη tΔ"ω>«ί6Z§­qψJ˜\gΛε†ο†Ί΅ΆΛ†tΖnΞΚκψwR7v.ǝΞVΆΕKΧXΞ‡«ήω~ηΣΎδώΰfuωKϋ”οϋ’‘=±άλ.ΟΪ§εŽάγΦΉ½­ώ’MNθΓώ-6yυίo…ΣΩήΏΝΨ>icŸ!±=g=[8\ξkζxNFŒρ_=ΘKΪΏ¨r7ΐΧTIE/Υ™"lmέiurΥd ΅bŠaΡT/¦υŒΜΞy_λTkpαΟρ! Ϋχ°škςS«šJωnΩi)VΑydFŠ%ΕΆ-Y_ΩƘIτβ•άΰ·ΒZu?&$‡χΗηSŸC6­+έ΅φ›¬eσ¦ŒΡ™ήίTΙ5vΚ^ΘPu6lσΖ{δΦўyφΥΕΓ­ ς’dπ”Ξ§{Γ³Q’n’ …‚Ε\KΗGdΉξdΧ|ν–»wuχΩ„~ϊΐ}˜ϊρ½!^υ°R"Mέ:l‚½Ό&δF1)²]„iŠ/ ‹ΣΉ±±0άQr«λΞhΓά*9¬ήθΚΜJ+ο§h¦ΏkU²₯ζˆLΩq+₯GnΤϋθ”KRuΧJ°η h¨yέ»όŠκΨA#ΎΧτaΘƒίψφ-{Ξct΅L½/1cς«7Ν+i{•π~o―UuOγ‡Ν ΜσλΌλ”¨—Ÿ‹―ξ.=μgU€Yr‰Μχp:ώϊ;†Riμ]L‰•xdp’’ΙΒKZΪ }@ƘS± šΆdzκM…b―U­ϋμ όλθkϊžΨ2<Η”Ή]œκΐ=χB\d:VέY§‚•ζJεœΛ*²MΘΑ„_b¬|]:Γo}W…Χ 1 “|Π’θK«ΚέϋnuQΘψ‘:j—©z!Qs6·;Ο…Ξ<ϊγΝkKB+Š€R²›ΊΘ3§jr+Uͺ£\Υl]m–ƒE$Ώˆ*@s‘:}‘ώk}#–‡©βΤ§SΑ”3ΜΥΌk©ε*κFΆJε’3:ΑΤz(>θXI*¦šp Υχ΅|}t›¦+oίjE&ό&ΥTw»—nϋ·r€Γoό":„υ}1{ϋuχ:šΥ_ΪƒbƒΝΑκ³ί ΈΫ9’mžΈ!=Ω5xpW%³Sfo›φέSZ―Φ}…ρσ¦τσεn‰Θ±o2Έu­6R„ωϊ‹u׊W-Ϋι’ΆdRΒ”.€ύpϋCxΞTΤΩ‚ξ½SΎ`%ΩήDj©9G”ΎΠdnPaw0RŸσJ•»Λ?ϊ?8;Ž’Μδ,$ΖuQE“mΣtm-9kώΕζΟ‰3WH*1|γ4ΌW₯_πV;ΛBςϊΨ7ήΛυͺ€ς„IοΎ°χNl=›¬U«η­²"rX_½΅Ι…ά\³L:62¬”ΙzೆŒC’δ‡Ν΄2Kˆΰ­H₯ѐφ«°”`½4Δ™τ 5Pλ ΧfЍ’Y―˜αΨc0.Ω`ΊUVY§δΏSDN,½ ʬƊ³W§SΧνˆE‹²>k?\κρ‘Ψξ!ΧΉ#¦KΨξ™Ÿλγβ`-¦Οΐ³.‘θˆV€Ÿ4˜;z`Šna:Iμ;–&Mγ΅FtNBrdόθZΔ―ŽονHγϋ}9¬³ˆϊ°3₯„―nͺΚR²ΰLθ€υΕ!±R-[φ™ΝΎ"s¦€„$›vΔg‘C¬5ώ¬s*• ία!¦Ξλ‹­cΚΜeωOLš™>2HνL"˜½·Α›lMd“*R±³Α …½έœ1»ΰL-†μ;¦΄ΦώιIΫιVύέωJ’‹>WLG‘ηLMά¨ΐψζbθ•W£₯hηEΦcύ½0βSe§ς™§>Βsγ›Α‘ύŽHŒγԘ—υ?ψυΏγΕ[τRw8`x«Οω›ΤήB0­HnΡκb€@‡uΐ„žΤ^yΠίλγrFwš-κ&°ν½΄mγύ l;Ρs²fgΨ2·mΥ/•Η<mpάΗξkκ XtΤαΐ:ƒ)J-)£­k>K:!ι2٘s”Υ€E^©ΰάIφ¦@χ`>vgαΜΠίΫςΘX¦> ^ @š³ΓiU­πή.›R’˜:~ΤUj0(η½j֏AVa¬‚ρ„5<Ÿ#ΛLδΦ­δ«/ΚψΪ(ΕΠCΰξΰ™΅νYs©’`X‘¦bπVά€"a`CŠT2Vύ₯ŠΆ½†«[—mdΔ €ή&¦¨[·ΊZ[ΰ―zδ"ΈΗ&Κ­>t%M―žΈεŠΔn>υΠΗ@Δ|φΖχΓo.ιΝ\YJB ƒo=<-aΗ—!ž;—ΝiV菾ΉKxωpenyιΰφSiτHGΩΞ‘οaτ•}KΟοOmφύζ ΚιŒHX•‰baa‘0Δ‘IΞB'YλD—Ί:–ƒD#·XΒNW)d3*3ςBt>ˆŒμH$ώw‹εΘο;² Κ±–ϊ ͺ+Ζ¦θΣp죫nΘΎJbtφ·kίx…έGwγ–Œψ ‘Ÿί†Γ²JŒελlρύ~—£u—ιζc³τ; wn°€s…FES”lφ±y9£Gvή΅25 τΦ+7₯Kd@δπNa"ρŽVšΕ½gϊ)ZίΧΕζžϊθwΉ»+·wγ}Ωϊ1_|yωƒ'ψƒψklΎΫ·Ϋ3 Εμ¬1:«sXyLB6‘jϊS8=1ηrI“κ½H­TJΨ\š™œNΩv™”0ˆ»yΟψΟΥ‡ί}xzη›ΟBυ• *IJkVΎ +<@„T+λΕύ7φ0ΚΤc6Φ³*:’k&ΫδΏfn²Ύ›)ξΝ&μΜ¬¬ή~kΨ«„ž)XαN17ͺ_²ωδXΡ%Ζ}4‘^ΐΥΆIZŒŠΑ/$}ϊΦΊΡβiΣ ρQ2p7<ΘοˆglζνΦ!¨:$‘h²•’”‹gߝ³-Ψ0DU#ωΰZš–Θ‰-em“s&…Ρ3Ρ—£©Ρ’LC½{{yXΎρΛlCr~¬ ΅˜Ώ=ΝN3 Ÿ-ιω ρ>ΫBot·9¬ΎΆ]eθΌΔΗͺΤυzρ}ΒϊS•Dkϊγ+ΩIV=Œ_Ώ«Ψ€ΐsξ3ΝδηΘδ1Š*λ΄AΨ«g?ωi9Nπλƒ0β,eΙΕζώ3Νόm@ˆχsXμbΦψ‰ƒA aO䐯XυQ±Υ•pt8Τ;"U“‹ήΥ}·©λP₯Φ’DΎ09αŠ,½Yοz ΥΈ ²1⇧{‘Α“^ο+Ο;·γRR±cXW#\ηNνδOμΜ½}HkžŠkλQΎΈσ–#Hψόv:(~9Ψ»μ­Χo†―›π ­Ό?άΏΏˆάΗ£Ti‰L]0Ό‹6yTP.’ξ^ͺΰ`Kύ*‘#Ϋ­PcΗ[Χ=ΗdŠM°f?o_Ιj4F€Όv—“«Θ„’b)xBΉT7±β^kψ‚ηI9{Αvm•/ηZΟρ-Ί.ύΗΥӐn@‹αΌj砝iMŽ€ΩœΫͺd±–άuTΎ#ΥμΑQp¬›+6wοH„Yύ j,:εμ™ιRƒH±]*"~Tj‚9c&Z¨AΦ`—S°Τ\πζ0μώώ£«ϊΒΰηm1½½’b4,Tœ!%WΙϋΔϋSy’ζρΘa)‹0”ΤKW‘ψ)ιŸO)šFψqΫΐ}(ώ’]š`š ά’%¬YKˆ, ˜>ζhMΨ²Q5R"š %b­ΐσi 8}WΏ*4φζκρο7TqΈn¨αC•όΩφ,2ΉΊ4§X‚q₯Ω¨°zΉ—Pͺr2T1Άzn†₯ύΡ6Οfͺ‘"Xp6₯³ΑΠΓi=Ζ!ζΧG{Ήϋ…QŸnŒ›~πΐœΘ\—αΜ`›H‡mf%2Ί, Φd’1²υœΰρIΔ tJψϊBΠώύSξ‘NϋΦ·oYϋ’ 60B²ή:δLώιTnΐmIy©¨μΙ"£¨T—³ν.LζΆKo£s*ΉηŸ•_ΟΞΛϋvΛΨoέT­©5]‚ΟlCΝΨΞ5‰ς9Ε %ΞU€’ΰΆ*ΰΈ6pψ½ ˜{S#ex§σΗmYώΊΞ~=ώsgtλ–Plλ¨ΊΤ%dΑXξ‚±jc¨)m:1π:§"t2=6|ζςP¨˜JnΌ’ζάψ07ο’Ο¦ΑδVQs΄JΤ‹zOQ„°rIΐp‘0ηžΘΰ% G.­­§αθωv ό”λαuŽ™ώ~.5[‚μ`iΟ΅X«‘ϋοHΕ‘’sžf’o‰½¨Ή šζ»†+³9Žδϊσ—Κχ'\ΓδVΤ!’fΠd ςω&7ΞRρ ΄Κ½³‡Ώ-ΎGΈαΔrΏf²1τΦΖξζ―τΟw!l,ο7Ώτ€=₯Š*#•7>4<ΐΙ'"Šέ&…5Β–RW†#a–χLXΧ ½ΤŸΓτπ&{«uͺ ο `zoΐ;*EΙΞΘΑXΜ½Œ¨+γ‹±½Κ%μ˜ͺmσ£c8%!­1‚©ρgh1Ye„"*,Ϋ"Žk鞴"€WΎΞω₯2&WWr6t«ˆλ5]qκwxάw’—jsό±ŽοτώqvͺΦε&·V4)§ΤR„«C4˞Q›i9#gοΝ„•‰_FQ Τ„»›E°£σλ}β>=ΨιΟ“T ½Ddπε1hξZ”vrΆ‚,ΓcΓ!7§ή„Τy¨ΞΏΤ2O€ίsίc†“ΉψΉ8VΌΌΎΞsυ ΛύΞ <@£Ε―»ο3ώ±Ό3K±*Ξ`ε,x .΄t$ε†zEJZϊEΪΞ5ΓιJek·?΅›.wή~(Oψ_9ζ‹γuαcNΏιIΈ(I†Υ½½H―’RENςcO)†¦J YΞIψ"ΈΣ4ŒqϊΝΌˆHτ[Ώ_“°φlLΞ"Υ³pΙ'*’w²@!/ΰύPηΨ€ΥΑΖ+nJ.'mv!Υj|:GΖσίmIdώ1:R©i]pΏys–Λ‡‰ x1Γeʝ¨Σ0¬rD ιV Τ’τ  ΊΒqIΐΣc\ΎmδΜΈ―ΨΗΗΊSΐ{δb'ΏJKΝ`K¨.θ” δz‘ϋDdIΙ“ˆP&nHd›fͺ¨Ψ8oFHKVΤ„¬Š’ο₯΅{πA«¬Xž΄ήoΖ9q|en†B牏}₯›ΐq±YBLΥ*Ε6ΛJΰHκΎ’Ά!δlEθZuŸ¬νMϊΈΖu―šœψwσ₯I˜žŸ‚\’#nίΙ@0\lδ›bBP$κΡH,a<­eJΙ"•Φ°KšR΄§·ΓFϋμOˆ[‡₯’ΞΙOΎΈΡqςΪe€M-2•Tk!΄¬CnPa9W9xŒRJ«½‹…*Ό 5˜k» λΣό½Q‘©Q·ϋPeΑmώSϊL*­AθιtZώμRdΌ0£qϊ½ζi ©!±Ξ±εΤ’ρBn―-Ÿ›Σ46ι$ςΗ’\’ςϊ’ƒήdFOρ–ήΔM_ R„šΨšJΞΖnϊ»X8ŸΙZx,ψqΟLΜBξ˜…ΞΆΜ} ΌόόΔιuv΄·‰'žΔεΫν»ƒœ±R»Λ[8™Τ5G D¦z– ιΖuΆUN‘bJUΌUœfΖΦ>} 8<ϊπίV??Ήkϊί»ύΈ5@&gm€…Έt €€ φΐm5œͺSG.ME9π ˆϊJk9Β Υ_:_τΛηί―τφΨgCυά–]ύΦ—€β%r,ˆeQ‰LR…1pιZθs l ΉZJΦds|”‹!t9wkφΔ Θΰ\oBέ”H‰Œή΄ΠΩd¬mοxO½VIΕRξbφ₯p¬1tŽcωvτμSΦi՝7rv-€LHδς₯€ΞPσNUδ :lQ™0Ή8Χ,Θ½ΧvχΆ˜έl«ΞΗcͺψ©ˆC΅H;Σ=x`+α°Λ9Y­‹0ΈTίl΄Ζ9Τ Ρ»Σή²Ο~r+³·%έΊΟ«sΉΝΑΤg‡=}Œy%-5č}0X8ΰpΧTT­'Ψ)€&]mšΧΚVnŽ Uj9s‰WŒΪl%ώϊωQ?φ›jΫz Ϊ`7Y'%Θ灡©>υd«ΖΈM¦a ’‡›uœb!Βf£Ξ#tRΒΫ:ΈΪε–ΖuωH˟D‰;τ―7O,J‡S Ϊ'_€str"Θ^ωCΕefŒ¨Ξ»›—Sm q~tφ₯φ¦βΜ°?~λΦGΈΒ€PSθ}R¦˜j³ώ^c;q­πΏdsPΩΙυ˜΄)Γi'γMψΊ―ϋLΌwm-·lμΐx¬%΄Q—Ic’ϊ­ˆ( (‚Β ΰT=’„€€S)€²~e•§Ν>>ωζ;if%3φyH ‘ρ5@χĜœτ|dQ၁©+ιΰS†»~Ϊ.g―άξπϋuφ"—†' ά…ΧΦΏG‘6αΚpκλ£I© fOUqΓ—MoͺΊx|cbΘ)q—άα΄A|Œ`¨q\ΌΠͺΟԘΟ[ξg''0€2ΖΖjͺJ–JKΥF²—VŒ;§€€ξ}ΡΈ‘BΒod"νu₯OŒzυτΧ zϋ;·_τ@ͺ€yx΅ά€›mlBΖε ΰh3);iΉ6λBΒPϋΠͺΕώ—Γ§Λώκ†Ύ4·XlΰΤΰυά»²‰Dς64λ°ΌΩΜ0η«΅€<.θ–Ωχ[MΐΣμΔA˜žΎ~JG κ„άήυ$' 0°eE" ϊ&φΪDa*„Hxp’]HD.ώΘϋΫμiysνT©jκ"RG%[ρΗΪj,M]ω¦ 7­kδXύ@mH,·f€YntCΫθ§ Ω?ΥΠΊiH³†’„#.Ψ<5τEγΌ-ΦΉ\ΉHS–>œAΐu’ε\\T€[υ1Α]rπΆ£€mlρ—ωnωϊ4{»½Π@«\Z‚ΊΦ$Ώτ"—ΨΫMΩ EΧ5φ±ˆEΫ ΐνΘ}U™½HuΒccΉ«~›mω–w–ύ$7ΟΨ²)Νκ0ωIο@#ί£‹Ή²Xaˆ/HΜ«0τ²άK,Jξ}ς€Ύ»”jsΨ.6΅+J¦φ$”Π*μπ©ώ±"UŸ‹y;"'ΥΣσ ιTΈΛ«A‚‡œuώIςK{Ί=rIaK¦P“ΠsH?šΠ±±0 KOΧ1;#7*Ωιj΄ }h<—By£υTW$ ϋπΝίΰd]΅ΔΒ * ¨%­V²E Lξ ₯σώ‘‘HΞJ&€ΩθCt—[πΆ‰@c€•ΫRvΝ…2Œυξ’cΩΎgRςδΊa_Εh]ΣHΛ‡nΝϊdΊ΄ž;*ήr˞˜π?ι0δk θC“€8‘mΆΙψ$}όdΈi~rZœcΛ€r~ορ‰³εR »oc-;ι«!g]iρ₯αU–ςέ’–Δ!ŠH+ΰ"€ M5υ‘Τ="™Σ©ΑfΜ.WξؘχκՏ 'WΫs6Χ`»Eͺ4€ ς>ƒC©Ά  ύ3° Δ½J₯ RDBΐΙ <7Š7!›™/oΟ0ϋYΧΟΕ`"ςœdΌι܁ƒΰ&΄ŽΝ†`ώR,MΤg€t[Q} 4œ]―χ₯πήΊk)ͺ4—[dπ”±ΣkL™lF >μ l”}sˆ+yaBΖ›ωεvEnwΊ‹ρνaώr.΅ε~ίgOXπϋΓφUύχι₯ΝMρ•‘ΦJy£ΚR·ιD@Ψίζ\oMD8΄\½"ΣνΉZνΓ)‘ώ»›|³J„‘₯:Έˆ&]Ή#νXge‚ΏΙR—ΪŠq’υDΖχκTΐwN{$£ΗΉ„ZΞΦΥK;ΔŒΓ°ξ–Ώ^ήθηΉ%άyΧQΏΪδ™ω°m½7νΒΐ™J²Ύ—ލπά€θO]€,iαž…9€‘·;οθ >ΟΏσ}›-ξέ§Ÿό A,:YΡA Β―g}f9πΞ«ήR•ΦK ΕΣ3—tlHq¨’|ΖlψKBj‹'©γNΘ‡‹Ξ[oΆJ…EΌI;όUŸ0Ψδυ:rˆ Lpϋ/V—‡$δk¦8Ή«'¬gEc—c‹#‚‘πŠqeg4!IFκΟ’ε]uhFΐ'†ŠυΠ6ΐ9dK¦c ¦”"βe¦Π«μή@_Κσކ=}W<ΦΚ/ Ρ­t[<ιΠ{–9έ*Rλΰ°B€-2JǞ«\itF¬Π©§~aΨ›ςθO {'Šœ₯j˜Dό€`Ϊ«(š¦Έ[PΙ~4B₯¦[^/j–Υ;,:μΐj[bDσ΅Γ’o@†›ͺΩ~h@΄ΔξωŸ#xψl+υΠ¬„νύ|J€ΘijΡKdGY³…%ΈJ X 1©IΎΘ5‰Ϊg#-TŠ`D#rΖ1ͺ$McLB«aέέλ”Z©ΤœνrT[δΛάmBα@Q$†α™¬3‘•μrΡΞΈR‡a!!Τ¦†ΚgG°ι;–u˜ϊπΛ&Ά gYBΝ½ŸŠΞ%ΔZ•lU*Δ,ΌZzP[Υ^»ΰpŽάAͺΣύTK~ώ>\Κδs½κη:ύΉDGζΔF₯X=ό’u|iU“ζΰˆ5³FE$­₯« 딀ΌVϊΤ±^ή3Ÿ¨Φ—F*Η57Ώfk9n€9#λό™ΰU’EΖZη@₯cTIZ…ŠM'f;‚πΚύΓΣ―ΧΗϋ…tζ.fZEqœ©uΦ7ZX*r‹°dDΣs7Ά‰ΚC§š΅)$±hKHrN₯Y4A+²οL*β»_ο7τ¨›pv‰κ_SVcΑ%φ₯^--ύ€π†‘3³Χ²£¨5W4Ο0ΦσΆΔΏwG·ό2€©sϊl{E ·D”K—sV@ΩL"₯αͺ1l³’ ς}鍓‹…,²I₯όΘυΦ •AlάΒΆςtGq+O^ˆ!| Θy„„|«0«ζ]š\Θv ™rΣ5 ;r` σŽ΄\S©ρ:υ΄KH|Yηrψ{(NΈβk›ΈXˆ i1vI]uhν…«I4Ί[€η.ͺ^›ΤΦιΘ@ΌF©–—;:εV#Έ{\ΐ:^ησγ•Λ“—¨yέ}λžlΫΎU‘^‚™"€Šά>!ωkΑ%_‰"œ¨Ž<ŠΉ<ΌΣ’έp4“©¨š•. –: Ψ΄œ{Tππ’Ρ—š‡?―Β₯P!¦#₯˜jΑbφz΅[­σχ—ΓιnΑ}ΛϋG――χο/›+'Z,θ׊he#ΒΉmΥpΫΕτειίζσ·ΗέœJ/ίiOt­>"©™O₯ρV—Λm ;R|{ωuϊ^π‘χ}ΖOm9*ΧζΟχ«VρΝ·+‚Τέ ΊSS[YαΕζϋ5©Ά4jχ:ιεϋ‘κΎDΥσ“ηοYͺ―V}4 W!ΙS γΎϋk9,λ†Υk3Ζoό«Μ±+v)#/jLJώŠΫŒΦ_½-fuy^ŽnW~­g½ΚtΆβΦηdλ₯΅N+ ŸVάzμΫΖΏMΗκ©Fΐσ:wϋǜvΓΎ’_σχ·ϋ‘S¨o6Ϊϊg―Oο³—εξK{7ΔO°Πωby²^oΠS­ί»ϋX‡k•φπΓηε9ε½½ Πσrd'd[/μςνΧξW{ƒώŸ΅ή;ς΄ςXΙӁ°ΩΊ4΄ΜΪlο…%? ί@¨Z*‚{J­˜n…ŒCδ(&€ςύiσοHKlηŽ~"€Ζ;e[³UwΏzθ’o1“=η’E5.4CQcžjuέp!‘@Μ]œcU­Χd"λ(-.I+ϋΣ ΰ5S4:›Pqp–;0Ϋ_\Άύ€dؐχΟ›JηθΊ₯Xƒ›‡­m'Χ1?αΫυ™ŸχΎΉ(d³:½9e―n‘KζnͺN!θ¬ –:²·Ύgl]b’ڌwδh-ϊjzˆF΅L°eΟιi˜‡KυɁހzΆ[₯©kelθ‘u)•ξ±Ϊ.P"Ώ\£ΨK¦kM B؍9₯€t %\5R}–ŽaοέGΝ“D₯DΊuγ-WRΙ{UΈ‹δ­Ν¦vμd«²v3ΡΊ7!xΑBο©ϋ8 ~z7 M–ςa<·/Qnα₯&Ζ[©Ίφ€ΉΕκͺ‚/"%E½Ψ,΄―Šν 9F&ΑΛ·sΛ΄hΗ½ZOήCœR§œlγ(M &ΧlSδά½ŽVjώ„²}IsΙԌ5Ιͺj’2·ΨπňEJ~yΩ»|d₯G;oςB O]،Ί¨#†0ψλΉHS‘MΖ“‘ϊ†1gͺk^ͺΏXΛΉwοu|œ—x‚1ρ6K‰£₯J•ZqNH=R·–2ίϊ^UΜΦeJ-)R³ξhΆGηΑNΙψ±sΒptY1ΉxpΜaŒΉ7bΘΧT‚ qlΆ L`”OΥGΚX^•sΛ/g)o=ΉfoσuK―χkΚVόΌŽšΓτ9 c‘MΓβΩψ£₯m£*’jXΙ[Η”@œRΒ|cH€ΐV6ςiΕ»Ν¨·ΓόηS6 Χ¦j{'Θaκ§Ωλι₯RΘδ^Χ ΓΗΉζ§N%geυx—ϋg^ότp»>^nΞ±ήύ™^ΦιΚΛ/Ι‡π+ψ”εG²Υ3|ΉξΐW ^^˜Ά;~xΊysŒΡ*™½“’2ζ·rτ¬»M.sK©:™κ.5s@Y«%.¬Ό6#$X—³ N:&N²½L_Ω―Š\ί‰p’—*MŸlͺΡaΚgί•T#7‚CΣρ&’+—nYY6€^η₯W‡s)k:ρξC-ςΙοψl()ζV²δBθΈgŒWW›(YίΙ­+ˆu}ΕζtRΩ"δUγŠM§Υ…ψη+½muϊ‚Yφάφ…=‘³“ρ€1¬Žž&θ¨ήυX’ΥΫ?p·ώό;μκΝQ ~Ίΐg~w'WX/=<½°οΗ‹Η―|Ÿ5ž9£+|ΠξχŽγY-³°=ϋί{ιnοPδ:¬ώΰY«³rGΓο­ ¬a’ηZΨzƒĈ?N+ΕP ιzu8mδŽή…+η`>°Žτ‹G/―¦zyτϊa‹ϊe96z«WMΘϊ9ώ£ΣRζ?ϋ‹θ°­N1Œο–O³ΚwuΓG4ΒΝ$'Oλ[€Γ+5­ξkαS7Cϋά]S­Αύπςή‡ŸΌεaώΩ–―w”$£ΈΞ~οΪϋσσ―¦ε:<Ί=y«φ_fσ>ρΏάξ?|<Ύ8φΌ~˜^__ΩNψYΗΏ³;~Ύ½~ΒσΌΆ]ˆ·b‡αas”Ώ9ίc]ω€}<ΑκΘόβξθ₯-ζ³vωο/³Ÿ·6ۏ›ΧΝ^?iΖ›G­yΜ*?lδδμ}aœΫ…ϋώwψœ:=νφ·¦j0έ?O‡σ΄~οtbNDΞkΰΙEsΕπ7Ÿρa4X_‡^ρψ'―ΘoωτŸhνΖ‹ΰΔX¬3₯+f§6κΊLφ/^έΘ_­Kϋq‘w’υωΔx™#ΌξžOHtξ^η_YLςΆϊ›Jυqση>^ΨΉ²<ϋOž ύΕ6[\W}±sd8Άϊ;ά™νVΉ_{7VίφQΤ†ϊ™ϋ9†Ά˜]}†zψΗdΑΫ«δόϊ™ύRqφUΖθk+¦ŽΣΚ¨£ω­‰>Aζ66ΙcF|’oTκZ‡Άΰηχ7ώω[Οsφˆεί€"ι°i;#{ΫRΞσ3ρHwϊζ½₯=Άμ€««q6ΆΨΚt·]χξcο©Άήb,N©^R¦μ2«ΪεΦΆ‘ΦZ‘œ½+ο½―kVg+mφ΄Γ ιRλM»ιό•ώω.­ΰεύαΥ₯Γ§&gz-wώItƒ#ΜJhΝ„£6­…ΰBJg)£`½ή '7>λGNπ&g[.u¬ϊ ϋΈ+Δ²7{›ί8˜/{Ά’δ‘Žϋ”'ΏΤ‰Κ9jB »Š±'S„Ν¨f³ΣMΔd•Υ©`¦7(.œKs‘ΤΜ–Άwm·ξz Ιw…α*ξl,L‰ƒ’˜TK­6K"‘—UŽ> 7A.5t—Ψ· rρMυλΊΌ6‡5£Γ}:"™~€Ξ©ΚΕwS^“k&Ωαe£*©Ί¨#χlYλθ‹iΜrKfG›ŠΠΩMΙ‡Θ&ϋ($©€υΪΓI’5%“3>—ΏO͐ާJ­{&e‚vdΎr7ΉaΧύ ~x­?θιmώς?›Oω;~ψ·Θ-œ]xπ,όW(ΉιΞ½)χz™τh™(εςgξя+AΓϊrl?<}W‰³siΊ±B}|ρ4'i{ωcΕxrξώκlΙΞΦΞ=ω^CΔΕ<θcVXΰ_*1(0ΊMύδΩpZο+MN» v4άO λ―—Ωυ[ψέΟ^€ΫύΓΣϋj›Ÿ¬ΖΆ[Z±ά„?72žƒΥάήθ86R_7oϋ'ΩtZj] Gο5bXΆ.(ι;V鍨σrφ9ΧR»!u€7)”8ΊλVς—oΧw`Ν€αRΞ0Ύk*MOΰmu`νΓ%y+ΤΥ]κ&­·ͺIz-F)_}ΔkξΎcδδ™±·¦‚Γ¨sζΗ]ΠΒ„7_ŸΞ5ψέ•Nΰω‹@~~¦ΧΥ{^f?%poΊ~Π/Œ―­§l₯χΕrέ―Ήyιu1›ΧΥΙΠΨ¬ΛNŽ$­§ηΕHBΛί­t­|v>±{όΘ©"ιS(J6š«Φ8a±‰*I͍άͺμγ{6σ8.6Έ΅[‰¦y'™>ψV¨₯, FW.F#%qΩ«dUήL \dξ΅ U7ςύΡ ψΧΩβϋύP!uX;8}ρ JpΥ™ΚNY:η€θ”u’ΒQwr@½"3…Δέǐε4ƒŠΒΔ’›Λ§w˝ό΅ό8§εσέVϋΗΉ‘oΪδoΝ¬»Θc8†‚ΜI…άΊ΅ήΗΘΘHt Ύς€­|…@ά ’°’ΔwXS/w]£­vβ$€brFΰBWΥDԊυΊ&€• Φtf‹Ω7Ι¨`}΅ΉY2‘t>|ΤΩΥQόqt.~eιΡ‘θTΝxεξEtu^3qΞ­S)ΪδΧ„8“Σ –bLjZ«½Xc‹θͺ‘-ρ˜B₯HδU=ζnδ6p^ΏΙ1ψͺ`λά@εΊεζ…ΓΝ8-:UψRC¬o#©f;;/XΦP]€Π‹4’Τ"<€HΪΎυ-Œ(ξIΣΗj€ζΧ7L·>K“˜4Βz €ž;“Y#θλ,%Ρ!Ψϋ R‘ιV„'.Ή1J­U'E/όγ~8"»AN†ό»l΄Šε€OwωΕR§ξίnMΩH‘Θ, {nD‚ψ0LΒ„|gC.¦› E˜‚πV€ΏŒ΄!|Bm{βe|fΞή\M;1y„E‡τ xlkH΄,)˜“M.E‘t…!U+]sn0³μ)Œζ;§ž>Ožaη‚θeͺ„‹Υ!/+Ύͺ(²ΰOΨ£mΞι0Mc‰UI₯€<*•ΊβIψ}Ε1ωΠ F‹έ³}ψ˜ωbSsΎφˆ΄g‡ψΰ}ΆΩ₯»<=ψβαi^ψ ΖλΞήέμΆ±fωJ!dԟͺ82‘ΝΡΠA‘πήmψόΗΛ0άΟmœπPjϊύΖQRšΟ3Κμρ ¬1FZžOΤ'όΪ§6:&$xœ½¬*Oχω .tOo.ψσύι}ρώI’™ο›Λττ­ΝΞnνŽΰʚ ZΚr¬ΪEΰϊΤ;r^kΚΨγοͺ’oh7y?h–ς‡<#ΊΔ©t{μ0ιVuqΊ`Œ:yνα ΘXΓΥ°‡ο4Ξh‹ά’CΜ•kνΕ· επK—WΞΔ}Cφχώv{‘Ψœ8B„φέ†αψ²q,Bsr*JϋT,›ζ-E—}rΎζZPŽ=FLϋΚR‘3₯0[ΞΜݏ²7ΨΞB=a\eXB_8fŸ1κNΪSp‰ ΄M•`Γd”“‘}­©tš§γΈŠoO\xL’j_εηš’ ›θ­ού)#RϊX…Dˆ\ΗD‘₯`ΰ(vO‡― @ͺFeώCy «Βo`JχlνˆΒφώωFΫ$uδttπ‹η::θwΈ7v)‘t2)Σ ί₯ ΄©Ψ7]*ΨH˜|HΖ3©ΨXx]Ε–œΤτ―SΟ―΄ζŽυ'υδ%‰8₯ ^tVHhL©I*€~δ!6[ RΈMtRΌX βŽŒeηY*NΩϊ’ή™α|ϋ±8Ί4Ή 0GZaΥR•‘ΊΧΨζΥZΕ.‹ ‹WAδβcΝΝ:“"ƒMIεŽΤμtΥκΖ혡·Η3£ό8αΉ΅¦`b82m¬―Ψ€)…ψΪ»Άε6Ž$»Ÿβ?nΐϋεabΎdBQU™%sΕ † uρΧοΙFΠh’ήΣ–D€ ‰Ίdζ9U™yzIMq΄Ϊu„ώΦ87‹φα!Γρ@„jaΔΨύέΞ`ŒŒϋybŠˆ^‹;lΌ¨Z£Τ@“€AΨ§Ξ ;Ζ€Œ{Θ‘…κ|fKdI‘:ύχο% fWκπu7l₯€:¨‰θ+’q¦΄Œ? €’…+v3.Η8ΥBvŽΑώlMb³Ω.Y ‚χ.'$ττΐ7ΏΓŒ>§}₯D]KοœΐΒhƒς’| žk‚ΞVٚϋΠIΖ[MU•VζΔJ^ξzΏyΓΫ1ύ%Ε棜kŒ‘}v’\ΏώνUω•PKλ½ΛυΟ’VfϊμέδΫ½ΑύŸ_ΖΚόΗ»??<Uςϊ/¬ήέΓΤ»ΈΕ%‚j’Φ₯©b@t{UšG Ο¨ΨΛ%2K>š΄ΞL RB**άg“Β—ϋ½aδ]|„Q―ΦOw/7χ¬@Bπͺ’uJε€ypΥqΟ"„δ(—ιμܐτ7’}κ”ρ0aΫ«v³[φΈΏΧ1Ά‹ ™$PrΈM d°~Κ:°?˜bξzΠΰL…,‹EΆ½5 ΘΉZ­ aΌ»”Us9ϋ]F:Ν(\>ƒΞΌ7ΉbAjъ#Jω./‚ΣZL  ’I΅3“‚ †.3{t|σ·Ίyw»Χέϊ°’Q‘$ΙΘp9Αup›ZuοΔ"ΎΪk’zoΆZB ‘“”Βy¨J.εܦܝ§_IτŽ.Ξ΄ώΞ;³£±]~‹Ε‹©—§Ogš:™₯±/*7U±δ@z€ν₯š²πΛDJj *ͺ(–όZ‘L΄ΜN©*=\“$3°6 vΈψΝκ1,=M/p‡A―·ΓΗ―ΟΟCŽν ΥtπώΚνu—h±}ͺώ ρWlΘ-σρk±‡Ώζ₯­½ιB~₯υΡ£Νχ­§‘Άϊ W•tL¬9*ƒˆA½ζ+TΩJρq¨F πΠ\u«ςα$GΨΠά­ΠΈžώ7tΠχ΄³ΘΙνέΉΥ]Γ"aR±ΊίiΒo§ξ]—8ΌͺΎφΫ“Ÿ^Ε ΝΫδAζE2S%ΛNcWζ.ΡU•Α¦²3ΥΘ} \$ ΩhξJƒ`Ό―q'Mlγβlmcβ–.ΟτΥδœBP†ι„BμΏkTUŠ]4"6Vι™ΈΔ€θί|ζθs υ‡½Βέ^ž†c—p:‹χšΖΪ+―J"Ϊ¦Y)cl’ˆ%‡Aa 4UΌΝ •€©θ­jm0eΝΰΫΐ4ΟΞΗμπάΪΕIΨΏnb‹w―†t‹φr§Ρ₯ωjLb†³`„$ Ό›ά¬RΊbP–ˆοΑ6Ρ!ΎΧ$σXψ{M’onΎώrσ‘ 0ςΈg`LΦΓ™ '½ρ‘ϋ؈6f‰ΡˆD0„₯»ο[£°u‡.ΞΓΈ°x]‘½a_ƒžΈ˜ΩŠ9X΄Ζ"šˆSΠJK[dPDΕ]λΐ_£ϋaΗp±QΓQ 9 ±aρ»=„NΉΠkΡγ»hG|ϋX03ΎY]£ΫρΨ&E“ΖΆP,uμΑ—E[†?αΗm6χ·?p‰šΐD•fΐ„`±‘r’†έ­.>fƒ†?m%W)Rΰ.ά‘tŒk­`7žο ηη`ωΫu ΖRσέ•8δΜ„νί1Dακ†Αƒ•–βtkα-9‡CQΡΉ€ͺπœƒΙ€±α;ƒήAΪ‰Ω/.#!j&bΟ–„cπ† ΗOΪ+j`'«Ψαδ<Η³r ΕkλΪ_κ;yheυμš` Ε―ΙΓ‘γΩΑΕ^s:ϋK_Θ3uΠnΈ“Ϊ’AoΝω@₯Ζ”u™γ„IμrώΙ Oδ,8ZlΆυiF7|―ίω]‘V(δ|pι,₯D]'¦@6₯bΪ<;5%Kι”ab1ϋΘΞwΔ•DEΗΒ±ˆzζ‡CΜ›y8Ϊ3,ςςDΝ« λ₯χaΠͺ)κJl= όrVc’ΰͺ °«q¦ˆP‘οQ`X`5ο­ρMΒW~Η8‰ύΚνTήΎ‚η_OEθ~Β”›ΝλεyΏ›Ά6ΛΓ ,Ίݐ˜γ{ΧrΗ€rγ\bQ>*mLJ%₯Tƒ!η¬λ Χ-;©%yψƒσ#§rq€άS΅β ‡Z›$•°˜.έOK7–œ—S’V)iͺ΅ΰ Jζ'1β©ωžž&ύˆ9ΏW.Ν1J€t₯9 :‰B΅υ¨bΜ=tn·Ύ`ςŒP0•»¬δT‘,,`OΗjΏ7Ν 'λaΊ΅Τ¬>άOξΨ–ή‡]„‚HJ}4s+_ΩΒ<ΑΰH)›m(ΆΆ(ΗίF›οš Ϊr6rφ#]€uΛ΄ΆNιŠτβ$yœB8[ΦΊ4lΙΧm˜`]s!+/ύ5 ―b§ ¦x˜£Λ3Ω&Σ”€q<«±CίεQo_uσΓUΒJ..ͺ€ΙΕ\o \r?#Χ‰Φ—₯U‡1…‚δΑΫκ“υͺF}Mg¬ γόΆzήάόό­qP@:Αe–j‹$ŒšhdΤ5t ρš‚νΫ‰b(I2/{/έηπhξJζωu@9eŠκΆu άαCˆ‘ρ~[S%9²ͺ¦Nρ1²tf°Jεt’ΪJƒπ9›=ψόm%UΖ7Ο~di R#`TΏ‘8yWajΕH—ο£³"ηhˆ4BΊν!€ΣP£4΄ΓΞΏ—~^ϊ/,ί'„=ΕFΫ‡]$* α₯€ƒ¦60 WZ`])”R¬‹,Jk’;67‚ΧǏ›~œfλεΟβ»²JDί(…¨Tσ)&QΫ ΦθŒ’ͺΊŠΦjοθΒxŽžε,’Έlб3Ά6ά/Cζ°:SoyzΩ1J ›ρ’QΚ Žε’λΧ—ێƒΗXŽδ3―^ξ7Η C3Κ_Ά (ΟI΅Ο9-­Κ3?–‰|νς¦›< yς»Ϋ„.­jv>IΝA…΄Σrξί€­ΑKIΝ§Υ`@ψ§7χΛ—oΨh[*F)I«—N‰|‹΄VOͺ ³NŠ0&0!Ι “Ζ뫝k[ςΪθζwO];•-cCθD.gέΨg ½ Δβ5!ˆI3κXΦΗ7WαHkŸηίϊκεωnš?Δ n₯ ₯"έ΅‰₯%²pφ†m―1: Ζ0ΰž@ΰ1 ‚?šσ9ΫΤγ}Ώόδ΅ύƒ@HJ*¨^ΐO‰uι’a°(€†Έ„ ‰>'PFπΗη+!ί*Αγ₯–’*’.Ηƒ\ΎΜK£¬Š1W δšRjefU±σŒΟΤ΄\pΡΞcC!2§³5|«nζΊ|Ο†_tyŒϋΊβΣ³κ₯O‘\ξ¦’jΑpc“©“[‡ 0Ωh€ΨX‹£tϋP΅WrΐXαόf ½ΰwlii±μγΣσCΉΏϋs[4=q K“Hαγƒ@–αΐ#'p„t8:›(ˆwjF£tX«+ΞFx‘€]Ά\ψbήϋƒέ΄η»υΛ„Ψ™Ε»σՎ%½   8Ρvxπ¨0J­j₯"WQΊH_e ¬§dΪKΟΨuLsΟ–Ή‡«xƒ*^E†Ε$ Έ©ο’uΕ[u^а€υ π0Ρ‚„σtΚ;[`―£ΉE9IΊ8–mRω­O¨³Ξ:e[tBG(Εn7ΝX›ηAΰ=} ›±»œΐΛΈ˜ήAΞν;ƒψ:Ρ ZπKšx’QpR`Θ}ΈγŽR «Cυψ—­Υa ! W% Β¦ΜΠη‡ eΔ Ν[€‡Š0κJΐߝ•‚#30μͺX1ι₯H’Bφ d,„3Eꔏο€Υ^#–yΪ)ίά 8Χg%žΜ?"F;€NQ’S3@?’3XO©΄Z$Cn?Ζ. ,9†™Λ8ιCδΪΣc ήœώš1’Η—Υ*ιͺH0ώάΣc!}ƒ^³@΄]i e Ό‘mέ•N.§tεεY#ŒrJqΥς[ΰ₯›πNŸ«;έ²“Σ’ί₯΄9@OcΣ"‚‚΅ͺJεF]ΐ½&˜ž}h‹ £lEJΙΛΆΖβμό›Έε{p1³ιNκFƒ τ΅ Χ ~’DΙ³ Άɞ©>{νD όUI]Ζω 7Φxy‡}n°Ξι₯ΗβΝΝw€NΆ„]‚-)Aޚ„%2Δ^jΩ8i‰Π” 'ΐΨDωG₯9tΏΝt^ί^ΎBW"0’$Α6#ζζζΑΫKMYK――^Ζ˜,–ΐŸdΌ||εΝΘOΫ“œξN / …αα ψΩ%! ΣzMbβ1ΜΉ°r›»‰T-O―6Ψπ›?ž^Vλgώ|Η_?ίi ώίby\ηΦ€γ«OΎ'S¬΄d“*ΦPΔεu’JΠΞp>%Λρgb―σ¬ϋ9hύ;Ι=\ό^‰œ"E΅«ξ ϋΤΊΉ,βB%ή1X%‡ςΪUš₯οϋ\"ΦδΪοpPΫ΄Δ‹kϊφZ‘―ΰGΊύD4+qPΫΘ΅'˜€RT½kr±ζ#Ο4ΙΡ΄.ΞER•ˆƒͺ΄™<Όϊϊ°.χO³Tή―ο9š}λΟkfo;Σ7Ÿ;#xŒ°ω˜9I>•3Ω»AqˆΕzΦ&ΟΊΦ(Ε"0os>Ζσ‡€oσ0jσΊ^?=Ώ|Ο4μΏηζΣ‘@…²4‚•xoε΄LΕήIT€i«λ€ͺVn"»dtΘ¬ˆ|D4εΫ½œ΅›ŸΫ?γ‹o>c%λf²2VΗ’e¦J/ΨUDΩUPNΉΡΕfr΅RψgNMK™Y™GrGϊ'ΓHΛΞι(II*˜ Θ²βχΠΏ…&‰!ΩJO4—€ΌG(N«δl-¬ιNO_E ~5†σύqΚVΑ¨]jκq°ήOΔPVxόά¦‰ςΡσΉ†tiΈ±όΧ;“;ώΜ›Οnͺ›lRˆΉ’ΜΤM„]©F99¦ ‡Mή:|QEψŸDΆ+lœN3yς'σ&RΉφ uΟ”…ώΠ m΅(n>AΑΛ; Ε Π3{ωTΪx7•SΰkM:‚ΑΝxό*ΔȐοlΘqπ₯^3Ag$7Ύξ΅6ή™‡7Ž›OEφ"N cŠ X=37•!Εr["Ηΰ)ΐι/]©{ͺE•ΈWςΧXβ%ν‘‹VφΞ$ν^xσ)² Œ?)B   †Z–>&ΰfAγ,ŒΛ&βͺ0)0#̐tzTΞι0­κ;£ΤΧ‡ϋ­Σzoό““m³|«So£W* K¨¬5IΣq €†–΄σμZ2Œ0άjΔ,ˆš–+ΊHΟ@>O½/κΩ|¨—_qRΙδ(Ω‹Α•μk‘μ©.“Ψ‹Ώn“―Φ„€7Ov’Ω’“Ν₯QϋΡ?œ’άkŽωΏp%©z?έτnω\”Bψn² ΈO•Ωe]Α·}sQώ‚ ŁιδŽ`­ΐΦ‚t’πΕY%¨δ ΅ο_ FΛg χ^°ψ +ψ1“>Υτ}£Φ]ΈOθτlGν]>ΠpΔρώ |> ό|΅Άΰtΰێž»«Œƒ‰xΛF΅[q“6Χϋ‰»ΌΙ³η4 w§›1_Š_Ϋ™―οΎςξΰbΈS>όβUšηg…ΝOš¦’Κ‡;ϊ”\Ώ^ΫzΪ`Κ&ϊ•ϊηo>žνh}8ΖθuδΞuͺ>|ζ`ό_0’Υ^{{x΄[‘ωYΨώ˜ΣτΕλδG‡~߈ΦηςΨώψΐζΥ^χςΧl”©τ³dz““ͺͺe _|ΦέHν+WΘgN7<˜’ήΝ…Χ»{’άLΜζΈ.'2 SΡεIΧλ½@σIΧkcΏ ύκΖ^θ/£¨Γ—η]ά=l§όΜΫlέύ†χ°Κmώ’ŸΨά?­_N¬πŠφΨ³β Η k8jψͺυύΨΫϊ@+aφnΟζι^=θΫ2vE>φ—»<λχ6σ°jΫηž?œ±Ÿ/ΧμεΣ¦™ύ|‘“ώΙφόώΝ8‘yΫzσ[cΝόi΅~ϊΔgW|8ή>χ‹{n₯ή]’νBŸρΎŸsΈ*ΧΑΘύΈΞ‹ΜJ‰½­Π|?υk~νw©9όΦ+%ΆšΙ?²ZCxϋ}ψώ_K°s@Έ`xήhό—tπΙ±ςή–­`bΜwΚ Κξ\ rΞ+5βωΟΟ{ ΐW•Zξ―Ο§ΦqU'ό;9/7½“Έ4υ«Yζ»ιΏξΌΝΞ8δΩρŸχ7―:4ΫmL6©ξXN¨ͺ1]UπΨ€­ƒΧΉ+[­σ«j²τ>'ͺ…MsΙ oλ’ΈΓΜr|μ_—ςΟgώόŽ&W‚4rςΚΆh»œB/Oς-Ζ0―ΧΎπZτzYδi `=^;βΌϋρ\{E[ŒΙ“fς$?¬_ΎMž<&Ο~~½4Z’>Ύζž|MαCpΣ\œ³?τΦMYJ³5ΑMېfQ9kφ•5™»ˆ2Τ&ξ¬λΆ²’¦Ί½†χ^cK’>ΜΑ9?‹+L+n‘q’ή›>i―[a€Ψ³/‰s$Σ”4Ÿ‘Ζ°V[¬ul{bΣ…u~wΙΞ/τ­-jΓΖ Κ³"9¬™u##έχΊœψο :Ib ,ΆDk5­7 ₯“νσ‹6‘a7ξ*o:Όρνΐ‡>ό€ίπυΑ΅Β– H7[ƒ7‘{–όκH)a)\Λ…μΠόzν ―ΠίuΆŒϋνηN>ί?ν‘ͺHoINσXΑ²2^ˆΌbpΠ€Ώ"֍Œτiχοj³εΑξ7ΞΙmνΒνΩ΄κσ\›¬Wω6ήγ¬DžςΣέΛμΟjYΡύE[xζ3Yϊ‹ΧψuΧαvT­}Mπέ½‹:F,†ŽBυΆ₯‰‹΄kJn₯ƒˆuΕV<ΏκUg‡φtσϊ OΪdΔ%Ή©p₯›C«Ξ›|«R‰i«―V΅ΐτ:U©‰‚‚Β§€γΗόΧδ(α―yBp|гηFs£ΪLRδΟ…σ>ΥΕκcNMτ|]ο5Ϋ.y«…[„«ž"|1ε ˆtι(ewEϋωI¦γQατ;eR/Ζ[Ί}Ϋ›ν‘ώΜ Ω8)]>Ÿg,qpY$ίBk¦–€»ΖΜ±ΘΝ{k%3…/oθ‹ <ΏžΝΉY~K#Κ‡N΄χ\Ί AΗƝ΄E@Ξ| VI!~ŽΉuJU΄‹m~Nζρια—04™}Όyeή^I—’U()τ^ ]Θ’Σ•"ωiU.UΩ@9Kο-nIή‹Ÿ+Ÿϊz·o«o.DB¦CΕUΝ5šŽM₯]r >Τ&kΘpžξ=I"α ²F„=KQφ&|πάΆ©,§ζ³τήͺ˜αͺ ΐ!Jίχ(έή8ΰ Yͺs$ΑIR%uhŽa8]# €Ϋ§2ξ·2'³–oόQuυ‘œ‚vπq>UΔ1°£Τ³L:–€€ΨBTI2ϊ’–|΅*έcόω£†-ήρίηO―λ7aμΛgΏή­oξ"Μ_5†‡·-gŸ½+Vgeά l©V•]Δ'υΎl₯υŸtWԊ±Ίη±ήωXΰqίmΖΫ|zΪ|Ψπ―ΑužŽ'αΏώώψϋγ?ώΣ>ώ[Αώcargo-0.66.0/benches/workspaces/substrate.tgz000066400000000000000000002654541432416201200212430ustar00rootroot00000000000000‹substrate.tarμ½ν’€8š&:Ώγ*°ψ3³Ά…‡ώ‘96{μdgWwΧΩή΄μκm;Φ›‹γrw:p Œ¨c{A{{cGίH€ΉGΘ­f:„Hθ}ή―η/»8‰μ<,;:ΟNόθϊ‡`ρΟ8πEΘo ›ΥŠzύLΛ€ήš›νj³2Χπί†ΉΪ˜²ό—~—8±#Mϋ—wϊϋξ4νώθ&χ‘Β?β“mΐΏξΓ°—»Γ‡΅΅΅w{°tV–³έ.«†΅Ωμ>nV޽9ά€―Σξ#p€.=%IϋΓμχtΩ-œΰόΪ‘›Ό&ΐ9=Δω’»‡ώ/tυ}h''ΈκΠβCΧίίύ―»QΏ9~Εd<8ž όδaΐαυα3Ϊ Ipφ¦ώ-s΅m~Φr₯Ύ9~Ψƒψ{ΰ;.ˆΏέόgνΏhχΖbΉΨάί.Ι%1:²\Xχw^pL½ΊΏƒτ“λ½ IΖύ]rrcEA”urχݎΟzβžA”Ά2λϋ»»Ϊ¨ Όδτ0rΟnβ>£Ϋ@ϋjΏX<ΐΚ3ικΌΏ{Qμ>j²‚·ΊΤχΰ™θΦ φΐμό)ώqΏμηΰ<ΩG€zH·&=vlθψ’Ϊ&Όη%Ρygœΐ%υU|NzΩ@‡W…λ'χηΊ$?” kΓ.)ΓƎž~§ΊΊ΅χf|/°'π'Aj=δΩ»ρAς=ˆžj½dΗΟbτχ’ƒ8vCZgΩ)Ž>ρ«­u…°>ZHΌήϊ2δxΩ¨«Πs;mu'z “ ³ηfcށΰ:>Α΅δ:]Ψ;ήyσδœlΧοϊ4‹Fμ;Ν5XοαX!}QΧ;€.ρ•κΡΕGΫWG§Y‹Ά…=>λmk>qB[ρθDγ.?δϋA³»$ΊœΫφΉZ¦:ΘΆ«owφ%9QzΑΌ jΏ@€ζpŸ‡kaοΟ§ϋγΒ ώ/ΨΨ»IΎ1.P&ΈπcΌωύρ˟u >vιά#νο?ύς'ν³gΗ1zBΌ8 Δ+ο¨Ύ}Ζ—₯ΫΡήAφP›Ε‘σΰΉ»EΟ Α‘m·k…δΝ:βt€žω7ΦB·6¦₯ζ_ώ―!ώ…•d‚ΛΛj λ? mΉ~<₯΄Σ©$’ α»μͺgͺϊUGH…D]3„>%γJυ ˆΰ»jή,<Ċ‹@΄ ΰi|PλficpZ ’βU׍bτŽωΉύ_ -,Τώ?γό"Έ’ΣιΧΟη¨ϊ1ŽD}ςί4ΦΝω_o,%g—‹xωv·ϋβ%zEfl/wήφl€*'¨λ–€Θ†ϋ―ΈΞΧ…ΎγΨO i–„›zGp§^˜4εŠωήι17P~ΌΏƒo‘Έ ψoέs‘ΐύ¦fΠ»δ7€žow9βϋ€-οΠξa_χΕcγq²G'pKτβ†Σΰ6•Ηƒ'ΰθθ·±bnb'rΓ\Τ}ό@‡}?ό5θ5'8C‘ζξ< Α­>²£W- ΰQœDΫΉ>:vΡl’D@ΌΥ%=?Α>šhLϋΠQJΛΏ­ύ$ Χk₯Ο-ˆΰ ƒ½†vgν;άγ54„ƒ›‡P₯Ζ­O©/Cϋ~r= \ίφάίΠPβΕύάΦ/gˆμ|τ\jdϋ}ωŒ–†4}»ϋTΝz«MύΔμβB@‡θ«’rόΎs:§Ή=’ @_ώΧ ΕzΦηeK5σλ‘' Ν›ηjύγ¨Η—0 ’„Ρi6fΤ©³ Ύ Όύ㢌έC©#μ)ώΦή’;+˜£e—«+u’H“rƒ!CΈΑDŽ.ΞpΤ–±ΰ„$ :ΨΘQΛN&p»ΥGμqΒ5Η"Œξ‡Œƒ/ηπ[βάtΔ«ξtΔ՝zΉ[ώUΫΒρV’+χŠ,hLŒc―‘3ύl;Q€UŸͺ\Χlν_?ύό£–=Ψb?uuάι”™jΊRh„α?A*ΐόΏ2'€ΒσλΔ*@~œIύ–IΖm CΕIΐ?ω8Iεœ(Ϊ9E”!ΡΆΏχΐΞΖ’ιήZ¬qκ–λ?½~²γ“~ΆΓ΄ρ5/“Ί£η# Gζ†Ι˜)bτ‹κΪ¦xΦΘ€OΉΘ΅—–Žω–”ξ‘ΓσΩp₯v“"dΜϋ]{‡ϋGΰ8ΈVΪ»Λ[pt Θ³Λ—ΛΑ²+9rϋE;lΑ Θ–œΈ} †΅¦·,Μ'3έ_³ o jv/V3Ÿ½]4€ΟοMΛ…€–έAŠd»IRΩwυœpυ. Ž*P¬ˆ}RΣ©tωXJ…SΏ+Αrσ?Φ¦Š—1γ½=άσΏ6Ά„Η²–jώ%ΔνάδΰΩΗ8UH¬ϋ;χz:6iκ‡ “Kθετΰ˜’ž0Ÿ!^|Nz`•cΤ‘&gϋE‡=ΐ‹χΊ|€c©ΰΝ% λ=«†ΦΆκWi‘-Νϋ ο,κΒCٜ5ρddξH΅‚Gxή0ΎVcNΈ‹ΟΌλσpΓkπ°Ν€Σ—*Π՞E7zΫ6ϋi ~Ÿ]:8JΉŒ»b ™{ϋφ;†CςxΆ¨aε ma’δυ1=ά‰ιΫΩ΄˜Oκ–B.ƒ a9ΑB EUA)sfέ‹Χ '―!ˆ›œŒL،qΎ»=ΦΞΡ~š©ιnaθΟ—ͺ{lYŒmΞΫ\Wš›Om@ϋ©iU¨ώ¦£"t‹œ«"Λͺa†m@ψkz z8φN̐l₯όΐκ'HYlˆώoͺϊRηΏ²{Ž6τς?‘ϊz₯ψeθΤό/΄œ3W₯̏χwΏ^‚Bυ§΄Ic= ‚ZmΦ|€YΎϊ νοpH†¨e΅ώΐθ-ϋΏΐΥπΛ‚ƒ& θUS‚’κ7d †π?ZŠρZζoΟcP@Ώό'ψ7k52δ)κuˆΚΔιrU­‚Μ˜K‘*dCPΟv‘˜C/piλY@v D˜HφŸ€Βα2ΪΙό©Σ“ρ¬‚_Οΰ•κ7b†Δ―•ώ]σŸξΛƒ`@―ό7šϊΏΉ\”ώ/UώΧ„|Cτb™­ιΈΘ―$%­Ε Ό+Χ#S~&ΚkόμκΣΏΙ…ωί ¨M-«_ž œΚσμί)sΝB‚4Χs”€ˆ ΥoΦύ ΰ—&’ j—8 ˆ“™ψŸWpξIώη΅ͺ%Eώ·‡KDπU5€.”IτZTB‡V–˜?)Ye“₯“]QFY?7ͺSt3ΗΝ”"7_„›XzΆξΐ³ωcΟF„ŸyUƒ†κΝΉͺ ‡d₯žfΚ±±γΧΑ}εςϋβςΏ«>L'πSEͺvκΑ$›{7Άwp ΉΈΈϋFήΨc½\yb}cυυΔ—)&Ίhυ M­VέΤοšρΏDώoSΥ•?©­φΦ5Θψ―΅₯ζvύοv4΅‰•5©ϊΪlωό·œJh,?)‡6Λώ? ‰Z©ψ9σŸn"ΣΏ{ωΧk2ώίX*ώ/%ρ]qm=ρι̊0μ: Γx-’|ζΘfB¬!Ά°bY[y Ι@ΥY΄dPΕ’ζΉˆIα\žžšVδhˆ›€₯raڐΎπηΑV­d ’r,g”k&k¨›g—4Ρψ—3ͺτrΆb†ΙΖYy@ήZ ε³ρVJ˜$ W$ ϊ}Τ’Ό«Sΰν΅lEηVLV£ρΚ œΥvπι Q–€¨μ(oYώΛπXͺώχΜσΏsύ?ΨƒΤχ'2 Ώώ7…₯κΏIΐiθΘγ9ˆ@*υ?~\˜χw‡ψ3±eΥΎNv|χ» F¬Q‹x©ΐ„§ηβμ2[η).ŽγvžύΫλ#Κόw‹’qθhpΜϋ‚²ΗΨΦbήρ‘ΗΖe(ό΅k@Χϋ;›ΒƒλTPΡ"`—ΜΜR’H~Ϋ{= ―YΗZΩθ£ΡK¨QΓ¬εaοξ¨0]Uz‘Ω€¬V˜„»AqΤ’U«4§Η»Ι|ϋ]σ΄΅œ¨ΤQm¨ΘφχΩΒ5£&)ΰr±Eλ’¦‚"-v=ƒz'7€θiΔωC³%3Λ‰“·§ΆΞΆξ‘υŽ9¬^‚Μφ~›-‡Ρ}ΝΦ|&Z'€θՏ/ρ·ž(³¬όΧωάȁθfš‘ΥTO―SΣ௜Gϋζ…“ϋLAζƒn^{jeΕυκΚuφΫ_AΩ« ϊΊtΑ<*δ_sH¦‘\sύ3%²4Π**αΏόY‡[»DΊ{Š΄Ώτ˟΄ΟžΗθ!uπβ<`]kΔ’XσGUΣ;Α3@ΥZ*ϋ―€ω‡β}Ξόoc³Yό/ζVεΜ£νμύΚ@7Φέ"(mΰ½ξΖρj# ΌK‚σVλζBšmIΒΆώ(pΣNcΞp'ΐ·} „˜•!Οgΰ½"Ή<ΨύΎr μγނ㲍υLaͺχδͺ TΟ@’X5€5/oG”;ΐΨυγ8IηΈY›6™Mνή™ΘŽTκm=‰ΓιΙ%₯ΩΑΜσ@^J―-Σ9~Ίψ§?½ΑYΣά+ β0zΥ—ί=}u#PfΙ§ΗΨ`σŽξ‘ŒrmΓ‹± Ί―~E›αq*ΰn4b»Λ38Γ%wπ'ݏ.’α—Ν ­β?$Ε°`&ώ‡ΥΖ"λΏ.-5³ΗOη€mυφ  υΌΆxϋ‰lγ%λη8rOqΆ―ΙlIsD* πe`Ψ'ο!~Lj3τ¬wε!O7¦χ;6TαY–YθͺȁυP\WάΎ?d 8ξ2λΪ |„½’φΎ‹όW\θc<ετΞ»Œ ™ΝΕώΩy› Υ-‘ΫσšΝ&©1“%ƒ–'_ΣΝ―ΟV!Kδ±Ά•+IΛ,Ώβrϊψ0Χ_cώ7¦ͺvωίdf5Ε…(HqΜm”W‰6IΤ|QF‘Ύση Η v²OΐˆXmS "’YίvŒη€ˆSΒ_ψQ1«―όζ‚Φ›­’rζΏ²½Š€½όΦа[λ•šYφ9υηΰGœ΄˜Ν6ˆƒΟ8φ6+z ¨E«pE¦φ2­Uνo’αd«ωͺκτ³TAE%Ε@ΐ!όί–‘μ?’ζΥ~™‘cΉΪZdόΟv­β?$ΨPδ<*εd@γC{δ„ˆŒ¦―Ά%ω­α―ΕK”/ω|Ž:ϋ>ƒθ Β‡spΑ„%zdϋG@Žcu=%sΉsLJ{ž'›6=x’ήι³lΡ…έ9”υAψς('JW¨€Sψ&œ‚2HχKkΆζ|mβ²rΡφΤΉ0³W•|;#±&½λ‘•”°' MΟƒΣ oΚ9Βd>‰7‘ΔzSIœD τ5ΆΖ‘Uσ‘δλ³ε\₯ϊ·‚[ iΑγ·KUIžώ—ΐyμ†iυŸV¦©κ?HΠ(Ω׍ύ½ͺ!fΛ„Υ”ŽvΕ†!}bυR π•‹{­",›KeAVϋΉΛ³Ε+kώ—ξ™Λ4·k2ώs­μ³ΘNή[œύήό§κ[΅QM ¬­³iΝŸΗ”6ΟΩ\(2|HLΒΪύΩ~Ρaπβ½ξŸ3LΑdΜjl½«ήlϊΑ鏽E5y(ΔΤL#_αΛpω₯οΌxgu(Αλ, ‚Όά²›BΠ g;α©γήΌdb¨τσΐu/Κ•ρ/¦V@œΝnVαΒξ%Ϋ"«ηBtΟZ u΅0ͺ±LΑŸΪ›“˜ŠΣλψΌsTΪDξS)Φξ"₯S68Εb‘»°cφc†qŽ16;d°μRήσΤ^ώϋs~)γ!Χ­;hΌμJΎρœΐΓ‚sˆΔ+―ε³Θiζ’!ΩΉΛ3Ο»ηΠ=χ©ƒεaθMŒ@υ‚2(»₯=8μhΐ>U\Κ7" ΣΞpTWO˜¨j<κ«t7μΞΰ9Eφρ όα·SφΑ¦Jδχpt½[ΊŠοY ‡<χP\…ršψw%Ϊ’ώμJΞρf― ΰϊ{wΘ.δΝ ’dΆΘ.δŒ9ηˆ'ΚkΉγœ† F ~βm±{0xv%ίxΑᆑ‹όΚaγ ΦΛςn‡hfٝΑύϋeΐŠΖ—ρ„v°ΰμ#^‰ΐθoΘϊnο‹σžΐPu!Ώ’oΌΨ9ύΕΎ²βRΞα;Βυםl. qΫ»o'ώI»t’ i―#ΦΐΏ†ˆΘμBΞΡ»ηΑΫFK/4\είνh―;—hˆΆ‘υςPν…σ>.ϋ`ˆ‘mpΦ RO˜ƒ‰6b,ŒT@βvpi€ξεq*U[σpεŠB†?ξΆ²Nxο"gΚβΊŸB‹οβ»Ώ^† šμBΞΡpQΒ—]Θ7ΪsNΗ;Ϊ3…YNZz§;8ŸΣε-κ2Ψ*ͺΆL#πwΠ ŒnΖΤLfU1f†ΪJ)™A£ ¨1γΝHvƒ…lz!ΰ˜Β6~Δ₯εiΰcΥ;aͺmYΔϊ aσΛ―&ٚ dνΥ9:9ZΉΤ‹”•TΧ)aŸΈ–ι2p¨ύ˜ŠCƒΖ^Š(k7pάμj¦ͺ"…}ώβΓΩΨaœ€θ`;)‹dӎ@Ά»§%ϊ·χ #1*ιώ©wΏ³ζGξ…¦6a1··\Z! θμ;u‘v·ISΣΊ[d~ΉΎf©7­§UζλnUšjϊΪ^–f…'„m*Jί@w{dΥοi‘η°u5ΚMθ=­r»wO³Τ\έέ(³2w7*-Γ=νΞQOƒΜπΪέ Ϋ »›†­žf©½₯§Qn«θjTθϊ}ΝžNΡϋ.H5»ξV™ŠΤΣ(ΥΪΊeκOw#ͺmΊϊN)–»ζfHœ¬DΉTWθϊIN˜Rίήϋ/+B(j|,U] qυPΎ)6g»`γpΎν•#Φ”–όxΣ Ή=6OδϋaΛq½2hM†šF»r/­§lž5zœ"ψqήD‡²]³~΄άΗ³―|³…Βh™o{υ£•}qβ5δ;Yγς Ό~Έά²γͺΚη@½ΡCα΅©N·J:—P― ₯rόWσ–3+y•ΔΘm‘)ΚϋHχΡF?υ£₯²–_U§bύmp*e «v°ΊƒΠ(˜Ϊ0¦‰ „ΥΫ–B‘yΤ^TNd ͺίΩG-9E >P¬fˆΞ₯­΄Ci₯‰ [˜4ν΄κσ,&Αzνέ# Ÿ ·ϊϊγ§ίόγβΌ'₯ο•|Uϋ?±@δv«κ?KΒzΞ!ͺW“ξΒX zζ³Z“υ0'°šΙόιΙTbU†’h~{Ν-[$umR|υK†°wηn8C•M"τ΅£‚}5΄•3gkRnijΑ©ΦT*f£…i΅:zΚ’ηΏΟΊμ΄ ˜jΚ'†w Ό ‘Ο΅8€6­OΪ΄΄?|ύτ󏠁½V/΅ώƒ†,H―ΨΊu‚7 qλx#Φίό쏆 +[χwαeηΉρ©Cξ΅X΅Ό†ΚρΊζJΚΚέ4HΰΒΉQυί₯ΟΏ("Πξω7 sCπWŠυ-ρ^O™ΡkαοTt™Š.sbΊΜqD•EHdd‘CŒμλ¦ œ—Θλ©sf#υΘπ#U)Ύκ†…ΆΐlσžŸν_ώ¬C±«‘ŽΤ£HϋϋOΏόIϋμΩqŒήά€ο‡’Ώ™δ4Φ₯ƍ½°ŠQAύU“Κ°ήͺόyηΏZ½SœξΟ0–ΉΩσΏά¨ωŸ]ο Μ»–ΌαYϋ‹°©αwŸ~χcb·Π>§Τά±†^'’vΣJήnνgνΏύƒ\’π!3Fšξ »η€aNFl²ˆSZ/R1­|sJ/υkˆ3TύG9σŒαG'@}όΟ†Iς?©ψOe™ΝώΣωYTŽAΊ€2B ¨Ψ5ω|Uefςw‘œ&³ŸyΜχ“ιO--匩Z//εtlCmΖ‡ϊ&8`oγ5iΤ–8•”뺍WOΝV¬©‘KE,E~ΉŠN©@UV@rjUΐZ§ž‡ΙMT Bί-Vƒ £k`Φ`žVIΟPΉΤΥo&ύOœ`HόΗj©βεΜV‰P` H?³AΜΏe)ϋΟmθς¨ ΪΈ"šζη†λ+ΆΕY`λV’'p‘²QvŒr‘^ƒs±¦R$Aζ ΤJ…xJέ"C© ••¨Εω‡ΰSՐ3^€χς?“υ_ΰ1Ε1?ώΗ³_+fʏŸpŒ(mz£ωžωF‘UD{™š*[ύͺ…a-U¦αUΤ•!ƒ?9yE$0’5ƒώΡܞ„dS‚&G#}Ή3Βςmtjω/6lHόΧΖPυ?€α?ύ|Žζ‹Z­(υVkΕ- Aτ·ΰ$L’Ά­Κ}»xU0σ`ζ Όt΅ε‚4C‘'4Χ›'ƒΥ ½₯Ό;>#ΆΥR† Ϋ«αYOYοp—dλ;ϋ ΞΔΆλλ‘νχXά uXώ―υέͺͺ Ο΅„ιΥ5“owpk=7žRΈ X’5BpζE›ΚRSy˜υψάΨcQΩe›^ŒκDγX}!OφP!ζŠX΅v[ΎW¦c 'ωρΗ?όΏΪΦ~ώω«–{n.‰ Χ†Kυšpθ^θuL―]/ώ—£μΏRζwήψŸ5M3UόΟmΔ(ώ—ΫO½ˆΕŽ/¬άΪykU4Saτω‹fre<Τ‘΅ΨΊτ]xΊšυ/φ‚˜2H1iS1† bƒ$ΗX*[Κό;)m άπgγΆΦ+ ·‘ψ?ώWψ–ƒύ6ΏϋΉΘόαΆvBι)ΰvΕΎMEΧΓμΡ2δr’εZKοθί΅ŸΑyׁ4[‹!Gp봟~kŽνC`ώ7Ίi₯ΌΧ@ŒzŒ5ŒWαω(ΈO¨`\vxοᡞ—‘—>@='ί-γڞϋlDξѝ˜±¬Όe…φ§ΔRωί7ke•3>β~HβωβΏ6–±%ζ½Qψvόνnη&1Z`Tg ‰€R ΒW¦3°ΕhΥ‹•žmηΎ'ΜZ¬Gι'mΞ‰bKβ €‚‚ώΜΛ_*ptψΠQ%œΎ<ΖΪλw;>λ(!f} Ζ‡…Iτƒ˜c9–w#ΊΞ’φχ:Ǟ 5ηΤ’_ΫΎEφt†sωΜΎK!Ξήδρ|8š7ξT©΅‚ΤZΈβvΑΛ Υ6½”ƒΡrͺ8τY»ϊ3Τyφ˜/›s‹kF³Ω1\ύΙγkYqΆξh!na’δυ1m˜‘³OόώTΦλ,<σ%uE#Σͺ!R ½•βΌ5<覑b―΅ξ²c3˜Sͺ‚ύ^UώΏh{FΎΐΝΙήαM…ƒν€jaδ<―ŸD‘a!-"n»«2ϋE²Φ—‚1?Τό¦ώNθh’ κ‡ΓΕσζ#τz «R3’’ΟšCΞΊdςjΞτן΅βq§΅΅ƒ(SΛϋεΖ ε•;©ώ$Β ΤΗ` ώ·υr₯κI·,ηΟ€»M•“S)£ςjm3ϊ#Ή$Ω™ΑzAmΗb¬,š}΄yjΙ *`§ F₯|΄tηΤΰ“Ή~ZΑ'=oG@‹€žm?)Κ7Φb8§W³²Sψ―Uώ €πίώ­δΏΤω―(Ρ#1`ίόo-‹˜©ψ$ΰΏrΝ κόz ύ;~υ[­ŒίjR™]¦0…|χφdΕwk—μ5ΧΟδθγΌV½κΊI‰Ιτύ”CφυFρΘέ£Π§[k³9+k₯ψ%μŒN<Φ¬πV!ΨΧΟ~B;ͺΟΥ3tϋΑƒ@gT@E!ηGO“;sμΠ­τόP9ΜͺGwYϊ, υΡ)Š6Η}„ϊΞ œ'\Ά»Σ²Oυx’Λp”©#tΊ»ƒ NFΪGš°Oξ#ϊŠ)‰xώ? ‡Ψ\Gϋϊε³Α)ΨΗΨ‚€ύN)±Όφ" mn;že@x£ς_Œhώ[YK₯ɟЍ{ύ?Ϋusώ7Ε2?ώ“ΰήγ™Κo.€7iΖΖjμή’8n€h^O`gPϊr©μiόd }1E’­V_³p’O_~ͺx…΄όzq#°Χv―Ϊηό^0Δ/ DaΘs4ŒΣkz‚tJώΧεp,8mΦ–Κ‘1{pΈ^ηΛ²Φλ5e)ϋίΔŠAl.ΧFG!>es£˜䳨΅u£dώ‹’ZΕΡ+SŠζyk%•`δ¦θ&Έ”xQWDΚ, Ξ,ŠVΖΈλ ’ R 1₯ώRDι+’ρŸdώg•/eώΞΠCeŠ£ΰΩ…_?ΓmΩΥΓ“₯„λεXρ_ΫΥJρHπΓ—‘ΈN#ΓΤXHIΈN]ό@Έ΄‡ΦΛaa]BmΡA8κβE>¦AΈύΧ·„ ‡ιόθIFλP ΥΠ4iϋώ€νœΪkΗ–^ύΏήΗ§ΥξΣ­‘ΎŸ΅ινβχ‰+ΞωΩ“£#ΧHw5©¬Εl&ΦuΨbαΘ9&πη[ό#ϋŽΩ“ο*t <Εw;H&zsωκσV9QJΑϊθν―ζŠ8σΏ|ϊσŸόσhbKΛοZΛοzL†NuOYU”ώ_Χ‡Ω„ΨΆΦVεɜω?αΌ“υΏΦkΕπΎόΏΚ «HτU«1όέΖw(V4Άιαπϋ!Εh—’ωΏ–*ώK*ώƒˆ?²g €¬gώW†IπΏ―-Ε₯βTόŸͺ€\M 4O~Hv˜KC=q’3χΧ{/ΟάxύZB£`ΤZmρ-F6΅€‘ΐ\]۝¨…ΰ‰όͺΑhθόoΏ4Π§yψ‹Yτ‘M§˜¬•M^αώ­αRω’ζΕ>‡žΐβΏύσoYYΧ4TώυιJΣ»YM€ή,wΘ‚Ιη&•‹‚WۚL£ή΄θ%ΚΛT’“˾ӝΒX3XΎ‚’ ιLQ­€όΝΟώ υt τξπŸ8οΟ0±VψOβόλΑα€ΩυοAτ’Qϊ@ώ7 ’ώΛΖάͺό―·ŠoΉtntDυΙ\ΞUyld ω'g?K‡Yο>Ώ„£Ξθ­' Bb‡δω—ΣΎTƒίΕt0”φΉΟA˜γΈ₯\j© ¬7E·οκ7Ρ†πΏ™ŠKκόΓ―ν*ήυ·–©κ?)ό―"ύU€υεΗ‡zbΗOƒ’Ύπ…ΧΏ{ΠvwεΜμ\'ΖΎΔΤςJъ–œζgΫΉœ¨­ωu―ό—Sq΅UυŸ€ΰΏ£λ%Bƒ?μΏ+“ˆ0Χ¦²^ώSρο ΞON₯βέgŽwΑΞs›1ξ}₯A:ψd†‡€\U"mΎΫсfΫq‚ *Eπ<0i(;Κ}ψOdπΗ0όo‡œωGa‘=gόχΖ2ΆdόχRΥVψ_αy’Β3œqrCΆ{,ΫσΥWŽAŒΪ2Ύˆ΄ρuΤυ !Κ±11š½†Ι #:ΩΛ΅Ρφ\ψe‘8CFlφρn]ksμ—3ιΝ’Ψ~σϋeΫΙ„cMxv~9KΈ}Σi”'ͺ€θ.?8 θPΎD«;|ΪλμLOuYέΉDΟ€ΦσC΅0胇=ΓZίΕQ>CΛxΊΉƒ³’ςnΪΌ‹4³U$Άϋ› »jŒI·:Ίy† Ÿο(u'wrutΘvσΗ―Ÿώςϋ/Ÿ΄ό…jG{Τe·Ιχ"eΊyOϊΏτόKε˘uΰ^–ˆ,ڟΏ"ν?+CΩnΓώeεΩ~Ρaπβ½ξŸΣ$dRtnΰ_ΞΟ>Ζf£’Εfa*ϋэψU¨αϋ‹ίˆ‹·+Τρφ]»ΉΔΦΞΆ{>ΓΏDt¨ω¨J/x£ψO 0[k'eώΟzΰΓ/ΜWce‰Uό―ς*όώVόΏΚ;«R·ήΈΟ²βΐΙ Ϊ±ΣιŸiu„+8Γα’a's“iύk¬ύτ―g-…Gs¨X9S:–ϊ͏εΦY-•ύ_Κόϋ{ρdΞΙΌZQόΫ΅š₯)ύORˆ`ƒK#―xΘΰ".C9’nΐoT.‘ΪΞν–ΧΜ„ύ̞£tP₯Τ\5ώ“₯πŸŒωχ‚$Ρλ¬ψί€ΤWόΟ +ό/Ώ5P[›l\αρebqG’–kŽΜ›7ΝοΡ ΠYpώΐΰ―Ϋ…ζπβΔuά;p΄?§Rgς_‘σwŽdγΣTό2ζ Ξ;z……©½ω&‰·†+€‘^Kε(xΜΥΒ {"μ":*εΗπX3G΅δ„ΘW Jα<―ΙΎWιο S†ΰͺ(iώ£'Έύž1(*Ωώql.Hώ_/ ώουv£τ?…ώ/Ί?Ÿ#(βvl“γ<νtκ—ά ”²(*s™Η+q'ΛU`ͺŠ^RJI›SβΫπŸ!τ=‚(]…!T/:*αΟ­L |WΩ'&NΧUĞϊ^‰"ς3ή[΄Ÿ³½EϋŠφ–L™”EΎ ₯m̏Ʃπz«κΏ\ΡόW6 ͺ@_ύΗ w΅Κ~«ψŸ „Ο€ΓηNΊŽA΄ίZ€mSλ©έ…anθ)Π.σ³6Σžέ‰ςœΗΐj^Π;#ξ}ωq.»V-βh1œ¬n;x`ŸLοxj@©WυΦχ†-Ήε(ȏΰ$Šσzπ_:#lΐ}ψΟ λΏl֊g~όχνξŸqΰΓΙΖ²CŽ)Ž«ΧΟE~GΚιΕTΟάQζXΩb£¬j~[d(γaΒ°ΦΞ œ'η?ΒξNΛvΌ8nΈ6•5αž”G΄ΊςWΏωp/„ό9=γ\Gϋϊε³…υ)ΨǘqέυXΓ₯ ΄οH5}ιπ»Ώ /;ύO<UxZ9ή•όοΓm€μμVςώω‡Ϊ§»Η9ωߍνŠδQφ_εWώΕԘΪί4S;=―ί;ήτΖηΆΜ+ET‡"ΤΗS’νέNͺsš#N7ή*JWύΖβ?ιόο ˘ίužΔ²1ψ-Γ«ψολΓSς(h ω{ƒζΣU4ΊώΊE•²¦HθT2Ϊ¦„ΘXΎ)|ά#E²? ΒζΖTόΟRζyΓoάύ “ΟŒƒ½ψoKΖ*ώo…ήK•υΞΉa qtpζUΦ½GRI«I₯I1"!ί.6d£Αΰόk₯μ?2π_p8_p^ώ―₯IΪUό§Βο«ci°bΉCKΦ<3 œ–·€ ·sή8[p {럽aΥ(ZIƒzƒ΅΅\.ΟE₯¬€οΙφ/· ɜwΙ€ό_λՊΜZ*ώχϊπ¬ρΏu28δx§οθa<»Υrσ΄φ0]θρ²(mΗΧQcήξy” ~=(ζΩώ>΄™GΙΪσ RΤe¦Rς™g |ΣfUν¨1»υ;)A ύΕΟY`_atκ ½zοΓrͺwΧTsgfxF΅x|Pv{zΎΫΡ^w.Ρ3h™ϊ‡j•9 ½tΨΣ9lv_œp$^΅£έηZΌ8Κ;$5ˆCuΫ@}o­Τ0―/u›Ϊαlj­› ΨΝ ­hnΈθ²rΜbJΠκΘjz»B=ηDn@“ΐg)ύ_ΖόΓMλεuήψΛάώ?s«όΏΧξ;Ϋ/:Ό^²Χ=ΰs:M•¬rT—šqI\~U΅ή²co2Χx¦DbjnΕΔΏ!Q _Υ .?<”‚β eψO~ώΗv©ό?2ζT‚³β ›2ž~πΖΤνΟ5šσΏUώ?w3ΨΫ>ύμΎ°sWη|kΨϊfβήrπš―φΐ6nJΙΚ*ά•_KQ’•’DC’dDΫ.ΗΜ(ƒΒ!όΫ•²J™ΰpIΏΞ˜Aγ±V¦ͺ¨πŸ²½*Ϋ«βy›ό/Άγ ’i-—8s€Βl(ϋεΏtώΏ΅’ζ?†ίΞώβH μχφ?km)ώ—w―ό7C2£@κ`#¦xfžζHω―' Ncb"LΉ8σ^(ΖGΕ)·‘_UΖ0x±Ο‘DΑκΏωΩͺ. Γ,°ZΉμgΒβ€!ψ΅Uψ_ΞόCι²ΧΓ π„iύφ_"wSσ―μΏΚώ« ΅²r8ή w*t4$tβ)νΏα¦Lΐύς_‚Ά+…€ΰΏ,VdXΟόC¬GΪMKΝμψο۝{=Jη “Kθaω|Ώ\˜ £­œ·Β‹Σ™r[‹ŸW8X‰@‘’_1tζδV©Βg.š•›§Κ$ΗH" σ4ΥEύ AϊŸάΒJ~Uπ )H›€wXQr327 &7σ{ό‚$΅Θξm(—G±ν\‰Φ‘=ο<šΕW§4 υ»5ό/2pˆώg.ƒΜωHΩ«™+’e³UρίσΫeπsΎ^ΜwA}ˆ²ƒXUΧνΒΌ‚CΞi­8M@pH:% ’P(55»α»Θμ3Ν0z)v0ιrΈwF±«(ΊΘ9ι" ½΅W7G0ΩΟψ8‹ς9σb6"^TΏAψ€28„qc)ύOΚόP$―sϊ,cCζš+₯©ψ―Ϋπη!­θΡ9Aρk³*G¦’€βU[18―αeuആZ§V«"seAc’k³Uρψμ •ΟιΆ#ά2Y:‡«)Iazυ„dϋŒ­Κ•2™dFόΏΚ%ώOρ?Ύ/ό/1+|d‰8 υαήbύλ w\ƈƲ½hO]m™Χ·ͺ―ˆΛ7Δ¦#vўYgSΤΫC[mόI8Ρk˜ ŠΓ${y·‘Ÿo¬p9mΏ°pE υCc¨έ —ο½ΈNλsBΏη”NΟ!ΟΪ^JΫ"g0ΖψP₯.[άΡu½!ί ˜;Ν―£Œ[ά>H{Υ<Ίύ©²ͺNe$Ά†M—QTγKυ?ΊoΉΓ ]β—J§€Τk2‹0ΌšΟBMhώΚ‡Pζ/QϊΏtϋΟZρ?˜ͺ¬Ÿ8ώΧ0Vι7-5sΫ ˆG?ΫN@ιWEιΑ=·ΪοΑσŸ.;ν?(€ŒXƒo xA"žλΓ©ΉΤΒ€ΎRΧGfXξχΑYΛοa`g1aq€ό.ξοΒΛΞsγS±^†‘Ϋ–—Q9^—α΄τWeΗUΏAψo ^0oMCρΘΠJΎήωκΏn·&Yu«ζ6π?„gϋE‡=ΐ‹χΊ|N•ΐT…bίO‘X"ΰ'ρ £v~± D0fΪχχcM§Pησ•™h€§τeχΌ=ζz’ Ύ‰zpJ±š‡ύORΧΆΞΧUmΙoMϋ9@%Jοο Ϊr9ΓWhη s…)Σgš©gSϊ,*ƒάγ’Ι­Ί6ώ“ƒΓxζψΥΥϊjΖX*GΩ؟§”Xρ%ze¬$–΅–“’TΠ―F]‘ “½—šΎˆΤγӎΔ-fKοNxHΈXΙ§¨yΊ<Ω0^6ΖJE°lϋΈ¨¦$ΠΞΆ€B@Σκ ¨Ϋ€~W‹―"ώKΕH™Θφc;γ±_‘ue΄:ΨΛΎ6ΙόOΕ0Ώώ1Δ ©gΰ€0y₯J>_Šΐ¦>₯ξ\‹Σ…¨ο…¦υρŸq9…›pΒ1%²“jM¬D\£ΆPΚK-“—Σj€|VΚ„ϊMŠΖͺƒψ? …eώ£Π₯τεbϊόo,UIώG€N7δx|HαxυΈξx.\΄Σ)h­ž‘kcՁva¨£ΫΘΝ£vθVπΠCε0;ά«χAγ+αιmηΞ“s²]Ώ»Σ²{ίγ«€6@uθτ­…Ξˆ~=τm_>kŸ€θ`;i–@r’Ί υ-fΖu:~Ώ Ϋ©ίΘ1pώΫXKΕyMσ_‘άX°m»nƘΛΝRρΌ—ψŸvΘ“nI΅Œp‘Ό‘©\Pn")=9?»©}~¦αƒΈ,·ρ}ϊςSŠΜ(¨¬j‘“Ντš:q«0mΘώΟ‹ψεΏΉ\[ŠUΆόΟ ¦φp­GΒ«ζϊ俊τ•ζA>Έ—' ²½oέuβ,ŠΧžπ²άK`ευϋΈ―*εPy»i)†€ΜA5‚Γ ”ΓΆΞ€ ŠŒ Φψβxψξτ[`T™0g²;Q’„e Φr•αΝyχ”ώ•| ₯³\ώ˜8„ΕΪ*ϋΏŒωΏψξ― –¦›[’ώ―eUό‡ς(˜>-£’ ιΌΧƒ ιd&ϋo1")s0<5Β§€Ά…Γς‡_4;Ž!Oqx% }J$ž ΔώI2›ΖRα?σŸΈόˆgΕ–IΪ͍²+ό―π<ψ_ρ€(=fdeΤ+6ήWΪ‚ˆδk©œF—Μ €˜@©οINJ]ΚόC! ;Ȝψί Τ2WJ»>ό―ψίί»r*Ό=§‚X kζͺ΄bLϋWHƒR—³HšL°N‰Σ‹!NWΏ‘ψO6ώ7χΜσ4Ώ8[8 ΠΑK"ˆ΅°‘aΈJΠΗ·Zρ?ΫεFΝμψΫπŸ‘¬;‚(snΏΰTBG?ΫNΔy¦1B°yQMΤΈO‰‹kΦ•ρT¬€ε~3’BVX›On`Τamφ|Σν‘‘ ,ΰ£hφš½fνΊ{Νqjΰœoλα)²g@'έ—Θ¦΅Ρ֝ψΟq#U ]ΝDF‰~†λΔΥ“‚OM ΗXΦΡpΆC&P\’ŽD£ΘOΎVΫ|5ψν=»{°ΧΰκFΞ~’%';ΡΫΧΌΐήknο ’ΰ¬₯·§‘zξ±A¨ƒ@£vp=0 ;5M`L‡CαΖ’€θμς0$€ΆKKΩζT`>ΐ=9plψyŠ3χρ™Αcm•ύ_ώk€ΉδδBτQθLƒ FCίνψ¬ΌΌέΦΞΓό9π2’­ψV4”7 ŠRΤρ‚‘6uμθΕυ“ΘδϊΚ&€~|ϋΏ0ΠϋΟf£ψεΜ?ή$‘ς½wc'€_ϊθ`Π~ώ’ώχώ­ζωΏ»ΈήΎQ| *›q’γ3)ψΠΤP˜Ζ―Ύ“REd*”yΨ9ψxΊG,?.Μϋ»Γ%WΈ±MεcqDGVŒ-¬ξΐ }|’§ŠΟ²‚Iΰ}ΰ{+ξ*²ύμ·h€»yΓYαξš!«ι©iU€/γ#«*π 9K¬Γƒaΰ"3VεV‹OV§΄Όoΰ₯T«+6τθT§pf<#j_p;z6‘΅²c­Κσc³ή―½+ΰ<嚈ΥA¨ηΰ’.š<§'D€ΩmρΙ!ƒ_ΑόœΫ˜λ#α&ιw—{―Σ†τe”zRμΡψͺέ\tω­kŊ֊=Ύ€ΙΏόY‡›·DΊΟGΪίϊεOΪgώcτVtπβ|/€nBϋΐ”ΕπρίHM`ΫVΕΙ™»ωΗŸάP€°―—ώ_›†²I°±Cυ:*Ÿ¨Μ›|$ŒAZͺ ¨ςW=̏Δ`kψP$‚H‡ΟΉN=±y’νA€ ˜IGΩο­VA"<„ΟφšfΣ‡±•ωΔ—εΧ!NςγKά Ϊ³6π_ηsÁލ΅…ͺ’t‘|θ“θaXϊθΉυΑΘ5ι&t'Wd}…7NΎ­ ‘ό‰τ†‹Ή/,|ώQC=:2U‚&φPϊΐΰΏρn€!ψίX+ό/gώ«°FL @_ό§΅6šσΏZ)ώ‡ωρŽΫ§Iσ‚}ΕΓ>*Έ§ ?*ΤVΰk†}Ν$/„΅ΞΆsr}ΠΙwViΧ.φ'Γs‚‹‰\ΌT΄bΚƒiυΟAa΄·+…Δ€ ΐ«•©δΏ”ωΗ’HCΰ ν­Ό2ΘψΟ΅Š`ν«Aδς,6“Y^,£žΒЊ~Θ€ι¬ Βμ€]e+ΨΚTΠΪ4˜'-‘ζώΑ=^’"ΊVφ)WδΫ©νΪE€‰άŠ_ΆόO7€ρ0 Oώ/­ιUω?2δ™β«;xΝdΠςΎrΚ,²A>ήίύz ΚfD―…ΰZ ;oΔʊŸΣ„δ$Πυš‰ ΄‚q₯·UŒiβ†δ:‰ž•‘~WΈ†ƒό?[•!gώ=W(ω_όC]Ÿˆ2!Pσ?ΏόwNQΰE (Y‡½‡tΐŠ €Œ;—ό‰G”QMphd•’Φ€Š šΡa!™xL;7Ξ£ŠV¨²ο Ύ&Κl¦€ ύβ$A˜δ·ώ‘%Γ5qύW}η†ΦΗ,Ρ„b@’»#η_Φx~γ‹CM`H ¨w‚½?΅.π‘7˜&u'Td-dgž„Άο:ϊ n ^§;·ΦΦΏE7DNβΝlαΤa'ΤitI£·‰έ£o{Μτ6€ͺ–€sˆXc—“q7•,8(ΏΰX,?‡όΈpvΕ0ψωΟ?iEςΏTEΞsoΨJΕ"Ι†αs΅VρRπ?Δ3γΥjEΞZΩeΰƒ\ fιYQΨIΕOv|χ;f„Ύ¦δ_Aώ xΞАLΘς ηΎ¬Œ1Ή °J昩;;ξ‚²yφNkDq=7ψ™_M©/πΏž1)μ³0Χ‹ j€υD}wτŒ[°ίrΉ 3½ήεΚεi%£δφ£ Λr¨›¨·lL ύ‘ŸΣηπT""0N+uV²±Φ$WY*nB1’]+ώΏ ύo₯πΏŒωίοD«ύω$—i)ώϊ_¦5Uu’§ηβo,ΧΠίϊœα]?·Ν'°Χqg;s­«QZγ4Κ!~†j1›%₯κ؊V7)΄χb‹ς`‚ά>)DwPυ¨dyΆΰ“œ xt.ύ«hτ֜“¨CWŸϋ49¦―~`ΥοˆΚγUs2ΌZ/jα,δP½œ€‚Tψvχ~Zzγ}ύγΎzΰ>kTμ:ΈEρ—p'ΛηTΓΨΑ.α»J« \bxνWt?Ώ–šk™φΈ―T mOε’ ΐ‚α όonώ“Υ0`Ξΐe½Vωπ?Ικ»X™¬$`-ΐž₯ˆ@6@ο§± )-Ο·~dέKΗ;3χhϊ]^tΚπϊeΨ³Τ}:“Έu&τμLΚΤ5AΒή”Z† ’…^+ώCώt%>‰ΦŠ™Ηy%ξσiU+~ΰR―₯‘ώίOψ?ΙDΕω-+T¬~#πŸώ/Œ ώ“9φ%²…ψzζ½6‰ϊ_+UλZπzΙQΨcV`(KP’Hpn ΣΖχ=ζxσJρ~'™`ža^%’ ·ˆ ž›Μ/Υc¨5ΦΗΐGF'‡ Ι­ΊΨa!ŠΊ½†IΠΧy³½ N»‰iνnCΛW’o¬Γ Άƒ!—uΗ(|+~‚BΤmT€£ΩΫ~¦ͺ63Α45†Ζ9έφΐ,‰ydo΄δ<ΖόΌΦΞ°ή#ΦΊέ~h{ΨΦ 1ξ$1Š©Nhη°ΗH7γμξSœΥ­(—όάΧ*Ζ Ϋ;ΨՏΝΕs\ƒCO·`eʘWα ΒΈY+ύ_ςόομ˜CίlΘψΏΥFωo_?ƒξŠ™π‚Ϋϊε¬οά#Ԝs6 +=˜ςϊa₯6ΣΓgSv°έΑΨ(Α]ΫυΟ—β¬y£ ]E€†JS1_΄`Θ>S.ΙΉ¬ 8§ϊ ι!IΆ‰1κΐTV„£θ xο „λρή#l,’n-Νu.€TY]ήΎΥ‘ ΦaPΫk΄ΊδΓ*7xCF"&n—¦ι Α™GψΓ*€ˆΩς$Θ™psF(e*šΈŠά§ίύΘb*bΚE­‰ƒ«0/₯7ςξΜK}ϊΏ4ϋ‘ς?―Αώ…γλ€τρ, ’{emΤόΟoa3τP{Cδšpq`¨ŠΥΊ©Π©ΟδνtΖΥ]9Σ₯€Jƒ(£ΐ»Ί}Ί¦šaξk*yj₯Y°ήά¦ΥN«ΨΞ§Ϋ So#–ή9©=Ϋ4 •ΡΞy:½/³†Τi„δYμnž.Χϊhο:§κ!WϋψϊεsYΝ!ΆON@kSJF₯@By­Υο–ρXpHόj©όΏ’η?ΖΣηs«ς―A»%·ψΤ>in5`J†H<φGΰΓuαhι&‘#6½†fS{b|VI@Ό0–«ω Šqξ’ς–Κώ+yώΟΆ±==Ά7τε™[Βώ»1ΆJώK‰‹A”<žνΔ9₯φ^ca‘=ul` psρ™†ο#τNHΐΝτ9ΠΩo²oΚ¨<ψD]γϋA»Ηu¬c² šxΗΕΟEkE»ώQώΡυA-θ*τγ ΪWJ²ϊί(#ΐύc(ώΩσίΕ”θε\’ό?k•'G‘ζ›‘L‘s&ι©΄:φβ5F•*Ε .Β ˜uΨτMͺŠΧηψϊόύF3πšPH^ύFΰ?!₯†δ¬-…$Ο?6@ŠΠzλ­ώ΅a©ϊΟΧƒIΞ7Ή9λn…€βŠi*[KMY μΆ0ΉrαάRωY9WœΚUhΤώDsχmρ|xΙή@8_φi)υfjό'@·άlώ“;ίρΐρ–±¦Δ+ώG ψŸ―‹ΪaGΨCpΓn4dWš yΝb6έ”œ}ΗϋΏ¬ϊ?*ώ_Κόƒΰ\ΰ7-ΖυΟ2kƒτ[›₯А =ϋ·ΧG€[C9…Ϊ ?8‘Ήή<ι½aτθ·γ³ΫξgϊΛ—­žΪvJ±ΟmŸ+zΒχ^v„ΪO EšΕ‰ͺ]8„Ϋ'?ΤμG³: 69!ކ>"4½ΦΪq<7Ι79l2΄}ΧΡOΆΏχΘ0„JŸ΅v,y©τΤ]>γΑvΊή.Ρ–#.َŸΊ^1>ΟΡ]δvέ(:=š“³ή%ϊH˜^i½!―Μ Ό@< /Μ©Θq•αЎP$TDΰ^π_χwίcξ)1ΧΕ!ΙUώw©sqxΡv˜Τ23Θxπ"hΊΖ‹Y=ΚŒνP9+PT†ϊΩ~έ‘έψF ŽΑYσw3@mΟ7d‹ϊ6†7¨ΧΒ<ΘΌΜd&]=MΫ/}VσΠ·’¦5Φ€βrÏώϊ†elΆοβ)εaι1άΉvΑKz!!'κ­ξΣΛ\D‘ΰkπίψομ,Ί]’»B„}]‘FspEέ保0 ž]Ψ@;ۏQ}ήτΰŠ~Ψ»H­EvΤ zπb¨b'f Κήρ‚œ4&‰ xy JρV?9ϊŸΠŸ‘ϊΏ΅^©όoΉσŸ ˆιγL<ΧMΟVιτ–@ΰ?»QΰcΙιeΉr¨ͺNˆ±2™PΑνT/Βl¦(ϋΗ†K˜πMδ'ΖιΖcΤc&$=L7/œQ¨fΦk_ΆcSr»Υ±‘YeŒ™vVt¦eΈg€½/ΧCΘβA‚Ά8fιŠ Έ©X:=„rIΫGœ Η A°7ݜ5x“1jœd~·>{°ΗΞ€τπʝύaΟξUΓk”ΰdξ=ωπ&»…nΤο δΏ΄ϊO…δΟ?ΞJ {νζšRIΩ$ΰ?ηpΤέCφΰ—οδώλKJΜ‚vh‘dG±P£ F…ί7J$,t©S<³±ι†—ƒνΕ NωβΨΞ C\$pZ—™Ϊ0¬kV¦6·τxˆoƒc/P]―αγJγŠ„ΥΏGF@₯iΈωƒϋȞ{φŸŸ~Ρ’ ³"Ÿωu@β4HB‘βωεh8Δώg©ό/9σχ •ωͺQκО>k½έXώe“€ψψίφ―Ύξx_FG"ŠhGK0/ό½Ίo©ϋΧ,3ΣXΐTΉ c6ΐƒ±B£°–­α―YC’\οθλ†qΧ΅Ή±W™gJ+RΝg0_•­š₯˜#΅ΰ}­—μΗΝdWθΗ ŽY“υS£ IΈΩHœ4€Φ >2ŠΞdΊ:Ά‚*ΨΆ“¦LƘ2]Š(ι$δfήΪq³ιx>ϊIΘθΗUΎm­ΟB)~‹©Πsbsͺ’Κ#*‘ώ£ΦOΐ ανRD§Ρ¦WέϊZ{"5τμΔ‘‘#δ°δ,;¬nHΝ»h&Σpgaπ°ΛH¬Σϋ“ŸΈšrO²,όρλ§ΏόώΛ'-_mΪΡήC-: Ο/νlώ:ς+—fž oε­ύϊŸœόoΓRωίΧ‘£ ΐγl}ό«%Qm½έ(ώΟ뉦½ /θ–Ÿ ኼμΪK½‘F€υrr#€@Ν]Oe'8ζPα[oήZ/ΊΌ‰υόܐ“ψαFΡ Ρ AuΡβ³T(^­°ΉV8žkζI΄;ώ “}φ©}Γ5?f%`΄pΕt ΜώυΛg εϋ1N$@‰Έ‚Έ'Σ:^‰s@jΥ‡ΖιCπΦTψOΚό»>όώΟ6ό—0¨>ώΗνΪ$ς·+₯ΙπAπ·ν, Λ0‘Uo¨SŽ‘‘ŒΣΟΰΜώSHa=y ixδΓΨ\[ŒΟ¦·0π˜Zΐϊ °E’”’\¦VlQ2ƒ“Š›Έν €Žύ_Δώ‡νRΙ)ς?χtΝΗhl, £‘κΏ\}όΟ)eB[¦,‘-1aμlξ Lˆ3v}ΗΝFί,ΪMPʐρŒ%ιΑ.μLΜ+–冏هP97ΔΠ ³Ύ>b—λ?½ŽQσΩΞN­QόXzζyDœ™1ΝjϊΘΌθ’?δ¦k‡Έώ œ<… g©˜Ÿ2Un‹‡nu‹ΟΆη='_W+Α»/h[οο~Qΰώςb0­ΐŸ5V™{§ζfJ˜ l κ1NΩ±ΩrΙx‚͍Κλz“y]’•Ν‹»GξχΟpήΡφQ[_™`A‰RU™χH 2ή΄±”PΌξο~½Έh=η)_Ά±εS]D\b*Δ:‚/jχ™ŠΉ–4'±XΎΜt§zKv •ω2©†Vh™5QkΣDg‘# (η’ΐ <6»D #Ν2Q܁b‹ ΄ Ρ­­Š“9ΐ,όΟY{΅Uω2ό?CC=λj,₯.^J E!Χ₯Π‰ˆŒ(€4ΟPAδ ηšoQh‘ε)+‚°KΑ‘"Β7’ŒϊI‘ψŸ·ŠM*ώGϋΊ 'PoώΏIΔ¬V*ώWώcσκΰ˜ΰΙ¨ώ°1K<^—c=I2«υv„%ž†O9.Aτμ:Μ<͝iεY_°xZV€–—-ΠZž]ͺομ`΅•O2>Šά€VXžjΔεΆγrΫpΉΜΈ·™Ί_₯ΫB·‘†“u©AUš”’-Όμ<7>;‰]S½)5Fώc‚--gώƒΓ Σως¬­Aζ―M'—ρW˜Ν{H,•Q₯‚~ Σs)G¨‘R5NςͺΡ¦Iͺ ώεόθ„—Έ$0eS’²•‘4ϊτΚνΌKWΕ9πΜ1'ωg«‘­δ),ες†s…ΫρŸάόߕ╃³Hέσ? kIς¬ Uσ*β?ϊΘwQZ.X(ΛΙμχπln‘bμsΰϋβgtqJ£e΄°—4ϊ€-‘ˆ™ο•€πΊβΆ ΏbpΕ“ρ>χΩρŸke‘3QΏmι¨ψ“λΔΣΧ3–[’ίTυΏeΘnΟω#.EJηίΩ±λhΩ²Ζ~2lGAΎ±ύ[ dςI6Ώ»)cώ>*?™ϊ±ξrꘊ[Ξό/χΓ;λεŠΰ0Χ–šωϊ΅ΠO_]ž1τŸ‚ΨΓ2φ2V3CΚoΦ΄T”›ow–ΥΆ—{Fj©γ+Κd₯tŠΗ¨VCͺ=@ώga @φ\=A=•½<Μ₯(ΐ·‰ήG³ƒμ«:αGα "›˜šύΊό―·Α?7M)ΦiάΕa3G­iXρRBί.>Ό΄{—Y»Ž.³B ΪηS˜Μ}=‹oΩν’Μvƒ«σ(W#Ζ±²Y™τϋι|̟Σͺ„_Ώ|–¨οOZέJΧΥo„ώ'”ύqώo6ŠOΦό#Ψ9£ί\­Χ€g©βΏ―KŸ½Π/—A‘NcHΡ¬”‹S§'Υ?Ηsΰ Uν[_–)\_š’z―ΡκΑvΰ­ΚΑ©ΩGρ[\›ό—aZŠAΪό£dmψ ‰Β€½υί(όΚwνψ%0”’ηRuΧΐκ-A'²EΘβ4q[œ.n±”Yz‘Ν'–Βaꃀ§Xβ{Lνΰ{άύ"¦ΓΩV"dδO― n<ϋΏ 0$ΫX+ω/eώσ*YΉ ₯'‚κ³Xa@Ωf‘dύŠσ'v΄π%RΪ¨Ά)+›"έ΄Π§!ŽΣωΔ γ ƒFβwξΪώ~Ό .»šΓ'ϋ‘c%{Œ?ΕΒΧΆEΝb½Gwχν.e;ƒ₯«ίσΪ=^;•?rΧfy${‘Υ#ΙΎςΎ§‡t‘«¨Εƒ±š[ “ωd–We―Ήω? ΐΈ€•ό—1Qί|ώ?Γ0,²ώοj©ςΏ$ΨZΈ>χnΈ)Έ©i(+™^\(₯±Ε(Λϋ]Ό Ζ¦ƒ'ˆ¬6L³6΅Ή k6(Δ,ΔU>­š le¬š?¬Ε€EVΩiϊ¦Α/Θ#™]„°Έ~gVΆΞ;ιΙkβf"γύΤP»θθn~χτ°rΜ¦{ͺY1·έυAtΆA†ΕQŽž¨‘ΔΤΰαΞωΐυpk}d…η₯ς’†0σ-;„žΓν^5€±ϋ™Ε#ςxe˜½ΦΓς£\!βσ‡ύΎΫτ; ‘r‰ι,t'z “ ³ηfcΞ<†6›T3•*TuΖΎ{/ ˆpΥE·³_­ΗΝΊώ DPΰtυ]΄‘€dσžι–qC51‘φF¦¬₯+Ψ?³β „>\W…Π]jΤhίέδ€?=οwzX u‡4J‹L£„§λζr| ™³σ ³‘ΛΗ+,ˆω‰ Sy2kΥBϋkbGI¬ΩZΊΘΰΨ‰CΝ?Φ.! –aξ΄τiАώ%XΫΈŽ†φ€…φ3&νŠ5$/~†Q΄Όus–ΙPΡPα’κ7±ύW6—ς˝ωκ₯΅^›υΆΚ##ώsο!˜-ƒjΞ?ΈΟCΙ…‰1Ô䝆)»‚SZμτ-μo¨†ŸνV˜ωv: ξ0#.i3c6d9-ͺ˜MW¬ŒΑW›₯1fΏ°₯kρΌ ·QfMl΄NΛ—γWhC½B?AνΓ£ΥU†¨5­ aΡ“§*αΦi]šΤΐ4i΄™Ι}]²[cnΉ–›ˆ°ψΘ:­„Ώ‘π·nό/±ώ›ͺ%IΓϋχ~7_ύ7ct½fόΧZιτ?2 ͺ[T!sX¦ϊY‹&7gΕ΅γŸ„„ψŒτΥ ΅££o]C²©ΠΞ6bπmΤ„Qg¬9Ά CΈηώ–Γ‘Ω<„Ψ·ΉΨ&^τ Υ6žν[Κ8.h—\i©ςδΜκEE‹@t?!W•/Cώ '{bckΒΣ·@Ψ$$Β77wι;{W₯% bΦ‘œsNqK―ιIŽ~.Άp½κG.Ϊ΅~›'9ϊΑ=žίj²`?1c"qή'LŐΩ΅$Πˆ‘Pτάλ5ΌΧ§υP°‰>‹@Ωπ >Γ H­&a^»jω/±¬ΧŠSΚό!ψβ @}φŸΥ–δάnTό‡ό眒ΐ ΠΗϋފ ƒsξL.pH”]¬ω‹ŒΦ±‚λEΜ( „Lœ0;ΰθo<$xIπΏΑ.†0$M;½T)?4)ξΙ?a-Λa©6ZI½‘fΓ©άƒ"j’Ύ 3 ±l΅Ϊ₯Μκ Ÿ“{c’υΏ­­ΒδΏνΗξ#ά³Ο™,Η!€v’ΌV[i–ΐwΐ£ϋaώΏυΚTψOώ+g]wp‚e†΄P¨OqΚ,π œ~½IΣτQA„ωΓ"eά«ί~ύΪύαβyθQꦝRUΕθoΈEΗh‡'†X-τώ„σ4|K©η€ΘHύΧ8σŸhŸόΣTδ)ۈϊMΆ…CκΏ[Ϋ•Ϊ%Ν=7iΌ!¨/s½$ό?ks₯πŸωOΙφΔ•’žmΈ:’ά‚˜] μ±Νξazf|β„Πd£4Z”8F‡ϊ”1ύΓϊg(σT(ΓΔƒ§Z±Π€Α&"ΘLΨ‡4΄βΣ—Ÿ δ£EbeνάΗa€!ς»VωRζ?ΥbFυΟvMΘΛ0TόΏ|ϋοΗ–Ϊ=f[`«ν“”›‡—νϋ`g ˆB:ϋΧšςO­ξΣΥ2ΙOεΟ͌Ÿ3θΊ¬ίΏΐθŸaρ?KΕ:σώ_·Ο½%ύΦΚRώίωυΏΜœέ‘ތs»eίΎέx·Εό±ItiΚ\ ަ@²ΙΥΒX΅8}¨qΡC’’ΩλτΣ°t=–Η±9 •‡Σ82– ζͺ \”«`ι™³rE©gw,Κ\ο¦:X°ιΎά>­jηz«6Υ‰χnlοΰ'Χ1ΝsψΗ=όχΓΩ~yτΐ3πƒΓ‘RΕŠ•"č’Ζυ Ε”šΖ΅yOe·œ·Κ>bœ2™Ώ<τΊ+!Ŏ[=ΩνjπCΦͺοt Tζf†·νh•²γΏ»\ό7cό·eφŸΥFεJΠωγΏqΐw{4χΞ³Ÿ€©G‡½™ γCPx~Ό=-όΩ…;t[0x—ΐΝ©h)φs,΄[BaŽθ ο;:P‘pˆ€§?qΏ‰j'†*άόξ2βΏW–²Hωκ˜Φ†°―V†Š Q&~Cb’χΡ+=,˜|¬ Ω; ό”5T†•ϊ!#T3QΏ€%XRlŽ‘Άœ–_ξ¨ή–b£|všΙ Y4ƒ„›±ΐ¨]'–±jΐ%#Ή»Ν‚sΤι€β€₯1 g“λ¨ψ™σzYν[έ‰^Γdœ ¨­ίf½TώΏλσMSΑ#mνϊ‡€{θ:Vε‹'νqχ΄Δ—NεψsΆ^έ€ŠpΖuad…{L·ˆτhζpͺϜHn𐹲qY»ΗΌ¬q:ΞΩSΥ£Uχvλ¨8‚ζŸκωrƒI|U_’ΰΩέ#:mˆ‘Σ€Ψδw>€»,Άω•›«†ΘηέƒλhΩ+όΑ³ˆ’e—2ۚ›υt6@r$•Ÿσ>δ$8ΑζΚwMσ?ΖΨ_ΰάΆ Ϗθ™ρœΥ‰›kg°Ή-bΟ΄αΓZOΰU’eο>Ώ`vΛή΄F/±D 8F)PA·‘β™ΰδTh½’ύ  ·+UKβόΓΰ΅;Oύ7‹¬±^*Ÿ "{:‚H’ wvšησ˜ς¦ΐ]!πtί]ƒ ΙΏœ³Ψφ0£·mˆΪƒέε˜Uάc2Υ+jοΤ’›εΔEsχωΦPtYY«ΐWQ4†Ι9Φg*ΧKKltΕpT}m“~v}χ §ΰΎ€½†yΏ΄r D:γx±τύ p•Aγ½WCιά%‘ VΚsκθKŒώUμΦπpφb ₯»°£r˟ #L‡ͺKU…΅”³Χά–ΚΎ‚ω·/‘=uύ Ξ6aΫ¨ω{ρo#|xβ‘AΔSUG/nƒUΡ­{«Ώ 7ε„ӝ7ƒGoR§/hΖΕHϋ2w_\qέν~Θβ9jΞj+’vfκΐ₯/eŠ>韠ΤΤ*xwb@­#)­P΅ϊM„dΤY+ώΏk˜½Sγ ―·*ώ[αFgAQΒΖπdή:ΡΈGG)yŽŒUˆ£Χ“‘Ξ’K10ͺvLϊH†Υ[ΦίΊίΚ¨mιžΛ¨ΔΥ.6Υ }Qνεͺi:`±TθŠc…Ή­|}σκΏϋτ»ηΤJSϊŸϊM„₯θŠλζ? ΎOŸ³5Lrώ—*ώk~ύOnPξΚΞ„ΐϊvπ'#Fδ3ΟΣΛ0½1Λ:άή6°bΨ₯δ¬VjΏωŸͺm#@oώ―Iδl–ͺώηΘd#°_tΨΌx―{ΐη„ζμ‘>ZVΜQ‘6—†πήS%y•ξ{zŽ·₯7νη”δ™ΙWIμ#ΐψΟ₯ͺ{ 󏌳Σλ֊’+ϋΟυΙρœPrDΡπXΕz7μ  °Ÿ8p‘b©uΩ¬wπf`Χ͜ž]9=Κΐ‹ ς kυ5<6S³6ΔϊmvύΊoE0μrτKιε„0°ΜΏ±ά ώ/kc*ω?ΏόG»ιϊCoθ)ΨwžύL=:8 z|YowΛ ‚ͺ=[Μ—±° qΧiΤΨΏϊΊγΑ7°σ@Ο;ώHΉμΝυΪψ¨οα˜O¬ΟSJ(Βσ‡!$­=΅ΊσΆiωΤ';>ιϋk]FcM™BΤ‡Ήή δ‘£³.ˆΆξΐΛΠeΊ":sΟ‘§gρ=/‡œSΟώνυ1­9쎌Ŋ֭»‹ΒWφd°”ΣΩv0†ΓwƒO^’ϋž‡"C!œcœ”Rˆ7Θ³γ~ΜΙJƒfoV–½uΦD]”'v†πΜυ»‚ZKŽgš&9ωα²dλ9kΜΣ»Ϋύ2PφξPn#οœΦ―aU˜zίJCΫ5Χm}τ}ΆŽŒmŸj•'βL=~iΈm~8Ξυ‘ν»Ξγ j0ΘΪV΄³+{S[«(rd2&žΘΌ]u³ζΔv§6Φg±6.~Λ…Iύ³φ”™£’ίXMύvχͺEy·ω[Κ»­žŠώτπί°Y†δdo)fT3ƒΙτΛ@)–o‹Λώ–išJ“6pӏΠξ&H μ‹5W€ύί²Tό·ϋ…#’]¬P=ΖΫΙ‡ΫΕ!p<λ±Y.ΜL)™kϊhρ% ƒ(Ρ2Β’:9Bz‘}bhΑAK@œhΆγvqφ N%Α‹ξ•WςΏm†ΨMU]κόc₯oώΣά¬‰ωWυ₯Ȋg΅κ·O%λγ‰ŒNμ£ ’έoβlΚ½†β±εγk[Fϋ Υt+« B/I­1 ΪkΨ“YŒtμ†εf`Bƒbύωθœ ²σ0“9V‘(%]gμϊ&MβνV+·ΰ+ °*­μ*κ½Κyό/«ςJ›³ύΊCψ …Mƒ½Ύσ‚έp0Ψ7[²ώγvi)ϋίόώίΊ}Šu?Έx³ΚŸO ΩU‚ƒ†–\όƒv‰/Άη½j·γ36³ό }?ΉΞIƒkTΫ-[€1 LκʞJ‚Ά vU,bί`a0$ώ»΅Τχ/kχαZԁpΆψ$€>ύ³$ωΏ6¦²ΟΏK ΘΏ©dZΞβΑC*OΨUυ8π.8Υ…ΊWτϋ‡Ϊ‰‰κ JKƒθ2Xtϊ‚φ9rp ˜ψ[£μby†%‘v{>eQ9ƒK9Φ‰&=L£žσμeς%ψ«–/3ΝφŽΎ‘Šyc*ΔU_ΰΚΉ%Fώ΅ βQόoW3‡ΛoΏh2ύίΨ _OC7VJ—ΰ9ώρˆζ;•ˆλϋ;hα$+ε~J‘9°‘Vy,“o•d½Η&DŸMT γ‚eX”H‘ώΌΰ—πΩΗσωίώ0ŽέMχ–yπ@1XxΩyn|*fW"ΐKT·½πd£―ζfPΗώ?§ύgk,Υώ5ψ―ͺ€}ςίZ7ν?ζr₯ψdΘ’]wpΖAf A₯@jψ…ω±ΞOJ―WΏ!‘QΡ2$’Ÿέ'ο7Œ.m„ς$@¨ΤΨ+%xqIτΊ»Έή>οwe΅•–«Ϊsκ–ΝrCφ8Dζ WΏš}³Ϊ/Ψ„3‹xm˜”žύ&χ~ΐ/αώo(Ώ΄ωηd»ώ<ρŸ†Eκ«•Κΰ™Œ,trΒ%!.F£†)½Na9₯a½LΝΏPν{=Α§™ΑΆž©¬κοg—XMρΏΘ›Μ g “ׁ3OΖX₯KΠQ6’3E)χ’ΥΣΟπg ―1νO"QΠPΡ6$C’DƒΫψ›1ΉΆP§|A¦’~rκΈ1|RκX¦Ϊ₯νQθˆ$€λӌ%QΛZ©ψoϋ_ϊς±s Χήβ ͺc ’γN(kΪƒρ=<ώ3.GΪ|˜,υΧ/Ÿ+ ζ~E‘^89m1•؁‘R΅ΤOΦώ/hΟj©κ?Ι›Μ†5ΟjE±Z+5δ'ΏqEvάξνφƒvΆ_tΨΌx―{ΐηŒ £%κ( AΔZ_tMe”5ΧV±k_mδ²3qδja¬hΌθ Θ‘ŸΑ™4 ϋ ±ί.nΖΒν1ul΄[ΚήX9΄ω —έrֈΛψ~άΰΊ’9ΰ2mS¨ ΘρΕξί";h0ώbψ%{-˜gλΑ)iΝ@Β۝Вs―:ž ό€=³₯ΐiCzˆ.c’K>&ntΆ£§˜`’ν,GζΈτ€ΛΐMΈ%­jKζΪ4Άβ*ύm]`±(zΈΔΰq ΰΧτ5j?ϋ ’ΤρɎΐΎTVΣ*&Œ‘Ή…t"…6ο^)΅7‚%ςΏYK•-{ώ+όΪ£4ΑΎϊΟ+ƒ°n¬₯ŠΉBύ/₯†}΄c(qp`%@ψnήZ" υ†YC[‹iL_τ‚ψͺτ2€Ί]ε1©βp―A„q [ΟΈρ΅Τ™˜Ά0‘uΐM€‘{rΕ t‡δJ³.F^NM ϋΡ+)ϋ-΅}SθD,•/RHχHۜκ*EkՈ:πohΦ½ κQ4Ά’‰c²Ή*K•έWb _ύ–δΏmό7JΒdοΝ‰J&Θ3·"ΓZ+ώo ψ'€~p ©”Ύxu?2)–rί“ˆͺ1^j?{«3ΥHβΆtA‘bφ—“kιS†y%`τ`‰ 6€5-|‰‘œD±‹λ>8ό\^‚πρ½(±υͺr0“썩$ΆΪηΙSφΏ«š€ϊ 6φρ?.W&Α`Uό―ω_–9,ͺXVK&Φ‹R6B ^ œ M"2h`αCή‚ΩjVvŠθhρίίχΟή›59²dgbΟΊΏVzΙΉ¨φ}‘LfC㌀6Κh3ρΣVνk%XH @Φ2ύw}'@b‰ΐ’ΘΌΝF‘}ˆD„»?η;ϋοΪ ―0ΜΨNteΏΆ<Ο-jwξψχι2εlόΊ§¬9ίvό•’ϋ ;\φ°~³Ρ–£Ÿ‚…o…hŽnWθŽ±ξ†“―5½¦ώ“wύ7Άώ + <_ρΨώ#ξυή5.ͺΎπ°Fx7W”.TΧF…ν^΄'‚τŽͺϊ`Ε›Ε+Ύη]λUΌχœ#χΒ2§ZοϋΓ ^¬γβ~ΪΓ΅υŽ6 V>Ό)μZƒίΏnμu5xύ‡ίϋ?ύ&εŽ*w18+υaώχόο»όΏΛ»όΏ΅όί3MύBŸα—BWΘ!Μέσnςf9ΞΏΏQώ―0¬#—ίσίάsσ˜νk2aο)ŽoΨΌσ,ϊΡδL*°οό₯6ς‚ϋι1¬ ŽΧ9’.ώV;™Ύ—•ζ=ΊqΘH9 ѝμζ vGύnιQ‹IZ΅ΑΒtmYŒΧ°=vϋ£ΗfΛΡj>šΜΐŽ |ι,₯²ΆNΝ2’Χ=ΆI~7«8³τν’u‡νώ―νu?€ϊωofσΥί¬ΓΐΓ’I;ύ ιHρ?>όνί~ψΈOŸ QνΠπή­zo³—ΝΉ,ί½αGθΨ>ΏeωŽωβήαΧΏ,&a:ωσΕ?/X{£™ΌγwˆiέχκgΌm₯΅Τό™zFνΛΌγ…a³/?Έ™ άyΒ_x9Πsϋπώoν=γω’Θπ­τq\SIuψνιqύΏbϋYœΘ?EημπεβTλυΕomηΈΔp!φa+Ž~sΑΛ€ΌQηŽz©KΉCk»Ό@iξΟ»½ϋω―σWΊώkήψ6ψ[Υ±ώβŽοψoό7l‡°ί2ψΩΕ/ϋˆgh4σχkΕ·‡IzhςžΓdΆΫƒl΄z«QX”Ρσ²ΤηizφΫ…·—ΰτr΄f7£πτ΄˜‡Ζc4™>—YY„ιΗΡ*u2›΄]eJ‘δ*Ω`¨_GΛiX>ΰΏŽΚ*>ΟGeQnη%ΩδΨκ$OϋΏΉλο)_2μnήKΪ£ψOΥ½ϋ»ΨΓtϊ΅νΝ©ω›—υ£ςΉγΎΰϊΙΘŒμxh³ηΗu₯‘KŸ.>ΊjνĚΝf:¨BΨn¨ξ Τ[ρƒχ ήΓοk&·ωϊΒxϊ­—{[κDεΒΙφGλΕάϋJλ²ΥίΠΪΊΎω†y”FίίpΝύښœ·+tψί†Œώq“θ‘οwΏ6ΐΝ4€έ‡όeDK½½OGuΟ}GόŸίΆcG―{ύΗ·ΑΘ‘9ό™£t’ΠP^jŽNΛΧZ‰:Kηύ―£Γ:ΚwκΌά•vμMu1Ρλβλz@Pdζ‰ E±±BεΆΝp˜ύ‘(nΝO­τ_ŽζmΏυΣ.3"˜x+ “οΎ»ϋΏχδοέ‘ίε;SΜ­όΖvδΙ»ύχμ‹RΗ),W―oλqaΣ—dέ π€YCvuχΎ‡`.ρωσΈ½Ιž­`χΔ[š₯.Μωή΅lzš½Μν~&ΥΰŠ}ΛUΆ>Ό†s)ήjΉν@ΧΊ…Ι_π:ΙίΥwΟ}7ωΏ Λ/Λα,gγΏ₯=ZΖοςνγ.qœΉwτ=έ +Χ-RΏ_Y*¦£oΞ[Ζ!ϋCnΰλ½§¬g| ]™ή—”΄ΉU Φ°ό1K‹ωlώΌόuτ„AŽ žž[LšmΈκePg}ιmpΞϊζwσύίIω?˜θ5φ«ξυίήoύΙ<Ώσύ'‘ΰωψο#όgΨ=οπίϋwšͺ_ϊόƒ‹‡Υ5x§½exSΠq›ϊKσ χJ?$XώB›ΤρD웨ώπΫμεpΐ$B«^Δ Ό&ώΓΪ;?ωmiy|ΖtΞώ£(ΧοΐώcΜ=ώγνε£ΓΆ‘ˆgΓGυoΕ7TŸ››Ž‰jOΜ’ζuΡβξνJ Lf”Ύ4[]j½½ό½€ό„Y_}1π˜τ$ΣΥάΜϊ6Ίv7ŠvmΐΪ#L:°³½n6Ϋ4jk qSBpϋͺmxΥ†΅ίŒΰφwΣ_ΌόΔτŸχόχ[6΄β$“Ί£ώΓ=ο―ΕώSaYς…α=~ όvEšή CkLFΫ‚Šgn:`g3Ϊ½u˜WcϊΎ†ιsc;qSρQwηή΄mΠ.ΜkšΝvί㜽λΰFέi77"vΤIQΨ›ΔΝέΞΠƒψΘύΕΥ«jΖ›L©—€©—œ¬—ρυAΝ\ΊΙΨ:LGκΦξ¦Ϊ χz‘Ώζ[;ΰέχάIW[oρν[Υηιtpδϊϋ$ЎKt'¬ŸπkΣuΌ―[Œ{;μΊ^ͺwFJo,ύ-B½Ε―λΏφ‰xΪ.ΔΏΏφήϋξ}ύkΗο§ξψο=ΧΆl3³ΖOσωτ'sώ_‘ύΏό^επ͌šοδXœΌQyΟ~Ω0#Ϊ0›LBŒ­ATOΏΏ0©πpλέθ<ηn«»σ>ώs@ΰςίπ{―ίΖϊ―sAΖO‹ωΌΎœυκ£ώΦΪϋϊίύΏ—7•»ή{Λ’£­½κDΆΒ•νιξ.αήΚ['&ωtΑ­aŠξ{άeYw‹4­‰eΗΊwK'ρ.N]3φQΓΨχ’1o8χ„ΚzώεΘWΑWΰ?kε½ώΓ;€ Xκl_iŽς?ŒΌΗύφόΏΏ…βε2αš*•φ­kΞί*<&^ΪίTΕO€•ζωΕέ/?τTmρ»‡υ›9Ίgs”Zθζ°Θα鐎ޠΰηš>Ι‡ς₯,Ϊϊiέ8Ξ Λƒ"‘[ΊΏ΄ΎηnώμΰύŒυLž„=PςΉΦΡσ²)β?:ζ £U\«σΕcX]j”ΫΡIΉcΞ³ςΈ`―ΙΤζχnψoMΞoΥQλŽώϊŽ{ψο7XάkύDŠa9g]±o·ΓΓ§šώ%ZιΦΧ‘ύ ›X ςrρ;fˆμΧEλ(v™Mπΐώ·C―ϋy#CC΄g=ξΗy~ž–ώΪΊΫθŸσ―“Lαv£ϊΏοΊCώ˜άΊ‰B§όΚω΄©ε;(xέ@…Ύ,ίفδΧξC· h}έ ¨v )’‘&Ί‘ΜΉιqؘO^'hΓΣdΌ‹šn*oχžt—² όg ΐkβΏδ=ώϋύ֟”“Š?k:'υ±όΧϊ^χ=δ£σ6’t ±4^=?MΛr›φφ ψΊ­‚ˆ~rΖ„δ//bΪάξΨ p»ςκΌ­ψΠ"‘Άηςz΄ˆeυ­”ΆΣΝΓ|Ήj―ΕzΥ+{ίμsˆ[ΑΓ§ά΅όwα?kxMύ/-ξϊΫœΜ~G%—΄KΎ”oVΩγϊ²»οδ?eb§dΰΦα’:\&γ4μL΄ΏnΌ†?'κ―Ώg%ΗσŸώΛXβU€l¨Ιbτ/ΏηkτΣ°\`Ζε{*<=DΝ~θ*πxqD¨‡Ώ‘(:±ί΅ώ³‘wύοmω»φρy2ΝγXŽΫC?-Ξ­Ώ1φΈλ=η]μΏΰ‹§΅l¦|h‹η?4žΗ¦'έbγrl–?6žσΗQϋΓ[Υ©Ϋ\=>¦ψΫ)ςζυ|o°ί§³Έοχΰu ώwάξPμlΒ{?!ΞΪψqύmορ_Ώόœ8ΩΖςω‰z1wh-­Ο_h{[ίσΗrEεϋnٜΎ4ςθ§””ξ€ώ£{ν˜L^YgΤ-/ΰ: Ÿο1MmjΔδm-‹mdΡρ_ώσ%¦Γ­Ψm—ΌY†›ŠχηΌωπί1= xO‹»ώχ~λΏxJ>€πlŸγϊOZάϋ?Ώ‡ό_—,ίZΜ‡_šΐΧFμsύt3ˆβ• ‚J\‚Η­ΔoσΟιηΛƒp"βωΡBΫο˜ΖtΏ¦BVΧK‘Εϊš+ΰΕ:ΗΊϋΗ}y―A/ΚΫ8υΥόΛdύw¦αƒc#²ŸΐΏBΟς’|ύ§ΙβΛ²ΜpMσύ?ζφ;Ιζ“eπλ?ύΓ 4ω_–£-ρΌά Ψ>ο7m78Οή όύŸλ»όgωίlξ!Ό€ηκΏΛγό£χυMΘ­₯Ά";ΒqχψZΖvn£~vΟ€ηχ—eR4$άpγMgΰ^α½+ΡϊίωΕU6Βη ΒUχΌζHG Ο>β¨(ΰU@{οΎoΊaœΞΣ—ΦxOΕάΟάzοκ+’ΒdvΩšK―mγxζΖCY–2.uΔΙ“ΜUδr9hύψR‡ύμ0ΆυΪ/Ώωͺ5]7»Ό}λξη5Άβq“ΜΧ^ή͈†΄΄mΑkΓ—Fε; dΙ£9ž;ΪƒΊokט‹χׁˆyMώŸ5wόλ΄ΧX=”ηε›Εq£Žϋ?±{όη{ΰ­z\c}φΡϋCΐήa>:ͺ Ϋ„ηοٞg˜η8=ͺ›`>κή†žD‘—&Lζ‡ύΏeW ‡O„/»'Ι/ΎΞύΪ6ΩY₯§£>γκ#ο(K±έGΕjŒsΗ€eύΚ{oΆ†ύorpfΝξ±XρV“ρκdωαΒπΑ±Μžε§ωZβjΎ2Ty`=K#ό]LςΖ8ζeYΖeύ:·ƒ2λ|‘ί4Œ9Αί5ώΣήϋΌ±όίΡrΦ*άPΰΌύΟΛs—ο wŠOοΦyο0 >”οkP°λ"άΤ.zi2w’—α@ΆΎ“f·ΧάΦw£ͺ˜ϋ=i‡<”)δρςCΟƒrΌΒ—ζZ³%a„Ξ›mΟγΣγγ|vΕ­Λχ’žWσEϝ7§―ΈγtςωaΥs»ζάε<―΅Σ¨ΆΎΫζτε7$€p)°»d!Χ·ΌΒ‹Ϋo,όi3a™μY _I*έVΘWΫΏ”‹ΓέΞύΦW\uKr|—Σχl.ΉbΤ=†λβοέψ1?'³S7ή»ξ7š*uF7hc=N‹ϊ‹Κ€ΊΫϊπίP ΐkμLάρ»­ΖΛρ&υ_Ή`Ηω_Jάγί· ?=.²¦@΅%•έ+φXΈξ“”ψ%ΥΟγIν«›ϊ>5Eb/)-2HΓuΊ&βtΔ+žϋΚ¨ˆ¦=Χe&PΥ•n>LΏ‡§0–Υ8†X<ύδΒ‘Bςψtε“ΆΏ»ψq ‘ΣF?ž$¦½=³E’γ•Œ9²"χθΨ?§chΣυzλ/©vσ5»ξ΅11αι ΎIΎ§Ε§Υόu<ΌΛ«£rψ+uΆκο8ϊˆΧ\’ΉΏζΧ¨τεϋͺ,@οM*Πk·wƒ+†W'͏~Œ?/Β,?½jYοq³¦‹ϋ}E ΖΙ«6λdώ ΫΜifό3ΆšΛήυCΉΦμwe=όŸ4ξl€Ο6ορ'žΊ[ΣμbγRYΆEj―κϊ§―6d½β‰ZΈ^]ΦοπqW΅ =ŒbΌώΧ7v5-}Ν'εκ¦ ―yΠΆ€ώmΫ‡ζ)x˜|wμf:φ»†‘ύβϊŽο?„ίδ8œup ώ+ƒZOΖ­žͺj™'KŠήC™ϋL΅$ΧΝΜ€NwpκΓ^·³c,ΉΧl΄už @΄sboe/l£ΦΣ–w#ΪΪΖ»½MΦvdZW_΅Ξ―ύ?vλ)ΎΌΨφ-χμ;Wl[1¬lΫ‘l%ΒΞΕ΄§ο±½ήaΧ/JΆσΑKvšΊ9ΈΥVΏ}Y¬VΉ~‘C΄Φί4ω(‘£)ήO—=hφΘ¦(Ρ‡7p«l‘ɍό*μξWΉ»ΜώŽύ_Δ=ώχύΧ 8°³ύ?«ξυΏή!ώ«†ί$fλ\ίΥ`ώŠLΜWE…υaψAβ΅:£«^`Υ—mωS©–}Y·ŠmΊixΣo‘vWΏϊ֝‹xJu»@<Ό]Χ“ΒψfωΦH Hωω.υουίίύ”μΧ!Α³υΏŽγ­Άχψ―wΐωΤ4Ή#ώ,τΛdφωΣtΎΪζ―ρΒΕc³ύε!έ―«JπͺzƒΛΈ%ΰΊ=|9QRαfeήsSΠ_4ϊΈŽΏ ΌBώ[-ψΏ—ό,σ2ω/Μ‘ύGv_χ·psŽΩ7³.ς΅λh\GΛ]¨¦wε§7}G›~Ύ(κHOοtΰnÏΔΞEα…‡‰σ‹ιι0pRλqƒa%‡N™)ρC“=Ϊd~ ?ΤΆΖ½€Ο_,σΥς_huΟx·υo™χ[ε3cEGώέσς₯γχ85rg-Ϊ &^N‰ΝQα?ός§ηωΛepαΗμ@vΦηιτXvjχV²s΄o%BΗ(4HE>p«‰SϋžωWΚ[/TstpNγμΈώ›Ρwύοψe:ΤΟ©Q[ΪΪ³wk/½’¬λ rψΒfo%9^φαM¬ŒTέη xΧNρ!0ΐkδΏαχψΏ·]γ|ώ·©e_πγψO~χΏƒώfΛΙ§UY<εƒ Ÿn["’unδ'’ŽO‘±>!ιΜͺ<>ΥΙ΄΄­kΦM‡§ν%š*Ζ} Σ/y²h}]’\Μ α}nΊlŸΑ»˜αG$Οργδό²dς–Ζy5Χί€gΡ‘ ϊί7-Žη‹Q3‰”:σ/ίώq'3όξζ†άύκ·ͺsͺοφέŸόγω½ώλ;γΏΕσl6 8³ώΚjyά‹έΧδ…ύŸkΒϞΫ’±λ_ŠB>§Ί ξΧ7:ψαΆόΓι K;tš+Ž/;,0fΟa:^–0=|ΔK"κΞEΞkέ»Ά#ρ!,'ΐ9-Όx˜<υe1\va‹λέώΦ―-”;D₯ΪχXJ9s£ΆΚOTΆ=ώ)₯/°:}el·η»~<+«oσΕ—žŸΟφτοκωΞτ5όj[τn}Αςd₯¦Σζ»Kb,;οpχΊpΙ“±’%΅f΄\Ω&¬ΏξΠΥ}‡ͺγ{ΎβΠΕ……^YϋχlI K+\RβηŠJ>'kφ\Ršη§ΚŸ*~sA›ŸͺH|QyškͺМ¨7sΎ¬Μ`•/)ΦrEM–γΫ5ΪΝ%S΄αEρjΛΙgθ[x6zώL?φχ!yώŸώ_šέϋΏΏ±ώ'³ίΝζΉμ>}x|šώ€ψ\ώ·<Φ Ώη½½ώ·VΡ"Ύ=<Άω=y"벨;]₯κέHΫ»Bα#*?RΊ¦ζΒ©°wύΛαξŸuIσ>ω½©³+ΑΗλZ±ΗΧ–΄~­κvφΦ©Q_rΓ3JuŸVx™bxΎΆmί]{΅©§σ“vRοA¦ƒΣλςΎρΞ‹HθΈA7rxά³PgποΕ?wϋEς§Pΰ+πŸaκυώŸtω1ή)π“N€sύί„6Gψίήϋ?Ό‡ύί¬ίΡΧm΅xN«ωΣjγ!8ξκΦϊ –₯ŒΏ-·9ΐ֌'΅š7re}₯οw²Θp>‹|Qη«rΪxΥuΝ5U‡ΘΈ3^>•ΤέJ~kΆ—]Υ•ξτ=§“ϊΓύl—ΈW ©ŸιΉΦSάgΐrηgξύΪΪζd>:sλ+K{wφH»a§΄χK¬¦τΆς™ϋŸ-|±œΞW=7}ž₯iΩ;—γξ·=B<8Φ1’ν©m½)Wη䒞τžΩRiŸSdηάKΨΣΞΑ­’°s¬­B»s`υt|hόyΎ\ξΣδϊΜοG΄‰Ψ9φTΐxΚήeΰ(σfςi1D€\Lή:Πmιƒ ΆG΅sxΫ“ΰθΠΡ»·JΙώŠ/ΜZ·9x…U™zΡ½ ι›6άcΠο^RξΞξ:œ8wΘRΪK:Π½°Δ,οWY~ O»G7{xσ΅›1νžέr€.Š»_§a–Κξ“_XΦΞχργγ’λΨ!'οπIξžΣ^Ϊ»2Ν!ŸjΓΓ6g4σ«eΧ±-9uίλŠωrM.σ„‹vG΅ƒ¬wŽ–ζeΐS:ŽA_'™hόyΊšŒŸΒ²œΌnӏtηšΉwd³4¦­YW@Η#a>έ;Eœfς>O¦»Oάe3νΌ†½ΪŒΙγx>›’:Ώsl–'ϋ„ή»Ϊ§°50Ω§°Η²ψ‚Χ}€Ε¦α>—sη»ι¨ϋ}hc9Ω₯³Ω$}Ω½MƒuΦ{εΟMœ“˜ψr°°€ίw‡IS8œ•εrόB―γ:έφ’mΐ%Τύό<έ[Ν%Ας ΏX]‡pιΫJΛyš”½5£;Ό¦=‚7ϋyœž{„rpΊξ=ύ9ΟχΎς¦AqFŒq¦'Oώn…ρ./ΈdΠμ_Ήεψ{A ‡zη«»sξ?Ψφ|έεΗΎύ“gH³ηŠΎ7ΨΉš$~ΨΫ5«E Λη=ςZMžv§τy6ωΣσ·₯Vvςu!oΐηεx:YΟνAορ‘CQxpv»H§Ϋ˜ž½’ϋFtΫ“ΤζwυωΟ^oΈέ zεΡ%[^ΆLΌˆ½qΚ]'_°TwΣή³[1}*$Ήχό?wžόΊ¨Η'GO#»`άHΈέγ%>οlΣ^#όΑΙNδΫ|xβˆ`Ά=ÎΏ@ε³™§Ν€_ΗϋςΏχ,ΨοτΉ!Wκά|ζΪnŠάΫ;ǟ€Χψ34=ώΡ†½tϋ»ο„Ξž»ΰΔΞ>ΎΆk{v_Υ„ϊ^q)U(hΟ‡ΏZbRβόϋΡa"«cŽw₯κΎγKυμϋ5ŽNΏΞή Λ/GδΣ οβμžή>ϋ±Τ©γU;5žσλΧ$T:―ΗNްρΏt>A&±Ϊtv'"iNJsTακπθ.QχΆCθ;Ϋ99=‘Q]§Ϊ:/t¦=Ψfέ·^£ρ‹ήΆ6Σ“Š ΩΌϋƒchΌ1Κμ^Πια>Ό¨ ΰάΦLλΌΡ“ΰψA€»ξθά ”Ϊ=υΉ`†hKD8}φw»‘–;—‚V •ΚσξοχrαύεΏός?ώω‘Œ„Œζu΄λ7Ε2ύΝ·‡Iz₯0ÁQœ―Fy.±j|t€©4_vω·£°(£f-—xΐκ!¬š#€OΈΆδΡjN7ϋ‡ϊο ΝΞ– Γ_›ϋm―}†Nτ₯LΠ₯‹ς§gλu8=Juz”Σƒn΄M=τ‘©VΡδΒ$g#/ŒΗ,³wΥe}p5ΕθΈ..•1ΌšR,QΕͺ½ RΧ&«λε±δ«£wS$ˆΛ…ΐ’[kͺVŠλ˜˜‘ΥR‘;Νͺ¬N²¨t’:Υδ˜ΦΖ[Y-λ^βGτ²X„£f먋t‘mg,ΚλRŒρŠΛXcρX hWŽU•‰$‹Ι.Ήμ½ΆΎz[£—Ψ~Rs}Yu3ΥΟcθ‘mα}β,iςτΠX-πριyRί|Ÿ?θγΦΠ&Kœšρητψα°8€xR’^²š•³5„ΰE,TόMΙηΜ΅0«]εB…ˆ<nSa–ζ-ΥΞIi¨φΧ_Ϊe=œ”Uϋχ3ΈζpΝ "M|<½›λoN#ϋTJ°€ΟN#KWceΎdΈuατP°o³¬^bΓΤd‚ͺάFοzΆΒͺυ―Β 6΄MϋS*Σiσm=ͺOΝ«ŒhΕ©™˜S2…”}9šž«˜ͺ%VP‡²9$ΗρAε,ˆC‚HΔΈ],XK̍{Ν•/άVγŠξœ‘Ης˜'†Έ©mz 7Ψΐγ+EωͺIt–!€ €C(ΑrΙ’2Yr₯ §ƒpLFΕ –r‰ΔΞρ΄SqΤΆ/ϋυ£ƒΛί¬±Οc΄%±T!`pσ€,lΜͺ²Hΐχ…Η‰k"‹E•"=H>‹Ÿ叇ω·#,έΠ@ͺζ\m%$θ°r՚9Ζ ]c J!%/U‹%Œψ#ι$ ŽΊΜ9D\Ÿ~’Xƒƒ“!t*Ό½`’9^2j€ ΉI 3ΖeΦΡΫX b8/%²r§ΐΉ!ΉΌ/ τRλοΔς,βΧ,~­ΠΠC4ήE£ƒ7<ψl!t€%"θL [0p€=Ξ@!Η–<2'"h30&+Ψ‰ˆ} DcQκ‡ΓTD34aΧβzEΪT­Άΰ¨©θœD!92›RHށςsΨAΜ°œ¨N½ΧƒτB‚y|hœ—Ή³R₯Bΐ;γ™Τ.‚½»,ΐυrεSm’sΘΞ.€δΩp2±xίMcσΌ˜?’―ΞαιΑQ¬ΐžΕ:ΞEœ@KV&"°kΙLYeBΖjξ±X"8 ,pk\ΌnyμΰΫCQ± P|° $Θ§‚ώMΡV@?v ¬Ψζ…Ž* ΐ―ΐ­ \A)΄}ᅵρOΉ,n|6E¨ ΰX<”V½rIμ m¦eQ "G© ϊaΟW—ͺpG[“μ}eY¬>₯Η|,BΧαχ’vΨ' Κ4}U5;§T €Μj)j‘\ς(%”ŸP°»cΔ&—ήtξ²Ω5/Ο©€Κγ:DŒ6%SHΣ"Ώ|―cmφŽ­ΘΊ`ηc2ΟW'v];kΟη@΅β7^>J• σΉJI™s>Šh9ƒœJtx`ΜC–U Ύ.bͺΚ"ί»φŸV«Ε$>―ŽΖΑη!Δ²I†j–—s€ Ξ!Sε*£Η;[«“qr©‚L|Κό”A‘7=`·i»Φj:?f§–Œ»φΝŒΤ 0O6 |žkPˆ Pm‘’SΑͺΚLαEk”˜‘ψΖ±Ž v‘Ό[πyAZc(ΊΟν ΙλΌjβΛl­nQ-‘ŸœΞΔvήθŠ+ΡY ¨•lTŠAƒεPόŒ_Kq3ΓšΑj­—ŠΕ†…%V Ίw; ψi•ί "§"ι‡{S3¬iζPQ$‹J™…sΣφy:aΪ={βφ/―)ΐGjΐiΡNi φ™•#Jΰ ψO‚]jMΌBρUβ΄1 κΧ‰ΩΫl‰_9’‰—CMΰζΛ#˜Εχζ{fΩ†GuΜ-πρ§ττΌ<šθ“S{μΟ?ΚΑw§ΗΤUW!n€Δ Ož¨+Ψ0¦ (€(1]DϊœΖŽΦ₯@'Έ|wž&Ύι$Άφr*6`RΥN\€’Y³’V4ϊ΄ ­ΖηωtΊωΌK»$ΗΎ”ER~±&ΩL?­θ9ΛΑ™χή@žY)b.*xο\LΠJ„8t¬ΊΚ$΄ ϊs²Φe€JΙ’!λbηqΒ“γl‰ψΦ\OYŸ£ΣΐeBg|¬F0UΘτ ­rˆ;Κρ6Κ2Rw*˞“Κ Ε@›2ΐ@›ε‘pƒ5 Z(ցAQΓΰr‰#ZΤv^XΝ xϋωBJ6<”pΕQ'XԚϋμq›#Λπ AxΌαφ·T[°vό0ŸΉzΏP\κώΤϊΑ§Š%3ΖΥe« Σ -Έ΄ŠΗ †ΦL1ς‰8θ4±†8Y9+~bjwΠί―'DΒ‘Όλ‘ σxωΊ!½f½Ά!ξΛWΌΜηm˜μ‹έšHOˆ)πΧI“LφoΰΔΝ£ ³ΩžΪΔ’‚‚¦!ω.ΕVb ?—Ωxύ–Πx“q~ G 6Έπ«Ζ老΅8τImΛkΙ’ΒTΚ €6!› •σή&Α«ΨΦΠ‡ +9©|‚˜Ϊν-,#ΣΨ©Ε£ιλ˜ΛU3<[ΎLΚ™ lΰa΅š‘ΥW P}ͺ„m²- μK @J†X² 3gƒ„VN.-˜•pœ³ΘN*aν\–jΤC―½R’Σr›\ ^Lη0"r f}ΡR—-δ¬NE₯€œζ6)Ι#‘ΐtύN;μfύQΘσΩόyΩ.»ΉΈ’κΑ+ Υ…Y#k°΄₯k6§T3Τ7-ΙUNΖ{.”Ν=ˆVΫΤ…Ϋ£ξεdφ₯›άwxO/ƒyν€™α'AέΰF¨κ±B›UΊF— ΙI΅(Πn½N©π(€ŽΙpk<σ2aΦL©ψό˜΄&“ϋζΤ•@-L _œpdš“¦Ιύ‹RΖΔ’gEgQ‹!ΏΨ*3/‹ΰ݌βPg;U'‡8nPΝ= Μh` ΅Ψ\m)Š)97,%2r(cPK΄x™ιsr–Εΐ’+9ˆΠΗ W«‡β―ιž40–ŠΗΒ„y¦O™¬ο^οιœe•Ά`ΡΌH‘*,_œBeJιvA?”Εγd5q²3/’ν±;΄Φ••ΟžjJE8ΦI°Z9†tΚΞκ5S˜Θ/™U Εzk‘ΫΔS.ύ–\‡φ―@­ΰ™ΙkX$>ϊΜ†b0iœΛ#Ζ3=a@Œα(²mp§`IœYL/Ef\QΊ˜LΣt­ƒ”―υΪΧ@ΏΚ9Gαš±žψΊΪ°A.γ6γcνόυυ¨†vΗߏΨΨΰήx°JL Ά„΄ΜX°΅ΘΑpc€ξ€JΜ‡„ύ+Y2άΚxπkY Ηύo―]Wkτa}qL(°R0’Κ ½κ‚ΥΥΙB%Α–ΘΩΦ›’q`D"x†Ώή‚ESx(Nžx{£nΣΐŸM‚2$8€ΐ%Ι£IΨͺ)x –£³•δΠΚ€θƒˆΠ΄VΰΈΰC²5©?-‰…Z›Œΐ?\1T><ΟeέΚƒ% εˆA;Α­¨Z`›Y |£!φ•%5Ρ¨Θ±jšSd€€1œλUγƒ―₯T„π«LXη ΰ’ΐo΄MЇ–g2€Α—dŽ bGrά+k½†ωπvTΟφzH>šΐƒp5™ͺmΝ)ηh5…vdΓ₯2ΖB9,…°ΤΰΗ1A†μξΧ/GρΓΓec%y΄…Λ΅‰ϋR.;hΞΩ ,₯ΉΰΑloŠT>ηA ζ Ά}v»Θš:έ'Θͺ©šτ9δΟeΥWήΆ‹`›ŸνeCC™}έΥTFλ.‘Ά¦γέ»†ό&#©‰½kEχ^oΚΝ‘Β Νχ—rc»__Jν]Χ hŽν˜¬ž^@;j„΄gv‹~4GφŠk4GΆfΊΜ›ο/IΙψΊZ<7aš|5^tΏd*1ύOσΙڏOΉ’p~1―5fΎΛΉ©;vνB―ιγΧή•?^ρέΦ°G^κ'Ύί¦έΏzΒ\<Η+¨mΩ~\ΣύΡjΌΜώΙΩZΧyjΒ .Ÿ2ϋ κύόΉ,Fmg΅_tυ}σ·ΡΞΫΚDGcZMf?ΖmφαΉ—Ϋm0tρ»>Ύwι;ε‹_΅).&ψ§΅Q΄o€Κ$,GΟ‘ύΗ™[k0Ύ z¦“JΠλdc„U²Ξšs§Š 5Ga•HL8¦<”`q f9Γz[›ψ‘XρƒC₯e‚ΤƒBΕ" 9 £ΜΥ(V$7ά‘‘Ό—HωZ1Ε²5Ε»Β &₯ν2‰“UΆU \•οOλh{zJcm)ό ŠΎΏΈJiΈύ*|)ŸΎ=L¦εŒš±Ι‘Ϊ(d/ύΣSΙUΫW½5MΕR₯ːΤD¬” σ*@ΕΘR¦T±Ι ’5θ΄ζ•ΈΛΙΙ(Š»^}|²:ŽδώγΠ‘χΞƒΒ4o άΔϋœ‹ X$Dθβ<ͺRΑΈ²©ς(. >E%E·Ήbοh-δI#λτΪΝ΅ ­`ώφγΟ'οxπbψx^†2Œt–Ÿs¨Υk‹Β:bψ™œ@Ν&‚όUdƌӑI†zρΰΝGqΕΰ›’GϊΫΠ ‚³ )\θRu%Uζ°ήUHΫψ« §‚xP ڍ­2ΛH.l(Π δξ菗κδΩlEaž|τΪ‘Χ€(½Τo86ŠmNΐrg-„€ΘZx,±^mπF…œ-‘½pΙ,ΈbΖΤ¬"Ω± 9α Φmγί„φξZU¨Ϊ*ΜVŸHB~*:7όΆ>Ε‘.ΒO­Π) °Φ{΅Βq‘’PA¨*hØ ε*kͺΟP YΈu‚:Υ?:λώl華›ŒεM†Η&ηΑ2©ŠΣQp Ό^oKE)<‡"ls)A…J&ΚΣ‚ΚVLo1ςΘZkχχΦ*―sstΔAR'D²δήN»(΄“ψAAϊθ₯©ΜgY_7 iΧ|KΆ7Ξϊ¦η2¦Σ;‰mυΔZ;bΧ‡Ά²$–ΩZΑΐHCΑ…“<γŒ‰ ‡Κ\zV˜dLIcDB%:ΧξՎΰ μ‰ji4ξςVe*oͺ‚Τ£΄Yq*mφΔ$ δ£8˜+kΝςΐ€&^’ΤVπh§Έ3EQYηYUQ©ΈήuΡ$‰΅‘αΊτα½»G) νpΦη’ή=―JωJ°hYr%Λ~ŽΕλκ<‰ny\ΆΡ9NyμΥΚͺ{u•-UΌb˜Γη Ί „™$EDk–(τ&Γ“ρΡ¦£OPa8γ¦Ρ™μ%·\C&ϋ$RθKiίΙέ:%?ιΒ-ΓjδΒ«UΜbΟr)ΈΔ Ν9U)4@I€Κ.O†Ι”E•RUHς6Δ{ΟΡ}]xwίͺ/<ΓǁBα©dΑd TYS’%©}„6abͺ’E‹νO>W(εšCΒ†k3piξυxPφΛ‘κΑM,*@€€€1CΊ\IΚD# ¨Z ˜;Ο¨&‘ZqhW  )Θ|QɐψŽFσ― °„‹Œ M…Θ‡2}:bΤ|πs*άσXe ΚƒΒΙmπ2*€aJΣ ϋ”²΄­L†μπ‘μuν1\?~mΜhζ€Τ~~| Σύyyƒτ#%θΑ‰%)A/fŽYΐ'*”W*›”Ο3U9ͺ”c/o"&Ό€†lγ„m{ϋ”X°L*0œKZUν\΅6(™-„…ZΑK§-ΐ‡†Β‘=oά=19~j­,9‘C/D‘x»N œ‡€P1eμ3JτTJΧ „κΉσ>‚ίb Α A9ΐDΆ'FΠ‚’C#ΡΠα6Π%ρr.2¬&’ΩGιJπEΟB€Ά•*‡2!™7P.H€X‘OQςφ)½‚ΥŠ—£τρ Χ†q 8u€Ό¬n"±ΛMΤΊ ŒΪ γι³ιdώ΅1EαςκΑΓ,XQ©$γ*£-^ξ…²NZ―g5Pν &5p)P‹‘τ0*•dΐπeAΊ‹WιqAPΡX*JZj•S¦Μp[3cή&ΕA,ΤS•¨8’sV ιTC“3S}Ή©ιIh°δ£ΜΘα.ΐΎ†™|,ΊX ] ρɁ±b†‘μnRΝΙλ$pBΜ²^θΓγd6?ZΑcκ χ#:ρ!`‹ޘvΎͺ†ι`αΑ―J±^bΌŠ’`ΌΰαUά¦Π똚§QΛ¦› }& ΄* Έ$Ώ¦¬Ε ΉŸ³MΪp0`tiΘζ]\ΛPLΪ\˜(’Υ†ΏrˆŸ6-B£ £γφ”— r9–D¦ϊƒŽGˆ±x+58¦ΤyUJŒ²†gο"‡<ν\“`cΨ_²_waίz„›#㦀is‚­M#;.Nϊτ‰ά¦§ζnΉ:BΖƒGE―$ώtΪΣΰ”AVΝG(q޲τ†E”C2@‰AδAͺ”΅ί9a·ηΣζΝ7%ϊ†™Ž6·ά λ#³Π0ZαU–€ΌΠΚ V ’σΑABjU*Δbvš19—Ά'[ςίζ±ν7vbl›νΦa(9jfƒ5Φ -²Φ”ΜrvxwD«K&Kζ|υBθF,’d ;āE»Ύͺ'b·Zγή1Η>χΠiΰAΐEEpPΦdIJΰIT 0‚cUPͺ-Z* Ό©Z%*Pf°η™}αΎ― μ!‘3€ΊτΦpMω‘ΜpΖ„\°R΄ā΅ wμ'`^NΕ–rP¬7΄iSΥϊΈ²Χΰu6 6Œσ9QΑI#AeŠτaͺ‘Η+Ό¬Ά8I5cΈπœA³JDJ3‘DρΣεπΦ₯sOΠαAοƒ)qΓgύ7Jn“‚"Ξ±X&+*?¨` a€ ϋ/GξΈ°Ebώo<0Cqͺ―ΦΩΆήveζe;ΌK'ε°Wιan·kSml΄0φk[½§¬;FSt–G­y}η΅Ϋ\΄#ΞλTœΙjρœVσ§Υ©‘PžΠ±’δδƒ6`€\”lΌ‘* l28V!tq[()pVLΉjœLdu…‹hΐU ‚d ²G‘ή±Kλ~Όΐ­½Ά+ιmρA―•Β‹,K^SMYkŒΠ…  π~°ΈJΕΙM&‹£ΞFΚmλΗ’Œ+ΈvnΫΖή:8 |Κ •[ζZS28§BTΑ@±7²T²€€ˆΡ”Β’ ϊ†ΓYπ8ΣΫχρ0Ϊ²Ά?\4τ#VNhŠ-A5ψxPTŸIΑΑŒ©RWοTI —`]KdPi΄€β˜Ε‰η8ή—W–žž>m …ΌΘ‡’žbͺΗVΒ΄1 ι±JxρD¨dj’Uρ\:‘D&θ₯X˞„υ#Νk-ŸϊΦnG»υΞ3&yόT …°Z¬•ύŠ>CΐZ mΑDΨv‰Κ*kYEΝΠ¨laΙΛK=…Ώϋ‡7ΙγH½ύG7τjΊP"TθȁɜLN‚ ψŒTx%5X‡ Ε›’ϊ8pɍ¦ŒAP¨0²―,μKKͺX±ΟŽŒ…fxυΧckAϊ'‘—sΰQdu­ R1ύ8*`c.£"lΆ8DE)r]nΗΡφ­_½ιΞ άό\"Χƒx+6ω„ΕυΥb­2ΐŊ³$`#>δδ²Λ¬ΖAŽ\:p«.ψΊUΩνWβΠKΘΓ\ŸδΦ@’X&m5!Η¨r°—”œW“’ƒ< -jο@2D~ϋπh9_ cΘcr&œΎbω¨ύ\η5‡$ΆΣ#DlςΜχ³|0‰a:―ν›vTmΕΝ6.|AYZΣςhuΦb^q”χυΚ½)ΗiXyΕ?Κψ•Y ώδ Κω˜…’ΐμ8BΠ$ΏLuΊŒJΠπ5δ€9CΚ/kΈ¦εkxβŠνv\sΈ[ŽϊΌœYΓ/Μ%«)Ϊ‘ΝΠnΏ-U!–5N}3£« 'Κ@Ε•‘mh©²T+P;¨Ξ‰OΌX& [©…‰=³ˆ―ΩI?3C7 sEΎg° )ΐ!ς$¦Θ-Ν)žIIf4}AU]ƒf‰Ώh]OnΜΟΡιΕ3ΤήτφTd¬φŒ‹l2χI₯P΅Ά‘τbΈ,>+N Vε ¬=—C"o°Χw~j@·ΣAGCHœ…°Ž%γƒVΚPηΐ…(ra ΎΌ <)0₯VWKα&ΎΚΕ}°ή·_6 cΘήTΈ*\W-uI0&JbQ6 άΥΰhW#T—"tΐL–\ΓkIξηFyƒ΅΄ΩD\,•©κΤYo¬F™j₯„Ξ3σŠκ`νΧ™²κBCJzϊΉQΦΕΌi‡wϋΥ€€Δ-u:ΣΨa,¨„u/²¨i[8₯ν`€+ bhT”ͺ/dᨩν?ŽW`šΧNΫ-ΆΊδΡ:₯ζ䞨@2²ΘΔ€"· ηΰi&{4hΐr£aiΣg\κ}ΊxΪ¬ϊ‰i»<Με†ί~‚΅ΙΉ,Α ΐ€ΌP©0λ#g–¦^3*•R‘- γei© šΧOΪ«§„Κ$ά~BJ&.sEdšh4™ vα]q&Λ·Π7D–)ΛjΚ;WΩηžΠ‰“r Ζο£έ³T   ΩmCŠΊ‰ς$Ά=ͺC± Q²ξτώ^wΆ>27žQΤ’Š%šΰ©vŽœylοP0χΰΒ)ΕK.j ΄ξj„υZBυΎΔπΥ;HšΰcνπΕ!RC£l%9R,pj©JBΏ¬Tγ"Υ?Ά,**Ÿ–@&ˆ`8d2`vΫΤ·–&llΩ΄&«υΘΖOΣωϊάςλ š:Θk8πWΟηm―Ί64w\m*/ωμ σ0ζ-ΕωN±…O;EDφbβΪκ«ςH±v›Ύ&Σ/y²Έ`ΫqF ^M— QKΘ%Ɓ‹!c|ͺΑQ€‘t”qš‘B›X!oB΄Δf8w‘Ša‰}O ώΝκνŒ?M·›ͺΤ]AnoF5 Kᲁ ‚1KaΘER›i³£S2₯JΓpPΆU%¨ΘB°Ψ­ϊΒΈ žjΫg§!—?=—›GyκIup΄@}0”q―©h<Ε §J}Ιιj₯hh}‰,χ5Y«<θΑ_; εiž~nbΪ[άΊΆœΠNU#}‰^c{Έd=4o#Ή&5Ψ–Sƒ')$"μΝ‚j LMό⨱ήZμYYσZ—eS?jώT>?‡EΎdΖΪRS‡qvƒ§!Κd“•Ad^|δμ€Nΰ%•t‰FIeΐϊ©‘$']\Θ βQ sέwQΥ݌μύ©zΥlΈαΩ*)δz ž+αJ­h]'R{€JΉΔ Et©ZYΒ$ xUωλfc§Όθe“ρ’F—€,œš*yƒΚd1βΒD―L:ZΊ†AΧ€)’*xΉ‘²‡ N…%ΆΘVτTFκ™ͺMε΄a¦*‡Ε΄£„Ν ’ή΅M,—,(Ο8S šOΤβ4`γΣδΜ‘θR-μ8ʏΠAzl΅s“Ιί­Η­ίώΣVˆolΊ\υ§.ω=όΠESν8͈L)€Π.Ω$Y΅Ί@‘©Iη‚ό£Φδ œί{Ή_Ί tΦ1ΑαΥ§΄Ιƒ8Q€½5̝”!ϋ3yσ9¬³ς^Y.Y-ΤΣλOQk&fε±™(BjŠώ₯ 5Χδ9¨D§ty\(?)Ήx\¨ναΦ7ˆΞ”₯ιeRŠκz’ŠL–Ί›«°bSŠŽA1σ’aœ(X)λ>εloγΞeͺ²ΰ)€lM€ž ₯όΑc .&SSΐœhήCLh€Ύ W}ΝΐΆ>rΫ³Œ»Λρλ/ckR’³0½pYu85~π¦QΥQξž‡ζ0!’46ξ,… ^ηΌφ΄²!Rš„Σ%(₯ζ°PοŽ_Ωρ̜¦λΖ:ώιρ˜λy?|ΡDΕSdΕάί’©°PΫ-@c€J'ςΎŠ4TΧ0[ ƒ2cBŠΰΩ₯![«εIνΘ/ƒ ΄nL)ς¦ D‘T²ŠJˆ>–TuΔ$@9ƒšž+P2«ι VΰΜύ/_˜†•y9lΩS‘RΔ>υΠ(…^“&m20M΅p4[p_@|§“Š ;)ξ]ο>ΗΥΊασ %δ«KΞR[hͺMΛe©#ΆVŒ£―Nγ…C•[/ΐBo©Χ£ςZε|•‘θWŒτύO˜ ˜9*έˆm€¨d3”Β΄§π**³ΐ!$-ŽΊ•PŠ'#ω9„y²(€JOϊo―Α7©Έ.V‘Ϋ² +kZtFa« Ϊ¨BMŽt8μ@Έμ&BE£¨χΎZ#ΐrxr~ˆγYωΎΊωφ“ͺ#EšύAMy`κ4α"jRιœͺM=U,+TΤH:50΄±§Φ‘iz3πvL§FΏμH±‘ΓΧZ’m3Kƒq[­\Nԍ“š¬jꨣ$₯hΔhŠ‘:ΓDζΨՍ!Ψ:v:]zQςόϋ§ηεΖ«|YZτώέ:₯C•˜‡βπΔDζΥ’^„E₯f€›S©&Θ2Ψ` 9/Œ:φt ύιXB_˜LΓβζ…3k’˜Ωͺ"™”(QΦΨLΐ ™…κ,• ά0μy0Όβ q‘Ž₯Ιaϋr¬¨an’r{+¦ΚRΖβ••TΡίκͺP𰔁ʔ†Œ’$Γ‘6Ν‘γ•ͺ!Pc5lίπ§ηIϊ²ξΚΖOΧΐΙsj³ςψH-V7―ΌͺG’ Τκ£ζΨR~ F§"ΤώΌˆ ˆ‚<ΉŠΒ-t Iμ[»ω·/Œ·ΟΦφ”ΧqιΙݘ³.NšJ ™sTvC Τ*€sΤJΚK€<@έ;„³qšΞg!₯ψέ VrρLx›Ω ŒH/—5p@Tͺ²¨‘|¨H…Ή±Νjv ’Ε%ΐOl³ξZΙ»#OŸΪϊ³›£§¬JG?ΌυψλŠ"i±·¬¦ϊ25 Ε"•i―ZYκ͚*XmτΠ¦eI†'Λ½$‡π ΚΖv‚n]f3Β„šRΆz3 pέΨϊ€&tά•ΐ”6Ί€EΝ c›DΦ„Μ<”ΠΪOΉ‡α’όMS¦žΙP”‚ήΗ[Α]Ž"3bTցΣkp—&θΓƒ—Π‘’x«ΚΞχ₯Ωη ‡‘GΝ.Ϋ%ϊυ§V·Xέ|κTS–KVΑO s6₯ ΄¦TACνιk΄ΈB%I†D#΄6Ω’ˆzw‰ΑΉ%‚uΜΛcx:ΡIuJ4ω¬θ}ψɘ ΙΑ• ‘ͺΚIr‡B݈“‰ρŠΘJ&φ~€ΐZͺ πΨShxςyΦ$&Ÿ_‡7α&m}­1¨j‘BWΙQ³BŠέχκΆd2JHŒΧA–bM³…άβYϋ”€6z"|"£VO£εέ¬^WI¦—pΛ‡ Fm€ΧEŽ…‚aUΣΎ΄P±.«JeEB‹5qm uξ_©2&α±U$T M"›3C‚W {Zλ5ž R KF+u’I© WhτΛr\{άžϋm >7υE FΗΡΝΛ!ŠΔ!«24"§!ΰ•Τι°Ί4΅Q+,φ―2^ώ’%ήΉξ0v`π JS‘C²ίk£š7RιΜΐΏ=ΠiƒΦ ώε''x‘t°|¨ΎPμΥ)wΦΟmκMΚ›—{TJCŽ‘Ί &^ΑΏΘΛƒύ,Ž†κ¨’>d5'ί5Η0ψIΆœΣiN§%< LσYN|‚„β‡–ϋ”Cκx(DΟ‘μd ρήΛ ͺ‡αΤxl˜ήθz9q‘ϋΐΛ",K·Ψν¨ Ÿ<Μh¬-ή’69˜&Ό M$PSKΙ <«Κ; <^•UΛbΦ>kΩ«k‘Υρƒ ,ŠH¬’FŸBρΘΔG%EβΖ`Ό ’x[­ f€ΥF_-Δ©|J…ς8Ί½ϊ8/Βηω¬Nμτ»Μ2{x‹[WzηŠͺΥ€oCgΧ…Φ4BRa3cVΖΜ΄lΊ3a£CΚ‘λŸ1όŒ™ž‚•)]Pρ«|₯ΪtSŒ ΜŽ ΟκαˆVKN,,σŒ ΚTͺ€\ΐ)nω>YΫNLGD;x ͺ’zVΠd6X2YΉΦ^(£2[XV!3DNδΠ£™s%}g―WŒΉCΨQοžεͺαKw΄dήcR 9HM•)ΤΥ—μΟΕArΘhZ6zη Pδ’Α’T.ΤΘ³”ΎbΨ5L©¨f7}²…Ε ”δΆΙMMάC%p%υΰΝVAƒυ ά²$mκPΕ26ώ>\ΨOο@ΦMΐφeΨMjdT`ymΪRΡΙg%…/Δ8BmӚ‘*n%Ynœ4‚ZT‘>²=hf–j…%π&U]ž™•ˆE(«›ΛiC”@ʍ‰\”SEe&1p¨ΚUΰ¨RTYe *·’b] S½ΒrίΕ±7xΏŽ XDI™Z΅x“)‰›œP‘[H9CΥm΅ζ IΩ‹:rA=Ά•&€ ]𲻩"qXϋΟΫr²“Ωδϟζί'§” Š€Ύ΅νKE–½?)f<Άmτΐ$J¦2*ŒŠ†P~¨ΰ&Q  ΡΦ“ϊϊξQqΌΙηΩxυγ©,o^FΆšJJΡ…œ˜)‹¬σXδœ[ΙF M^§h”+ψŸέb³2ό²§xγή6•½ϊΧͺλκ›wΊ‚*C5B  !c¨NηlM˜ˆA¬«χ,˜¦G™σΡ,4°fR¬ΖPUμ_Ό/`μ₯…Οuο€NΖί?OŸžΣ&6ψ¨[Ϋπ.+])οΕΩJMŽ£ žAζB;-˜TΩSΠΚ2/Lso³ςΤDΓjhRέ€ρVxΖZκΌμl΅ vήΪ{ϊ&`γˆΛ[ΙΈ7ŠfyœΛΧnšl~Ό|~zš/Zφ²>ςzhλ…izκ.ΒtΝvf˜FHσe—ο•ΩΈίŽΧyO:7NΒΣdσq²=Έxžm›|Ό|mCΩkHΫΛU~ω8_`‚™―¦KΛεsφϋY6Α’λΦ3M§•—©Ϋ[‰_;ηψ’zZbXlJΟΎ¬qΘψq]zζ²i₯>2λ3νG"œ7Κχ’ž7/ŠοΤuk’Κ‘ΏsΏΥLωήζ LV“5EυŸ9ZB…Œτ“ΩΕ=jΪy)4—Mι™Εό+€Χb;SƒRqοΡ!{x,›Θ‰ƒΙy!ΝΩΣ|Ή}ωeοΌδ³γoΦθΠS3ψ}‚Ί_Vγΐα†Σ΄ΗH{^†υ"„M,έUSΆ7A³ ΰ_-Οnζ=‹7HΫ΄Ž§ρzΞΝ\gsDn¬μα†J)‚Οs&’ΧΑŠδ,π*ΠyΥΩRΑσ«`F²B΅'€Š”i'9Ώζ]8ί§νΕϋ€q9I΅ŠΑ§]A΄_t0εG|Άo*ωyCoGI±q­r7†Θ―žŸ¦₯S‚μΆm?₯a*¬‚—zμ”1O‹²ZύΨQyFl[Ρ­gR;«Ώg WΠ|ψ²Cθl³g_\²Ϊ»KπZiΦwΓuΛλŸπǞΉσ+™ίэΖ;uΡv’ήšν½»Χn1ˆq‡ΛύΘβV‡)8ύΟλ;··"α*zj›}αζ§Άέε‚χ<τΨΫC»‡hϋ]3»›)ys4rϊ Ϋ{]ώR/E»!ι₯x³ύΞbΘKΐ±³7ϚL`uu6"DΙj¨ΤΆWeΚ·0Uq©‚Φ,P2 Εͺk."wΩHa…Δ?ίΧ†¨>§‡ε$ŒΣΣβΈ,Ιΰv«@ΙZ6ka¨)Γ:%E•™\Υ*‚p5ηMqΔΚ„7”=’¨‘”M&ΛΞ βΟ“Eκ¨ρςέΝΦδΠ{HeyTφ/S6E±΄OΌΦΒ-ωΗ5ζΪQ©•}RΈ,QςvΞXwΥϋ7ΞӁÛȝAuυL ‚2©xO<ΑrKI€,ι¨β¬.TT™ζU'«M!jY]<1³-’Όλοέ‚œΝ=‹δζΰώlΎ.'³/ϋ^γ°ά?@j©]ΉBGΣ:΄ΛK —Ϊπ_£sγΎτTΏ99ΙT ©ςΑ‘(.H κa.+ΑΉNOλα§ηάΰ+Ÿά`δŽ<ΦΒxm«ΙCΨι”₯0‘a?)wU)$ͺ7,;SΕlQˆL>½!^¨εζ;Ce ΣΥAΫθ(΅H$>{~Jρ,e‘Α‘ ₯€ &Ζ©o[‘lKΓΣuKΨGΠMΔ§ττΌ<Ώΐ“ωΆΉˆΐ*5t%Η{LΪΪyμ©οi•ΥμHΑs¬†Q”`’>XΝ¬”φ ΏnμΣ‡2gψΠ㨔γλΗrαY0°!΅.6+]™TeΧ…HΕB,WQRΣ#°x,maΧ¦· :;Ιν֌m[€t»~ž4N+T€S1j΅–Άb—²ΧΩy2θͺvuJΰTšTEloͺG―U.βI«SΔ(ΎXλ½Βf²ΉTL·ƒD―ΦΗxamι‹έCψΉ€ΝяŠo‹αρ΅ \‚u0 hΌBεB1+΅%°#Ȏ⬀/ZRζO¬ |…—œ„—6œ‘­kBZ₯‘ŸΎLΞΏ‘7'jtΙ5„Ÿd‚jg: …Ο2uΜ©f |ŸtΒ±΄I„β(&r©!ώRJ²ί“=~άΑdkxΛI[xϊ?ΠΗρrž–srυ–―“ς_?)bψ δ ˜m$հԐπΆ΅κx‹Σdfg‚ΞZ#£2υΑ†,¬–—ΝΙΏ-·±Ε‡ΣCώωiδ#ޜΪrχOXGά½Ί…φ'¨ο‹4†7IQά Žy³Y60ηΙZΕl²Δ°cœ!rhηΤ-…ηOαOΟeœK|ώάx)ΦAσ鏯§Κς6έΆιcxE”Φ¨>φŽ•†‚U%4`e t λœ΅Τ™8pjt_IC’t>Z‘z4ήΓ|±&ͺ‡žΎ‰Y„ΘΐTx ΤOM¨Ο•S1|±νj4“΅p…ε„*’ žf‚“ΤPKA’ ιkqNRQ2ίΖl˜Ρ‰¬Ξ>β—Ώύπg¬(† λΰ ώgjBQΨΜρBUŽJδTϋΣ“₯VYŠQgPg@OήSϋhπ…\‰γ’ιbhυL.eNM ήDCMD#ε§F|ό;`{,`Νp’Μ +@ΤNA­‘Κ_PAkΧυJƒzaϋ‘Ο‹ιH4 xύs2Η#t:xP"eΤ{)ƒτ6ZŸ€₯²@Π£AζDΠ Τ θίΜνjΕΔ%Ή(ZxŸW’ήύ8σG ξN‡>'Έρ₯ς’‡²g©‘Ι’ZO₯DjI zΑshšdEu" |KΕ ~…ζ€α@4­U‹m€MλͺG`2ζ­Ξ»Όy¦²m€{W/Xΰ%h9ΟLR§Μj™Ξ’7LšZo`@ϊZˆ±ϊzη̎¨?ݍι΄τ!Lλwψ +F„DˆAd#’ ?{ίΆέFŽdϋ~ΎΒ«^O«™ε6Η’¨&)»\_v$/β-IJNVUwŸι©*+EK‰±7±#vi’ΥmΤFδT€ό"ΖL>τβΤ²(€ sCߍn{χ]ώ! σ“γ Η Ξ_Jυ؜$aΰQΌ™z[ΆH]ξŒb€,`«IS)’ŒΎ*M[-’”Ι²Ζ‹ψdc6ε~{ίττβ „UΦU@@$²p« Ψ΅rL>šκ+Ψ³—γ]“|χ*q @Ν Ÿ<…„ΩŒΟΘΥέ»‚1ΕZ€j;(λC‰ €Θ#ξk_<Ά‰U ›&ε1EžΑT ¬S9XmJ:SœΨλσL–όeΠ Z]¬ΐ{š­¨ΜNΟ Τ‡­Ζ`Α1‚€‡ΧM|π7ΥϊLͺkmΧR‘Ρb.l²ψΕ,@½@*«οΝ^c7`.N2~$K s•˜_Όu²ξ:HB r€Ο$MnVŒ½±†β˜^σ~qΠΙ9ψΤGWΰ‘‡Ϋc‘=K6Λ9M(ΰe5 8U&+cλΠ5Ε&JA:‡””ti|ŸϋΣκξ(»(ιa%ς]`’wΡuβ`‹·>² ~«u¦β@% C³ŽŠbtΰ r†9ΐTιΩVΊg/z“#αgΫ φΣZ'αLw»ΉyΠΣwFΡFGΰ_ q5%—=,5Δ[ˆκEHΤκLψΞΙUEl#έ%ΙWםŽ}±ΛΌ ϊ‘-Κ—»k)Τΐ$Αk‡ΥͺU>ι‚ΩD€Ξi[‚eΚ‚9Θ\ΐ,’L¨ZΚMS,#­CŽϊŽvώΊ]ϊύƘ~ίκ(χξΰ•ΑY’‰\55ΙTH)`§φ‚=ΝpMNf[ƒ}bŒJ}ΩΫΞ„£}…Ώn—Κ%c̟Ώ|ι―όqͺΡ9υύA1²±‰ άkwŸeΝ&J/Žb=βΏιAšΚi.χVrUX:ΊάE.Κl΅ ΕΟ±ξρλγ€ΡΡ/Wςη» !―ZΦΚ+wΐΰΎvΔPZ³Œ_T¨:!Œ²…shέ…B Τ‹—SΥ‡‚ΙΟ΅yG⺌εώ·"°Ϋ’s"• XGE„Εώ§²,*>Iႍ.υ&}*s*p*Ί8*8³άΏ|ήΆf\w”Ύ0ή‡2o?OπΤ¨H»"2ΊrZ*ιcΜ-³Ÿ₯ys3%Τ`J– ‘s–kτ”oτ0}/½/ΨAš΄œ(•MίŒR,AΛq?g[Ž‹FtφΩ•Α·βΕ€€cW@gΫ €asα…­ψξΑ³$ 5Cuf销[ΑK^ΐ 0‹%NQ‰ΜfκΗ*·ΞΉQΨ»•P=ΐτΡ§ ή‘'Ρƒ3'yi˜.n;R‘τ:|―ΒτΨ²!ΥHξx€%φΝmqΏΤά!₯?“%_dyωl5X&@oυ(L_7b`,]rμΩEιn”ΑH΅Œ7i8zπσετ€FMΏ¬H/­9°t2€qͺβ=΄ΈI₯@ ‹mIΓrύLΊxιQ`(!"ξρό]j‘%Pχ^ έΖ_Κη/―νΓjwσ0² ’δ΄)|ΫΛώτΫnΈŽ™q:λ―a©‘οΞ?6Ώό“|λφΆBNd/\矡΅›ΎK¦‚Gͺέkr©=°ά_ƒ-•€<”s•LΖΖ†ω‹‰‘{iή"=ƒ­‚Ι‹Φ·E€χδ>νw.ύ-TΜΫΞϋνΌ‹‰OΛyύΖ+3œΠαβjώm“G³šŒ==–} Œ!‡ωΙk5wΠ;Ήί,zl s$=l‹²’΅ζ UD¨ΑΦΘ3 "Κ\ͺκΝƒŸηS+{\ŽL†ŒοΣv ž»΄yKƒ]ρα™DAε‘ςb#Β°oiόια™³kσžΪΦO%‚NΆIΛ° Ξ~’]”`u†9ms •ΑΠΰAmDΰ³^K·—dΰN³ς·‘•knŒ΅΅Η±©Ύ=b£½VάχΖiΚ&Ραπ!p–dP―b½ΡMνG=Η₯πJ9Ϋ¨ƒρ(IϊmlΟ'R›“χF°©Λι–(gφβ}o’A¨•\Κ«Ζ$¦“ΚΖP^;Δl}*’–οψͺVΨφ@ΆΜΪμΰΑ³δσ=ΞώΈrD{Ζ fϊ£θŒXIΎIš˜–Hβ8ŠT­Uά 4€ΙΥrO‚kfM‚q΄k.U£οo…ώ@­\ρ†ΙΧƒ«η@IέJ!«h½pˆ Ϋ’‹χΠ&ƒSΤtl ŠzbKΰ§η„7o&]λ&6zπχο}-Ρ8Κic‹IΉ~βΉx’ΓˆT\9W’±Λψ<έP:χΦ.χK#έ#oWΗ0ȚΉϋ1œΘn{“UŽν¨˜"θΓ†sΟ₯k₯5†cͺ ½6›DΥΉξ€~FΊ,>Χ£\νs»CIΥ?φVΕϊ‹—g>= ©7.ΡpZKDΗΖɏqƒζ"2,Œe‹7₯=Ίn₯Τ<ƒ\—ήΘM«VΑ/lθVGω+΅˜ώ²δ0Φ‘Ζ“η(Ζ ώgJώ©*’—ΤUŒ¨w‘KΥ1ΥT‚4™qƒ nφ΅T0_ ‡Π?WβΌΧ½s\PN-ZU/½r²i.².zε)cγ³Ώƒ]o β·–RR‘ΧRΒ.§RΪeΌ7n›<ώ'9₯ “)G¦Ν~ψe°tΰ$.ΒJ5iΖΣB'©%ξφΖBέ­ω§υU bq[ρωc”§v‰ZΗΜ₯`Tΐ>΅nŒ§Φ£(ΝΌjΡdT(:im/Ω|S³οΡ!ψ‹P­-VwΗ{©RzώΤβ(KΑO”“ά­Μ5*Ei‡—οk…δξPN%CiΎKÊxώξι@ύΒ(ίuNˆότI©ΑuZaD%uΉjρn;1Ρ)ψcŽΈ1&Έ+²X~PIjdΊ‰7Άέ8֌7Νψ1ΜΏŸ₯ι;HS¦ΠzXGο’ΕTv˜KzεqΒKJrΥGε5>[5«θωY³—Οˆέ?ζ‹owŸX•Kt,³™ξHW2pBEš;a©Εˆl½τ pr8κ?Ψ Δ²†cZ/uώάg_ξΞ3ΰI€ qεUVτRN&ŒTαΚ©jπp4Ξd ―‰u‰ΊΦΰPy§vώΖγν 3o°RcΌκξ°ζ: 0šKSϊ|”_kξpνbsΈb’?΅JR„\΄d€ΙA&œ•©ΊL"'ιΞΧ)ΗΝ{Y?6›p4§Šχh§f’Γlvp~―;kΝ D― FΰL-*&k|Ν9pqnX₯±£Λ©»nή•Λm*ώη}ΓΣκ­š&‹XŒWR’ 8Žd@»’=ΙQ(Fξͺ +LRX½I+Η*½Ν?8Ύ98’©+_[3^Ω ]/U“φE 0VΥ’ω&―CQ™Fl7ΐACk’«Aώ_IL_|?Σ!fςj_₯ŸZοΛδ Ξ=7Σ*β{pΓ!]χ‘‘ r!9§‡ž­€lq$qσ²£ίσάOΎ­°S€G,'ΰΛbO.pχEMW[t5Ή‡}ξ`β4ƈ‰iΩ2ξ<»­βKŒE§p£S>θNK}κα¨uΊƒ‚3v‘TώEΟΔυβ|ˆvYي †ψmcθ)5Ξ5ΰ“ͺΫ‘₯‹l]³ηΟΦ֟ŸŽφAΜιιλΫρς:υ~c’·\όυƒ—Χ²|-η¨ϊHΪφώAξώ₯Οpཎb7ΜΜ±jΠ]&C»{ ΰ6FS ΙζδLΔΊ“ž±ΆvΫτ s2›ͺ67΄.μIΔ©ή‹ ϊNν˜w/ώ–4ω}Kn₯JοnP„σ&M…ΐ5J£΅ξ†œ+-μI'VΆΕ·ρ-Κ·š‰0–ΊΥ γψͺ=ΞH#ίΕ^+ΕΤ)μ\8±ΘAΐ₯±ΌRώA vΠ2P3ΦβΫPZšΉrΒ°’^“Η_P§ΩšeΈo<Υξbι™$IpΡ`½d8wΙ¦‹o‘ΐΉαϊ9ŸƒtjΆΕμ;1άc«@ύ½ίΊLNή¨›[}OβΈ#oD Fš~m/ΌΝ?Γ₯S€ν―›ν~εύ­-}‹’ŽΖŠHR“’Ό‡ΖηL)4@%z8€ eμQk±l₯«(Ή”œχ¦|―m7§‘λ[?X撝ο«―Ψy½ξncνžbΨΚ€ΩΖ|U¦*—="I@θp*•TuM@˜ Ў”6ΎΘAό‡m|’Ϋ{bΪ -κβ&-δΚιάΩyΌ»I‡ΎFL τ.ω"50π§NςTJ`’λΆKˆρCΊV4ΈsκnT±ιη>’₯±oβmέιGΜ}|… 0=ν=Ψϋwϊ‹Λ?Ν]τ” ΐ   ²r%VTΉ;³΄1  ΎΪh“€ΊQΥlΨsημŸε.~,/­πΛ’=λ_΅d~x ›Λ»§‚w³q¬aCο› AϘJ₯€‹α?ΐCQΤ ³υΎ,•EοώZώΞ m3 έ«γΔ›ΙΞwΎ›>'%FγDδΌϊΜpœ t½φΤkυZΓ†‘"|Wέ+–δδΌBwΡζΡ2]ί^n…$ϋΥl­ΫσFΡrΦ³š )6X_gΛ!ƒς‹cχŸAπχ°zb“¬+cχ±2Σ†›k²4=’U35<”σ€\‡T„ŸΠ€οNηΫ¬ΎMΦu o«μάφKtV΅Ώ5냳s1‡λΨ«_—dψΖ΅·““£Ι/Gb5A³€Η‚ΙbvzΥ 1 ΪΏ=Fœ(%zΜaN₯kΛ"NΏξ6ͺp½~ϋΡΊεΙk΄^η]8„‹d#Ζα₯|ΨΜΤ…™\ΐUXsζΈlϊ;Q² Ζ“6¨ΥU‰A2rɐ*…*f€ί»k­©]V>¨*Κι|ΩώξΖΐό3ν]!<\ιMσν»$MžQ|½GW9Έ-VY#iJRk†ΏχΉ sDpQ*‘‘¨4ͺ^Eσ΅a­ƒΏϋu½‚ΡQžŠ LΆœ'Ϋ=h ™λΠ¨ƒŒ¨J€ώdΘ ξΫΈ™ς²cε€ž@1LκpΘ—Kš’τ»0lωKσΕΟ?cόΥ9οuέξ½Κi<¦NNAB“–* j ‘€:^·ŽJœ‘³γ¦nΌ;ΫΐxΙΐKί–§ΆpwшΌ”Ίρ6`#cy΄ˆ‘ζŠiή+ΈpΡwΔθGU`#n tώlxΫ•ζϋv9mwώ–ΰ?ρ,ζYlΡΝ&|Σ°ΑŸ6&{ΟΒ;,GΈ·?₯¬[’lή€H)Z[α2 ›Ι…Φ²,S֞#Ό(‹NΎ œ­ΞyLάcŸWM~Š&:4"@Xͺ3ΕFΡ"ι&ζδ\«ΙΚύ€)†E”¦˜Φ³θί{_A@-ƒ †0t=ΌwmR²ͺ©$ ΤuΛ’r7ί5)#rViΫ’υ•ν ©Σ«QΙψΉρcÏ\@LOΠΤ)ΦΗ έ—M!%GΈZ…ή€4,4©ΥΙrά“Θj‰₯:Aδ§½Uj°L3£ HξζŽί=OSnπ―Rž]RCΟO’ϋ"§I ΈθΓxΥbΉcoΘ@ΖV‘b½Ρ‹Ρ}Α²½Ucώο=ZΉ€,j5Uλ(‰«’MC7ΔƐ΄©aw₯•τ㩦ͺ¦»Φ­³ΈIn‰½!‘χ¨ΓύΛ·/›L’qgΆλšu’ :yΆ‰)ήJπK]7₯ά۝jh.Ά—jž“’ιRΌ‘ΐΰ28Ε6ͺΒωžΔνΡΑ†ΙΡ υš,†Ψί#'#5/ž4[ΙΕ₯-εDΤ]„&’ “hΤ¦t£δθ/;NΟζzr gΞ¦*A©…j»ͺ%ΙV„dSr pšΖrΡ°u“ΆώΫM<μ§»ηηΚ€-„/h©UpMύΐ$Ν€ͺ ΐκ5XΚ|ΞθξU™‚ΉŽκ’w}1Η%οvz=ͺ,U·DJ:4u[œ€WFΎΠN΅Ήΰά£AIΖQω#yv;’i»š?mŽo8Ζ>ΣΠCί;Ι^έΈ?nk ·ž—ϋ_φΗωΌνRΦΟΎΜ—ΛΩΛΡΓ‘kΦξ?ϋFmΛ§£ŸύτςΈAŒ›ΟσΩςΰe^ΆΩmΫ―IrL_<ά«*―όH?όλuhέΙΛ—ωσαo:^<<Ψoί»yΎͺ/ϋ_ΎΆƒ‘ ™'Gπ°{ψ“ž^Χc~z}\Ν€8β¨>άu¦;τπλΆ—φξnmαισζ€@ΣQi,ΉŽ"˜I"v›ΐk{kI…ΨˆjϊυΉφ“χ:?4zδ΅ήζ.μβqγaj―έ€φžέξΗ߁vHοξŽD=¨‡ΪU Αυp―tͺ9ipl[spπΡΘyEμVΊL zΡ%P#“Χ}υkΘLπς‘Q§?2ρŠεP9εš$!Α`'OŠŠuΎHU ƒD#ltIΖ3rΣ„(!Όf`¬zhPžϋ>μ£m—πΛ›μhέΨξηοœΩσκV(τŽ:…w˜r@oπŸ’9_’‹ΊBκπΞΠB.* MuΕd +9BΦ ΦέbsΖι~-x£=ΰΫσΆEΓ՚Qt°+…aκΝ„¦ŽB‹SœΝ„]™‘€ή©Ym[f’Ύθ!5Q‚”ͺM₯Œγ)7Ϋ{T(³½φθξ•ΞIΡΕΙˆs7œ"\ΖoΟσα2}οζόΖ„ΓΫΣεΉYŸάƒΨΎΎy4έ:Sΐb…݁K[έcΰΜ^jhΡo¬ΪΰΔJ>Ωμε›rλλβ;ορΨ=>ϊ‘σw:ΒέΎ9r„ΟσΫV,ψUŸi‰ύ,*&λÏߏyχ­$πετΨΤή#-7™ f'}Δ#!Q·pvΦ€ΰΉnb’¦d€₯!%98AMO-‚4yGΞζ{Α‡£νπ‘ΈρvvwΟ€mq1Yφ" m/₯UΞ !’-5„–Š)Ή0—nα€RK@=±ϋ+<ΣψzΏΎ―Ψύω€'™>^+Ρ ΄©D”.N΅Ζ]. ρ °¬ΆΤΰŠX‰N‡ΦMG_"Μ_t«5GwσΒ½)·ξΘyΣγ’ŒΪz {ΥdλγΨ{§.*m­0…Ηάͺuh 8 +iSƒaΙWt]ΙT πΐΉQγ{†θ.«tτ€ρ½8φΧNίE?βNΨO'ŸξP±s%$#=)΅±Τ³ΖΕ$!ΉfSΗΆ ”lW‘ƒΟ]5 U·ͺ άQ<Ω{qΗχmυ#uγρ—F?ύj°Κ&VΥϋš"'Ρg)Ά5ˆP ΐβΡΉ46L υΙRJnUΎyτφ]sΏρ7wŸόIšͺΊl³—ŽΊJ-Tαb₯Α/|€•ΣzIΛΏš•n²°ƒτΑΥΏ>ω燇Ωo%΅PΘΐ+f¦αž8pά—²ΥΙ©‘™‘R³.‚w˜Φ°θEΕ±Η:ΩμqB£€kϊfwΠWgΜmΒΘ»EΨΕϊ’o3η ͺ1/―a†¬“Iι5’Bxπώ¦¬ƒͺ«OΏ}―xpiμΟίx]p,­.ΰίMήΎίΚUƒ48”ζ9)ˆŠ~Pδ.‚%I Π’ΜšΪ\ΜΎβ`η”Ίύν0>Λ0>/yuw]iη„:ΉE#„¬+Φ§.ή¬c,Ύ»J¬ρΙv*u‡ύŒ ?"Ίr<—& cXπryχ~ΦϋοήDΓ·’+’•.ma"υ.Β2xζ­λ­2V±‡FΌΞ$΅ψnG:?Σγ. ΪεK,. ­ψa 9žΤ©ΗͺD4+'Ή½'ιμγZ.I0«Θ₯ki―γbήε˜ΗβUτF=(θ¨ύΘ€βυwΩ]—ΖΈωΠ‰TξΤ£¬^©l0«¬ͺthμFiOZY¬ΝΖΚ+#rι%IJΩ·ŠZ$„bι7;"€°άˆ,κta˜σϊν3½ΜNβΚΤΫ³:0U'͚l}—‚iΨ’Ρ°€>g`) B«ZdΊ°=%!έK•²τ=/,^η/όε•νέ#œΎoj šή@έΰ[,9Z’vTΝLωΖ½pΚΙΆDž¨Y.m•\€!™_αi+θ<Ήμeζ$aή‘eΕRφ‡pΙ*¨Žo_1 rΜ9Λqx0ˆUΊ>,er:Έασ -τ‡ψcσΊK£c#"vp;Υ9ν I:ΖfMΦSNͺΩ`ΰw΅Ί­Fυ;=ΎςC‘K±ρzrx4u»ΞΘ„gΐΊ—ΆIHmοx[0ΊcЉ=[B4™ζAœ¬ΰΕ…²&Ϊ6NΡϊ"²]Ό>TΒ έέ­ZΝΖuM ~S¦PVT΅•v²%Μ‡ξ]s{ _»C·$]όαŽΠ«™ΎQ5Š[Θ#Cά$ςfŒ½ΑoΪ”£Š@μ!J…F˜ΌΦ½X§z"»!ώΓmnϋΖΗx₯ηι]k₯˜‰ωh˜Α*΅Τœ=7Ιΰ+²ΜΪ&Σ-λ]w0Kl­ΔοxφD'έΞ¦ŸE@ΛΙΧRΗ;g`·jr+ ΔR³u΅‰«ΙFwΉώθ4¨> ”₯ „s’€5ωd¬G ,K‘$―ΕE¬B_]ME‹ …N“JΗ܍—VΒ₯AΊ¨eipΠ~{ϋmΓψ{kΡuDmNΑ:‘eυΝψβγ\bxQ*Vz~˜QˆΦ θ΄“€UMI.f΄5σΊσβI·Δι;v•sL^Ξ6=Έ|q^z0GWΩͺ&ΩΒ™{Ά6λήαZΛοΑΨ,}έUεσΞ~(ΪzξY]Zdλώ’Η“”&g°½{X›\ȘΛb3!wJLΦkCrOŠ―€[Ν΄2ά€U™›"•Ζ'i1ϋ}({~yόyχvžŸ|‰1΄ά: Ρv nBΏ₯ƒgΞ΅΅ή½ Dϋ„…ˆHζ"œœΧΔ‘¬ΐύx™­.MΣΟΒ―Ο³ηΩqKι“+‚² ϋ’KΪ<ͺͺj‰ˆ_Y4αΝΑγ”΄)κ¨R= hξS›±$ΗΪ?ρSύΊ8i45PTΜ ‘ˆŒ¨jQAp‘ΖΔˆn ›[ μExα1΄fPΡw5~όνOOzξ Ε‚eΣAΊ8εjˆΗV{¦^πΒ…4(%ΆQ―QΊθv­N2νΛe΄p›Λz¨ζξΗΈ0?bͺ—ͺΛ< qhkHڈͺ$mΕ*Έ 0 Φ©ΡhΌΰ@ίK­α#1•Ÿζ½Ÿžu…ι{zΌ±Qˆ4%P°rζ₯ζ˜5ΠΌΉΖΞB<•fGΉ ΗT ή½΅oκ†xa”‹Ÿ'’‚fϊυΪXŽ™Unʁ“)Mƒj’Ž’ΟB {Ρ&B|³1€€°i‚ΨZ­ώ& Υ#ΦrN5π’%>Γqξ―)uˁm·U•` p­ 6Υ6 &tΒμ—PeΫλk)χΉ†I$χqd 0š―?υ‚…k ΊvՁ߫3”†\$Ÿ“²Dή”ŠψWK.ΰΑj{ΣΨb©‘½UΆο$oΧџ7Χ ~“8p΅Έλiφtzη<5χ.L(rώΕ«>–=Ύƒ$k=ΰ/p€“³―\ ψ0*Xρ™²•/£\φΊΫŒύΓ¦ι…AΤ*baΙέ9ΟΘ+Σ’‘Ά(U‘Ι57μλN±J+O—4#޲2Xί₯Ώ<άΎύ›°Ρ•ažOlŸΎ+­ΖXu*r<-‘ ”“8]ΜKΓΆ`#π5βJ«–ΊK"& ?1·YžMيWΖώΗηωﳓfΛnrD°&qNU\`T5ˆ’‚'‚ί%W³!o[cO:c±'`€¨­[r4ΒEπ濏©Ιμ?ΩτžΈl™•Σ΅8΅GuM–_Θ©€6Φ²TΚδp ΚYέ{"γ:\«jr3ΨVΊΛπ'•ΞΓΈςHίΨ”ΟΛΩΣΊ€nύ`yτ`$©ό ?ΊΧ‡aE―Χ₯;\¬ϋκ¦Η‚λŠώs+Ρ_]{ηL>}%lτ†ΐ„[ΚX{˜9­ADX‡,οƒ σͺ)H—μŠ§s;Ο¦΅β;ΥlN~δ1Dœ|Υb=μWl}lWK‹]sφΡ…’=ΙU―h/½[ :₯ )GcbC75{ŠΑ†oΉ›ϊ‘ Εp_«” ]±αι pšž°/Νx΄Ν₯Έt( ³DξΡΊ’tHΙaΗK‘hN<€š°cς’t)~JÞt_™Ό«P˚εDΑΑ}SοΎ#ήKI’Υ{š—>5‘΄νQNZšoδJ€[ϋhΕ›Τ-~]$j—wrΚ:ωΩ\0½wvJ™ξ:Βaoou¨πzU¬±²%Γ†²ΣΝ²κΎU£Νω3)zQ6RΡ‡ΧhΨΛ6άv/ά4ryΐ[οT΅εkΌ6FN{²gςxΉXž«9NώšXd= ³§B·ΥΏ½/–•ξ"ξ›΄ΛΨ†ρDŽͺf«I›^ΐ+‰H9–^(Xβ3G£'=:’.ο/{’₯Uι*SιAΌ άsͺpΥ‘ZιύPuε ’YS{!%“3Ž€,€Ρρ ντV§%Mο ε£ΞˆΨͺƒ{[‹­—λLς½ψή’bΡΩΕΜc§a_%BΟ”-f„€žΡK;{œ1Ηί\.χ,Ύ©πΑ“-Σ]ξ7Œ\r}=ϊFϋ1_|yΌϋλ+~zι³Η QνΉHΦϋσq4/i'/ΪP6θΦu©ΖζΜpb’ <Ύ3Ζ'ι«VŠjδsZ v–ή6Jk}Dάxgζn<6`9;8^avς>"YΊ°6PΔ ‹Ro<§ž£“&DKE*΅¨%#ψ>₯ΠKΚZUπMxΎρDςχΊΟ³c}=}o i&ZˆV“ bš†©!έ[RE€ωAe²mΓwƒΘQ²σ΄ ϋu›­ϊ£τνΪ+z?ΙέΌ|σϋ‹ΜΫΜΖjKd7W£:ΤΧηΦ0ca"―«―σΕςλμeϋ|έφa{βΊ΄«ž—T%δ?ΌΜηcΟίώΞhWTΡvx^ΒίmΓ-χΔπηΩσW^ΰ·οΎΉ?j|)vc €_gΟ»‡ς|γιeχ`Auk™αΛΩφtDόeu«Ÿ3ΓkGt|ΖΧΞϊδεsέ0κ# ρΥλεψ΄Ά~ϊΫ·ΰΒW›ω:―ϋύ Υ :{0,}·8ψwˆmηαΙμyω²m 1ΎΘ^κωχΨΜ«‡ΩΣΓόωqkϋΝΓƒe@?Ÿψyu ;Sι‘Χ:wλǏ΄κσΕΣςŠμψvm {Ά¬sLΒΟ«‹χ+χΛHχ–σψΚή<Ωv¬Η«sο~ε&^<ε—yύzζΣΛΗωκΜγΧηϊȻ糄―;<ϊ£Œχ‹˜ε…ΆΟΏ1Ζn›ΤΔ³7$Pw³Ά\χ9ŸU>ϊrπ»gλ%)iο/ΐŸxυfα 7ύ‰ΰ 6εθάΎœΜκYŸq:+‡žδΌ)ΟxΨv±η,M}β€ϋšsV;|.?_zRΞϋ'΅|-x s5θ0b3-f/«½>φoX»νrίσnβηv ›\ςx»Uy»ΫΓ[Α/ΝΆ€'~nίΗ½ΎΌΜ«›½ήGβΰš »ΤΝίΪ<ΓΪ‘I[-―;ΆƒΙήy;¦ελf΅ŽΈΈΣ­-ߟ=χωnΏΌ<Ξκΐ’κβηΛj~ϋαΕ°ΊgΌΌqݞœΫ5χƒVWΦΔ6z|E›κ’‡>v‘ξε‘HΩΰτΟYκΐ{ϋκHχ–Ϊ™”Ϋs2VΗ‹xt€οYηqι΅Ε•ίΎ_Ε7γoΌύžs»μxΏμΒΕζOO‹γGg ΐ~9Cς•€+O~ζH(‹₯ΗΟχΦαχίj$hŽΖΒ—ύ?‹tνΐ7w~#+χ¦(wq§EΕne^Y>›ΉΈu՟“ |[TΫ6ΆΆφ½D/Ύγf ·/σ‹h8fn1>Γ̜ Mkο³½Ή%€ν=€λΆAy»XΦΨ Χ7ρ½+tδ0Ѝαƒoο°Χ.Œ>όϊΛo΄Υ‰΅e.Γ8φ'ƒεΏσΥ¨όζznpM'Ζά|¨ρΣΌβSγ<όαόαε+-ωάρ _Ϊž6|›ο~™=Ψ`ίΧl‘ΕΠΧhυσ*ژ=·Ω‘ωΐ!VG>_ψd‚ΰŠΎ}fPhή;OΟφΩιJέ|@ξ ~½ŒoώτΜΛεή<>τΗΓΧYπιΚZΦ―ά^ωΰE—ψA[Τxψhτ•–slπCsΒ/Ÿ|jύoςƒνahƒqπύΧ6Ώυf/Λۈξ₯ΰ7Ά.ΟP‚ 8sxφϊ<IτƒG`G«λϋιΑΠ-Xdv5;Ι#Φ5›Π―η—ωςmΟmŸbDΚcp»·ŠΦXχΫޏܞΎ\`g›€r!…ζ-°ErwΈ!`= ·ώ‡“ΣΜ“πu6tώ³€ηVD+Tπ€9¦½‚gGbΧΩΓ«σhrϋkϝ#]Α‰Ž&oŽ§Θ^τ«g.°αα™Λ')£Η·αίvFrωΜγΧΞ'nF€'Λg‚ΥϋgΓΉcΰΆY Ί†œΞ ‰ΫbοIdΫξώλE·χGœ+aelύž„•χΖ‰χ†„€ν™Τ{9ΛώΌrύtŽ œ_;AϋΨYΎψκΣΉϋΨIϋ•“žI<βG ΙeoΈ[2Σϋ¬χ¦λ₯Εόε$‘erhΈ;ς!%WΙϋΔ^š1xMsd]zΛR“š{γͺΡQ9LΡ4Β·Λoc―Ώί§οψv}rzQύ7Ζϊ(YΏ΅Υ.9*ΥUΘ½‡dœν©Ϊ`»δMQΞ΅+₯(”\)Τ‘A<+ΒO~9žEA—¬’%[ƒQB$CΑϊ¦+Η& b9Jžd Ε€ή3ωF9(Q£0v9ΎnŒͺσ&΅­c1Η¦ζ~6Zνσπ>C.ι u™Ν½«$0?Ζα‘X€Λ+[+kE‘ υŒ-§ΦΉ[‚O93YͺΩzG͝/άΊ1βUϊ[|™=―ξ^Pͺ²ͺ±η,o%£Ψω•tΎάc1’Έ»ξΕ36₯Ά&ΧκZ0VT€”»©|="Q‘ψ²q„Yx¬°Mθ;.Hœ|Ϋ†%;Ώΰ"vAΚƒbΥԚ5΅u#9M]œ·V§ήJήΩͺ½J#šˆ7Rn™iuχ1κμtΜјjRv&aΞc€ά•s]™€΅(χ₯ENu9•œͺ!K πΧZ#΄o­ζtyΔΫ₯p’ι9uLi¦οlρ.ST₯*‡m›Ι+™ |΄±(r$Š‚J”ΉES4’ZyΗςΎe¦·ω¨'ΫzrAΊŠ˜’­+Ύ•ξΰΰΫKŒΖ₯Π¬υ&γ_:8W0Jμj*œzτ‘·Γάί1ϊ•]>b–ι·@ΓϊW¦HΞW5­{oT± ;΅&Ψ‹;Bu/ΥΥ\ v 6yQ‘ ρΏ;{»ΝgNŠΰ¦^’”Y4cM42‘Ν$― ‹#)¬’{l₯X ©^”Λ+ιΪTΞ&«Νπ8+OόΉΎΌžH4ί‘;&Ά~ξ6z8x ’΄VΛ ^Ξ…μCοέvΐ²TŒ%kφ1ΔΦuφΖ‘œ—c˜Qy 2»!Ώo^ώχτVΪL>S±6(Άkc²ΠKs‘Š#Υ±ό›IΎ\“QΑ₯Ίmά΅SƎTvΦE΅¦Σζeψ­… Οvϊi%ψs‚ƒ .i‡θ6l`Qφγ†V šΩvΔ·B˜l«RkB΄Γ7ۘ΄Ϊωןήi-Ϋ|ˆt²Qc݁ν`§™ά΅r؟άμ zΩ‰QSM"»Υ4sυ\ωσPΒ}Έ §―Ί’.p’(ΌΊwΡͺ”€z(Xφ˜#QQς IpΔΰEA‚ pL{²#Εgϊn uΎ>˜:©Ύ‡θ4Υ‚›”΄Κ!‰tΑ₯΅NΛ½Β ΓΓ[ID[Θ„βγ˜ΘΧό…ώυʍΛλ—Σ,ρΙ[%Ϊ,‹2`5ΌW2˜«nDͺΓƒ΅j ’¨€ώΐw5fg}Ψ°π’Ύ Σ;ˆ`€ι,¬MUιτ†@ RœOAΞ2–‘Oπλ‹±AdUJNZΪ6>λ*‰“ 5;u›2Ό³*{P΄Ll‡>ͺJtJ$ΪYκκtΛr SМΑΪ€Qelxύ4ΒJΣՏσΤΑExφεωAꝎDAή|Εi5Θ\4Υ¦ˆδήz½ΰ2γBξΨ\ΦieΚ–ψ%ΏŸ»/¦dŒ.rδ’K…Νb0ˆ•Fdo.βX¨VN)ΒΤS)fΐM[:Α[–δ΅HΝ` ƒ6—mZ˜hUKˆ#!’½ϋͺ“·­ΊέΝ~­ΓΔόuυςΊϊό}₯EOρΞ³κm{Α΄ας©z’Κ‘)i΄D~ UT  2+χ¬[7½4X*Β9ΜόΗ³DF8‘ŸΪWa}!iu]L Έ 2Τθ,PRΈ,W5Φ& ΄H‡Faoμ€Υ’©η‹r–+*όΉ1FπyΫΩ{d¬‡™8ϋΓuΟ]ΊxiuώVιΖ[™ƒ3ςωh6i»>ΰΥόiV₯ŸυΛ»Gύο0ΌΧ£ΰ©Ζu|77A.ιΥ{΄_3ΔiΆΘ=μr˜*τΛVΉδ2±i††I-2{zy\ŸiΘηΓκυερ½ϋΰ\ΕD£’€&ςs·δh^4Υ±(ΐε,ΗΣkψsYsη’ΦޟvΗ}~˜†΅}ϊ}Ρί7Χ7dU][ Ϋ4ΡΏχrΈ{(Ή-Ιωψmώ ¬pή οΞΈ[˜-Μύ=ρdψόq²Γ]ƒΜ[ΥΫΝΓ~FF%IΎο΅Δ/Ύ=JF9―Gν³­k>IEίk Έ§+6²Ο†ΙΌΆό΄Iτώ»ΞΛ6½ύ―Ϋͺη‚Φm%jwΫ³{ ώ‰]ώόρnKnξ¦N|ۈγΗηG7Η•j7Ω"·―¦Ϋ‹0ήTnޟΆx6%ρ0Ήύ|5ω‚W«Ÿ{ιڟΦfΓχ†D½·\ΤγZsΡ$z©_φΊ˜;‘Sέ¨›—~MΩH.ν!y·ΩΓwzœ΅Ν]/ΔΌ4οˆ ϋΛμ}ι―l–ύΥrϋΫPHιl)γνΏυ$Α}¬Τmϋ|OΔo’R¦σσtAgγ8_rΌHφ­οj©ιΥ—}ΗL^΅Λϋσˆ?°ίͺΓώΊ£Ή³υQχ9ΧMrΉ6ξߟʝ½ά<£s²ΠπΣΎ>ρVΈgΤδηk˜n!†£ΥG«Ελ“„©β―<ψΌVΔ[?Χ7Oλς?ρ8φL™ζώ(ύ_‰ΙΙο[L‡Ωθ[δsΕtΏ“δ‚ώ}8̟ΎvΦΨU<ˆΆ/¦έ&o­Ή§η,W΅?n·ήD<ΎΏ`岏ΈΧ™-Ώ]_αC•φίώζνΦπςΩέ“½ΓηJQύδ]F…ͺn¬GϊυcϊBΑνΛC${i~ρ΄βοήτώΊ x~‘O|λy―ƒΠ­vΕΏizΓ•vƒΆJχ3ΐϊΙ›’ίίt+νι“όWώΎοR¦~+Ϋ+™'Ή;΄ ·ΏQώΰηο ₯_Ύπb―eΔt'Α—nYώ³}μψς#'ΰ·­‘ύ³”½3₯_<ο܊€έ¦^0ΑAγ;¦w’ΣÏ w’D=pΟ³ϊν?ς°eΨ$Pύ±ΎΌ˜–+ΥN}G%ξ;¬ΟεŸq·}^μοrϊ;*Lw“’ΚYύ΅ΏΌΧΜEέώβλŽvοΈΑ/άIOμ―ϏŒ:ΰμχ»“ο˜ϋ‘·ϋ²žρΑγϋ₯όŠΆV-ϊOœζώ“¨Gς!οιΐo/œρ­ζϊ›ν3·ί^ ½zΟϋ±i»a)o”K'XΚS iPΰwψ½^δχά›‰ύχ¬ΈQωx|Aϊκ½>[‡gο; ΎρΖω‚Ζκ[6Γ‡Ϊ93}$CμBΓΠ“δ±£ސKvτšύ=!κhΕ_ώ]ωΕ"πψ§ ΤϋΉ€)%iœ‘]ΟnλώΫ€ΡqύΓ Χν{κšA^ώ²β?˜_·ωγVΣϋŠ©ΞΫό]€όθ ρ/€CίβFδ‘Ԍέ_Ό½o ξ-oχξdάχΦ}$ίήώζΫ=ςgxŠ‘λ°IΟ_?δT/΄»fɍGϋ3Μw©θψΟ<ΘΎξ=Ά]ώ{nšΆ‡©=#ώ~FWϋΏ%iuΫκυXζijιγtΟΥ7γU°*©nͺ2ΙctΧ–uSμY§NJUkͺ±Ž¨y‘“qŽΩ¨$=76‹Ύιη{ «Φ7ωdΫjΣ±~ڏΈ­Πρ½˜²ϊ·}θ—ΟτrΥΆϋΣxΨ λΚ‚9z•=RΆ”jTΝμ1PhͺΐζI΅kH-γ­.ŠͺIœ(_έVδξτ’Ϊœ”ŸΰεΛGiZwΒvοP–Ψ‡L[G~³]·ωήζuͺ•ΉΗDΣΘV_T`ΥΔ²&&6ΎQU₯jκ°Χ€z)Ρv9RSyΒ#†Λg kα7=όXΠΛΛ ίΙe)Ηρo]΅άš'"ε(—PjQ¦Ζ—μM―δ)e]σ,υκkθΏ]ΚjώM~νh_§5M,Ίš“f8‘=+‘Χ.u/ρΎ¦Ϋl“U,2ĘqΆ΅f[°γb0:5gJΈ΅½έ˜3::OώŽώμΗΝΊOΤοΤδJh-tM•₯9{φ+ίzxνaΨŒεzν^ΓΣpr٘κsο*γ /΅©Ϋ ,ζ?žΧΞΨάζ^ΆV8€cgϋ.ΗiORο -ύΠ$œσNχΨ‚έϋ(‘34©X³Ρ.κ 3"mφέs§fIq ΝυIωͺ²&―›Φͺ˜›Jd׎gψοΊwΟλβΊ«–κδc€1½wΦ€Αˆ–kΦYks±₯†5‚.MŒ‘Έ‘*#©φd½ςΡD₯”Χ†κy„Έ6_\lΝ2>Hg&ΧΩ+‚’l5!xe€0 gΒyKn|Σܝ/I™Ψ=aϊ« Š΄\ιΔΩ\φ³?Ž΅υτΛ΄αuΉ cΜ₯)crp•Ψ—Sξ”kΞ˜–Β"2ΫΘhγΉ*‹ΕŠΨAW§iύΥr_λ^ή–›^ŒGχi&ΝΔΓ?έ~ω0tΘ9[εΏόJz@€›νπHλ4―‹ΗOfSω?ΎNNnͺ6-Q§E)n eΣ3Ηd[Φb&™„½’Θg6*TrήΔ¨ΈrJCdΉ5@Βί.ΎΎxΞ»GαžKuN†K¨¦³uΚ@ΨΈZΣΙ“νΎb;w’ΪzΔ²'ψ0γΝΘ=IύφyλΎέq4ψ,dj½Φ7­Ÿ·γ\‹χ^›Ϊ3ΆΡjr•γb³"κ­j‹b‹ χ0HHŒh€GDρ0^χΙΪή`εωŒ/5|Ή`=½g‹ †$‘­ucZKŽαη8Αdέΐhΐ3=(P¨9šR[lΐm&’φ|ΎeΔ 16§”{q4cI7ŠασGV Σ‡CΡϋžj`-Τ/4Π6Ύ:Γ:b«D §rθa―©₯ψ@LΉΖ†fRk4Vη―νHR_ήΏΝΌό)Όμqψ8ΠέψNΪΓWbŠ3Ύ`]2μτ²θΝ§kc^kΤ³e!Ο^'ͺΪ‚χε†oScγt °`Ω)1uΓ“κ[€£jΩg J o HΨ˜Έ#―πW™Ά—―MEΏε΅β\‚εm]υo^~=θ»Οޝb+F©’#w@JΐŠV)—τΘͺυΌ’\€’±ΓŽUυšΣΘρΑνΣTΎ!ή]±_!’* o!3Fœ1)tζ1DΛ)N^λ&4¬Χτk˜’w¦εό:΅H½;u|ϘοΠzL&ς)`˜ͺ+ŠΨ@ΨZFΙ\Α[T—Άk›²*†“«`1΅‚p&…₯;do”ιβ(™½’oόωΗΧΩγύuϊu. ­8εj{*5Ι*Ž…Šη¨£u©fΟd*(΅.]%{™kΛ!η±έΘxΕηΥ 1oΗth-}?q‡@­΄j π‡ͺ΅ΝΦeolΖΎLαΰΉƒO$ς rέ1Ύ  Ψ΅ώΘ8ΜŽwšλΝ§›5u cΕ:›lΰ'»+ήS―UψPΚΡγ€υD”6JΦπθ8ŽuΝz­hΥ]І8» x γKοΉχζ βψp%‰Λ, #sΙJ?φηρΣkmט1ΆΟgο¦Ÿ¬¦‰½OXJs¦€5œΡ:ƒ–΅μJUZž_aB”;/,?@ŸbΪHΌή¦δ#ΩtqŸ/ŒψνsχžΧœKj%˜ΐV³/IGψ‡εL (/‡€‹u‘VŸ]Τ‚WhΟ0©ΪG.υύΔ+ϊ₯ːέΉ·1ΌNc§\χΨu*f X¬ž»717Ά˜χ”|ΐnΚ­:Ik˜†'€Gš‚<ΡΛγμΝg‘FΗ½ϊ² —―GqΒOξ_]ˆMašμƒΫ59€@k=a…ΗΒTΑ”6 xΝ"":–σΤυVβˆbΜοάΚl΅δ[{d½Μ†œν3]Α@ίγΤh w„Cx³FCΆ™– a s’![[τ αΫϋ³lc†m0ϋ …#λώmC_ΒΕ3=~ΪΌώ»‡-‘tκ79€YπΠn7κκ\Œ];7ν’œ8‡Tΰz‰©8“@Ϋΐν2Ё •:ž½}Τλ·ΏeΠ»ΏsIž¦Ψ‡„ypŸρ]{ζ |‘J­dŠΑςa1TSϊΠΫH©ζH§μ'• ½f{Μό ™Ψ"ΘΥΠ£+°‡ΖlλεήΫ{μlΖζΞΐ‰1‹ETΉ—$ο”ΪM‡ΡJpY{R D.iψx›C¬%·NΦΘ}†l¬νl+΅ΫΨθBŒΏ}h Σχ’n 'ΊŸε~0ΖjΉš›–Ύ³[Sd=Ά2†λƒ\κ7¬©=u²‘³ςΖ±.ΙΈχ=g* Α%Uν5Π―簜°Ε¬vAƒ–)/ύΧ@GδX½λ€ψˆz¬Έy¬£δ^s­“\¨<5'―pήΉ\Ή80–ε―HUˆ–st"έͺ©o΄‘–Œn¬oŸTAH›κ“n­Σ§I€ n˜ΞF:Ξ:RˆπΔΟΐΦY‘Q±gα‚•ώBqΆs6MӚΡE΄V>ZήύCNΧΐBΰΎC Χ‘b• Σ\5ΒΉ•#(αX@nΤ±€ΪZnλάξU€λΔίσ€Ψπ Ÿ=|Έόώe§άύPπŠ@νλ\.·λ;ωΉχ6VΑLφ^[«ΊΔ½ΖΡK/LΗΠΥ–˜³1¨±ΚV£Zui-rj€ΎΞK#βΛγSά} 6a²±₯ε \I`ΐηπmΔΥq—†Σ=W 7ο½š/+;"€Γ]§3<6gσΗΗswΏ“χo5K ň2ό6;οΌΩιdMR˜ΌTαI½ZQ1ŒβΐΣj«τΖξΖ‹Š±μ₯,έ,ί›Τη?΅•ΐrt‹35SΙ½φNhEPlλ‘£’ήΉTγ·p W)f ^Œcώ­;nY_^α†όMΗΝ·ή©ŸΦ+yΘxžΙθθρAr™.γϋ šυΣ/”P(Sž{°ήͺjΡX!FiWŒ‘T݈ψΪδp8Μ ήξk"–•tcήΦΤΖyω>`€N»―OžΠX£«ΑΈ†ΝΠ°~τΖƒ?κ”lπ-–Τ»¦(\“μ5§x*ωQΈ·ΰ&υ³Ό<λSσ°°V.4ω(©ŠήcΛ(ΈSΕU+ζ?²ΣΥϋ&}ΗmKŽIr†ŒI΄i3l–…HΫlfl†³MFŸkγ~8ΉqΤwp„—υd­‘žάμ2QςέŽ0p#‹V©"ή3{ψŸ`’“Δ’-<ΧW§oέιήΓΠxif&v*γΥΈ$ΟψFΛ”‚"Ι±,ΐ‘‹Ρ€-ΡK(λήfέΗrϊŽ'νΣo2š†Ρψ φF9Ig˜όδΎ'Ν:8ΗΊΖ{ `Ρ‘ “‰ΕQa'R:°ΐ™ΈφΦι€NΝyEΟΛΩgν§O›μΆΑO­UO–χA έ[fϊ|]©(bfcΐœsΆΦ³*@g䭏Ρ†ΡΌ‚οbέίH…”t[[fΫΓζΐ2ο²ΙaBνIVΤΤ‹‘ΜEcjΛFIh \7 f…&95Φr›G‘Γπη€χγǐ۠΄Ι8~+ΘΎά«:;¬Οy=_jΏs”υš§>ΌΡ-ˆλχ%lΥ6; šΨ@C©;Tγt”}Wsx@Ν:]RΝq€KωjώτψΑ1κ{d>κήYΰΰ C/rιΰ»‹Α`Ϊ}p-6e“άɈq@ƒZ…Ψ%³"ζσήrυu†9^lΆΑΝ#^•“†Τ“{‚Τ­U&*ΈΚJZ" »¦ FHΆ ΎU„7ƒ‘b.«i€uΐΑ-Ιώδx-α`Ζ­V‹ύ|Ϊk‡vŸ~یϋσπƒ­ω Ωξ‘iQ#+ΜyΚ†¨ΘZˆΥ†.¨@Ξ·[³ L”{φZ.1¨•%_Ω¦ϊ«­_O0ΪπλξΏΰ’\ž—±ΣΔ‡0x’ΡΡΆp“–”z32ΐJ%DS“ΰΧ‰:Β4θxψοΓΣ¬έΓLχX`€ @ΆNнa@οD’Ζ₯“ΛΦΠF\ΖΚŠιΨΩL@7;λ{ ωVϊ¨E†t’“ Β©O;[ιͺΊTͺnYΫΤν«­M ­±›P¨Rυ~9(ππΰ`c;<Γ‚Ώ]Η3/WάNbθԌͺT ¨bB’Ϋ(bŠ]$8§Έ'9g‰σ­1ΧV7@Χ$χbͺ‚p„«1§„bκΙ(Ήƒ’zΙ&γ3 °±"δP ΐ%ΐ )δIΕ8<­ΦΖQ Qλfμω[jxEΏΟΪΕuχΔ«―όϊ'™ΤΎξ­8qc°ϊl­Qξ/@θ]‘B!K¦‚ζ*mb`xwίH΅\RνΆγώό}} FόόΌnΆu±z˜ΏlJ‘χ1ΕΈ΅–«“ΔάΙ“ŠΨ³°ΥšJˆ‚6`aX8₯cHb‚\θ‡Λ†ΏκV…ξ| Υ79,Mε[~ZηrmίrυV„wiΐεuφΨξ>lλ}*nγˆΖ1Ηδb½”ΔH™ sŒDt]c°#φBI½eoΆόΎ5O©ZΝηR’€Υω¦Ζƒ’ω:Αa/wcg½=;Ύ(ŠξU—Tΐα¬υλ¬~½fέ³•ˆΣ›7`ΰRG€4&Χ}v Q΄Ύvεk–ͺυn€žx©"νbΙtέ#HEμ(R‘Σ`•ŽΔ#Ηeώi ό΄,}zMΒ, p”lGεdB,N1Θ ―°€Ÿα=…€ΈυT£ΣRθΔ’(ζΞσρΣΐτΞqΊΙΧuΦΚέ/‚―ς]@RΈ)Dΐ›¬“ίaΡπ!δ`ΜθxCΙ­ώφΎL_c”UiμMAK€o ~FQ3΄ 3&2A•c1Τ»wΐώΤ3°}Óe†¦σ­±ξ35=.΄Œ‚ά–ΩΦuυIN¦›(mB–‰Φœκ=θ(Ÿ"gο$Ωaδ:w el†q6Γι=βδιΊHρ­ μLΙ΅)Β5IΏ…―Q„…Y=([ r‰Œ\‘琡3·$`ίΒO7ψξE ±8"")6VΖD ‚”εŽƒωf dΑMφcJ–ΊŠ₯D,&Xƒ"\u5ΧƒθΙ(§—`«d5πg—A:j©ωΛ„*υB˜Xκ’€γm @\R=¦ΓIκ…ΐκΥQϊχrϊhiuV’19:πOw’Z΄”Θ8£ͺdhi.9;‰œYΓ4k$‰ΰκ(―‡Η5₯Ή7lάrΖϋ7ί΅•Ϊ 61ε2¨ †eΠ;Ÿ»~ΑGα›Ω0v*³J,Ϊ Y;ιΤ„@λ ΗyY7ΰΎ’RӍε~0ωRε Tθ ΠQoͺˆ6±Fh4 Ρ³ΒΫ\n!‡\υv&)υxόvrχ}1fε1*@`lΦR\L6΅ gG ΛA΅κ"ͺ,ε1ͺ1•ζU<_q gΤ_λΧεŒκΛb#–}Ρ<‹6ΑwZ όςξηŒTz.@K€ΎYΥ.Κƒ tk²θaΨjswΝ΄¦€©§›ΙΪi'eΞ}€ ]‡‘§7“{hHd(N>Ν΅X½ͺͺ$ ¦¨šκΙDbwδd2 ²κ Œ)εm:8“'pξ»³˜{{ΎŠ Œ1">{ŒI1ΰ`ΈΒ.9ΖHSκ͈fΆ”}›ΰ­vf0ΨΕΧ_υ|?^ζCYοέ'5HΉgŒΩf―‚ΧΥ1ζΡb= €φΆ˜žTΖnΐ$ Ξήd|IΪ²«‘Β!ŒRƒŸGm5¦wϊJP£’Ζ++¬9&©ΥΣJhΊ J±₯ ·ΞXŠMDˆΰ«~@«xy+Ύέ¨ΦΕ|Ή,L’δϋ―Χυ=γew‡ˆ?₯ΪΕς‡½Οε ΦT¬ T}6Zf—›Ÿ”K€­\Ζj]['Ι¬~Πd)¬%±DάΘέλΞυ+=?σγ¨aޞ ·±;ΔΣ, υΟυευB₯γ&ά[Ϋ)ΔΜΖ%F“BρΪ'‰q6ŠΛZΓα+¬—„?ƒWvs ‘dŸd%·mζ7ϋΎ@έixœ1/’0a€•ύΊΚͺ³“λ>~ͺ—^|†³FΐΔ-]»Oύή ΘNcT\V«₯p«TΤF8OEάG‚'ΰψ¨~ξŸ?„€ΘZΨίΰ§θŽΖ»x²ΛJ%œ —₯όΣl"“…ΓΣ”‚‚ΠqF.g·s+kλφrO~ ΌΒhfXjΏκJgόΊ*ˆςΗ;έίγ4{Gƒ»λΌσ{Χfˆ¦:$?½ΏlΓσky›$%œI v\υΓΜΔΆύ*?ςjϋ£μ–―κ‘!eNwPZ α­ ―ΐγΑΟεκtɈκͺ’˜…±jδϊ>%"]›ί̐έfύ«Τ«έXάΉI―²`ΞdΖ™Φ<ΒΣ#ΰζU¨"ΉO΄,QΊb}UγQ ¨{`Γjς²Ωνi1G‡.‘ό‹ίΈυ)ϋ°zϋρϊ±xš±ψkσM#δ²πΔΒd¦°«Τ"i^ƒχΟLh!Ήβž#,3ΒYο²)z΄μ©;ZΟΣΌxޟi™ͺ¦TΏ!² «LΙ* 㛀@y€J_§L„h*.*¬7b<9·ΙΥ4^Υά2k›·T~€²^•υΘρ ¦§,Ή3#hΆto°ŒΝ· „2’€ iCDrΒpŽγ•<–μ+βeνιZ6ΦX|Υ<•U h\E$c§3ιψβΈ’ιIΫχ°Ϋύ†§ΏZ°~ΐxαZ“—pΡ.Ψ KΪU&YΝΓOhUΜp&ΈΚIΜK£$―ž& είτzgntsyΕΣ†Ωτ¦ΘCŒΔ8NE6S‘­0ΘT΄%9_’ȁ³ΤLQ’^Έ,™₯Ϊώ$δΓL ―?z~styCu<”ςPžͺ R.@‘ή ¦ljX•―=Μ²ΐ©5ΛBα"™Tn¬ 4»ςϊ«ώuBεςόΤν_o₯Ψ c”Σ`ΉΈ¨m1Ν¬<'ˆέHrAΈ8kΑ ρ²@ΰ΄.ez‘Η1&+νwΕψε§΅αμhΪx‘žςUN*΅κ¦½Χ™¬½βv#HeΘ6ΒgδaΓΓEk”Γ}sΫυξ.νOρύηκΦv<½…f8I*Ϋύ‡j‡{°Ρ&π†Ζ?&A9ygYδpΚP§³ΣΒNK ΄@JΩκ ΨΘ\Š ^ΩνΦyΔrΞw¬ά…ν4ύ€&hȈαNoϐV<„ο#κLL΅&SΉ~vauλς>‰*˜$¬RΤΝθ¨PD‰^&Δ[F! ‹gcΥ†&Ω#¨Ÿφ ςΚ=ρΛ±Qšΐ]XοIu§„ιƒ*)f’”Ύf›BԎ©ΠpieB΄ ˜Γ΅’Άΰυ €°ΐ)z!γ°Ϊ₯ΥϋΛΥηŽΘάk—}ε·^ΦΔs’±e¬ˆtaΘrˆ‘Fˆ@£"ƒ€JŒYΛwž1=RŠ~ˆ&š‹Ω‰5¦§ζ G ‘ώ9•λψεN*ͺiΟm*j8ΞV›1εc²9pΤ―viƒΗωΌ'kvŸoιPz˜bΦ4ΐxν΄σ΅νμ©ΓfΫΏδ?O η«χoeτžMˆφ.ή'šu»Y»naΑ9W“ΞγΊ”£ι˜?|Βٝ^i? ΆχόΞϋΰ†ߊΔi`°Μώ,Ÿ»}λϋNΞ{˜ΞXλ#BΌ!½O—™OΨϊ¦h₯σ ˆψώτ».‰Πρteq\Εξeυ~Οiίqš3Οlx=&νεα — ƒη„ύyί@̟°KΫU:3μΚΊΠKŸηpκ»Π$Ÿή7›υΨλWG}ωrθhίvˆ»ΑΜπΫ M9άοϋ‘§{΅mwνX*ΌΏιύ}Ή>¦[z0½7οOΈ ₯ϊ_γ2Ώ³ΝC<νήKΊΓήδ=Ϊ€σj“‘w IO.g^4\vhΞR‡¦L―Έ;H°5‚η‹έΉΉGΣΛvσvΈP©yMΓ-ΗΘΛqΘΟΈ‡ §˜<±Ž Έ Ίδ5GMΞ΅ο0NςΧ*•)Ct±σι8Rγ–0άτ,—0½οhήΓۊ~oy}ΡΡ‰«οόζ}ί‚Σ™ΥΈzϋ|Š«wιηάt]oζ|α< ’„°{yΚq‡Sώ Š? ct W‡φ/‘Γ>Pζτύυέι<Ψ0`-ιΕΝ›†d5ΟcwΔ±ϊžCΊfOΎA2uςνγύω«υΌ=ζgϋ•ΣύΛΑ0Όύ<Ž•}:|γά',89h$Ηrόυ$$ψ/ΨΪciζM•ξŠΖœSΏ²,ƒΗ;μ>―t|βΟ’όψΰ9Œ(q νλw”vΦI )άM%›©I§‡†ί†ΗjCυŒ²οB{α;όέLw|όΆ9’Ÿ|ή½Ώ―iv;™ύ}μέP·'η„Bω7±sΝΓ 30ΆΜΚ/ŒΕ|=ώŽ(먣»3πύxKλϋš †―1:Άψύ y­sΦπž΄VωΣΜνΪ†υ`ίξzw%½ m~ςfX ϋΎzΏ‡έ1­―:οΦ"πk^_O°©υ2].―_½ˆBΊXΠvΠp Š[›ί²ΑΌη‰Ώ†ΟHK|}‡†νJ&юχ$·OOv}?>*½aχs GځτƒWξSiΦNΏ~’§ν*ό²ΝΏΓ~–ԝμ ΏΌύZm7o―Ѝ£8Ž Vkψρt/e ‡:yjΠ¬Ξϋί—#»Qΐp؝ώ²ωέ€Η[*C{^ΊΉΉΪΗϋwψθΫ7۞wφfLΙ6οΨ™ύz*ΣΨہ½Ίs͍Xήs9Χeζ#¨·JύωκχμӐu™Ϊ»^kΦμ]=Srη=<8nɟˆXΧ›£κύρY±‡ Η«)pίJbΝΜJ=ov»Kό4'WΥ½Έ?;πE|uΘΑ»?§ϋΈξœ(§»ˆog˜₯xΩύτ;6γ#ή³Οδα³mςS߼ٟ)5χδ•ΖO—˜5·―αm—%"5ύΆι!}₯Kή™zΣe\_ΛM5q~₯Β-~*\ ΑLӘ=-,œ•4\―ž_φw,¬}Ώ@ZαΊΤβfΞιF p#"ί…“-α·wϋ<θΕ›ο›Κ¦έa·ώ:4ΐό―nί}—f(Ι{€ύ2λ\σUΏΘ‰Ϋ‘RŠζ΅τڏ]l»_o?Ύ€Ά:0½ί¬λύ*y/ˆ²Ώ§F«;ρ䱨‘ Ζ |ΎΎ$·xˆη7Ρ/lΗΫΎ•Ρψ΅9Κ8βΐ ‚Α9ΪzBw§οNΊHΚIΎϋ₯@Βρeλ~Ϋi„Ϋ;Υ βΚ΅›[τwν,§΄Su˜ ]οΝBγwkΟψInj=<ς|Wέu#Υ“·ΌΒΛηϋ‘ύgΘ/―<΅Ϊ {©ΛNξeα7λ's₯α<€ψzΟIΫ}>Ό― ΟuŠœ&Hέq!urawΤΩTЌjsxFCμ&ΟΘΨτw;9-—±\έΆ¨/ίuCΩ[PΏoΤ'Ζ°νΐtZ*΅»τD̍ -z%μίΤ¨©„Μ½m7£Υ;=e|OcρCΩ΅³όΓu<χςΣ~―Χκ?’™μz‚4§Δ`ΊΡeτX&Κ«ζέΤΡβ ”νχlβθ!P/κι/΄__]ώπΘAœ^ϊ½k°kΩ:w»œψ¦ΈΚ©π€]€Ξœ€Άƒˆ›?— <\―—ΠΈˆ‹™β¨ϋ2ϊο₯hV[D››ν)ŸPώPΧWsκwΓ¦{ύ=7'W ±»ΒηΉ@j†WθA«ο6Ρ\RŠ#)ώC&jΨλ\ΫμV:υJ™gφY-ΰ€Ύ]]ψ₯j™[ΧδwRuάΫ¬φ‘n_ΙΠN^Ώ~ό $Ž›zσ.ύ?–š8™Θαλω£’Ο1Hχ˜ΠΫݍΧΥ!S»ΕέΊ=₯Ίέ&Θ‡4YκÜz/ ύͺ,-Š7ύσΉ‰lρ…―~­ η'χίoπ4Πͺώ˜–vηυαwποœΦνΙ¦YΈχ³ρΤ…Υ\Έ{ρ‡σΡn»ϋždΐ·πΘΨ>τ«f$NZ{G ?tφϋΟ9ωυ,Ρ±LΦ₯ΉΌ3/}HΝΡ]:ροΪΎ7ΐκxΙ߈/e»y>ν΅ό‡§τΠΜ·r@ΨΘ ΨLΜ.hκŽ~ΑJœ‚½Ω-ΛίΎάnv7H^°Αvϊ|,‚Ό«U`žuΪΠω’7£I`({°€9ϋ·2)£wH£{9Σ©'"­ p³79lρa“IλhiΌΈ'–X)²‘eaœωL΄Έ††[-πU€Ρ£ήσΰ%4…tl―¦6@œv‘WrΩ=τQDsΪ‰«ζ»γωίάΎ‘α1ΨΕˆ΄Σ1γ™tL{QL‡ JQήq]ρ~H!»$ͺφΕδ,˜εΚ%YyMωoσΗ9Ÿ½™ΟKΣqΈΚt †y’Ώ,†ΕRXΦΚΦβUPΙŸƒ•2σδHΥlJΕΩ'“!lι¨ΌqΧ­ζήξ%Μβ4Ε‚GΕd¨ % /%‡΅ξeςΪΉ’³–&x•ˆžƒU‹­!“ΓΆΫπΉ-‡ΔCσΝ―cέΝQ΅>€― Ίά?eΘύΟΏDCcωΏ†9«―:ΗΫΊΫμŠΨ½— šzXqrϋuΑŸPGIœQμpl~΄b»ƒžluͺΨ[Η•.ΥΦ μšƒ Ζͺ‘nFsΖmΡΞδdkΦ9(¨βθŒJZΑσGθŒ‡Δ ρLŒ ,™,kb₯ΐŽC”ήήWΛX ‘Ϋΰ²ΆΠž%μΆƒΉ©Q°`Ggά΄γ.Νβ›/™D₯ξk °kJγωBΒ΄”λ@δqΎHb)2šˆC.z—ΗȜOX©Mh0&h%mKϊ|83VfŒRD#‚£§P•P5{idΑ.Θ Ό–LΘ’•֐GX•φ6 §Ρn«Q*σu‚nyβκ(€Ϊ5&¬=MJMD`j«΄‚‰ͺ‚ Ύ,Ÿ Š9Z©H·ΩyšLGέ|Όε&Λ<τΪ™Έβά£Ρί‰ir‡‘Ο?zχlQ₯Φθα’‹¦Fk₯(Ζ²hR)ά0ΰ…Z]1Ϋͺ‘Jf¨χΩBγέ0!ՍΝ݁Χ_½ω‰ζ\’;"™*†HžiΉVRE^uŒ‚LUVΕ.YΥΐ³j˜”ψΒ+©₯[xΈkΚwμΤπϊΑ….Ο™Κ³-xE$¬ΧpΟDJ ξDBbΔγ„’―Sl7η¨ΡΧBZNΣ.{ό3η‘…ρ ™ϋE ΏΖ΅‘ŽœE 9{jΟmΘΫ&kΉΔ. 㑟czΎ@'ε9Œš§&-π¨τθ«;¬Κlξ—±¦IrŒΨuhzΖά‘ύίφΎ3cλΫζτβΏς ˜Βƞ5Ÿxͺ›νΣώγ}]vwΐΤ―?Z6ί‹A`Ήš3 hδXg’C΅QΨ„ a2£!7Fε\hζozΧΏMώ~ϋρzθ8g·ž’Ο„°<­αmpύͺε)Ββ)™ί–a‚«ωGυο[δν}³{*λ’ιͺoη%ξH€υαm{μi·Y4Ά”Μ­θβ:)y2wF“qOaύώώ©g$εΎΆ;½γΡ¨΄ίΌοοYΡυξ}ϋΏv•1#ΎZώηΫόώf™Ο‰Ηΰυ$Έy ,β‘užώΝχŠξN}Ί= wβ/Χ=―·θBF"§qDρDΊΙΉ¦g)v± œ;\ϊ—―Oϊˆbώqέ οΓΗξΰ]t›½ΗΈόν¦Ω$Β=sχzzήβΉζΐΞLΫ3Ή€ω‡σΏίκΊ™όΈKγ፝{ƒΨ9Θ¦81·y Ο8ρ‰`K’ »%O\|"O=Χ2#­‡"ƒό΅Φ³χθΑ'?Mτώ[ΦΧV6φc‚υwηUΒj0qQSα0gω±7΄Σ•?[υV°qϊψžχΞΔΙ΅aΈ‡W»%žž’<;‹;œ±-ϋύgΛ5ι‹aښAZ°…†η^κajnηͺœφ¦ωz»ΩάP‘}†ρ9Έτ;αΞE·ΖB³RŸR8Ρ»Ξ+ψ_؁€ΰŽ”ΚWLΦ½kφΓwn‰η9Š;{'g:'Q:'»\)~;ϊυλϊ»«2ζζEο;άιωbή³;¦@ζτΆ—4³ξœηϋώιΉΧη1K†F Ύ.S_»WΈ·ς£k©§ΦΈ*_.Ί›r€ΠοΝφσ‰λτ:ΗT„άIΔc½Lzšγw0o9lσaΘρψFœβΧ₯}Ι(ύx2tό8ΟΦ η^0ϋΧ <;uΉ7βΠΊ~`£΅£'ό>ϊXμαa’ηΒmMΕΘT«,‘΄υLλc°)) 2'eΈtNΗjδΚ•bcυ!ΰ£b΄uzO46§Τσsέ\&–o{p•sa…Χ\TWΞρκupΑζ\E΄ΙDx0U3ΌvFq%’d)D‘εψJ:i˜Gψ ’D/•F)₯0ΊΒ+‘'š'™΄\«l|±ΥiN ΝθŠj%λΔBΆ½ϋr½|―³Ž9F@±Npυ γίY–$Ή“\)šηɘ·Ρ Η‰q©ΨT¦o΅ w―ό€:ŸKά†‘@ηͺΙdLΝ°+«Χ‡χΊ4η>ε¨h ΆΦYIbΤ\(’—N9λΧ9ΓR²Ην}.’LψΗOΟ—±`₯²2±Œ¦ΦŽΓVδZD`FΡ Sε•―Μω"X)ΙC„΅fF{ƒ7Š2b\»NWHέfB±tγ¬Φ†”mΑσΐθ͍ ŽaaΙWSŠγ‘Kca,œ΅"ˆl –(‚΄Lgα†ΫΨΣϊD^Π½ˆ>-μιζ¨Έξ'»Νv‹·Iλ< ™ ΚsXe‹&ά‰δ1{!jRΜj'˜ΕYΌη+—Φ[SύH'ΐ‰Ί•'n@AλZδ«ybΊΉοl‹x„ˆs—ΰ"φGΫʝ–;a˜­›ΑtVjY<Λ*…ΏW }!kμΫ°€4Οώ£YσξP|ΐο\θL¨ΞĘjζ5x8kPUƒ2€i4"I5γ6Šw΅X^k‚Ϊη"f-”ί\θρΓύƒ]Zσ‘Ϋ‘|Ώ% ,B‘)ΩU_mΘQ΅˜ ήiƘΙUralU©F·«ό\yΏ_¨GΆ`ω @™™€…c'˜Ο1ΑŠG₯΅;ΞΠv›«±9T°\€σωjΨ#Τ.θ’³ρgωœΩ/x j™]wΠ)Ό|xsΙΖ(Χ>z/αΰ3SΨ` ;· ϋΊ aήi Ud_Δτf­Μ±_0½ώ•λδοz]φ&Λγχν―ϋŒFxδΊ"pM ϋΝJύAΫ2Ρ<ΧτBφ^½°yΞΈΎ¦υΘ_ξω:x-χσ‹oζ‘6ηΫ Ν·˜o3ΰuκEΪωώ»f̌±Ή½γζj.γqΧd΅LΤbό…ƒ¦{’πΎ]œΌ€ίΟεψwM,L3~Ϋγ·jΜΏΥ·5^  ΐΪ΄β7«'~Χ= ’Λ#=ΨAŠιΥތήΡ’Δ[;wΊ²?νgΠχΣόΒΌ—²~?υͺ†νσζ Y ^Α“H7z πyΏΧXλŸyΥβ-~>κρΥν׏§\ΩΨ@1˜ΒxŠ‚{AΣr½*Eε,bτ€4ΟZΧ"ˆΠDJ8H˜ŒNJμ?½xΐψ7^ΟD+Γq8ΌΚβ3νL–©TuŽ™ΚͺΟΖWΚηε*ͺTFE° ;NUχΩ's°fιA©¦rfsTL$ό‡i-”Θ±ͺ,BeΕxУSκΝkΛc,ΚVxaΆΧ돷μ§?«)ΓσωφτΊΚ½ UΉ8Ρ'd‡H,7ͺδȍΰ”9ζ4ε^ϋ€]πYV• ΑF'83<Σ΅D°ΐ@~ΡfΩΟ·Cήγ£ίΝ&§nˆNͺ ₯3•ξT’σ–sY$Δ΅B8yˆΚrŸ ž¦Q‹v™Ζϋ΄;]Άδ͟Β‡ΧM[ιq-"kiΈϋxγŽI’„Efέ‘‹β\^~6;οαh"“UB8]‚–…€ΣwΑzεφ°KΖhΓkb!W­Š0PΚ€λ°σ9,ς”ς]γ΅Ώ 3 ήH•ΞNNΧvS@v«ΤBΦM“Ο ‡²νW_ΓΫG  ΄°Λ€ gR“ΩβηΟ ²·²oK/iΩv†–uI«όm9ΫΡLΛcr*c&e,}2ΤΥ?Ά΄Λ4fάtP„¦ͺςυq'ίΘӟδΈω¦3B¬y­½;§l_{‘ασ΅\„οο”ό+α“’ΞΩ·WΣ61Ηπg{ΫA<aΜRί€MD‘Α–J¬ x; œΈ“<ΪRb`±Do•SIόŒI/~Ύl~SE*'b½ΆΌ·JVNέΩ5YΧ³ΪΫϋNn፠γzυο !όΝJ—φT΅ξ…Αβα9:λ₯bΔ―V½VI Xf{l²5p’ͺΐpX[dM%F.μ /²G8Ξ\~zωœ^!…¦λnͺš*νωβΕN*›¨+‡%΄Ζ(ΔWTΠ-OKŸ’ζ šH4,j]Տω΅Ο^βcZ γ˜ΦΫgΫYΎnNr ©ΡVVœO2Q,―ΫΟxͺN)ΐdΈδΥžTγΘ ‘Zi\„·σΠΜtLp‘Β%AθΓv˜>ΥηΛE—υβμ^Λkz9XͺΧΥζPΆ'OϊΙ?φμγHΗυͺi“ϊ―’φMS[“€1SL~Ηm~:‹ΤlOΤόΨαΎ³+`‹›F8]]€:Y–ၝΚEΓ6FͺBfAB­ό¬>—` w‰ŠUα½`7‡G9]Τ?y—Ι‘·φf2rm>qŽβz„œ‹SΣG‘ytUiε2”§πjwšk“²9ÌuUΐδ0–2ε²B²ψμ7vŒRc³ΫΕ^ ŽμΙδ6¬6ύ Xlk[N?„Βΰ+-ZK^dζPX/#O$œΑ’ΚδYI,η ‡‘©[£Ύ, WΩ³Ιm0»ΰ†ΤJY‰ ’ΐ³–š™O%!Π™Xš5Ο‹q/ΰ_³Υ*‹¬6Ο‘σE§«ώ–Φ»^W{:,3,BΡVκ‚I ΊVN&!‰Ρ ₯ΐK91DΨΡQf΄ά›„ΦσΟIZ5kF–5i#(‹‰&’SόΧ- ΪPωΜ5Λ)ΰ›Tƒ(Μ"ΰ*‚ϊN’2 R[σfBwŠηοeώ)ΌΧ΅Ώ:Q 7δ›;{• k™ήΣ7T’yσ8°™=ρΛί²Δqƒ ²D4—hcZEι"―@q"Δ œΖ Feh"γŽΎΜ ft^GφΰŽK|ΉΊ΅n,§Ψ“B»t«SUVγΠ²2Κn…Dx"ύŒ D6μπ CpΏγ,Τ6GI+l*$Ί$Α'`Μ,l₯»‹/η 0Λ΅ˆΙqκ€JJ›`c`&©¬3gg€½…h]£ oδϊο\ςeJ[χ-Ν=’ρ0ψ+OpUž"Βd₯)΄ΠJcm†„²f‡+}-©1Ή(7Ώ,?½l7o›ΑΠό0Jv»λ"MαϋόtŽΘr^ݩϐw¬w-}S'ΆΪo"iψΓο- Ν±p"i#«oLE0£eΆ1S₯<^ͺ1ΠXCΈŸ„ΦVpΞ"ε˜Fλn·₯¬F²>€―»BΚ"βΚanœ’ΥGW’ZXτNΰω5P·@ Zi,Β3ΕGŸ<§£,Λ_2«’3B°$ Γ†[›X₯έHς)$[bΙή#δj<΄ ˆŒΊVVvη°ΏΓUn{¨¬qKŒκ;ύŽ^fjρš#*6’”ΈT4š‘ &lŒW@J 9eŠΤœ5w˜S58B)ŒJξ¦4tUjtετΌωmGψmΏyόμ—΅-‚^Ζ`dLΥ 6ψ‚ύΐΑc¨Ν&i„–\…J₯π&:ΉH³¦τ’4j‡{lςΙ1*³;<βΫΡδœ/―­§rηI «όΦ&iZ½Ώ•ύœœ|§:Π«wYO颏νϊ/ρO1­°§SΕΣnΦΏϊψjωƒ8$jz’Αδ yH‰― \‚Μ::r œΧΚL_ΦN ‘s΄kiB™9o wοiσVWΟsv}½ύx‚σz)“l\͎Q%oy6š8•Ο'*DοaέΕοb©Ž%¦byφΪ£TJ¬x 3Q> ‹Eψ£±υal’ Ѝ-’ΰdαΔ1ώό­ζ{Ϊ{=»Rž~οΪ]δσF½P“ΚΎ ΰ§§¦Šώiχ~5νcΦ‡Ή jύ‘χLαθ+Έ=Έθάuέ­ VKKJb").αq₯π5yÁA½@δojf)@›^bΐ/μΌ‰άΙdΉ4HLgςj·[½Φα°οΟ§nγήHΕ±¨η[ΤκKΫ«3‘κυf-MBΊFrLˆθŒŒ’{`bƒ}³)ια  |zFΡ‘Αe#€,4,j?eΰŽ5)C ¬Ac[ρω^^βGηΈβ¬ͺ<ΗXy@±D}ΐΙ€p«g>§*’υ,ΰDΨ”p0Υ9g1ώόoy‰ό­„Κδ‚`¬ϊbl`Ιλb((Σ’ΖκτΘ¨$ΰՁ[§΄Ζ9;`pΟG96ϋτ#εώόsφ€Βvmr)4Ξ’f[2˜ΗZΘ’Χb2“Ζ)Qda(5ΐWθΆChΙ“Ξ*ρ±ΒΎ‘RΛχ@Ε/•ΓΣ₯Κγ;€ “P^3' 7 kBΔ_#Beͺ§¨< ˜8zχΤfμ>Χ^†Cέ‘FT—άeYζ1h«²FW*"–Ρ*jš‡ΊΨ=όs Υ7ΕέΤV£2ΒD¨ΧΈŒΰ+‡3’ΗgώΡό‘Ζ¬θϋV*`95«RE”΅@„L„c)…Ε’ͺΠΝε«u‚Κθ’ΡEΊj½€ΛΗ„σ7³Vκ >±Rͺ5«Όκ]œ-}%―|ήΣ(H,$+SΒ΅ΒFdΛLG2ΥZmιT„V Wlz?—·²₯^ρν6|&8ͺΣΰ’ύΊό%Ž;£jEεθ%?ύB Φϊ' $“c@Z½Z$άu¨Y$%Π…*P* E, Ό%<‚ƒ‘7©ρ•e,ο߀֎øSγEΓ©fΉ@@]5iNOŠΰ‹ΌJICxζ’Οi57I»C,όμZ˜ΞS}ι΄νς0€n7μf₯ ,,?Ι*p–©PŽΟςŒ0ΤHΖ³ DhΑ, *πœΕ4R€8ΈMfΩmj2<%χ6hiuX?կž•`³AΨdh~}*8¦ΐΓέ$•α[«E„’ρΆƒί *Α–¨1J‰νΊη—³9SΘ²¦Yλ†:© ‘Ν`+@Χl•‹YZ¦θΒ.ζΒaν*―4SI°υΎέ8fWω±ž ΝΓω^πμoϋsNχ|Ț·β~Ι§Dw`œK#Drv*Ώχ%rΖ`ΊΙΐG!Β‘B7ΓΐΘέζb°#Wλ4XγžΎYθ‘}Ά“užάΡ\pŽξΛ―°ώ,ΟύιmGJϊ₯οΑL4Φ0N%e:yΟ€δΔBB6€ ŠΙ’GŒ˜²IΚγC!ω”$ 'Gκε'«K₯χŸΟ}X²|f*ΞL@#«DŒγ‘₯žW΄'h BβΰΖκUΛγ,ΰʈϊߗ·ππ{%ΤΔ£"W€­…G` „˜†z9•£ΦQŠœ,0ˆv>‘Ao89z6Ύ€ύK5·|i 9W<At€ξ\o ΡⱉXIHS²„ΕDψ…/<>nt‘BiΟGΏhφπωβυY\EŒ΅ώδ…†.kvπ€* ˆΞ*&ΔΞQ=΅³:‘ͺe9ί~a~yφ`Pm€f€=>,'ͺ•₯ΖS\d.?1š΄ΐ’‘β ΗtJt4ΊˆΝͺŸ_œ@-0ΔnΞ!r1Abrρ¬§ φ=“BωΒ$u ‡Œ°°jμ±XŒ{φίΔ(G›}Ο-­ΘΎRr“fΦ iκΒ*ΩΐxBpΝΜ2…Έ‘(š cλ ΰΑυ‘LMNψΧΨϊ~–νS}{8›šΟšΒ&γ"H` (@°…Y›9|ΝctDv$‹”πΘΪE‹ƒ`•)ŒžΟaθι5H‹‹—c.Ua%όΎŠΖH>6ψΘzžLΘI[΄΄1 !2΅RΖ(ΰ$©5=|4;šͺtδΪiž9μμwxΫ?Ό=§β‚7Πyž2t'†uΉθ€5<"lά"χPΧ·–ͺRη,•q„χΝ!8>_³N,u·κ'ΊΗ¦€,|γ‘Μ΄ΖΑ‰'‹PQAΥυ–FyDε ±*qGd> "`ΛͺΝhξ1{IS“M>Υΐ ζΨl²ΦΛR«υ*S7„@Hl=_™”㄃•PΐFˆό)aD’ΌŽ―ιυ)βο=—·žΙ΄rω| eμh– uΗΑΠL«†>ΕZ*ƒ*:xbeYpΘ< Γ %s³ŸWΏΠ^Σ™qυ_·Χˆž~β·?~B‹aL’’KΑu’ζH“eΠγΕs]Y…έ’4UˆΘ9…#"$Ό›l©γˆ―οa½™S€q{@μΥΎP…lΙ37qΈΘ^ύS,žΚEI π©},Ρ¬4$l=M‹QπΕpY‘$€ ˜ΔAδ¬!W―ζ Σνh”^oONν©Η7ηlLŸΎχ!²%‹• φ])Y)΄¨9*ξ‹ΌΠ2nΌδI&™£Μ’»8Mš‘ΙήΥ g€._R‹™dφ6 rD=d;2ό½•?ϋŽ_!PύτN^ΆdD“‰žhoLšZrΡa9swk`ώ}ΰ—Ώy ƒYΔ‡J4ΤC›Ν³9J˜ο„³n&ES(±λ…ΠŒKΔB²(άΜN#ZΥK#žAεΐ{­γΐ»ΏOω/«ίEΧnF.ργωο8ˆ,ΌΒΈš ‰WεΔ1AΘΤΨ¦1X1Ÿ”“ΜȌΰ‡Q'Š7T², ·΄γœŸUcωΩK$tϊΘψξφκ"{}ϊδΓw»•YΩ9N ”ΑέIγuaBd‹ΨΐχUIΦ(€DYF·Ί%$Οζ•F, gIοί΅μL[οžνThΡ=Ή[‡ΤM~=δ€œ΄˜€"(!’%ZκŁVΨl0² €·)Λ= +¦₯Υ6d;’69oM_ϋ«§tπΰ-yΔN²Φ=ΰœ3ΏuΜ}Ύ€ψ‡Νe4ΌIΪ2·1ϋΡ‡ώΔΓ…ΦNω@§δ“y/Zάf¨ˆ‚ύ(YΈδ½(‹ρVΈYQDρ– –Ιžw*·φρr2ίΝ"”„™¦M)X!¦₯Α:ζš4‚μd4 Ο=b?ΔΊYΓK–ΎΉ›}Ϊκ±0bŽΎLKψEromπι“ΰΰE[oMqΤΘΚihvA(š e£ρœ‘V4PΊBL\ΣψmΫΜ;£»λ€ΩeΟΏΗΡ`£YΉ΅.ύψΩφ­[μζ˜ko(έz’X‰Iψ\•Τ O,^•dšξ)ƒ ΪD§KΕΡ©}0LVf…R•Gζ%ŸoF—ΦOι«Ε“Z™Κ…’°Τaν2υTX”6 4ρΪM—²ΙZ₯ŽΌfW–ƒͺαψ£9”ρuK₯z8μβ“ΜY¦qD°XŽ1Ybpάόr`.‰”Θ;eθ(δΒG‰Kšρ­cQŽ 1+·_.QΔtμΙς Ρι3ΔE ’’)MΑ –αr˜W©T[p²)Ψ „I&V0wt‹Y†˜3cυ₯ωΖB›‘»ώrχυ‚q‘‰ΡՉe’ίVNWι"1Ν₯ͺ7 r\:Ί‘Ί$!νˆΰή σΑϋ»MϊYz|ωl³’6&λ­ ₯b©xN³ΖJnςΛ<—¬ 1έ*E&˜)‰§€S֎gφeJ——Οχervͺšμ°˜­Ί9 xoq ήd~i“β΄Ύ;ώζγ(’Ώƒ–Nc.ω™φκΌύO”kšqL‡Ο=ό¬44³Κ2cIε”"Uo3(U)ŽλBγZ₯pάc‹ld¬axΣΤκ₯DrŒφξœN;³Tφužwη©—€9;}Œ»—ΐηžηΠιΕϋf½ξΩ{ωΟεg²šˆh₯/½―•ZrCΤ<Ϊh¦W4ΐ]Άό|ΛJ2©8β²ΪΡΐ=ES_‘6WΌE$ƒΣ€hrAC8¨ιN lπdέ„¬ή 8§DφΡG‡8₯A€™8{x)|œDΔ ¨μ©Š₯ΒωΥ$§Νψ1:Ό(”‰D2Suv€ό€­(ΊΊu#Υ{Ϋ_₯΅ŠΏdƒOΨϋλ0P―Ή›.ΫΝκίQΝgxύψσψΡΌ6{&\ΜeΐΜfƒ’Q:1KΒ!S’$Yr%-UΧML£ΪΰŸΔG.~Zρ:)Ε·MΓ[Ρ"›Μί!M˜?χ΄Αέ‘Ύ‹S&ZxΜ)ΓΈΠ+ψž[e₯α1[WͺŽΥ Rw₯3£ρΝ&@ˆαΖ‡γΓ8wΎM/ςΗH“Ηβƒ,ΰ[Οh3Dͺ ”UςbUΠθΌ ΐWΣΕΉ`47aw37_l^Ζ&θLŒ5Υ]§Ζ'½LΌΛ#hX M‘€υ€?°Ϊ h«Τk€mˆΐP…ΥŠm*Ά" e<9‡ˆΪH†στ΄O»Po¬σπ‘^ˆύΈΕr —"|΄tœT,λΉ*cψF’ΈηjPYHžc"Ύ+αLui:,=,iͺŸφό‰ή„nφΈEΓ«y¨Μ6U"ΓUΆP{y8ν|TH&ft"ˆ˜ΰ₯€€FoWΖS ­ΙJγΏωοώϋŸώηγώ/Β cargo-0.66.0/benches/workspaces/tikv.tgz000066400000000000000000001266741432416201200202040ustar00rootroot00000000000000‹tikv.tarμ½Y“γZr&¨ηόaΡ/’)™yvm£™nυØl^ΪΚτ’ΊC;k+Ή‰KfΖλ>ξHb'H‚ΑΘ{‘*U±ηχΟ·Οw³oίΏ~±zσ²š~·ΫιlV_ώΎ]-aΈώ)!·γ?Fό1 %˜„Ώ) ΆύΓ;όΫowzσττί§§§η—ΩξωΏ>αŸπcϋͺ)όz6$α±eŽΖŒI*BB½2ά'DDqσΐL,ΧαωszήΣσΖ/V;§ΎξvλνύϊϋΊ7_μjρu‡σ λŽώίxΚσZο^aΎα΄Γ“ž?ύοO0ώ{ηιΈΐ­WKΏάmΏΪωjοΎκΫ―eΒ—έj1ΏσϊgŠDΥυΟ%ΖυώΓω΅_:Ώ΄3Ώύν“ήΎ-νd·Ρ³έΣΏ<=“/τω“yΫω-ώ’_Θσ§ gσl—€ϋέ~“ν$_ψσ'\ψΩ†ΗΎΎ­ύ&ΏŽΘNvσόxωόi³ίv«©]m|~M…η6o<<Ψn¦η ;·Όiγn[άΊί€g¦Ος©τ¦_yώΫ'ηƒήΟw“ΰυαM‚žoύ'Nxξ—/_kηζo=ΩοfσŽK6όΗσlυόΫ§ο~³­–ΗΟUΉξΛfmg«ΎW\Α™Ϋν|ς.ίΟU―ŸΤνϋz_©γ/Ω°7Θουlωbυϊk~/¨2ͺΧΟαΫ&RιQ·~ƒ8ϋέΓmφv·­>k6\•Λmη«—Κ…ϊηtξΏϋω¦¨υϟQλΜ½ήϊιi—σfRΎkψΪxωΙΛ|e4 bϋ‹›χΫ³νλΧΒ Ωλoόw<ΙΙ„…8φΒ£₯7‚'I‚Ί‘H¨"˜@#γQzgZ{"”ΕΣ3“ͺ0/α?Η3κΧZ}Γ©Tϊz»ΩΒ—Ώ ΕEˆg~Ÿ”ε@uaηC»XΩoε;άΰψΘι9bφabWΞ[Έ'JυjΆάeO‚?ΏžΆΑpΖv—ž‘Ο³―…ν8Ϊ™zͺl<~…Žτš…§¨^Άψ€Ε+WΆ—.^~©O±Φφ›~ρΏ}ςnΆ;Μ5BγηOK½HEhηOλ½™Γ :bi€3ακνΖ~ΟΜ—ΝφήψώΥτΏ|Τ3zΏόHFΌ6ώ$ΗψΟn¬AoOθ―ρόΟ#ό›λίί@₯λέΜfΪφƒwώˆΣ_ ‘7³ο§»Χlί¬6Η-UpΆšΟ½E TΠx…ν η,ν~³_o€– •7ύΤhΓ‰΅‹ϋεΛlι§PΫ~W-žΡvΉf_v½μ”ϊρkNQ/υΌΪρψϊ₯~ξόf©η0« |Œ©Ή^mv=―[9ωkvrAΟζ€ίΆ;ΏθwαΒ 8ξίz>ωξhΠξτΗF―Αf+―ΒβŽΪY«εη),ͺ0{)VΪS;oν¦v>Το‹―_h³Zψέ«ίoϋŽάrφςΊ›ΏUŒ²Ί£CΧσ•N|<Ρσυ«nΌ žkυxxέ‚ρ Έf»·~Χ9=ΪY»™3ΣάϋΝΜΖΕb΅μχk§5p½m·fσOΟη+ϋ[ωΆ‡Ν·ŒΦβΟεtχΆφ=εΡρπΪ…ήτnύΫ'³ΡK›Ό(ϊ΄}άSЧ„PΥ…«Ήl,Α° °Fm€xΥM­— X/ι,nQe_G=Ξx±λgΐQΗ3ς&ξ l€η–Ž―]UΝΑß;-n4\=˜ΒlΨNRβΜΡΠMίώψ#{±ŠžLΙF»_€σπ΄ϋ°v.όbCΊεtDi36«^γ° vxUm±ΝwΆ4xp[Υ1Pa-ϋ2@UΫyΔG΅=-T9,ΓΥ­¨ΗkۚGW݁κ i[ͺ"ͺ;2Ώξ1©ˆ²&—J?7 ξ8ˆƒκŽj;οšiΟΚ¬iσξΣ¨Vχ—ΆΊ³}l+Gž†·²γ0ΒΥΝ­n§8Wφ†Ίaσq΄+ϋŠ^sd5Œyƒ³λXuπ+ϋ*γ_φžm—UIpΨ‚;·!?`ΣΞV?lΑEa·ΦΛYρ°ΪΎκ Ήtl9ε ;ϋ9ό2Κ‡φωυπ ηό»Gύ?}δkv /ΰΉψ/U’:ώ‚γίj»5^/Žx»ΥΗWυκ ΰΓΛ8]€Άρj₯±ίέf_ Ir_Ÿ=½ιΧ«άΩ›WƒscL΄dχΆqοa-WR 0 ­δ>l†¦`b³_.ΑFΔWBUΓ¦g_§»0['.+λΞ›ŽΝ“έονΡ6δTςΌυ.½λYYwA]5΅ΓšΛηRqτQ΅Ώό\£•’£όΔψγβ|gύOiΣψω?ˆ€D_mœΟ1{ώb Ι°A–usšj0 7yΤζΌ½ι™zΦUT΅m™φ’„džΛ% ΧϊεTΰ-ΙιͺΊ=ίΉ§ώMυοŸLώύΟGω˜ρ?FΓCgωί„¬ηπ=@‡Ÿ―:•σ)…½N—φQ+…œ‹Nε2& >nύ₯Ι#ΡΈώ3ώυψΧ­zΰœύ_‰ΗγώςΏθb\λΝ·Ωςe:_œΑu…pΜœlΏΝΦ ΗwΕςoœ]‰"όLψρ2™5Χ ©ͺ¨έ W*ΚBγ‰i¬σmi?N&JXνdH’ͺΉ•[zΕBΪA«*ΎΔΌ „ήόν‡%GsσCΙ[qΐϊ_Κhδ}ΜψΓ2Ά~»]m¦λωώeΆœκυμ6pnό©ΩŠώχΡf?›»*α |;ν¦ΗjŽk˜"…ώ‚p~ Ϊp}ΠE•ρ„Κ/»@Xmž,άv΅x*LΑ§l nΣݝύΫΏyξR%MSw΄/‡X7©€+δΏβlΜxΔψΓβήΌ₯«tΈ ΰ™ρηRπΊύOGωϋ―½,‡ΖμΖrvΰ MχΧ‹ΖΪrˆ:ΙδL£i²‘†¬GΎQ1N75ιG¦ξ|8«ζv—Φ|τ!ΧΌwζΥΥUa…bkωώΜ οM\ΨFs_xΧ“μ~5οΑCλ$ƒπΖ¨ ¨φύ V ΠΏ’αξ’όΐKY)ꩃ71Ά]‘wxŽώ’Σκ’~y_Uώ, ΰώ―cΒ#ώ{δψημ‡·›ηπ?!¬ζγ#ΧGΑ-Ψύ<ψθjύ£ 0WιΎΚdΙΗΆ$·4r:^§Υfύ•-‘.Eρω3Xώή`{μΤti§¦ο ΧΙl9;4KK Τ΄W¬‹F†±2Υu™υΉϊηVθ*ŽΔf@}Ϊ0TΗ­`³ SήZ§’ΏJΟΦOε—v`ΟΫ«eN<=4ήC©γΏΫπίνfΐ5ρ_¦Fώ·ΗŒjާ… οΔK•Puώ_1βwΗΓ{·οήrt~¬_΄σΚ΅υC]~Α@ΦDπ2Λ9ο_;jαDΉ’§€=Qα/Λ'S/ΦsŸ& <ε½!ž²'Ϊ½κέ“[Αh-W»W$ύ1ϋέSF@ΤNRφ±­Sώ?’5b£όœώΟϊ«Όώ,ͺΧ°hΤπ•ς΅{κ–XξS»ήgρ~<ΕΚΩ>Aψ³l}m΄ύ¨YΫω^/υAχN4Ί?φž6ζ?OkFBVyŠΒ$Γr†¬,ΛςΧθ­;SX‘:σλΦj+€g£?χWm½Ψέ*±œ΄0TžA‰3ŸBEΜεjƒωφ[ζ….ΡYο`΄μp#w.ύ{λ `Μ ‘χΠε\>g]ΪfnHkιlc·my¬3fΗ~ώλ"‡ξθΊ~4ώ ύΗؘϋΐρΗ΅χβ`¬Ιώ§£ύχϋ―‘»r“U“Ί‡.2jrWWσεn’YΏ§ΝυΎΈπFζς?oζΚͺ3ςαχκύ‡νδΫ*/|ŸΙ·ο'¦θڞ%tyΒ₯Άσ}mg–x©£2|…uU7>vγ΅y¬«.ν§-Ψΰσ²{‘·ΨX;,ΛƒΜηO+°‰7ωAΫΟi ή)μΟb8,ύ²„w†GI%Oζrz=8ͺςWyγ7ΑlO΅šΙ±Ρu₯όίΘώ ρ?‘κχβ’i­G΅ώŸϊϊΏ‰e©Ζ}ςj9?ΫχŠί%3εκNse₯G澍πβ.Ά= ή‚ŸoTΘXώ?”EE£ύˆρO™©κ$ _σ¨ΓΟ‹°Αωώ/Υψ£#στ—fφΧίͺ$*-D»½€iδΌΏθϊΏH7\.aύ‹?DάωΝRΟ§yλVΰYώ_δϊ­ΨtμωϋO/ί^W?ŽΆJ™θ·“­χΐλz»ίei€₯Ox“•Yͺ\ikJd²ϋέkΈ£Sx³ΙvΏΖb“ίjc%M“^©+λΰŽ¬Ή}θroΆ³;ίώύνnΠσ•vpδ™G‹†΅X―Χοƒ ωΐv]ο·―= ϊ¦mηγάΏ’ϋχ|“'<3_£₯ΣQκέΉε’ϊχcu«‘ Χiήredό‹υlοΔC£†ϊo*Ζϊοψ:zΉ¦x(Ηκqή²vͺXŒς‚Ί;j5ΩΉjJšZGιΝ7@‘ΣωjwR`g™e;š»v‘Δφδ‚}dQΠ[‹ωΔπeΙ/‘«›ƒs@$>S–φέ¬Χ£μ)cνdΆΪ.W«Μz‚ίΟfH_@gtcož’_ξ|Φ[ πdOWψEΖΝrσoΟUoʁ9[Λ§λ’εσίO‡ύοΏWξsρψ3Uγ'‚γψψΕ91u¦ήι£SΊ1ΛΏdύΪ)Κν¦ήu=d$ΜΓ±(δAλ°r!δ?αJŒλ]Η?Ϋ;θsφ¨ϋκψ32φψςΎ+°«6y³šαΰΜY#ί¦¨#§'ήBΝAύ2t\-έΰR–Υ&K‘φ7p£Ά\mΨv Έ¬'ω²Υκψ―Ÿό\‘αr”ο:ώψ_~3Ρa>487ώRΥυΏ€γψΏ»ώRΤ=υr΅b-ήσΌβ¬έͺ‹J0›ΦΗpυ|ώΛΥβ~ϋ1[ΊΥνߞισSΎGoμkΊοg¬¦JΐŠs€U˜γ©~ύ¬‘BŽωΏΧΥςεeX‡ΰYω/Dgδε€ς8­/Φ] &揷Ƀ”_Dτ±νͺŽυPώ—hδ{ΜψÈg½ŸόυώΏ Ηρ€ο8ϊ“νΫ‘ΆΏ!1ύ«'ŽΰOβ0λX•lΜz·ρ/ΈΚ₯N:Ξυ‘5ώ/ΚΕ(!±ώι~Ež?½Ύ­ύζP‹“œμY€²‰‘%κ¬-Χw^XBy§Κΐ{ΧΤύω8ApRΌΨjώ£ήΎ-νd_xΆΦ›]-’ τ»cnb΅Tνj ΣRΤXΟsAH±½&§­΄ζΖπbZ;F¨ϊ8'π5ψOŒόίΩr·š¦?ߏH^ΗjΔοοΌ#Λ‡εoΏRwwφFnQμ΅&j½\Ϊϊ·«χ¦^gύ4ψi±Š|”Uω`ϘρˆρζίΆοœCi½ώƒΙΡσO9α 7Οφωύ˜ύtί³ ²NΣ«έ°ξƒA<7zPxŒ”ώΏΎόω_r¬{Θψ…κP8ΰάψsVγœŽώŸGΔόΟ#§ΓAd€X€ΙqŒB wŽΌ:Χhΰσ:₯¬εG+υ=Χ@zΰšϊ0Ζυ€ρOγ\S€έ˜λέ@=@―‘2νΘμϋ4Yh»Y±Όδσ?χ«o.άΎ-+Υ°Οθ"ύΟέFOrŸ0r4ζ•fŸ΄Ÿτ/OΗQώΏχϊD\#Ηώ_ΏXmή²΄”i*n7ΞςΏHQο-Fωω»΄ΏJΚΧ'έσθ6ϊ0λV%pMEΖυˆρ/qδ:[AkωBŒύή_ώ—9/§εdΑ{σ΅i]Q gφΚc0©šk™^ Ώϊρ@œ„Η΄γ>J₯Β9F%XΓ8€Α#ΧG¦υ~ΉR8λ‘Q΅3‘Ρ˜χλωj~΄±šΝ(Γ?ΒϊΏX)\ѝH>GŒ)EεύκΏiέΟ#:ΖίEώŸ—ΕΕ¬₯ΡώηYΝUjτ’A`ΐ9ώGψZώG4φ~„―ζβ¨–ϋΕΤχΫά2/φsωΝ2 ήδ‹ψε8‡m }5WQν*“μ\αΕ(œ=zΑnρiέƒ?ͺFΥξβΉ€να°ι+r{€XόΥT‘T§ηFڈŠΞ£ΜH―ΙT#ώ{Ψψow«·ψ/₯„7ΔΗόŸGΰ?3Ϋ…Ή~ΩζΘ Em*U$§αΛYψq܏EC[γυ"Ψ ιS8 2ƒ„pH9ΞΌe­1η šΤ­r 17…‘σˆλι τ‰½›„ωJο2H’Rδš‡'Ω&πs“]'-,¨q^ΐrωkdαξ³X7Ϋψc–αŠ/΅]θωό»·Η X_‹f6nΰ-1ρΘl°H;tΐuϊa—βƒΔΦγΪύfΏή .AΑoϊ=IΓ‰ &έΌ₯ή©ΎφΐαψVλ"υx]b[ [ςξ¦Κ€ 8¦—L‰Β χ’Ϊo˜Ί'~Ι~OY€(­Ϊyώ­η dΌdcΚΕ—†κΞɍ…Έ™ΏΗΘbΏ1+„h‡ΟI¦3ŽϊθžόΉ©ΧχZ~»Ϊop¬<θI˜”₯ρ¨νMώνv7Ψέμϊ  β „ZχΎΎ„ž-ο?X’Ξ†]ΦΓc²{έxνZϊgά#sgΏŸΉκC!ΨJCŠ’Ίμγ†αΏ9χ§NυyΒS§Ι₯εkvZS)μp€IΩατŒKnVφΫe&=£ρ’½ΨNkg!š^Ύk§•½qiЉώ‘OΒq_»ž‡½ΨuΫaΨά€Σ·‡n/x”ά™Ί€ŸΗ™kΎίxΑ"J―΅‡©γζκ!θΪΆ«Ρο؞vV`―ϊ%‘'τJώϋΟ…¬ZξZq(jNϚм1Ε KO0ψ_juΘͺ­wšG­rTeΰZφΆΉk;SςΚ#XυθζƒxS«κhVό*Zs7Žι©€υΑ­>vy4ϋTZΠ©ψ*9νqxκ‡TΟΛAχ™'i›3 €g”ζΨVm΅Ι€Θ__ύΣ_gφοOΙ6>7ψΣη3λ—ΫΤkπΈΘ«Ÿ€Ι&E7{κΪμγΏσώίηŽό°ηί½›ξ†kqfόeDλρ*FόuΧ}gEX‘.ΊΙ-Ÿͺ£;₯ζ\?Ϋ[βz¦φ ΛπC~(bΏOίίΉ8œ›0UΔύ^΅ξΖΉΑ²ƒzχ}_W\Υ—ZH­­lsΕUyΪjQ̏μnΊaU69.\žεsΪ/{ΑŒ9ή|1œO8«sμΫχλ S¬Xςdύέ/ROτiχa μ\ψΕ€\!’Qڌ‡ΝͺΧ8lA ΨΕΪ€S,ίyΨΰG»¨H²ΥΥδ:YE ώ¦ή¦.Ό›ςϊ(Ώμςά΄8{Jί’ΑOs‘‹¦Ÿ‡¬#Γp»¬ΞŸΓάΉ-Lψ[=ώ°₯έ”v‘λ§~ΚΙηΣ+)ςdˆέΣ_Σe εΈ&’Ρώ”ύ_Šΰ8W‘r=WψŸ9νo7Ωό 1μ‘ΕeνώsλΡT?’l0*Qύ=ί–ZύgΣςz€έ ΰ9I^Ί2ŽΆΩ yωW¦΄‰Us¬ͺzO’’<έΎ/l>Ϊ _ϋ£₯JͺΣ™šπμοι _ϋoΟ0χ?φ\-šF:άΩ2¬~;—!r8p‚Ξ|6HΚd°T«D$ΚDά0eΈ%±Q>ΦΑr«”ΤRω‡ΫΞλΫQΰ5ύX4βΏGŒ‘ηω?8‰κόjμτόΧ»ΎαοΫJ‡[Ήϊ†KωΏ7Œ»Ν΅ΫΓ‰zumι1₯°{ΨΎ¦ς₯ϋ²~έXz EŒι*ο/Μ!Ψ(3ώ›οΓ²­${}WƟŽυŸρΌnVΛΥΡ`ηz[͍u]•›₯ܐ’Oh9ΛwЌKd±ήΝίc€»³~Τ«)‹½’Ύη]Λ‘ΝQ4”RΪoϋuί"J<Άw\Οά|8πOV{9diΔΐAε?cYηέœcEζΕ$Ψ¨*rΈΔ₯ƒHτ₯Μ½²‘ ρΝήΙ=ΝαΡα‹1ΗD­»'j}τ"ΛΗWJ‹n¬ΜVΫεj΅>%C6>W υjP‘£Z―νΨ¬dορ9KKx$ϋκν·b PŠ<Ύw]šΪ”AΟZB‘³W• VΏ_s±`φΘΓΆUΩ΅SΫχ―/&“«J|u§ώ=xφ›* S šA•Ν‚++ζBsύαq: YžΨ] ΧžΠviαβΐyn “ε²ΎΓ΅;ψƒ₯Ώ©aΜάSC€ΓΗ™½dάg¬.δί€ήίλόΏDύί2ώΕr큼ΐηκ­σ?s9ς?>$―JζΧάτνŠ&owηxϋ3ΈύOX— ΅¦ˆL@ƒ^ζX­œάζ^½•\ΕΞΝΜΫΙz΅šΧXγξΕF7ΊC‡ξΪΧΟU8&t~87SZ-Ψΐευύ>)λί‡gυΊ¦qHΚ©AλχΪψ«ZΔ}—s¬σŒΜE–ύ0qoτ{œ[:Ύv”₯\?Λ―uzΛγμϊg>έΑ•Υφ‚C»΅ξLeΥ™uO—OΗχ»›ϋηόQ=sΨJRγφšμ΄ς\SΗΙh=bόS΅”»šί‰‡³HΥλ?٘υϋ_‰ƒύŸΩjZΧΑσ?½έοV›γŽR–V#kH»κ‘]5T‚ΤmL?ύύ_ΥBυΜλ;ΫΗUž3tΝ΄Πp³ΊΈšΉŸ³½–zζο}-šμˆζ({Oΐίβ" άώ폧»ƒ­Χbδžχο‡— πc,ϋx4ώ{$‘Ρˆ5ώv…υΜ~»] <‡E½»dtΰ*Ϊ―΄zύEθ9‡ŠΎ ΒΜΥζ%θμŒ>°σΒ^$­xτΞmN>؝­ΝMhεA`τU­(ͺ·‰Fy›§Ί0);z6d“λΚ¦ύ!5 Γ y—΅phσ˜wΈ¦+―½?Βιϋ]έCαR؟Λώوm „Ε—Ό0‘°υΤΛψτͺΐf4/ϊαΏŒ€kψ_¨δ#ώ{Τψ―έ ΰηψXCώ'ω‚»ΨώFнαΚ5ΗbΏ›™ρ.ΖϋWςκ C+έU@Τ“…g`~η3%*70χδJd„XΏώ²δšϊFΗό‡©τmxfό wΝ―FώΏGΰ?Œό€°αϊ`„ξ&τi`οω6[M0Eysδ ϊuXsώdT7₯L”‘†ζŽuCY¬=U‰₯Β—Ζ΄2OWO7΅ϊ5{cέ΅₯5Λi0:“+Β>—d<=”.δƒ0΄Zζ‡ι8ΏCβρa™8ΊœΕρ»+GKΤλτά—…ΌZΞ»"ή5ΆόέdβΊ&O±1ώχ°ρ˜δ\Yο γπΤω?~1Δ·ύ―*vΏΊΔ•ΈΌ/o©+Ύ h^†'KPΘω­έΜΦ8„’μιΥΟΡ›ςV›§r-n7pϊπu»ο'Σ‰‹±ΣγΖ?O·{ώ/&H#γψ? gψ”Ÿ_&%g0—φP ⨑ƒ€²·Έ[½|­ΎΒŠΕϋNΎΎvY—·«ΛyVGψΌ*OqEχpή―cΜθϋΊN?…ΙΡώΨψ§`Έ πsψO2QΛR|δύω?₯lŸRβώ˜ΞΆZΫγΑυ\žΩΒ·E£ok:pžΝ―ž˜>¦D<2Ÿό1Ρ+ψ/#+l†Ψd²χ‰X_‘ν~ηŒφlάX`²]—~Mώ·T#Γ»?ϊv؞―ύǟJ*jωίDFcόχ1ρΏ ί…ωί €C½έϊΝn{Μή^c/±c#€tπσ ²ΤΫ΅Ζ*Έ³―“Kxρ ’―xς =^OΌ¬έλι΄^πχβ§ικŸpa‚βU{v.m>ϋ‘IβΩυ'―^Οw―}󑓏“,^ψŒyγ«εv΅ ³—‹Η·tφ­χΒu?Xzz±Γg«ώOhyEτΛΛζβOY9(Ύ’ζ; M]Τ|—£γ1fάν-7:^€ϋNλΝm7Yon5έjL—κΓσ2ŸφήΝ,4¨Υω²„ωΰ+ΈΚζ˜•eΑΜυ›"ωξqΛΔχ“dJu?a.½ΞΒξxBΦΆιοΫCeDsˌͺζ ˆ¦₯€r―ΛsΛJψ¦šfΦ|υKRΝκ—/fU― ρsρN]Π•{^ƒ±*—x…yΌι ³6ϋeΪ:¦ΪK\4^Ί.^αyOiy•«₯ΡΓιλjυ­νUOG4_ mΰ[£?žοo<Ή»Ήw₯CxCŸοΚε.sΧ–ϊ3=·Υ«^άΪ¦₯σC{ƒ›qϋ₯³ΘλΫvννΤu7(Ρ{΅έeν»8ςY΄]σzΈΩ”χ1LΧ’€ΎŽϋ\ΒFΩ7’=R“K²ωΊχx΅{­Ν4K)½βΗs›―}e-γγˆΦ ]Υστ“\Sa’Κν₯Š•Fξψσkε–οίΫύ\ΣφΒΕκδkρβuΖω“Σ©GwχŽFW­ύͺ†‘/Ί¬’²˜?ά”nU$Ό,«“j΄)U«d‚ή‡΄n΄u³ή܁Μτ8ΠΉGλΊσεΙΨ«Ο|w±F`ΣY]S!˜ΦΤ’·CφŒφbJΕqre1τΰ–^_+ΡφΊzΏ –wO»·ΨΫUΐSΨ/³t”§UxJoυ”Αέ§έκi³_>ύuφ―ύi½ίΎzχδV?–O…ΰe;„­‡Η"¨‡…ΧΤDlδ}πψηŠϊvxnόΉδuώ—±Γ#ς?ήδμ<υΰΥΫoΕnpv²Πv³biβωΣξWyfg=ŸιmY g„ύ|žw έθIJ/Η4žϋΉ?Πδ¨>ξ΅ώoVΧπŒόO`όSdzώ/Εhe¬|„όΧΛ·Χ,σ,Ms~3ϋԟ'@'$ο Ώ{υϋν$Ϋ3Ÿ›™=ζκ7δ›ν^gΫ4DtΪQ/zišzGnϊ₯ψ>~Šβ\Ξ^^wσ·ZΪΨsPυσΧσ―'#kO_R6ΨNΌΚΞύ©Œ{ΒΣf»74pυ]LάƒkκΓ‘”σς1ό_,νΏΡλt+8Χ;’uώOρŸθ3Ϋ…™Ÿηεx”£ΜΗmsύ²Ν•4 )³Z™¨w™ζf_κn³έΆ`UŸ}έ¬–«c‘`φs²ϋ=x™WσΈΏ S,χ‹IAŽ&(nΚ,Ηcuaͺ‘Ό›„ωJ ±6ζ%»Ε§Xδxh/dR+δΥ7 ΅Π μtt}όiΉZτMbίξ*™”ςKΣΓLλ[‡ΖΗZ±lεN³ΚίlRŸ‡°MŸύŽa@ΐ‡i‚Πo5œε ύ½ε―ΰι_ύ³FΫZ½Ÿ£υMeμέ#ξKR ;Ϊ­χŠΌ]–ΘΦe`ό+€¨'DQχ³-N±ι+¬‹>ψοV ΰό―ΖώΖ§lΠ €3γ/hT‹(2ςΏ<—Έ=0ΙsH{ ΥΈ;Ά˜sϋέη˜]| œόG·#σΐ…pHΊ1Αμ!Υωoz·ών“Ωθeš’~›.Ά_2UexΪιz΅Ίϊrx£»œπ˜Ηxρ,)$8~Œμ·FπβΏKΙΤ-εP7Ϊ½Μ„χ-ΐ€²¦Λ ωΛΣχ,·ο¦Hαπ1㏃o4―°ϋΏ=~όΧοR!šμBΗόGΔΐ€Pβύ©Εtκ ΕNCl¦…)3=^”)ρ@ώXύΠo‡g»C€¦Δ­Ση2]l†ν:Φ7Bρ;Ϋ²Ωφ/iΰ>>ΜrKP$'i¬f±ΤΗn«ƒ_ψ[Β/w‰Fό©Κš>ΌΨ@΅vΓZw°z°j§FΞ©N’©d!cλ΅sq9€M7€]ΦnuUώΒ²*DV~»½gMUζφ9o„Γ«QcώΟ»α»1Ώχ¦€ͺŽ?‰σŸ­·oK;Uyδg‡)Π?’ρzί ΞJBX­Υτεδσ[V%’Ÿδdώ Υ43Dιψ™/'9(=ς{g[w‡ —l ƒ5%«”΄Ηo?ζ+νŽ₯TΡ3ςŠωγC΅1ε/φψ/δΘ-W»YHνœ& +₯ή„­S Ί"― *½Q΄Σ›oπ$Σωκπei“ΆD΄Ώƒ~:m[ϊ]ͺη³e^Ζϊ$ϊ§ΚόψΪlQΕL}Ÿ"γψιύΠΤ―:UbΟβKn.ΎϋΝι]2[ρ”)X0 g/Kδƒ?Ξ¨³Τψlς‰IUS‚ξ:…”Ί―<‘Τa+Έ9o©yΑ~3ΟΝβzώΝΝ6ω―ۚτξhΠΣ>.ρψφΠφμgp»pjαMŽ ^Συ|(kͺΧ³–gm:t.d­ΪΊ‘εlΓ©qqι΅OgΆ]Ά/Αr'»r³²ίξΊ*žΨzΡΎtΝέ\ΝmNο}Λε§9Χ-™ΊWτӸ䚝~©}n,–μθŒnt²₯—™Z ΈlΥ΄ζm›œρ”n5[α2H‘ΐ­έ¦!=ΰι ’SkΧk [θ³ §Ξ–»ΥtZφByx:―žӏ»‘Šϋ}skJu! ιHΝώΎ½TΪό¦…σΟψO`ί[φ"Σ΄’Ύω1Ž{‡”χJ―˜†ηL―f0kt•υ}Ι0Χ cχZd .ΏχΫ4)¨»kΝg`¦b.ϋŠ•“oj‰Σ«N%ώy>όλΈπ[‰πK—Ϋ Φ- dfœwχωΚ6.£•οlPŠΏl0Ptv ΐ("$έΉηΠ;4zŸnCχo5taŸ‘j=N΅#PG]N―&EΥλ}ϋ~S€λΡ ‹ZΤρ$mhŒxI«£{φ9Ίw~ηο³uκhR_Ϊς;ύO ΠΜηΦ}σΜ«Π~ηΗ~-ŸΪ;HΨΏAΝ5>λςqΕ|οnΒΡ03sԊϊ²Ζ"­-ό™vΖΪΙl΅]VY{ŠbΘ­°οω’ξΫUΞχλϊr7_%λZ‘ύΎOάΫ|–ρ…7œP:¨v*Ϊ ηΞΔcΞv ΏC㍙y½!G1-o/zΞ4μΘDώ©mGυμ³ύ<ς œνκq8₯·Gι¦εύx4ΕΤ<–ν]>ZΪ΅|-ϋςoά‡3’mWκΤκά™yΌnν?rrTΤύΫφ]»•T §Ύνξ;›œό"mGΊ‹d0©JώεέH:r?Rγͺb/ή£“IKξHΛΪlνxRXž΅cΚ+΄yχq‘6ο>¬Σ–½Η₯Ϊ΅΄ZiΝR^ΆΥrŠ|ε>ͺKΓB6|)¬ε {ΑόšYš»œΦψ%…&ΧφjιN—j[ςΥ©R^γ ύ_r…ήΠ¦΄Ϊςn0ΆωμΓφφ†1Ε΅yY˘Φ3/k“•CžR²™ήο^aͺ€§ώυΥ?ύuφoώτ—lγsC!›mw›™ΩοΌ{i±άκ4L«ηO°’'ίυ|οŸpšbRψΣzυYvžΜΫΣΪowOΕ_π| ιh―«…_Γν8όΖ_V›—ηOpν«Λσίώ·π ΉΠ>ΒLxξωΜϊe:”ρw}υ“4ίϋ”τ†V}5Σ lϊŸωΧη~Y8τž­WΫ|Π·.3ύ―η²·œŸ|Άπ½AΉ|± γVΘ—ΓΤ,Z³ίΣT‡σΆώ>εδθͺώΤ~ρΙ1ύψ7ΙΌΧΫβxΪ$s5]q’нξ~ Χ‹nμΣ…_^©ˆΩdΛ'ΪΉ ΨGostZœ»Oz±Ϋΐ„ν4Cwνέoφm±}‡»,@ΦΌΓmΆό=n²π‹•:e5vΙJ[a•ΓF‡~;w‹ΓιœΞ=ψ’2,Υ*‰27LnIl”u°ά*%΅T©λΔε·-HΎ‚#Μ[γsQo.½Οdλ7 ΄R {ΨdwσΜR>φΏžͺdψί―ψ_~3Ρa^έ4Ÿ™μ―κŽΧΥςεεp‘,ω‘xΓ–gψ\w&εέ€φœBN ;=f?wϊμΟ,ΊΖ>·x·Ά―]uλΙ5ύΉ+ψϋΉ!{κsG8υs›³°zϋSΒηφΰυηυη³Ρ†ΟgBϋ³˜Ο€Ο›ΟηΓŸ[’ΗενΥvΆgv=υδ+ϊŚΆ}=8ΞͺΫsa) ―a’§ΉŸΫ#jŸ[ρŸkΑΥωw僚mν¦…άΏΚsΊκ4l[p…l’Ο-l­;šΏg!ΏισΩDΎΚη9I–ζά¦Ο­i‡Ÿϋ$ω}Ύ&πωL¬ϋ·Oω’;?dώέ:][A"Ζόχ«.¨κ;ƒt?₯€VAi4ΦΏOύ‡ΩΟζFν`α­VΖ1Χλ<ί…5lΤk4Žω£/ϊεx›p±”b9ΛwΠ$+>Xοζo‡άPY`Λ šsw›½έ­Φ»γ3Υ“‹9ϋ΅ά|Tνύκ4ΣBσ*(¨εͺ;{ΝΥP₯χ%N«œ9xv~Γ-Ξηθ_“Q_»QοΌϊΎ π wθ‘ί7c½νκmyλ’?W=TbyνΚ]ιεχNΤξ™Σ\{ζ?MfsνΝ»\Ώ[»rGο―›_[{Λ!³l/ޜk{AŽlνͺ]™²ί+ΤΥ3S“k€£œυΚ6ΜΎ__βρς§ކΌαΊΝyŠ 9[—dj]`XΖbša{Xjͺܐω•AλsΎJηwg{•ν™ηΥxΞ»dx₯Ά;-λ|ήΥωŒ«bΥΎΝ{KV΄Η]Υ”¬Μ—ή•Š•~ΐjΎ•»g VgšUs.Υ…‰RΏJ:Τ!q[ΦЍ€0§)PΝμqο’οs.‘§5eηͺdœ_:ίζ0_Κ@ι$δ©εŠ4e‰dΫ›³Czδ…d‡]”R?ε” Υ·GBDtλASŠzξ—³Ÿ• '|žΩ f„X FΖΤ?Ίwψ`ΐόOTώŒfπ 8Χ7ͺ?bμρAύ%‡—wόΰ_ΈΖeΡθxΈΨo7&σ™ήeN懴ۋ₯2ΛκΎ†Z~ΓvS-? ΣXˏι4ΧΗ΄lωώVS-ίίa¬Ž8‡ρOΗU έΆt§ά–ο9ά^„έšN½ζύIτεΐΰ ύΟϋΎŸώoςC‡Ξι₯jύΏx4κGπ?†Ÿͺ½œ|ο,ΣaSk΅Ξλώεi·Ωϋs‘«ƒš9s"ύΒ0FnΘO0κΥyzύYͺ‘β7ϋ₯›§ΡΗΞλρFΊp’ν‹ΦΤΉoqφΫ·jΠ cϋ|λ…±wsεΒϋεzγΓμ§wΣμ˜ιj9έξΧiF’›bNkXm²ŒΛ’Z¦_Λ±("΅šοΉτΜQι+Χ7ΒgkυT—χυ„ΰ"0{«ξ’Β{n­Άωο›™^>ύήpƒΩ%‡Nژφ²¬†αͺσω·.ήζSπ§Sε§9+ο%CWθ‘Q?LL }‹¨ηqωΠu猢iŠlΕyN‚›K<Μg)1B_ΰηΝΈpΟ³μ~‡Σ‚OάJ’ϊεΛ δ¨§Λ – 6lώWߌ―Ύl†d ½ˆή¬‹ΦςCφz(ει‚L’Žά‘?cΊΞ0Νk.Λj)δ±Τ)’ϊ§*ž’―iAΣ«K›/ς,ωOέ‘w+Ν…IνΌ1­ f!4Ί!‡ασΈ*όίIΉΡžΏ*”ήκ%-sτw”ΆœΧθ+½Œΐ8hμθ”<ΉΩΖ[¬Ψo3ΖΌΠσϋ«‰EΰΞΖΧΖCΊ€―±ϋζf±€μΨΟ'kzΦ]]ͺ&;Ϋ8§΅{)ό81‘δ£ZΪ–¦x7ύ°Gn:tΪ²σ›EΆE4΅₯ιΣ…¦άώˆ6·‘9užΊij[ΡYW™Ω†m£‘zηώg―{χ*¨?fαR§έͺN;Σ’ΰvΓ|$˜ΏΤ―Ώt•»τ¨K9[rvιͺ:8u.#ηκ†χ'QΝξlΫ_cΓ_Z“ΠPΠ3CΚLχζ”φάμ΄/›Υ~]£Γλ6χ2‡Οyƒοb^(Ώ?έΞ~χEΉ Ϋ‚όώ¨dSΝωΰΥ…pͺΫOa˜EύG΄†σ\cK%Gϋο!γ$Μp.‹ΰXWϊ?SJΗρoϋvϋοΊξ7) μΣυfΐΞ*=±T*5rϊχ’k―p­7β₯Mν;y£λtΟ}Ρ‰Rπ’ŽΗΏ„ό \‘‘£!γ Ύ›ŸF ωίJŽυΘšνΒ\Ώ\ψ(μλNώ 7‹Cš8X€‡―£Sͺξ¨ώƒΉ™?fΞΡΆ›eͺ^RxδΞ.^°¬έ­ΥB Ρœ)rRλτo΅33΄ζjτΔo§Μ£ SΪεcνEGύnϊ?Sυ !Ύέεγ/—‘ͺρ 6ΪοροΏ<ύ΄υSΉ€Υ{|=…ρφτί^όcΧ…4‰*›%ŸώΛΣ½ΓC—+ψŸεε»{  μzΉΧσ§TH-_ΰΐ‚μα Ǝrμ·£άΒF lžςθ•εTKOΙ΄qΛΖΏ`‹˜·nP¨iw“Ή^Ύ|ΝD|™­&3x¨Ÿ˜ΰν·ν>©s―h`& wΜFŽ3Ε΅ΥZhΈαžE*&ΚͺXEΉ°BΙΈ Tkλ05α€ΜP|zz~™-ζ³ηϟ~k{½9Τε˜ "¨Aί,0Ϊ@aγΘPO¨qά%qˆt‰Žƒ5&¦ΗΦ;#hΦo„ 2αšΛΒ›Ά‡ΗΚ㋁_:ζ#.Έ‚Mœcξ¨!\ΐ›Κ“ˆ„H K%”qK:ηtΠΤj΅emζΟ5^Θ‘=aD1EB;ŠSΘjΑ‚aIπRRg¬ΣΤ nCD4‚(e˜†V0*πW8§,όŸΉ^§ΰ³ΨιαM0Ο…§ΫΊ—Ž™‡ψΌφώ|ΰχ%α­"Ξ‰ΤJ9Κ’˜οM΄S‰ °βˆ0±₯„IXDZ$~D^Ξβ(j^S{QΉΥ’π«₯υSληστWώVӌ€|I`Iu|‰@©ήΞμ·Κ‰ΠhΨ γ*}"] “!f1–6iœŽœ0 411lt,ζπ§’πm$‡₯“˜˜ΧdαφuΣρ†ΛνlŠ©CU!JŸπή‹Δ€t΄πμ>»Φ±K@D Fœa<ζA‘§BΔΜjnqπ |%¦ρύ~Μ–z={ΚrΚ:ήςνuυ£.L‡Β(f2¨˜eή0AI€ŠE°πڊKγmΰή&xΓ6*œQΑ8Ιm›8Ϊ€i½Ρ›·η†ΩA_€ͺ$‚uˆυJZiΣT e­DX‰'υLI£:œΠ}T€΄h};Ωώ™T}ώxΰηwΪx©iLCΜ£8‰xbi",n„_ΞyšHΕA[Γΐ8’0FƒιΘθΈυω7ϊν;ϊ*ΟOιΐ/`bρο@&Jί8Φ&ρ"DB0ㄏœ‡LΖπ‡”†ΑŸ^2ΟΰE 1Ίq‘,Wn³Zw,°;TE%±±R:EHωΘDήSE”1αΰN$‘€"α Ακτ8‹e,œj|·,i4 – ψ4c3% $έpΚͺμψΥΤΥηj½ώΠmcΞ‚°VR-,X,ΔΖ>ή%2’>X @‚(ΔΙJCΔΰh&6–aD?Ζ! Ρ¦νx f«Ο4£3έ²ž-±ΚύοήΒCΟ°οΤ™O΄Ϋx]Ulxp'1ŽΊρ€-@E 1 ―UD‰—οLˆAΌκp G¨ZvCΏNρ &³Εz^ϊ&ω7λρφΩΉχώ<α\*˜‰ΡΡVΔ DGΕΉ`ˆ&…‹-#‰“& ΤΑΜαu›?&ψLΪnVΩbωΟύj—ΝxΏsoŸ– =Χx5Ψΰ+ΓΑ|1¬‡(R°`ςS‘=±°‘πDΕιπf™d@Y} @€G"=*Ρ_,ΪYUμΛΑΗΪ‚ΕB$KΕsά’(V‰‰x€²AlΒ#ιajό Rκ8’I°! X?aaπΨ  ωωαVΨΊ·“‡ώΰΐ’„°5ΡαKŒh/O-”χ2Ε΅€©ˆ΄*Ρ6ρ6H0šš]€₯a΅Y—αΜ‡Ύ|9Ž}M|Γο£Oyνύ·ΩςeΊΣίότΗλA\ΆΫ§Ω‘iΤΤαž.΅ΧyΊ£m’ΜvΣ0σsWwΦ Kœ5`lRΛx€σ  UΖ…Žˆ§‘O,4'„ε[/Ψ!Φ£ύιΓΆzρc7½<²3$€uœ€ O &΄‹±¦Jlu&fAH ›Hb‰χŽΓla.‘Φ$ΜG4ιxtΆ”m6Όƒ₯|±€,(5]΄bPCqœ%‰T’–BK ΛΪ©„ͺ„·=?Fκ'fͺkS„dxyf₯*8姝a Ր¬₯ήr RΗ) Λ’)‰β„a„k8bIδEKŒg JΚNR§lΗZY­P₯΄εΚΗα½ζ6hP9"1η;^`[ρ ­™‰biζBd§R rBYλ`πΓΓ¦α,V0HF*+cnb* ˜˜ΪΦ8Θ ΞV­jŸh΅Νσu‡eE„2A}[G4Aβ0ϊpœ$2ά λƒψdJ&‚(zπΝΆΡΉ%τϋl͚ΐτJ_rh©q₯)`F°fPژ@–Gή Ι˜Π^œΓ8$!4VF‘UβlWGΝͺX{™-_ΐ²‰}Υ;˜VκΠσσ`œΐœππ5@l‹HΒΒ‹sπΈ2ž“8€yκΉΘE5Ψ[΄ΕuU³WΧίΣΐT¬:^ŒΡΠ•2₯‡Οa@«(Oΰ=5$Ϊ2ΔI‚uμ+ΐP‚ žYIβHD"[=WύœΩίuφ:ӍϽO^­oΎyYMΑ ΧXΪUΓstpLν9θC OtΐΕIŒ3XŽPLγH γN c βΨFXˊ^ΐc?eψσœO’G,Χ’‹πήs?a ²yM`’3Ι<ΘmmΐΊB+Sž&‘$’Δ ΎV’¦0©ˆ(K(u‚΅©ΠŠŸ–Jχ8ƒΔ‚‚³ΖUœ(3°TaΆœb0c-w‰ζžCiy˜δ€χh’xΣ<ˆ_™ΌοHϋΈ8ΫΣ#\ΆΘ{»‡Ϋd=œΕ}β §°yΑχ{«“χTΑpΦηΫF•|ΨΉ=ž•Vtϊ‰ύv5ξέτ?);ŠσI‘‚žώaUΣς}M'·oŁ\tŸΒ¦Αε`MΡ]ά6»RΗΝ½σ£`q{gbXΐΜ':3hƒκg°D˜–¬tœF"‰-•ΦrbΑšŠZsO2UVνo—Ζnκ˜fpƒ=Š₯A·O‚Ω¦θ#dΖEΦζ²ξL ΒG^rͺ.H&―Ɗ ’-ŸΆαροv5}&‚σ$RTΖ±±DQP»ώ Œx/c捉Hs#|0![$ƒa#­όŒ‰ξώΉ_1 cψ’$ eΐ6•‰R`j‚ΐyψΦp2θΠ₯€ν˜5 šGΆζε~1ΑLϋƒ˜Δί±w΅ω²ϋ­λLvΏΧr!ˆ8δ$hGx,Όp XΉ³ΚPΖ0Ή;G:6!ϋ"&>³"β±€^Σf3=ΒLΰnΆ~ςϋjι‘ι¦γώί*¨ϊaΉ°11<ŽΑž₯`1 šπΰ1ٍζƒΖ€:0«₯dl €ΩΊY‡’Œn6"`Γ|₯]Ϊ΅€γεΧ·>β&‰Š‚4†9Η Š Φ!€€₯€1θωΤ °‰|Α;2ΒΠoεšSΰOΝψ²iŽK-xΟ½-8)ΙΥΤΟ*κ,54ςΚωɏ™Ϋ½ζIγ°ΖΊλ{a>ΕuΉΥ^%l]&“ά‘Σώ5hJβE]ŸτngFDζΐ«κtŠ »ψb”Ε#QŒXΑΌŒ #nS'«’ΪIE9HBΑΔΰ”Σΐ‰₯l·¦Ϋή++ξKK.–jJMeΨκΆ{θ•AZ9jŸ‚»©ήΒX€oΓΨ‚°­/W¨hξyΞZx]pƒ:ο™τьΦ'Ϋo³υ|–Ω.Lb<ΉΝW»¦OXQ_%WΉσΫ-ŒηzΎKaͺΧ³ž_°ΊΊσ Β¦bž>kλClό$¬φK0μρ3έΫ΄':N]xΑ”Χ1γ¬ `kΠ(xχA9¬ΒzO z΅σΑ^A@F·$2”^βM-/Ž~Ÿ Α«nΧ{Ν5θΜ"ς‚Ξi(Έ Kΰ„Soϋž \―Α††έ΄-BnΧϋ™›`Hͺž?;4ŠΤ”&Ek„ pr–vŠ0ω%‚ƒΊ΅BσΘSLΐ„§’$N,ΌSΌ-p{δ_>‡Gρ”Is1€δθR‘‰‘γ³₯%Β$‘| v˜§A]ζL‹ΌΙ’ίr£©}ΚsΜVϋΠλ #,ΞY휀0Vq4r:"Ϊk@€„Κ($€/9?τ[&•β4n¦ƒ\­Ζ ‡ŽΑ`ΜͺˆK!˜ˆ ˆXZμ, “Δ1f* LcΆΖΓ΄’η”qΝγΝσΧμ!ΟοXΚx|³Ιϊ σν6σΰŽ¬­mΡeQυΕT ŸΥ*+Μ” \qwπΡlτ[–NiU—€5ΉξφP’ZΎ‘dσԏzώΝΝΊάdΗχ‘‘Ίœξ>ΧZ”w22`dXΓ#°8$˜Ÿs˜™ε4ςΖ%Κj½K§ƒΑΤ‰ΐu ίSβŸΖΨoΒΔGZ‰Ϊ띑ξXQ± Ω Φ “Θ%:‰Ή#%>EΚ.q6"jζؚ‰"ʁ‚§ΗλγλΨεΧ›γμϊe*`[φEC sD‚Y \ƊΗTƒ€εŒnάD›Θ’§46D: ;iN€:ΩβzL:έZG/mU―Ÿ9F¨Vήp‡S]cέ΅ ¦1fŠqΕ’ΑΘDKάsΐ8.ΖΤΈ`’Θ΅TΣΤ2ͺO8ΦΎκεΟ+[Ζs*Ϋόze_Σşώρͺ7ύ§Κα΅» •·Νπρ w97ŒΗΧΎ·“ˆ(¨ΛΣ( ˜F# Δ2¦ε°ΡͺDi Φ‘S +“1 ΊΨcα/gΔ₯ΓωNŸ/›Υ΅0τΗSBJK#&€°zA%5 ‰΄°ƒi OΈŒIτuKa„Šb°‚Φ_ϊρ†Ÿαο4ιƒΧ,―‘e1LO@y“HΙ±@λ€=A₯aYpΑ$¦°r„$δ’œs`z³ΗΜεΖΤ§U[Ÿι€-XΡώe―7n / φΟmΌεΗ !#Ϊ•1χKΪY~!ΕOP›+ZNΔ‚Ζ± υqp”fc€47}Θ«oψ™Ι{0drΰΈ1–cZŽ‘˜Ϊ mœq”j’ ˜@Eπβ†Η"V‘TTPU?ωΨ“±$doœŒχ™β—MβτΞ5Ξ•Αka[œ΄Žϊ$XΗJ0^ΉObo”ΰB½Š$4a ¦π“Y@u|¦πΆξw¨τ«ΎC<Ό=ωΔ%IΠ ΐ%0…‚O+ΑΨ  °σFsΜh r™ΐhΜΗQ€OHfEχ;Š Z“CΌεΫz·ΒJ‘ϋsp Vp±ΤΔrβ›Ĉ#i"Y V‘'Κΐπ0ή‹R] ί$>@k­zeδΓZί›]W(znͺaΥ‘ΙxƒGΦΚY!15sς½€»,’°5Š|ŒDcV“€š€¨cS+Ήj1λΣLυάχt’٘νVY‚χζmί'ΡϋxrΥ΅1τ7ΰh5&3i£HΓŽ‰Ρ‘e0ΥΑ\ Bγ„OιEΌ§Τ(©`nhJΨ«8₯œήΜ1Ί|χ%Όb‰–œω ”°|%¦uˆ+'1Ώ=¬g0<ς‚S›/―‡‘n6dς§?ΕLJ³o=mΧα_έ{°(3ή’mFm` ―4ƒE0KԚ Ψΐf5…―‡8…Ήδ́°Μύ©X•?΅z{ŽνGOςηN2ς‡Όϋ'"ΦqsζACτ"&bΘ*“(­§6&Š•4r ¬σ‘¦œHi)΅b–φ›=‰DœήΎ.*9β|Ž>ΒΙmX ΪLΕ.DPδW26Λ’i{$$šˆX+ξ‰ >’€>φ<±ύ`η‘χeΗϋbΟΉ™»;€ Μ}‰―²J1P\ δ;γ’ΐλamœ‘/x“x‹€EΓ"' kαΛΪοgλΥ6³ο ΡΏϋJ†Μπ‘°`-w‹sK K€½δ(eŒ TFEΜcabΗ V1―mLΈΤQD̐T8YŒc²τ?jή½Αk*hΰΐ‡u_HPά*ΐΚεV*Ψ.rrϋΘ«΅·^z[ψ@ mΙ Ίι΅³μœŠbxWŽ‰ς‰²2Ρ‘hξ8β(bΞ1η2’9‘`nƒ ’ήJ{Ν¬†—”ϊΘΝ^|Ν ΎDΤΑWΔQ ²—H jŠq˜½"α”`α€c†S 󜰘KεLΰΫθ »₯DΤΝ6[˜Π?ww§Ω΅₯9E@MƒKi@¬Γ'H« Έ ½ƒQ₯·0Π` ΚΗΜτt–€ο³}ΛίιΒΧfχHΘMΐfC¬g‹Ι›Œq7$‰!-Ϊ#ΞjXίΠ8η Φ1@£dηžΪΗΓ!Όqš aE³ΚΡyMuδ%cƒ 6fΌ³A&΄qB -m3i„λW)΄ρnυsΊίbΧύh°€lΑ2ƒ{» Ɲˆ[Œ Θ}Έ#–†PfžQX |$°Ζ)`πΖhma D{Kžƒ‡©šΚΑ²14Ra©ΠΪ•ηGƒ:‹Υ1 ₯ΔΤ)L…J΄² Ζ6ψΆ"|Τ&ΰN7 Mƒ'jHšΊnό²ˆ$1`%^&†!ΐΜKˆ7:NTl…0…£΄Όg·δΣΐ9θά¨η£\B\֞syJJ:OŒs–κ”mxT…ͺ£&h|@Ύ­¨’Ι[©ώiΟ½™ΪιLN;“Σ)³r΅˜χ™ ή­λ&rΰ•μκ}œsM£\άΎiΤ;XZ³ξ λtow«υ/-R>%ΣΜΰžŸ£>‹K<{‡b―ήδ[έO–Μ]GγV†ŽŠ»κ\şV°Iλμ.+₯k, +-ΉτΫ83Πςλ―SAάβάων•ΝƞU“}_βΜCfμω˜=₯viςT'Kσ_5Œ—ˆΚΛ‡/ύ2ι(n/”ΎL”X΄J%BϋΆ=Ύ.¦ϊΜͺZΟUΒ±˜ά'HGEK„±‚d‹ 61¬e`Z V0'lC0©‚§Κyb’_ΐ‰ί³©%6₯λ~ωΌ³[―ρθY*ά0 k5 ϊυ§^¬ad ½‰+:nR¦Πμ«Γ³φ Mξ΄|Γl²έ――aδτ?½έοςΨΒxήΔΪΩ©^ŠEreΠv,kΰ½½Gΐ·Ÿ¬τ>/E»@mό5ιWθΥΖ’zρHׁZT'Σ»N»Ύp:tQ–ηΗr†¦₯ω /·Ÿι€•³—εa΄Ξ ό}¬m¬Μ“ΪΔΈq&€Kσήυάxn΅2 Ξ&4H‡‰DšSοcΕ8eΜϊ˜l ‘>[Xι%ŽΖ΄%έ€‹«3κͺ»:τ'­…§‡.u \ZŸΨ8˜$Ζͺψ…©χœ“H²ΫBx#ΣΎ+Q$)΅GBhξ½η’­¨Έ€ͺƒ7xπ%†‘pΤxJ8Ζ'\¬b2ΗI4lՌH“x7ΒcxΙΰ(/ !ώ‚Αk­‚xθ§ς́ »Ζ±ΫθJ,[2lY?Ι²ΠsήeɁ猿 Ry‰ΙΣξO!³ΈRσt ZC] |‚‘]_ΤqaƒΖ4=А ±ΰΑ'žSnŒ3Ξ½νlω­ΜΧ©·ίšάΏu~Ξ–"ζ;|V&ˆV±a‰Žcζ‘&€ΩbVΑ§ΣυE” 3Ϋ'Yΐ xG΄%3£υζŸηάΛΧο1‘Αf¦ œtŒh€DΒv §=Ηδ·Δ$Bζ!Γ`Ήƒ4Φ°Ϊc.°χHβhχ‚8Ν–»Ώˆ T+$`Ÿ™ˆq…uD$mΦoγ(ˆE Z‘”,ΘλΈ#ΉlΫ&tŸŒμΒ’ΉϋWΡ֊„1ΰΓEbΰcPs©Κi,UΥLΑ  $Β[a°Gˆ>7ΌMχx ΄¦X‚υͺΦ°=-! ΊJΊ&Μh€ψ&1ΰy₯Ω2 63Τb§GŒgΛΕN™Ή“WmΏ+ΆθΚΦ--ςϋ‰I¬ŽXΐtΓ¦΄q@b]’lΒy ooΐβΝΖ}d0ΉW&ΨΓ0"X=ΘΌo:Νοώ1 ›ρ!ψή9#χΨΒ%N< ]bIͺΔC<§πα ‘…η˜ώΒωάK€dΡεVw¨χPΒβΗ…Τ³°ΨξŒJω8T”Πφ ΐ dMt0K}BaΉ%»al΅g^"•/w @έ2tγDž”!Έ?°Ε/Œ…CJυΨΒ γΖ ·μH€»x(Mh’’³ ν5LΑέηKΡGh*Αϊ@BC˜†^ΫΩΖ¬ΈυΌD˜,ύv—GΆsmΊΔΑοΏ_²­thLS8Qb’O_bσβ{g―TRN©Ÿ…n“ψpθ‹λ›e„α7ζ}"ζCαωΟ]ώu΅|yΉΰ»W.ώτ|ΊΐΉ[₯ύuν­Ž8cιύ¬EBΙ=:qj2‚mΖ7₯œ!>t\xAΉH@™ͺDH'ΐΦIΐ)36VLκ Ιω~8-―χ’_ξΟ¨j0IœEY`iΕΨ»-20χβX+τ@D\*P+T&Δ€rӏqΩΟ#XˆΝ΅½¬νݏχΤΊ–ε|IΣη’—Ά=}³[^μΆ1ΥεšΦΝεŸΚπŠΑY%‘B)omŒ…Τq›0– _ΉΑδŸ6ΏKR :N ŠWr€1mV](ά–yΤ΅šœ–UΪΆΎό.g5¬ΝλΑλ[") $€ϊΘZ †jŒž¨Ζn`Γ1ΐRŒ„*"c¬Oƒ—ΗΠs α2κΗΩδϋΦΫYšΓA. lpΗ 6Φ• Ε%Vb+I£rΜkdqβ6€ΐ’+P% a‚ΕιΌt€ίH~e€9Άύ3ώ=Ω.υzϋΊΪΎπίgώνό8υΘςπžW&°™%ˆ­#v&;’ο% η˜†ΙίΒƒH1ς“„€t1K@ΐK"Ξ”2NN9·WZL/³ΕΌJκΜδπ%ΌDκ‰H’XPΝ ²Ο…Œs0• ‘O”58]Y)ΰ3‰lI¬γ ΎΝu™ϋΧZBΞπΜ9Ν“ΘD Ά3γ ΰ"_Zsg‘"Η§tμ1IΌ9€„ύ0ΏA± ΩΨyA6Μ”ςξω Œh¬ŒiœφπV₯M½“$ΆqBXφ +Α#lλeI“δ3rΖάΫΒsYΜnl±έ£K[žwxoVo&@²ΑSZ0Τ©ˆcΨώQƒgŽΦΊi…V₯ήzNfφzm|ί§ŒΌ†”₯z£ΞOˆ…Œk@>οΠΜΦjΜ"1ηπ΅œ Ζ‹sΦmΒΠskI’(ο£ Αχ"I"!ul˜π±j†©…ͺϊΪϋΓS™ύlξ ΏU•Dπ/ϋX―^Οw―χT\π35Xμ`ΦS Β$ xg,τ°‡‡Ά"…ΘΰCr†Κ3‰ΡΪuyEλΣ¬<£.ŸAυ,Δ¦_ψπ4΄Hcξ¨N˜2ήcΗO₯ˆ²„% €PIŸΐH{€Zf,)+b–0- ‘νΧS£ΉΤ3ψσ©—δ‘﹌¦<‘πψ»$ΜzΠWΏ²»Gc@ΟΨ9> Ά  3Hsx‚&–ρ†Δ€O ‚=It,6θ‹< TELυj9ΞΉpJ—•”>ϋ"η/Ο6ΩΨX‘™ΑςͺηΥ&ZΡπμ°L[Ν΄ΡΜ)f„F‚Gί6’ΜšζξDQ’₯ FR‚n~iΣX¦p€YZ³ΖΠ“`6«χοdα"°%#ž€JiΪ ς- ,[M$DΨ€A?Œi“ ήΔ‘6Τ‚wΈ{΅<;Aψ(vπ0ΜΚ₯…©nx*ΑΤ‘ΐˆ’Η Ώ%6֍LϊtI³·ΰΠΟhλ_°δ_ηm“Μo³έD›Ωέ‰Χ9φ™΄IHxv–΅‘NŒτ6„X¬ ͺT+ˆ‘Β ¬jP&²[P^ρYI˜*γι{ WюλaUΐαVzΥ–J…δ°GΛ½{”‰]Šrξqfΰ‘¬‰¨6‹#©` 3f œŠi&R‘ŁΥ “΄=ꐯΉ»―Έΐd“`Γxcƒςθψmbΐ!Œ °‘YB˜ŸΤ°HLπA7ΞIΨKϊ‡BΏΨμ v³JŒβΈ(Ώ zψαyŽδ­ρΩνΚ~σ;VCξ?0xζ7ίgyω Ό£‘ΤΥκ%‹˜ ·ξ†5ΰs˜i Δa z"€» ‰8, `Στδ e~p*XЍi₯\ChζwS„ωΣm}Ήτ΅XͺV³2+~ ͺT<7ϋφsύΦΫ;…₯ͺϊwp;EcŽ΄«Ibbι΄’ Ÿ‰ρšψ`α{» •υZA œ”\˜p³($I d―Iδγw^¦Μ“Η€[υC•v·|›½λ½Λ?@a_‰Œ₯R>ΑόΠΙΊΕ$)™ #H9Μ~%‚;°pˆD0--o“v3·Τwχδx8ΐv¬E6 ”N°ŒvTΒκ=2N΄rRθΨ$°|¬Ζg€EΥƒ’»ΜΜά¬΄aΉΪ`θοg,΅£{‘<ŠjxC†‰°1!N"ΑxŒΤΒ†0K ¨«³wΠ·SΕ}H"ψ0`XΗ '?›ŸsrtΌlπ›em°Ό$2’Β:t˜§w΅;-;°΄ab&Βy,hvHS)oKaœtΘ  vxσΛΫͺ;ά•œCGΤVΊŽέλ°vKοςΧΜ~›όΜωd6/搒ƒηΪo]Ÿq΅›…·//œgžUŽΩX{XEωK€ο˜ *yšaλ&― ΡΟέR0ξ|ΙBώ&φ-žUuo žήM€%ΜH¬Ο ›D€"p Rk“†’ PΘ5²+КΫ%άonpoΜ–0–υ(o4xη@ušΖΘnGŠX簟₯S“‚&L"–L(ƒ/…ΔΖj Œ1,D΅.ρJέΒ-8[ξVΣUςFξ΄Ά›­ΎΧϊDΣΑMZΓ 6Y!‘žFR@‹CπJGήD–GΐΚ¨:M,¬L#y‘ žcο†Hϊ[fΜzι«„°ΓG”Ed< TXΕψ—³Ϊ‘Α0ˆ¨325˜υ`Ιι΄nΰ "@ SE[ύJιΓXmͺZ:|η9$w0“±x$|$6Œ`Ɓ€`ΣΖskΐΡH cΙΑsΤΨ!ΞΈ^Χ3ν"ŽύοΈc±°šͺI<Θ¦`-ŒŠvF00Q£˜3i“DyΚ%‰ˆhJ?XM5‹¬œ«υβΧ»CϋΰX¨θl&Ή†Yπ zJ*A Ε,†ξ㠍YL¬†ρ%<‰,ΨEΙ•ο·5ΗΠјΰμAΣΰŠδ„aιZ6Žƒ=c΅8K*V1—&vh \…(΄¬¬Ώ― ZΉuίΒ:΄. ΜP'°°δ†ρ8‚)΅1,,Μ$s ±h­‚vŒ}’ΰUa&r[ΨΩΒž§‚=εYpι-Γχχmcγΰ~\iΌ3εΤ‚Δ&Ι*r˜Mg`rΒƒb;lΑ\ml±“w >r.BωHPΝ†δ½]LNβ–WόΠΪΟ9kxQ6Έ5‚„Ό0˜ιΕ±' rΉ˜iΐ±ΥΚ:£3ΐLX–AB$ΐL†Ι‹υ·χfΛ­$9Άθ}>_‘Φωr―u)ΫηααΨω’2άΎ·*5•(ν!Ώώ.qˆ CΪΑκξc•V)Q> p`­œΖ-θ%ήό‘φzδw“ΑκbAρ%vΙ#‚ΕΉ‘SΉω[Η(ΗΜPΏzΎ{όRιω?6Ÿ!ςWΏ«ξαœH›„ˆ&Φ gλ‹ͺ,„ά˜uςŠ΄J•?S…q\ uTΟ²Η¨yPΐ²ύf―ΰEπΨΝv’'ffΟ'žΥβ*ΡT!_ΚbEbΘt#ZΑ>‰‹4prA^Lλπ1]tVλTkpαί¦‡0€ΘŸyθ•,μGβέ2 I€ ΞwM0¦>8«`j“’Ωα~Ώ‹² ZŠΔ°c¦ —ΨλW3ζ7*DFž½\?h!¨ΜFΚ&IπΦ5P‚U‰ ˆဌ ˆΈJmΜό‡_;……1£ξRUΧ„μ­ lρC5JvΕ‹°Ίά—D΅tν΅Ν₯ESQ³"ΓΗυΜΤοuq\»4Ξ)eΉχψJΩ$ΐZ‚JΡ9αΠpŠ`«•ΘˆΫ2–;ΛF|Ν”׌υ ιμαβqξbιε =9Ρi°€8-TΫεj#‰L‡0_²θ7c9Χ¨u“jŒΠ’Tώ·@σŠΊη‘Έ`Ψwƒ»ΝY˜Ε+Pl±™ξς¬&ά™Ω°£‘ta6Φ ΝMΦΑŽ\δ0q_paa7TΪ·«qgΞ,nσ€;Β6]y@άB―Ϊaμ_{§T-XCΒb9aœψNͺΨ.q|˜sδž%ξ>άbrΐVAyS89`ή¨©δκ]:Τ¨t_M΅ΔΊ “’#bλ©zUC$η„ΪζWRΤ‘αΌ$υ‘ΜAtΨDΉ£žCΩΏ5 ‡&0/~ξ[%Δ”}KE*ϊΆr\³Ž#'/叀έ·”½ˆ°z‡(?m)ͺΔ0#έΡη^»k9rJΥVμ+“R±›lΩΙ )RΪΓΆ±0Šq²ΉSμ!±ΟfN[νί¦ž΅~½•†Žϋγr€ιMσΙΈνeτ±]\Ο§ΝWo-B·ά°˜tlΐ’D΅ΑkΟ°@b„`M’ηY,k†E¬15UνΖ_½~&OˆςKΙAΕ.ά՘ζr`j¬T“–^]jS±EΌ² >‹Ό2M“1ν78Χ}“NΪ•“ &€ΎρΝ  zfιΦ΄‡χ5Λ{«R­IY¨σlu˜|Δu±’K]oh'D9t8φκE†Tx<\,i"šyΧ5¦Wφ|ς½eiΞ-^!Φ«žjKH½8{€C•¬C―1D© F§Fβπ}Y’±C~κύΨ„ΕΧUϋXœQΒπθ3₯ž‡žσVƒ— ηŠ¨―Ex³mŽΉ&«­R!b™Σ%Ζ•3γ{ωy;(―¬U η&_η΄M¨|&/}·―9J…Ή¨’j–ΊL5‡štšΚF±6§'€0IZ0SŒΦ™P*<&wrΫϋηΖ9BΆτ•RkΒ Jφ-&§l­ͺZて²JΨϋ­ε’ΙJβ*x„₯R ζ¨4 BΊRzηΜΎzΪP Ρ; ©<ŽΤ~s!Υ–Ε%Έ!Ε`>…Ζ,κΓe 2/νkN0d ΐ‘wJπ“[ϊ)fέV3Βΐ«€Hͺ""•‚άŽΛ%αUvΚGkœ#ο€V’…yγ*$ΰγŠΪ=ΏXσ™.ΙIوάq–^νδŽ“Π9%7΅’«Ω·-€&±±Žί„ΨΎLp]›uΡ€΄l‡?ΜA'ΞN“²BDΝ₯ΓΛ–ͺm§¦Ψh δFU‡$c―ξ8z©¬ΔRrRμju3™5Ž8l'Y`·΅/keΎ½{Β³έ˜©οΏ­o‡ς΅m υ ςθΒnφ„ΗεkΪkO ˜4+!+„%hq‡ '―%SaaοHκΨ€£>aWαGΗl8š™΅:fζu­Έ5#` ­χ)KtXΌΑFΏ³Ι‹ ]±Ί`T’kušqΉ6ͺΡer>ι<)ο²+ ί›“ί6gι2·™LΰΥ/©Ή΄'gœΑ ΰ•JQΑ` Π…Χs8—ΪbΗ΄α §BQ‘!Δqe>'Ηgw,N――WζvχΖω±ΌS)YΪΙ°¨ΎΚψC»tΟΑ(e-$F‡»…G΅CΎ‰†sΙ ΑœΉΥOρtJMƒ' ·­‘}ΔZ{ΉB€DͺkɁΓήvM@U‡6p?ɜeμίœH8Άk^Δkoh΅β—ΧΥΥ3 ±%c]aχFΐ\ c›΄—2g»”uFΰVŸcb% G0R΅λ Ηίξ_οN—lyΚ±KΑ΅l6Ά±τ’±"ΦΒ6wOΤ"Έh³αŒΖ¦©} sο΅…οΕ Fj„ λ ƒi³ XLΤL`H”©‘εΪ[―rλ]"Ό+₯,ύΉΰR!9OlΚ»Ž5=Ώ<ΏoΫο0[q}{Ή{ύyΣ_0kC5ιψΛοχsiWBBΆτ”[ΉUM›’HŠ 0žΥΥ«ΰ’Λ!xÜ ΰ#bƒΠ‹HθT τ’¦²3ΈΨf™ρΗ»γ~u½Ό/Œ€½Gm ›η)ΖιI΅g–m‰—Šw–=B"lDΥ ‹¦`½άo°½²Ό<-ߞξΪ‡¦#]‘ΨΗy%…γA;ΉpΛ^Tw\‰.ΑΧvG\δ₯ ΓΟΑϊλζŠΝέ;Κϋε¦cώ,,_Ν,’Iι{₯G¬": Ωκ,-βΝw7'••Ψ2ͺη³-ž½ΡΒFTb‰ΧŸ…§φς4‚q‘F΅Ψ#aͺ|υδ˜uτ9#άсHe {X8kͺ€α4kΐsSHG ‹ζδ°έu™Μ΅#΁ ό:5kΠΔΩY5Ά­§’Ζ ZΐΌ=DΡ}ζ£»+CΚ­‰*‹β%KMs–*rIςΡRδ £θρKgaξ.ˆ·|₯Η•{|­>°Ώ‚Tλy1,cο1063H0 Y±†Ρ©F˜K-μk­ΎiΩqαŸ4Ί9Ή@[\eGHΑΛ€ˆ#Υ”Χ­εT*ΚΥ&ŁyΠδ„°Θ7W 0O©έͺLqΦΞηΩκeξiIθJ½7χλŠΖΨ{y„C^―Ωnύυ΄4ay/μV='I4W•"[Υk‘•Mͺv²ή4ΔΩHΛT1Φ9.$ i*Ldpgπνακ ›T,ž:ΐ#•κ:ΑyŒOTΪ’ε,»+!0KπSΑ¦ ….ΐ Žί―I―¨°"ήo˜Ϊεϋ»ΗWώ²m!—ο_χΎyi§$r³k‘›žšέ_ΈφIδΣ€Τq ‹*:κbα]zίDHΟo†0ΓgΌ€N υRξΪsΉ©WΏφό¬Ζή 0ΐ₯ΨΚ‘ž’›$ŒαΙ²m‰‚76TUX-0^6`Ί%‹φ:―­ΌW„¨MΗ…σ$egχrŒ2βΑL"ϊœqͺ«€ω’a‘wq/ρχ6a}žΆg`œ΄v.ΞZΨsΰt#Biώžt,xS…“£ͺ@ZKwQ§K«Y•%ίοzšΊhήo’Ÿ»§‡£2άΕλ&p.CuYΔ‘ΩW8Ύ\}χΔ–ζ­˜uςΡb³Λ›Έͺ.)7αLΛHaΦp­ΪœαοŒέΥm»/BλT·’š‘ꂆxΛY―4‡μT1Α—+"Lμͺ¬·°d琀'΄MW›Ν{ΎWΦVΙŸ„`Ζα¨;)•αΣ{lExN­τU?œ½ΒΘκsE;%¬£rί+Xmο`Žmτˆ­4 {JpGrδœ Yτ­Ί4Ψ‡KΦμ#‚°ƒa°b'κvτ‹σγΘ§"€EΗ+Ί<ˆ•Ჴ΅΅J3T X+ xρ]€ώE5³ 3vdEΑαt0½“0˜σjp;‘cκΣΕ«–»8˜Ό9τή+€.b©f,ο{&M]eLAW΄Uι€u‡¦ͺ–Ÿο…w`S­=―ΰεΰwnχΪζΆyκΝWϋ?z}zΈ?7#Ÿ8οa>‰$žžFTτ5*οα‘!€ˆf’0l>«¬AΘ¨  V,™š[FΰΫJ°Χ5‘ψ8•ί8(Ε»Ά»Ζ‘Vi;*Iu 8€ΣΘr–±!}"K@˜ps Α‰€™mΣcγΣΑ§G‰Έ‰Ν’vΡ€˜€R•δ₯EΔκβ»˜sE ,Y&φ‘Ώ”)ͺΩΆγͺC)ϊΏΝΉ(8ΌψϋΩIΫάœ“- •"ΑM4ahΛ7§Ε|J4ΑΞ"’)ΑJΖθ’ήW*&P˜2Š5Ο―ό@~x΅c¨—“¨\ΣkXΙ@,ΒζXX[ŒF(ž{νD…7—.݁DΔΤ( ZTΆ:Iχΰ‡ϋΗφτΈ8,Ξίn€ΈO§dMˆή"f 8{/ΑHβŽΕ-‹p†{ιψA.ΕgRͺZ1]½΄m_κ'Ϊ†Ύun7ύώι(¬ΣWΈR‡ιυΤ­71΅fr—ΚYu)›Αn.²ςΪ }ΟR,Ÿ‹²ήΛ5…‚φ‰΄ϋp;=Ls…Κa4€€ζ£ %Ίξ&ω‹αŒ,b_0m°·Φ"ΑlΑF‰Z GΥ'XΡζ σίήξώβ«³Ν2ij“Άe„’”΄j„ιQ8ΰ«”όˆž­$%΄πWηYxŸš°;α–,Žή#UiŠ˜ψ•wVΙ«ί@†£s°z&·n <`@ΌΓ―)§›‘rλδkT‘ζh€Ι žL$ν'ΨΎΆ<^λ+ξM;Ψaζν†υο'αvDά9-Ÿ²…λΙd8±θς؎hWΕ^C‘„Έe:P GaEb₯ΎNΘ(°Ši^ϋτΑΜο(§ύZιώ~ψIXΓU)ήζ†ζn»—ίόυτΘwύιϊ€“)₯Zqz°‘JuWOš]FΙW‰884_Ό €Ά‰[+8‘<žۊvNqυzŠrτ]@±½Q5FD  ‘›βΌδ„»Α¦.Τ²MV…o SšΞRšMδΩoξžο'τSgYΆΦκ[πΥ΅μ³ΥTqΪ]NΘΓ4ˆEZ4Q‰ξDT)T―Τ+ΉΗΤuΪν}υUΓ³“(z)%΅ύUMΨyR› ŒdΥzοΨ.’0#ςΗ±UuΔ^εRjϊ2΅ΫzΗ#L†γΫ‘>έίs•ί:hwn…Tu/ΠέύyΙͺSrŸ BΞSŸ—§~ύΚo«ƒ²‘Ν7xόί6”fΒψi­ΜΉ7ϋ\ρύifiω›cΗ5λ"bCEͺφ> Tnδ« I… ωΑZS)ΙEμDDΙΤΙƒj¦€2ž™ίχJςνχ―Rͺuν`N EΨξχVmO₯&9*±Pρ@Ρ:ŒΚ³(b —’ΞζΰZ…!Θyr$xΔΗΧμφ§s…i7œnJΧΤu"cε.;H=Έλx/t0΅υ˜2Uγj·Γ•P0O£Χ“γΌnŸ99o δ`ΜE{ΫΛ₯aπ)cγlο ΎΥέ³pœcjέSΉΊ43œk²ΒžΦƒ’‘E‰…-α0ljAΡMυ·Υ¬<°υΌ·]δΓ&ˆ φ$Lδjψe]ΥsyΈοοΎφΈ“&Χ»θ–„έΉζT‚Β‰E΄QώSS‹‡95ι‚H₯—RLO‹ΦΊœ(Ύ‡χaq~ΕΊa}2RτŸ#Α<Ύ4.u/YR1>ϊ@fLΑͺα(t΅#5uΙ#γμΏΊ:PO%EΉΡΡ^ JξΟΚyΤΐRA#‚WΎ)ΓMkι™ͺήuD‰g²f Θ)Φ“½tφI[ϋkMσ@4-+ΙBΪ4…Γ6Dε ;Ξ›ά±-{n-­m*Ά £R1·¬™¦†pτϊzΚσsu˜;@ίuΘM!l’XcχΌCΆY±$Ψ;"ύlˆ/,·œw&«"]Ξ;Ώζ¦`œxοα‹«o[£raΌΐεόΕΪΙη^Ώz0b-…{S —…n½άύ:UΆΔœ‘ΛUi†3μWZ‹œZΟέy©A9?™Š«Φ_Δhlipπρ― 10 pό°ˆLĜD ώΟοŠΘtΉˆΩLhn¬πԚ=ςλm9V ‚u\>ϋ)ΡG"Dm]Dɝ2³¦LͺSμΉ’οX(a€„'ΡΦ+Ήcι9)=Uμ?Œ Ρ+ υΪρ(–gJmˆMÞD’Z‰”ΗΠiγ«t•+ΐWM%FΚ€'―%#ΆΦΞGJ*ΛΐΗovγt€»EΪ}·­ ŸΧiΏϋ•£ιY>0Kπβ$ε€oBX[ηB’p=ΪRD!’4lŒύ9€FS].Vσ/•>Qύ 0Π―~«FˆL˜96aΘ”΅Š©rΡ~ ι0>Ω:.†‚Ν 䟁€†A”Αϋq.Ηx ΦΩΔ₯k΅€Ώ}€wςρN¨η†>¬1MΆKΪτϋ)̟ειώβ΅LΗ=·K&3'WγΫ€Κσp4/½&’ Ζ΅PcιξŒοΡ ƒΎ”ϋώΰ[,©wM½γ­Ι°σ―j%Ίφ“ ΘχDχz°ΧΎqJKd#:·†¨ΨŽΘ°ΪΠUͺkΒΎi9"€PppΩλ(Bu†’%!ώNυRΊ~Ε ½ΎΎέ·ŒγξӞόΗ.™ρ?tν9#]Ψ!XVμ "ιœHn¨Vq ‹p+©bΘ1{„gΞfΨ‚—Q’όΑˆδ³32ά”œτŸη₯7PιͺΊTͺΨ΅0Ρμ«­-Ii†1M@VΤT#Ζ©ΰ”TΌtŸ;+φ—Β#―^OrWΰn ™D¦&. œΤ>³ ˆ'qͺkΔϊŠZ[ΜYΔόzf‘„R©e.n:½·ΗΣ„Εw¦7J[£}Υ0λcd€‰ΊR]ξLKΚE8J²°³Λ½0Φ  ΆζΚυ¬όs]Κ2ΜΎΊΎ:iI=¬*N›WdDΟΨφ!_­ͺΈͺ–φ.Δ™ΑXL@†?„}#©ΕφΡ4}ύϊς΄q‹]ψ57’±œ«όε™ ©p³T:Ύνν'άΌ¬ώΟ ϋί^Γ‰ŒZv9Ή` EJͺKΰD‡2a)αίηΎq†¬Ι 6Ϋ‹u[ο&ΔͺΦΫνΥζVczFΔΫΟUg“R//w›Xσ‹\­E‘ΖΦδθςσ­υφ…ωvV—}ο–φΘ-Ν D©IiƒRΩ₯Ύκ©θAαΈΏ„huοΝγ °]ΣXΉP-‹Zϋ{τi μF‘ό"ΜΫk€vο4ØΓο[29(m7£7λΏ~ƒo_ξκΥ©RzκVa*βޜ­\T‘°‡#H(°‹^ΐτYθ$7V€ )iΚ¨š»λj’TfDΐζS‰ELΚσλύΟλη°OΊTfH3‹8ΡΑFΌX³” α0Ύd―π’—Qp Ϊ ρ©Η.ς·J·ΐΩ^―NΚ9dxeΠa,ŽPAhu.E2γΩ„τ-\Ž‚>€Ϋb‹SΐΒ¨Ÿ€Ώj–ΆςQτrnΐ›˜ηΪΓΆ ½ ρ±Π­UJXMvΙ$ψGΓ쌊:Χκ:λ ›@Ρw²σ$₯ΉώΉι‘άΘούΆV< fΫr: ΅½Ί‹ίkJΒΌο_οκΧK³:ڌ²ό΄&PΑh¨Δφ6(i#o69Qƒ%4•^R$­cs81" Θ?ΑLD?Ώ>}ŸœΘΟۏݜ^{Z0©IΙJΆŠn"OT»Š U8Ά‘*ΧpΉe6€eϊΨaoΊ.eώ!;;ή΅:¬zH³δ_ΆgΓx·ŸτΚ =Φ―ϋ>εwI«p!Bτ†”Τέ(ΖpI5‘rXv=a 'Ξ6Ϊ αΫΎΩ δX:θ MmΗf₯r΄ ͺ γιE³ξ8› yX‹N›©lr§nŠ$ΓΣe<½.“ΊοΆΌ©Η›|$?4"υΆ-8<ΫZΥΛδπSώ nœέFΩΔpήυΌ2]ΛK Κu\φ6>+ς§¦YΨ.£ΛΐHΑxi5*. ΅_uAL\NS˜Π7ΏΤΓΉ^ίγxyιάE΅MͺT’eμφ΄kr_+ί’1AJ{T€λΤ‘υΦ™δ^¬η’ Ε‰œ‰ρ βΡΗΜΐσ₯«DκQίfη=HyοΛο€c + #W kk`M–γ=(8Ϊ’§DŸV±S Β»ς₯CάΏ/ΈΈ+ IΨΓͺΤ!ΑπIk΅ν_Πσz£‰!£•6=auN τΛ)`‹ @Tl%Wͺ-Ϊρn}©ΦtΪ !€_¦‡£bαχΉ»)ΨΦοΙUm’κi:©Ύ_5<LŠΑμ ˆη.Șˆθ­MzV=τŒXθ™QίJφθVόœJόρ#ξΨώvά΄=~ΐήOΪζ‘>] ~Xヲ‡:3«ΧΣޟ©)(’ #;P~D›Ξ¨―αq>&£Ύ½Όΰσή>Π#}™qς†ρŒ₯λ"ώΗϊςσY>ςθΫ[ώρότςΊyuXΆ‘Kl…Aΐn…Χmnγt‘'»€tnχ¦£―ΜTΑζ₯›έΚά=Ύ>έ>mΒc„]3ΐ BόιΖƒ1Uρέx@ŽΊί+ωΫΊ?rD»ψ½γΦjυzιLΊΜα`•nρ‰Ψ _.”α…ο0ZηΟΝA Ψjυz{χ {hΫqpE‰r{ϊ–£.ŒVnωΖX)0ˆmFwΖ–Μm―Mγ -γννΝθϐ9;Ι7/ΞοδRrT¦œLΛ­™ͺ0-΄»ΰ:“«Fy'4F₯χXƒ(οf;‘"½V¨Ο/›₯=ναΓΰ††ΕυρZΏΦ†!Οοή™‘x… +bnN.cJι¬jRΦ™L$ΙG%ρΫr±Aδέ»β”{΅>ο\Sφ„λΧ“τ•ποϊυ“©σϋ―}]φήο>Χ/š½εΫE‹ϊrHHˆτZ0ZΨX’¨ ν…7 h©ϊ&_1§•€y^9ΘαDΩ?NχXΨ€Ίχ‰ZΏλΜάl>πϊ„ν– e ½—«Vο¬s€‰r4IHΓ{ κ’Dhdš2Θ{j!M΄jTs³yόρ-σαq/Ÿάf-wž΅+_3kXα E ΧΌG°Ι»X©‡RLσΖ(«ρPu―ͺd| œψ)ˆZΎ~,Rθ_“-Bd˜L­Φ$tΖDQ`Ho±Z‹Yχ»ΒΑ {i9γ6‘r³?Nχ™q._•«΅dq–+)ί1hT”jο( Ι8–ŠΒIοCΚϋYQ-M’ αρΠ–ΏΚJθL½¦¬a΄Έkƒ¨¬P- IT„ςzχNE=[›p&aΓzh)#†ƒ+uή/WEχ£θd›Θ`3²ix­σP΄NarjΥx/™%§€gχˆγŁž½HήXδ«χήW²,ΤΕ²Ž8]ΝgjDzβιN.Ο¬\*fesh63E!2 AΫzρά]6¬'£\ή¨Z•Τ ‡@pΈέpμNŠ‹sΣAK3ͺV*J©C„ε-&6£‹±F Ε.Žς²½[] „ ΔΦ†£ιΰ3…eTΊg€Δ$n₯`M»ν=0W¬9& εD0₯‚ltump •¦ͺ.R-*₯K-Jα¬*kՍ¦z.Εc› W‘:ΓΖ²πη7QNϋ±]ϋγιeυυξ$Ίό’6γΌ¨†Ώ`Χ8©]Ϊ‘΅Π†`ΆmQJ₯ΰ JΜ†7²ΕFΰ'£κ’~Gΐ"‘Ωώ8σΛη諉΅ψ˜DL^8Ε›c‘=φ>ΤLšΌvˆΚΨHΟ'1 μίΤzhiNj’εo/‰39ΐŸOGYϊ+ΤcS&‘|ɝΰ1”°Ώ’t©h%’Ί*(Ε–B„3ΕNmRΉdEh’£‰Ψΰ2‡Φ6™tΣ€hΪΉΫ₯`†Qޜ'ΝΩ{Ο±dφβ5T¬xχ’ΟWfξ)—H1Ϊ ¨υΠ·λ²ͺ~θΧ.¬ž”D&(ΐί§`_αflbή_ΘΦ)˜ίώίO μ»”"šν6šΙXœa1&x7.1Šv—Χ>iιό βο²ΦTΊΒKψ@­»˜c@š•σi’šδ4M15Θ}N’“kΏΈ,³ΉΰθhΧ&4—F||©V*h,ˆ.—.ς –R†Δ>ˆψ1 άT£ήΉQ,ί.™LTΒ ,u ˆp`³7φέ‘D4p8°Γ/τH₯HYΗљ葝a‡AΎ­N»ω–ί”ή @7έΆ$DΨo57ξ†ZΛ•`Λ;ϋy-Χ*έΠB οMA^*άLh=Ε‰59_ލύυ@›j7U[­riΛMβ^Η°y/΅Ϊ¨2Γj©j­μͺxΐލKΉ2Q I_₯:α…Vw›Ϋχ½ΊΗaX7«Ÿ―τc“Μ}ΛΉ½ͺηϊ€ΧΏ&.ζ^ιδμ¦ΕI& `«„ QIDUS,-‹ ΧΨDΈiP£¬ΆγΥΐΩI—Pm‘eUΉη‹χžg‡Ή™“rq–HD“°ΏFͺˆx RF`S|‘Zρ%:i}φ­³²žqcΧ6Ž6©O¨‡§o|Ϋξ^nO-”_| g―4g+Δύ)•l}ζZ e#d#T­ν ζI.¦³¨v¬ŸX^ c6ό ϊ$Ϋ:έ«3*$*,^Κ†ά%Ωΰku’*εζRz©aͺ[ŠΦΰΠN‡YW;Α₯C+±^cWˆ[Ž£ΫΝΧφNmΩF.Ωdl»/nΚS[Σn}ύωΌΑ|ΓWƒόαίΆ½”ΓWX½KOά·=l/kφ4ΧՐGŒLϋ_#„c·Vo/χΓ/mΈ7ŽΉΊŽώή=Ρl½χΒΝfJŽΊ°‡­„pn­žξΏq»=˜¬ΣŸ}Α»₯;yϋ~ϋ:“œν°γιαMιΘύόΗn;/^hoΩOωί&oρOξ?exιύήπ„ξύrqλ1Έ½ϋΕ3[απΚυ“b’Šζσ”{“bŸ§λΉc¬;:»:Ε‘Gη£wΩΗ΅PΏX―1Ήίne@ΧW.ή”‚k0αΚ[mC―T#»”¬hrtί+p† Ϊ’zτL½$% [eJθΰεK9Ή.\\UΔ¨<Œ°ΐhΧ΄Ιl”ξ©Ϊš*ΐrŽRωeŒ‹‰TΣ9Γ;η’”3ά[™zφΣ ―Γςδ#CΤβ€wsͺ'Χ…]ξxπrOπΦ#Ό―Ω&Š-H\†ΤB€cLŸH~L’†ο”3†‰γ°φ(―ς΄χsδ-ζ1⭜Οφ-%“ΉΪ|ΚψαοM 3“ dˆ#Eζ¨K@蝀j΅Š"δJšΫϊ6~{ΆΫ“3zΧ>4|ΚΆ,ξΛWν’ΉYΩDp/7GΓ«ίq–„‰°Ϊ„Ψ”v‘V—»ρ±΅δKί»6‘ρD^Χδ,kΈ4œ‹ͺF,ξ;Pš€FS(h;Wˆ"αο6Fmσκκξ @ΛΫΖg7 Ϊ «ωΥ¦ λέKύxΈΏy™±bοOρίhέφzO¬nω‡^νϊψFVk·H“s·ϊΊUω{ŸΕΏψεIΨα/MγŸ«VϋώΈ€ωtΒφOτ™i;?-O'}φ]φΥΉΏΘά¬με;f¦z·x…§γœ£Ψ·E_ψŒΓ8ˆΆΎ>Pς#»xΉέψΘ“Ξε’ωJζΐφ]X΄ΧΦφξό9=ήΗ©†»[˜ e}΅»‚βi‚pj³sΞ»Rο¦oΘ1{°ΎJ)Χͺ·Žι&SSNΊ–>‘$χή­zf€{¬D'μωKΓ} +λͺ8ˆΒE`₯²Άl­°ΉvˆΠΧδ|r‹’ZπΕ4aΔ,ΒϋΣd 5 cθJΉ6]gτNα‹’¬r`ΞX:V„o\ˆ­rRΙ’YJο΅)ϋήr1©–τιdτώι? ˆ'ΓΆ‰νP[6ά/ˆlz €J Ά*%Ύ&yBaνqΆ½ ‹ήγ„jՎυ?Ÿ-$=3Π+θi«Φ1§X½.(…/­j’¬Θ“ι₯ΔΧ/Ϊ°9ΤΠ‰΅χΜηͺυ₯‘Žσ*ΚΕ7CΚ†VC3-υ¦₯¨ΒΞͺΐ&—¦³-ߐΞD…%OΖ6OB,39„53ΒώΒ5δ±RžcQFg’ίζΝx|o|s1ψκs‰5gΛ°&Ϊη^ΔXsJNΫsδ!ƒ©SG‘Θ@`t>ίΆw“vΤΦΆͺOΟόε^Ϊ&Γ°¦Zρ—xtΪυξlπύm5ή^{4v>’€ψωvυ ΔΝΆ M©Αa»pgaaάM Ψ=˜Dkœω–&•ZΛ³Σ<%έ²’Ξ˜Ζ“sn―”©lc•֍8“hοΐνΔځE¨Ϊδcχ5%ί¬7EŠŠŒΆ#¦μ¬kjςρψ¦λhΘόϋ₯οΕ“ηΤ„ΘΌκS‚mRD*ΗE‚:h5‚q*γΝ‘Ψ ’°^]|Ο\¬έœOΓΆ%3WG kΰ€ΌŽ0₯’ άGΫmΦ"ςCιΒ4ηU3ή ’\Ω[2©‰1gpζ©½|,Έ|_&ΐ‰Πl2βl:)'ΞƒH$SΡ‘RΓ6ΥU7[’ PπΙͺΕ4išWL'€Θ]αρρTJtP\ξρ""ΘΘΔؐ!wQӈxIυ0΄ΗZrΒH” …2ωψ›ϋ’Ο]ΊυHξέ¬\μ™{ΟΓ\F]Ύ―Ψ>ςMΑ+7ν! Γς%Ϊ6DUt3ˆ‚°Ζλ 5άc$)‡©Jn₯™l/-Χj©j­; σ.-͐u‘κ¦?½=Άw―uτΪϋ•ρ±£<‘σ|³οΜbΔνΝV€AΡ c‰ΚVΩγŠyŠψ†]ΑQŒ.xψοΣ¬Ν;rΦ$MΞ„@ΘΎΑΕo‹ KΥQ Age GφΦχ\‰KLF΄Ύ#ŒOΔΖιˆQUΛδ”›hΥ[?ϋΝ€b^~S»VΪνΙϋȐυςΕΔ@•«IdJͺl•&rs#žE™€Ju-₯ΐ΅ ΒΣZ%$Vσ‡FΊΌi”ΧΤ΅2VπL—ž!ΏνJD§Cƒ₯@„ΗΙ4c­¨΅vcNIjοΒ„VέρH΅:W#~πξλ…₯DΊuγ-ϋ$άCΖfƒ£-ϊY»‚™h€-BςzO€jR°ώά †α/œ Βsϋ₯FͺuΌ•ήš kKg­†aχΕIΣ BΣ [ €­'Τ^žΟ@ζIξD…Ε-νИœF`-¦€vQΉ+₯1b4KΎl?ަζΰjεͺΔͺ1ΑΠζ‰yΰΗΟ ρΆ–§—λWˆ!ήΪ~2=+N%ZŒKs pœM»1()Λς‹6`&2“' ’¦=¬x6ο+έχyvto:Ώ€™½Λl §ͺ—„¨―`ΉTξ΄zΝέ Σ’QI#βν¦Ϋ€΅t3„¨S¨KκΧ¬‡~χεριe„QyqiΓjjlRLξΊι@YΙHΫGν³“΄|joΚzXαštW‘‡nNiυΐoLoζK# κρ†₯Cώ˜3ηΨ°ˆ™±{½d€ ˆŸt!RιGς©ϊHΩ2V]aπrΖΰγψa½{}ΪtX|›Ώ›χςφΧφ!,Ν%qθΟΈ€Χ„…M ˆ ήΔΩζlΟZ<„ΦR‘€›r1ΕΩΚγ£ξO/Η—Ÿ›‰ΡγΈ}ρ–€άlσ0Gƒ‘R€7 ΝkDΗ֐mΉ $τ°ιNΚW&NwvgοδŸεβ ·o½v8ξTT A@Ι’-η‚§¦[ν€’₯ ξ(“₯NΩ‰ΈWΖβg› ‰‘ Ϋ³o³ £Eρβm} Y[=Ύ"«χΦΟΦ«Ξη:WΤzL)4ŸIhΊδu”-h*g{R»Ιάκ-1η%‚ŸΖψ|Υλ>ΙΦi%μ³ΟΆβψΣρ|ςaΝ‹\Ύί€°φ RvŽfςΓύ+Ο·ν;ΟΚ3άJ_›8ίdkmL­uF¨ΚdBw"₯γK#˜hαR)=ΑfΙ-’i5‰¬Ίj°UBn['N&ζΟ,qߞ‰“ϋ‚‘υ¦<=έ<=Σ?ίψ¦qyϋrnrξωΗifuiO.m0hΕ!R)ΫC'j’1ίΰΆρσΠeγ|m‘\ΛͺΙ5_†M67SqΡzγ₯Π[D™78oάrHNqm\=e՝p7l„Ϊ΅‘fΉE6Ψˍx’δlzΚq^•δψϋΉίH}όΝv`GΙ,3«ΘG“ͺ‘z8`¦*D?€œ…[Ξ‰•Ρ͊Z’VZ΄zδZkg•9ßοV"τκχ°$²ΚNΧh£α(­5EdŸΰP“ͺζΪΰ₯,6ϋœkΑB“‰:[o€yuJohuOεκΌ-’S-04>dD;‘Β\x δt)Δη5[²X„Ό­a$–λ§μ˜₯·yςٟŽeΝύbLmpŠ΅e‚uογΐˆ"υhEDjTΌ+Z’*o56JΜ³ί k“²9FŸ2bπ@ISR8C­~ _όσ•ώ„~{ύ@ΟδΎCώ lya^}Ώ[}ύ½O’bρA`]Tδ€–ͺHΟ¨³9g€Wl:Χ5 y/]c½~ŸϋΖρЬ—z³ϊΎΑ‘S•Ρ›I:7 'α«Ή7F«ΐρΤ»a υDV˜—ΐ‘Υ†ΐFgχΨ Ψ_”ξLSAW;! Ί¨§K/O€έΓy†ξ%3Ύjιzk]~#sC£₯‰"α€#$TV=ΚΕ7w«-»drva| Όn0θώLμŽ… λcGβvTλ`ω+DkΥb•“·B8Ψ ώ‘5)ΩΚδπ’ιθAJ`œμΥ<°€υή²hT·₯ά;‡‚x’$ˆΐΑб°Ύd>(δzΎ~EšΫέμ?jv-’ 6ΥΗ¬\ˆΉP¬CO’ETτδ‹΄Kj—˜cιXiΌΥLVμS X˜QΌΆίθu’¬ϊhZŠΰO#»•¬ n?Ιu’²šAz½ίΨ;“Ϋϊ0Iu”‡ϊTΓν *η έΥsREcΌΝS{τ•ΚύππάΧ‹qK/RΧp)f0˜6a’’€½΄uSb %‘¦ ”š”} ρ4€b…½j“ΔΓέ ΆΞ¬^;KMF$ μ€s Φ‘ΆYν”0LtΟb‚Œ† ™#©.$%Ι­q±p›ΆOδEφαIΓΧςΠ‚=ΤΒΟ¦DΑ\;ΨDΦMT‘’EœBx\Σ*lcΕSΗΥ‹Œ‹Εq™ΐκξα€λzρ &―u­Έκ}&qΦP) –-ΫδRzρ­qP"ގuΙΉ±Ζž’@yϊΌ|5UYea‡9cs{%Ή@DΚ1WΡυΡΚΙ=^-RNF°«₯ˆΨ³EαΊκaϊιίκλΣσ)ο‘Y<%hBCΜj26…sΉ«’CΟΨ$ΦΏήφ,ΤΨΉU‘₯PTω‹Ερη‰.ŒέΙ±9ή μ’*ήΙ;1‡^œlέ³t.ο#ΰ*£<ΜBΤˆ²WX ܈'EΫΆ&Ϋs£hθ'ϚΣE₯¦C½ξ_Πp“ΉyΈΎžS΄:UαIφ…|μαžEΌTμFξφΘK·ε¬šƒehΑA6$~<4<ϋεΛΚύw tιθŽ9ρVQL‘ρ€’»£,œoΑΊΞ‘ξΥ%―ω7Dχ&Δ.χͺΪεΉ+±E~+―GΥπΧHξXaΔξ^«j²φ‘tγdΧuΔ (’ΩIrΔΑ!Bτ NΖuS-ްͺO"ϟειΡ'εα(G•ΐk TA’ζΠ•ˆ²κ"E¨­™ ”ά(Š΄ξΦJ\^«.ΡF„»ΒοααχΣ8‘ϋpγ΄ΡΛxΰ‡­ZεښΡΆ2Ϊ"x))1Š₯ Š€ώ‹lδζα”4sK˜%±aVΨ@E•M,φ8’+ŽŽaΤ€€λήΦ»ψp©ΟLΑi©M4‹ ›³Τšr/¨“<«VRΨ‰ΖΒ)|Q£p‡Γΐ…W|΅Z*Ι|0=5Kς1Έ€·‹·…ŠX±fM₯ ‘Δ‚ύξC/5JާSοO?nv’„η'²“ΧWˆH\kΫ’#]=Γ}…X0Wλ*P§‹Δ‚9«uΨηR‹‘{ƒ9kIηρ„ξφιΟ'tήuυqΒ·h9X„Ίq%ώ§•t„š»Ο„γΙΞθ €šqž4δ !¦“‹97ikψαΉέ–,½Ϋ΅οπ+½(Δθ­‡”YΑR6]t±‘E5ιŠ9F 0‰ˆP*­i!(‘Mk"όΆ–[t0°N―Ϋ6A·?h{…$X£ͺkΤ­πͺiΥS‚ΫΝ%e§›K•ŠΰK„ˆZcμC§Ν’…OQq™¨N?ρ·£’SuocόΛ#Σ5ΫΏ_ͺ^£:Έϊ,ζ―fίr€S*άXΨ£• 8 „γ-΅¬rPCO¬α €V°γΗϋύΥ>9ΐGζ`υz»)ύxΥθ|©Κΰφ’ν'Ιτe²ηόƒλΫλΖ=žM©›”MΌΘ'ϊ.WΈ‹Žλ#ί³ά—9C‡?QŸyσjυτ²0‡μΙMΖΘτνΟΖA‰κ:ηΏ^’³ƒŸVpΥn|.šηφΙIωsκa1οL ΪˍΎGiώA4I©Βώ`Ωυ΄tλωΉόάeΥq υErΰ %џεήΫ€—耦iΡ!}ωΠf^ΰͺςβڍ.Βω΄ΝΑQδi»`YΦx„€ωΨΞώ’E~a‰c6~€v§cžSΩΣμϊμξό€X΅Lΐ\.rB8ω}CŠz,Y}¨σΎmω”Λ―/"ΫΌcΨ~'bP7;-ίχ—aϊ ΓfYΝι,Yτ²ώsvoύεΝW¦ϋ -Ο;―ίΈM<‘–lΣϊ>x.ΧΊΘΆ¦ξ±’˜ΓϊnQe8/Ÿ6―eŸμ9™Τ“Ύ,!½9OπξΕηv–~Όvαέy€ͺ‘/_^>…v―o‘ικτΟ/ηΑVθz‡šπΕνWΎίξ‹M7— WΩ‡ΧηhΦ„ ₯-ΥHŽKθ^κf«΅Š]E%―BΝΞǚ›u&E–r§$νS9Ίc:¬©Qξ2gΗ‰y³ψU9gbλJrAΔpD]sn-(Za₯Z•V@#y€’T>!@Τ½vF$hΗωαvΟƒ-|?cœλχ]}°ΉQέXNΎh½tήgο"Ψ™ΘfΈT4©κ…QΎf«’qΑ€Φό’­%ΔΧ&ul’Gnk§ΐ5τ’+E³vΙ₯F8Μ‚Ι’Z’Β$pΦ- Ήh7ƒxjj˜GvižŸο~+–υž6“yδMl[γ­FΚ/˜Ό=Λ6ύΨ±Λƒέ>ΤΌρ6zΉίz₯_½‘>‚™ΨκρηΧ-Έ:ΐRλJΫ‡­:‚'ΏRηwV„γ8 ~Z7aŽ,L°}χΪοψ~νͺh£€#ςξ‘Άι›‚z‘<|yσϊΧy6Η€Gσω z7#Ηζρια7‡ή$έίΆ½Ω«©”oχπη°τάnϊύ OδuψŠ ŠqsEF0ΤI9αΨOe¦«4Χ‡υς.xΗ7>ΣΛΈƒβΌb{¬~Ϋ\^Σ#ζn§'rΫi1 ώΛΝδ₯ΞιγžΏ_tnέQτ·wwλu!Ι\›‘#7λXν€ύ“ƒ‡]­ξGß3AΝQrhw°6ά‘ηΩψYZfέ?ˆνΏ?}§ŸsPΌDϋ‹νεζξ†ξŸΏ₯ΈfMΗΊoγλSΉ”ύΙ₯<ΑΨN™4xλnŸοίΖο.y“`ΗΩ…Ω)ΟaOπτ5Εώ+·λ4Λhz°±y3‚ Y…iϊŠίjŒδ ΅¦€kφΩ‘ΞίγλΣνΣNψyάΟJωŽH ΑξΚ"&Ι6FLNπNΦ°%ί+,χtOR)o/ψ׎ωο§Χ»ώsΜςμ‰.εbΞ¦£z„ yšc‰΅G~¬†τ³­_Kχkςks”χfqΔ›:ž zWb[[XΪU[εˆyΟg­nΉΊ~)Ε΄,^#ΥnFΉαε_ž1:FnssHGΧϋ›ωδbc"«΄Υάψ%ΣΌS 0φ@ΚoφΧέs?ο΅nκλύ-Ί’?}ωWπεΝΙϊOΰ)zhΞδ…ΧŒUsi‰ή[F>KR΄; η§ηφμ?xS0uΊyές=Ω&Υ€ΨY銌ΑζξœΥ!SPΊΠΊœΒ υΧΦο Œ»Ύτ’…ΰœZŸΧ$ΊvR•PΪΝΪϋ@Ο׏ΌυdNώέΛίΊyo.”|ΛφV‘ Φκ’ΌŽΉΉZΌς5΅΄£Θ:5“SަK'ήΕΆr―΅χlbO‘Ωs’}{€γώ&ζƒιΧή"Άb€ž€Ώ€¨}©>&―t"›RΡζ’“*Uz¦³­ug³mΦ6—₯ŒmήωθŽψέΞ|οΩƒ½½;³χηκνX΅ζΤΝz±ƒ%³%s2ίMΛ~Λίφκ-Ǎӑݘš± t~ΘG±ίτx#Ή“4δμiΪϋ}ΰφτ(Ά˜wKzšψ€ρΊτ€ƒΩγ9]Εs)Σ1ͺ»sΛ6R£0΅p›ύ™Ήέάί=Œζqψ™Šυ3qωQ½.$­ίWώrDϋΌŽ  mϊ)=λKθCAέμΰ΅ θΨΎ«}5'&›‘•š©+yΌgΧdYS[ψζΰ·nvD;ΣρΞωζύΠ$qΒArb2tΨ »ΟΉ”Ζ|8•–s‹+ό”ΤjΪχPυΠ%b„ήή2₯΄$*Κ.ΖAΠ ±σEg¬’›ΌΖ„™^σΈjYαΓGκsοξ²W';Άt@¦©κΌ§V½(œ9ξΐŸΚSFΥ|£ΌeΕ9[)Ξcρ=/“:Δ¨#α"IΠσέγ—JΟ!°‘ΔvΔ±xCDΎwνU>μ. Υψ&,FΖΔY@ ”Υλώ]ΟΑ!ί~sSήξξΫωan3½sΛνΪι΅ΔQφΰμmΝprX&Π ͺkOΞlˆ ­ξΑ8ΦFT‚}β8g‘+ŚμDͺP΅D/VfDΕ`άύΑΔΤώeΤ±Έ=΅‡»§!g₯υ˜υίoΝ;Ιٍ©¬Ž’yΎΎ½TδΜ³}hBO8]Wχ(¦IΓ°σ.΅=λ“N^ΞΥ©‚ 1u“«U ab Œˆί!K:D}ψphkζϋθ?¬Ύ4 "²ξAΈEkς6t‡­¨2%Κ}`0³QΥ£βZΊΦΒάmlPBCd„ Xz5β’+ΓΐadΎρrΏvοU-}›₯™‚Ψ·j’ΣξOθΰm/%/-γΪ΄ͺ„Μ=&.ΉDΗηήσΟP>L8/SΥ…«―ΒVJQ³u>'ej$μω”΅ΡΙΊ²‘ώ`εzœ}­ΎiX/γΖ‹±Fνp©j,N€Cz19ο­ρ.xy;ΕwωVΫΝλΛέ©e\αϊИ…ΒMψάTΣδJηΫίέ”EΌcQNŒNΤ@¨Ά§ž`C|CT§žθΈ]έψ!τN‘9`LŒF#vΙΞ q_Š˜k«:Bw ½ωxΟBˆD =ωιa¬ΫeΚ]»;K³4ΉάMip9€{πJV±nE2’ϊ^»$EΟ‘|CTΪ FΌΦ6tΝO72œ» ίρριεξοώZ‹Ÿ8δ₯Μ»˜½5…ZƒΫ°Ι₯¬lV‘A„σ"ϊΥd©b}Ξ‚κ€›ΟΤi"uϊ^|a΄+ώ"|T§ƒΥWΐ’:΄€­Wλ$coQLxφ#./jeα»2,«·±ο#°Ζ/XΌοΒζ\χr―ΨΑ^T’J‚ύ¦‹ΐίΐX0 #m0lX‘ρΚΪμ„*Πi`ηΔUΜ„ :6κΒ „(νΪ#™ΐR,5 &+׈°ΥΗŒBΆ +Χ<0‘ι ±7)"p‰ΣFβU`D9qρτŒφ‘Β"Γr1Ε`Ό‚qVΆ΅ΰ΅Œ'―e@ ‚?NΏΘU=7EŠ+ΧMΗ ΰ‹σοy`9›¬p<[`…¦SΠ€=β“mΒ–f„ήE;‘χ)x ލ€€˜κ|ΡΗφHο•°›Ϋεj±)ρΪΣ C|υݘ»]qΒΒά›Ά'«₯†ΈΑ%E±τ*Vξ–:l+ΐw,λ"t4΅˜#—fρ¨ΌW>yo*φd•)00Ε‘³–XβQβ` ].ΆVΙ¦xd yt9δ—GΎ·fZξ~wO`ώHgΦπνĘ€Ε7t©°Ο"]]lœ“¬0<˜B)κζ{ΊγL†€`BzŽΨΞ!Β•‘­Œ'ΖΏπ« ⧇aˆvŽhι7Ίς‚η»}αΎI-ώ΅ošκEK…”sΡ26.’ηž$Q=Β,( ¬GwΓl$€•Œ; UpιSFθ[}ώσΛιΎ]zγ₯J₯ U€₯1Ά[fFΎŒΠ8—Ψkkx‹χΨΖthˆ$e›±†1χ)Vt€ŽΫ‡“πt…s7…'‘rEL΄KΔV― ΥΆ’Β`80†K#-œb•M‘Κ)VΰβΙΗžϊvψS'Pbιά”(Δ‹Bu ΜHe&@zo©ΤAu%Ψ€ΰ’ΤΒ.U`χ\+ωd\ͺUDϋŸΔςοΕγH†ιΛΒ;H9%ΈαV2’ ½(Pωα'E Ώ*X³ HD<9ˆcMΊ‘”ci-O¬SVz9Ν@€€0G#HΘ@HQ;ά±lΝ-¦"ΨA φΖ7ΉΨφ§#»;τΠ/ΝeKpˆ.%Ι•ͺ°»9+¦Πβ½"’ο‘‘(*ΈΤΨ©•’˜‰E‘ ¨zjή·εψΗΌΪ‹ …Ζ¨“ΑF·ΎgΪΘcΪC³P4;Dg‰HSΝEŸ[­φΥWM¦ΗqBκ•°—ξ*“Ž.Σ·/ܜϘ|§ΗΧ«gΥuexΒ€σ kΓyξ ˆnK™<Œ-ΞMa€Β[Χε’½FΖE¬8žΐΪ匢™Φ3c\έ]ΐ°€\meWdK’ƒ Ϊ<8Q>©PcίzdΟ$‰θθƒΤPu‚έι zςμ§λίευAδkυυιυζω…ΏέρχΕkκee DNXSS΅ @ͺỀA X˜o‘GFP5±†ι1=άό½S’σG\zaΌ‡ΉˆΪΨΖΡuΥHnι›Rœv‹ΉlK6]xTKa€œθ]ρΆπΌ+‘ΙjΔύΑ‹ΞnΤχχJΩ*ώμυ'(–κ‘ςΠΨCd κΨPΎ*xRΪ&©zͺ ƒςkkσζ«Ο€H‹β,±2M !3sε‹ζ€™π)0gŽžzpδZ Σξ¦|Ο氜dυ—ΎzQKΘZ„b”²rQ‘γ §΅ΆRR–“v¦!œMkΌ₯R΅rνCReΌΫjΪ L-έΧ»z˜$vWη2±€6ΊΤ!†¨Ρ7Ο=1c₯f,0¬‘–δά±†'‘ΐNχ€΅ζΉU{—ƒSΓ οΙζLKγΩ%"UMΚπ­[硉8ΜZW۝3ή'gͺξ‘b‘ο΅²”UV)N…γ£O/h~a@T“Eπ 0T[D›=βΩl bprJΝΩDψϊ ΩJ}Κ7“‘T=. ² 6ξάožλ ΎmOίW7_ίφc‘)άwϊ†³ΛΉ)?Ή›Z::nV‹œ ³£J.ζ(‰ιP΄εGDέ:hƒ0™DΫΚfE]D;Žy.υ쒎NΜ΅uSaO Ž&pœQŽΆε’³f©1·υ{χN ΛKΑˆ»εή%n‘βuX9?’ΡΎ¬₯-KΤ!*u#,-ΞϋΘ½)Ξ°0U”œ”EΜΡlξδ»νΙ ΈΔι“Šžs{υRΩωΩ-{ν₯ƒO`ƒ(ΡiΉ ­š‘³ιŘN"=ξ]ΠΔ’%€€i@˜ sΣ+‚-˜ΣK‡‡»z,,υΠ­ϋ%¬JξΕΒ@ωδŠh7†XαRƒ…Ljλš”>Ϊ恏cYύ•U[™ΫΝUΒ΅/CšΟ•ECG2ΕXƒ@ !>€IΆΡΒΙ %Ε9—_φ¬―A§AΔ›•Ϗr}Υqd'‡]zμ7ββM`•*:ψήΛ+Œ$‰ž˜υNIeΚc>r.^’£€H'†Z§κn'ΈξκjέΧόό΅o{ΐ*υBί…ωΝn›ϊΡΎœ\y.Q’rHšH.’τˆαD€^<€½ŠΝ§Idpa’δ¦!§ΤΈ&Ρ7Z'ΖύαώT1}yVο°ΆRܝͺ/+gŠc ·„ν²qΟ‚―Θ{Ÿϋˆ±Έ‡Ψ?ρτC#γgϊΥ€θλ?δ·χ₯μ„…αεwxα{a 8†‘9ΐM—dUΛΝυμ;° gΏPŽίδmέΌK«―[YΏ]§ξqφβ =j―!κ½3υ¨½υ ΨmbcώΕ/OD{ν[<[n"?λjΝͺ{VΑ%O5Uρ3^T_₯ηξ₯;)H‰F'l[£v₯‰εlφŸ=\C٘5V.@Ε6qτΙ ο¬΅ΪV€Φ‚… &Λ°ˆ““tNS(ž]œΥk;ρ(Z*ςΏΛΟδͺfρƒ«qŽa:g#uVQ‹―m0 ρ#œ}­΅•ƒc΄->wx©WM¦Άi‚>a*ΛγϊB_Οόσ?όΟ?σΟΪ??Φό0cargo-0.66.0/benches/workspaces/toml-rs.tgz000066400000000000000000000033661432416201200206140ustar00rootroot00000000000000‹toml-rs.tarνY[oλ6Ξ³~… >nlσ~)ΆEΪ>Ψƒ]΄η­ΗxΪΪΨ’W’Σ“.ϊίw('©;MOΧ ŠΦΜƒ)iHΞπ›ω†Γ νz5ιϊΩ4ΈnΡ^έ„ώͺnR;ύwί6'j›βθϋά§ΨΧLi‘“Ψ§Œ~A.^‘mϋΑueyρm-Κ²ZΤCυi™»ψΠ/Ε§Κπ¨ŠDοœKke±Β8 F+N*+Cͺ.ΗqeΥΑΊ ]Γ¦t6Γy—[? νzζVπ!tuXm3vNWαΐŸςθjγ†%ϊ]vΏ<Ύ*~*.Ξν5Ϊ³/sψOσΣιΧx!ώ©ϊiό"Ε9ώ_‘}aM„&ΤΠΟ‹ΊεgeE§djuU¦5φ>¬έf^΄›‘n·Bα‘ΫBq]/ξΖξήLŽΜ~‘«oΰApχ2η›½± ά°νς˜ΙmW~ϋ~^lπt7pΥβ˜.Ώ«ξ5ͺζ8lγΒ΅[ΐΌpΫaΩvύ(ρ©§όςŽ{ΚΏg&ϊbŸŽ2?}ŽΓƒ`Ρv¨ι8,΄MͺΥeY‘ϊm¬›±Ώqj0©Χ›¬‘\ή…ΎΚzφ8αΈ)َ7eƒŸΠΜoΡΑΚqTΨ5±Œ°λ·©|χΟ·˜€Ά[»a€X¦z…kg™~θΐ­ϋiω―½©qκχΝ“σπa (‰ς‹εwΈ«nU³―ςνϊ«‡ΎΔ5ΖΕΚθWνϋ&ΉP―jœ P‘{y΄q·όήσ¨?κ³ # ΣχMUΔ6lTΙΦή>~θ§Θ&™Gͺb}/ΐ5U±lΧ°A|φ‡Ό˜#αφ„{ΚσbUhϊq·ίΌ›½Aμ—0aΩ·ίοΤΐ­Œ»ηoΏ~σΥΫ―§λ˜_nΪΎΪξφ£”Ωσr2•S“}΅»ξqut»5¬=άΉέύ0ι·5¦Δyρ‡ηΎ ³UνΌWηάωŸ¨3Ώ"ώ?{λι/α/4?8S©ΟψΏNώš₯=v–cvEŽ£ΥGdν§sMG©yqŸΝG^άM„œψς‰atBΜϊXd©ι΄ϊsόAJΪOW9v”]lΆ~UχyέδV=Ί8Ό8’Αύ ρcΈ3ρYMφwι±μδη6Μ2˜,£’Ι§tυΜ^NRO•—ψ œSτz0ybΗμ˸ўσΔ«σqΟνάΞνάώ¨νη‚"&cargo-0.66.0/build.rs000066400000000000000000000040101432416201200143310ustar00rootroot00000000000000use flate2::{Compression, GzBuilder}; use std::ffi::OsStr; use std::fs; use std::path::Path; use std::process::Command; fn main() { commit_info(); compress_man(); println!( "cargo:rustc-env=RUST_HOST_TARGET={}", std::env::var("TARGET").unwrap() ); } fn compress_man() { let out_path = Path::new(&std::env::var("OUT_DIR").unwrap()).join("man.tgz"); let dst = fs::File::create(out_path).unwrap(); let encoder = GzBuilder::new() .filename("man.tar") .write(dst, Compression::best()); let mut ar = tar::Builder::new(encoder); ar.mode(tar::HeaderMode::Deterministic); let mut add_files = |dir, extension| { let mut files = fs::read_dir(dir) .unwrap() .map(|e| e.unwrap().path()) .collect::>(); files.sort(); for path in files { if path.extension() != Some(extension) { continue; } println!("cargo:rerun-if-changed={}", path.display()); ar.append_path_with_name(&path, path.file_name().unwrap()) .unwrap(); } }; add_files(Path::new("src/etc/man"), OsStr::new("1")); add_files(Path::new("src/doc/man/generated_txt"), OsStr::new("txt")); let encoder = ar.into_inner().unwrap(); encoder.finish().unwrap(); } fn commit_info() { if !Path::new(".git").exists() { return; } let output = match Command::new("git") .arg("log") .arg("-1") .arg("--date=short") .arg("--format=%H %h %cd") .arg("--abbrev=9") .output() { Ok(output) if output.status.success() => output, _ => return, }; let stdout = String::from_utf8(output.stdout).unwrap(); let mut parts = stdout.split_whitespace(); let mut next = || parts.next().unwrap(); println!("cargo:rustc-env=CARGO_COMMIT_HASH={}", next()); println!("cargo:rustc-env=CARGO_COMMIT_SHORT_HASH={}", next()); println!("cargo:rustc-env=CARGO_COMMIT_DATE={}", next()) } cargo-0.66.0/ci/000077500000000000000000000000001432416201200132645ustar00rootroot00000000000000cargo-0.66.0/ci/dump-environment.sh000077500000000000000000000003051432416201200171300ustar00rootroot00000000000000#!/bin/bash # This script dumps information about the build environment to stdout. set -euo pipefail IFS=$'\n\t' echo "environment variables:" printenv | sort echo echo "disk usage:" df -h echo cargo-0.66.0/ci/fetch-smoke-test.sh000077500000000000000000000011251432416201200170040ustar00rootroot00000000000000#!/bin/bash # This script builds with static curl, and verifies that fetching works. set -ex if [[ -z "$RUNNER_TEMP" ]] then echo "RUNNER_TEMP must be set" exit 1 fi if [ ! -f Cargo.toml ]; then echo "Must be run from root of project." exit 1 fi # Building openssl on Windows is a pain. if [[ $(rustc -Vv | grep host:) != *windows* ]]; then FEATURES='vendored-openssl,curl-sys/static-curl,curl-sys/force-system-lib-on-osx' export LIBZ_SYS_STATIC=1 fi cargo build --features "$FEATURES" export CARGO_HOME=$RUNNER_TEMP/chome target/debug/cargo fetch rm -rf $CARGO_HOME cargo-0.66.0/ci/validate-man.sh000077500000000000000000000010251432416201200161630ustar00rootroot00000000000000#!/bin/bash # This script validates that there aren't any changes to the man pages. set -e cd src/doc changes=$(git status --porcelain) if [ -n "$changes" ] then echo "git directory must be clean before running this script." exit 1 fi ./build-man.sh changes=$(git status --porcelain) if [ -n "$changes" ] then echo "Detected changes in man pages:" echo "$changes" echo echo "Please run './build-man.sh' in the src/doc directory to rebuild the" echo "man pages, and commit the changes." exit 1 fi cargo-0.66.0/crates/000077500000000000000000000000001432416201200141525ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/000077500000000000000000000000001432416201200170675ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/Cargo.toml000066400000000000000000000005711432416201200210220ustar00rootroot00000000000000[package] name = "cargo-platform" version = "0.1.2" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://github.com/rust-lang/cargo" repository = "https://github.com/rust-lang/cargo" documentation = "https://docs.rs/cargo-platform" description = "Cargo's representation of a target platform." [dependencies] serde = { version = "1.0.82", features = ['derive'] } cargo-0.66.0/crates/cargo-platform/LICENSE-APACHE000077700000000000000000000000001432416201200233602../../LICENSE-APACHEustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/LICENSE-MIT000077700000000000000000000000001432416201200226002../../LICENSE-MITustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/examples/000077500000000000000000000000001432416201200207055ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/examples/matches.rs000066400000000000000000000027721432416201200227070ustar00rootroot00000000000000//! This example demonstrates how to filter a Platform based on the current //! host target. use cargo_platform::{Cfg, Platform}; use std::process::Command; use std::str::FromStr; static EXAMPLES: &[&str] = &[ "cfg(windows)", "cfg(unix)", "cfg(target_os=\"macos\")", "cfg(target_os=\"linux\")", "cfg(any(target_arch=\"x86\", target_arch=\"x86_64\"))", ]; fn main() { let target = get_target(); let cfgs = get_cfgs(); println!("host target={} cfgs:", target); for cfg in &cfgs { println!(" {}", cfg); } let mut examples: Vec<&str> = EXAMPLES.iter().copied().collect(); examples.push(target.as_str()); for example in examples { let p = Platform::from_str(example).unwrap(); println!("{:?} matches: {:?}", example, p.matches(&target, &cfgs)); } } fn get_target() -> String { let output = Command::new("rustc") .arg("-Vv") .output() .expect("rustc failed to run"); let stdout = String::from_utf8(output.stdout).unwrap(); for line in stdout.lines() { if line.starts_with("host: ") { return String::from(&line[6..]); } } panic!("Failed to find host: {}", stdout); } fn get_cfgs() -> Vec { let output = Command::new("rustc") .arg("--print=cfg") .output() .expect("rustc failed to run"); let stdout = String::from_utf8(output.stdout).unwrap(); stdout .lines() .map(|line| Cfg::from_str(line).unwrap()) .collect() } cargo-0.66.0/crates/cargo-platform/src/000077500000000000000000000000001432416201200176565ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/src/cfg.rs000066400000000000000000000227741432416201200207770ustar00rootroot00000000000000use crate::error::{ParseError, ParseErrorKind::*}; use std::fmt; use std::iter; use std::str::{self, FromStr}; /// A cfg expression. #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] pub enum CfgExpr { Not(Box), All(Vec), Any(Vec), Value(Cfg), } /// A cfg value. #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] pub enum Cfg { /// A named cfg value, like `unix`. Name(String), /// A key/value cfg pair, like `target_os = "linux"`. KeyPair(String, String), } #[derive(PartialEq)] enum Token<'a> { LeftParen, RightParen, Ident(&'a str), Comma, Equals, String(&'a str), } #[derive(Clone)] struct Tokenizer<'a> { s: iter::Peekable>, orig: &'a str, } struct Parser<'a> { t: Tokenizer<'a>, } impl FromStr for Cfg { type Err = ParseError; fn from_str(s: &str) -> Result { let mut p = Parser::new(s); let e = p.cfg()?; if let Some(rest) = p.rest() { return Err(ParseError::new( p.t.orig, UnterminatedExpression(rest.to_string()), )); } Ok(e) } } impl fmt::Display for Cfg { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Cfg::Name(ref s) => s.fmt(f), Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v), } } } impl CfgExpr { /// Utility function to check if the key, "cfg(..)" matches the `target_cfg` pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool { if key.starts_with("cfg(") && key.ends_with(')') { let cfg = &key[4..key.len() - 1]; CfgExpr::from_str(cfg) .ok() .map(|ce| ce.matches(target_cfg)) .unwrap_or(false) } else { false } } pub fn matches(&self, cfg: &[Cfg]) -> bool { match *self { CfgExpr::Not(ref e) => !e.matches(cfg), CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)), CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)), CfgExpr::Value(ref e) => cfg.contains(e), } } } impl FromStr for CfgExpr { type Err = ParseError; fn from_str(s: &str) -> Result { let mut p = Parser::new(s); let e = p.expr()?; if let Some(rest) = p.rest() { return Err(ParseError::new( p.t.orig, UnterminatedExpression(rest.to_string()), )); } Ok(e) } } impl fmt::Display for CfgExpr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { CfgExpr::Not(ref e) => write!(f, "not({})", e), CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)), CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)), CfgExpr::Value(ref e) => write!(f, "{}", e), } } } struct CommaSep<'a, T>(&'a [T]); impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for (i, v) in self.0.iter().enumerate() { if i > 0 { write!(f, ", ")?; } write!(f, "{}", v)?; } Ok(()) } } impl<'a> Parser<'a> { fn new(s: &'a str) -> Parser<'a> { Parser { t: Tokenizer { s: s.char_indices().peekable(), orig: s, }, } } fn expr(&mut self) -> Result { match self.peek() { Some(Ok(Token::Ident(op @ "all"))) | Some(Ok(Token::Ident(op @ "any"))) => { self.t.next(); let mut e = Vec::new(); self.eat(&Token::LeftParen)?; while !self.r#try(&Token::RightParen) { e.push(self.expr()?); if !self.r#try(&Token::Comma) { self.eat(&Token::RightParen)?; break; } } if op == "all" { Ok(CfgExpr::All(e)) } else { Ok(CfgExpr::Any(e)) } } Some(Ok(Token::Ident("not"))) => { self.t.next(); self.eat(&Token::LeftParen)?; let e = self.expr()?; self.eat(&Token::RightParen)?; Ok(CfgExpr::Not(Box::new(e))) } Some(Ok(..)) => self.cfg().map(CfgExpr::Value), Some(Err(..)) => Err(self.t.next().unwrap().err().unwrap()), None => Err(ParseError::new( self.t.orig, IncompleteExpr("start of a cfg expression"), )), } } fn cfg(&mut self) -> Result { match self.t.next() { Some(Ok(Token::Ident(name))) => { let e = if self.r#try(&Token::Equals) { let val = match self.t.next() { Some(Ok(Token::String(s))) => s, Some(Ok(t)) => { return Err(ParseError::new( self.t.orig, UnexpectedToken { expected: "a string", found: t.classify(), }, )) } Some(Err(e)) => return Err(e), None => { return Err(ParseError::new(self.t.orig, IncompleteExpr("a string"))) } }; Cfg::KeyPair(name.to_string(), val.to_string()) } else { Cfg::Name(name.to_string()) }; Ok(e) } Some(Ok(t)) => Err(ParseError::new( self.t.orig, UnexpectedToken { expected: "identifier", found: t.classify(), }, )), Some(Err(e)) => Err(e), None => Err(ParseError::new(self.t.orig, IncompleteExpr("identifier"))), } } fn peek(&mut self) -> Option, ParseError>> { self.t.clone().next() } fn r#try(&mut self, token: &Token<'a>) -> bool { match self.peek() { Some(Ok(ref t)) if token == t => {} _ => return false, } self.t.next(); true } fn eat(&mut self, token: &Token<'a>) -> Result<(), ParseError> { match self.t.next() { Some(Ok(ref t)) if token == t => Ok(()), Some(Ok(t)) => Err(ParseError::new( self.t.orig, UnexpectedToken { expected: token.classify(), found: t.classify(), }, )), Some(Err(e)) => Err(e), None => Err(ParseError::new( self.t.orig, IncompleteExpr(token.classify()), )), } } /// Returns the rest of the input from the current location. fn rest(&self) -> Option<&str> { let mut s = self.t.s.clone(); loop { match s.next() { Some((_, ' ')) => {} Some((start, _ch)) => return Some(&self.t.orig[start..]), None => return None, } } } } impl<'a> Iterator for Tokenizer<'a> { type Item = Result, ParseError>; fn next(&mut self) -> Option, ParseError>> { loop { match self.s.next() { Some((_, ' ')) => {} Some((_, '(')) => return Some(Ok(Token::LeftParen)), Some((_, ')')) => return Some(Ok(Token::RightParen)), Some((_, ',')) => return Some(Ok(Token::Comma)), Some((_, '=')) => return Some(Ok(Token::Equals)), Some((start, '"')) => { while let Some((end, ch)) = self.s.next() { if ch == '"' { return Some(Ok(Token::String(&self.orig[start + 1..end]))); } } return Some(Err(ParseError::new(self.orig, UnterminatedString))); } Some((start, ch)) if is_ident_start(ch) => { while let Some(&(end, ch)) = self.s.peek() { if !is_ident_rest(ch) { return Some(Ok(Token::Ident(&self.orig[start..end]))); } else { self.s.next(); } } return Some(Ok(Token::Ident(&self.orig[start..]))); } Some((_, ch)) => { return Some(Err(ParseError::new(self.orig, UnexpectedChar(ch)))); } None => return None, } } } } fn is_ident_start(ch: char) -> bool { ch == '_' || ch.is_ascii_alphabetic() } fn is_ident_rest(ch: char) -> bool { is_ident_start(ch) || ch.is_ascii_digit() } impl<'a> Token<'a> { fn classify(&self) -> &'static str { match *self { Token::LeftParen => "`(`", Token::RightParen => "`)`", Token::Ident(..) => "an identifier", Token::Comma => "`,`", Token::Equals => "`=`", Token::String(..) => "a string", } } } cargo-0.66.0/crates/cargo-platform/src/error.rs000066400000000000000000000034211432416201200213550ustar00rootroot00000000000000use std::fmt; #[derive(Debug)] pub struct ParseError { kind: ParseErrorKind, orig: String, } #[non_exhaustive] #[derive(Debug)] pub enum ParseErrorKind { UnterminatedString, UnexpectedChar(char), UnexpectedToken { expected: &'static str, found: &'static str, }, IncompleteExpr(&'static str), UnterminatedExpression(String), InvalidTarget(String), } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "failed to parse `{}` as a cfg expression: {}", self.orig, self.kind ) } } impl fmt::Display for ParseErrorKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use ParseErrorKind::*; match self { UnterminatedString => write!(f, "unterminated string in cfg"), UnexpectedChar(ch) => write!( f, "unexpected character `{}` in cfg, expected parens, a comma, \ an identifier, or a string", ch ), UnexpectedToken { expected, found } => { write!(f, "expected {}, found {}", expected, found) } IncompleteExpr(expected) => { write!(f, "expected {}, but cfg expression ended", expected) } UnterminatedExpression(s) => { write!(f, "unexpected content `{}` found after cfg expression", s) } InvalidTarget(s) => write!(f, "invalid target specifier: {}", s), } } } impl std::error::Error for ParseError {} impl ParseError { pub fn new(orig: &str, kind: ParseErrorKind) -> ParseError { ParseError { kind, orig: orig.to_string(), } } } cargo-0.66.0/crates/cargo-platform/src/lib.rs000066400000000000000000000116701432416201200207770ustar00rootroot00000000000000//! Platform definition used by Cargo. //! //! This defines a [`Platform`] type which is used in Cargo to specify a target platform. //! There are two kinds, a named target like `x86_64-apple-darwin`, and a "cfg expression" //! like `cfg(any(target_os = "macos", target_os = "ios"))`. //! //! See `examples/matches.rs` for an example of how to match against a `Platform`. //! //! [`Platform`]: enum.Platform.html use std::fmt; use std::str::FromStr; mod cfg; mod error; pub use cfg::{Cfg, CfgExpr}; pub use error::{ParseError, ParseErrorKind}; /// Platform definition. #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] pub enum Platform { /// A named platform, like `x86_64-apple-darwin`. Name(String), /// A cfg expression, like `cfg(windows)`. Cfg(CfgExpr), } impl Platform { /// Returns whether the Platform matches the given target and cfg. /// /// The named target and cfg values should be obtained from `rustc`. pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { match *self { Platform::Name(ref p) => p == name, Platform::Cfg(ref p) => p.matches(cfg), } } fn validate_named_platform(name: &str) -> Result<(), ParseError> { if let Some(ch) = name .chars() .find(|&c| !(c.is_alphanumeric() || c == '_' || c == '-' || c == '.')) { if name.chars().any(|c| c == '(') { return Err(ParseError::new( name, ParseErrorKind::InvalidTarget( "unexpected `(` character, cfg expressions must start with `cfg(`" .to_string(), ), )); } return Err(ParseError::new( name, ParseErrorKind::InvalidTarget(format!( "unexpected character {} in target name", ch )), )); } Ok(()) } pub fn check_cfg_attributes(&self, warnings: &mut Vec) { fn check_cfg_expr(expr: &CfgExpr, warnings: &mut Vec) { match *expr { CfgExpr::Not(ref e) => check_cfg_expr(e, warnings), CfgExpr::All(ref e) | CfgExpr::Any(ref e) => { for e in e { check_cfg_expr(e, warnings); } } CfgExpr::Value(ref e) => match e { Cfg::Name(name) => match name.as_str() { "test" | "debug_assertions" | "proc_macro" => warnings.push(format!( "Found `{}` in `target.'cfg(...)'.dependencies`. \ This value is not supported for selecting dependencies \ and will not work as expected. \ To learn more visit \ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies", name )), _ => (), }, Cfg::KeyPair(name, _) => if name.as_str() == "feature" { warnings.push(String::from( "Found `feature = ...` in `target.'cfg(...)'.dependencies`. \ This key is not supported for selecting dependencies \ and will not work as expected. \ Use the [features] section instead: \ https://doc.rust-lang.org/cargo/reference/features.html" )) }, } } } if let Platform::Cfg(cfg) = self { check_cfg_expr(cfg, warnings); } } } impl serde::Serialize for Platform { fn serialize(&self, s: S) -> Result where S: serde::Serializer, { self.to_string().serialize(s) } } impl<'de> serde::Deserialize<'de> for Platform { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; FromStr::from_str(&s).map_err(serde::de::Error::custom) } } impl FromStr for Platform { type Err = ParseError; fn from_str(s: &str) -> Result { if s.starts_with("cfg(") && s.ends_with(')') { let s = &s[4..s.len() - 1]; s.parse().map(Platform::Cfg) } else { Platform::validate_named_platform(s)?; Ok(Platform::Name(s.to_string())) } } } impl fmt::Display for Platform { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Platform::Name(ref n) => n.fmt(f), Platform::Cfg(ref e) => write!(f, "cfg({})", e), } } } cargo-0.66.0/crates/cargo-platform/tests/000077500000000000000000000000001432416201200202315ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-platform/tests/test_cfg.rs000066400000000000000000000166541432416201200224110ustar00rootroot00000000000000use cargo_platform::{Cfg, CfgExpr, Platform}; use std::fmt; use std::str::FromStr; macro_rules! c { ($a:ident) => { Cfg::Name(stringify!($a).to_string()) }; ($a:ident = $e:expr) => { Cfg::KeyPair(stringify!($a).to_string(), $e.to_string()) }; } macro_rules! e { (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*])); (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*])); (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*)))); (($($t:tt)*)) => (e!($($t)*)); ($($t:tt)*) => (CfgExpr::Value(c!($($t)*))); } fn good(s: &str, expected: T) where T: FromStr + PartialEq + fmt::Debug, T::Err: fmt::Display, { let c = match T::from_str(s) { Ok(c) => c, Err(e) => panic!("failed to parse `{}`: {}", s, e), }; assert_eq!(c, expected); } fn bad(s: &str, err: &str) where T: FromStr + fmt::Display, T::Err: fmt::Display, { let e = match T::from_str(s) { Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg), Err(e) => e.to_string(), }; assert!( e.contains(err), "when parsing `{}`,\n\"{}\" not contained \ inside: {}", s, err, e ); } #[test] fn cfg_syntax() { good("foo", c!(foo)); good("_bar", c!(_bar)); good(" foo", c!(foo)); good(" foo ", c!(foo)); good(" foo = \"bar\"", c!(foo = "bar")); good("foo=\"\"", c!(foo = "")); good(" foo=\"3\" ", c!(foo = "3")); good("foo = \"3 e\"", c!(foo = "3 e")); } #[test] fn cfg_syntax_bad() { bad::("", "but cfg expression ended"); bad::(" ", "but cfg expression ended"); bad::("\t", "unexpected character"); bad::("7", "unexpected character"); bad::("=", "expected identifier"); bad::(",", "expected identifier"); bad::("(", "expected identifier"); bad::("foo (", "unexpected content `(` found after cfg expression"); bad::("bar =", "expected a string"); bad::("bar = \"", "unterminated string"); bad::( "foo, bar", "unexpected content `, bar` found after cfg expression", ); } #[test] fn cfg_expr() { good("foo", e!(foo)); good("_bar", e!(_bar)); good(" foo", e!(foo)); good(" foo ", e!(foo)); good(" foo = \"bar\"", e!(foo = "bar")); good("foo=\"\"", e!(foo = "")); good(" foo=\"3\" ", e!(foo = "3")); good("foo = \"3 e\"", e!(foo = "3 e")); good("all()", e!(all())); good("all(a)", e!(all(a))); good("all(a, b)", e!(all(a, b))); good("all(a, )", e!(all(a))); good("not(a = \"b\")", e!(not(a = "b"))); good("not(all(a))", e!(not(all(a)))); } #[test] fn cfg_expr_bad() { bad::(" ", "but cfg expression ended"); bad::(" all", "expected `(`"); bad::("all(a", "expected `)`"); bad::("not", "expected `(`"); bad::("not(a", "expected `)`"); bad::("a = ", "expected a string"); bad::("all(not())", "expected identifier"); bad::( "foo(a)", "unexpected content `(a)` found after cfg expression", ); } #[test] fn cfg_matches() { assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)])); assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)])); assert!(e!(any(foo, bar)).matches(&[c!(bar)])); assert!(e!(any(foo, bar)).matches(&[c!(foo)])); assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); assert!(e!(not(foo)).matches(&[c!(bar)])); assert!(e!(not(foo)).matches(&[])); assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)])); assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)])); assert!(!e!(foo).matches(&[])); assert!(!e!(foo).matches(&[c!(bar)])); assert!(!e!(foo).matches(&[c!(fo)])); assert!(!e!(any(foo)).matches(&[])); assert!(!e!(any(foo)).matches(&[c!(bar)])); assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)])); assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)])); assert!(!e!(all(foo, bar)).matches(&[c!(bar)])); assert!(!e!(all(foo, bar)).matches(&[c!(foo)])); assert!(!e!(all(foo, bar)).matches(&[])); assert!(!e!(not(bar)).matches(&[c!(bar)])); assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)])); assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)])); } #[test] fn bad_target_name() { bad::( "any(cfg(unix), cfg(windows))", "failed to parse `any(cfg(unix), cfg(windows))` as a cfg expression: \ invalid target specifier: unexpected `(` character, \ cfg expressions must start with `cfg(`", ); bad::( "!foo", "failed to parse `!foo` as a cfg expression: \ invalid target specifier: unexpected character ! in target name", ); } #[test] fn round_trip_platform() { fn rt(s: &str) { let p = Platform::from_str(s).unwrap(); let s2 = p.to_string(); let p2 = Platform::from_str(&s2).unwrap(); assert_eq!(p, p2); } rt("x86_64-apple-darwin"); rt("foo"); rt("cfg(windows)"); rt("cfg(target_os = \"windows\")"); rt( "cfg(any(all(any(target_os = \"android\", target_os = \"linux\"), \ any(target_arch = \"aarch64\", target_arch = \"arm\", target_arch = \"powerpc64\", \ target_arch = \"x86\", target_arch = \"x86_64\")), \ all(target_os = \"freebsd\", target_arch = \"x86_64\")))", ); } #[test] fn check_cfg_attributes() { fn ok(s: &str) { let p = Platform::Cfg(s.parse().unwrap()); let mut warnings = Vec::new(); p.check_cfg_attributes(&mut warnings); assert!( warnings.is_empty(), "Expected no warnings but got: {:?}", warnings, ); } fn warn(s: &str, names: &[&str]) { let p = Platform::Cfg(s.parse().unwrap()); let mut warnings = Vec::new(); p.check_cfg_attributes(&mut warnings); assert_eq!( warnings.len(), names.len(), "Expecter warnings about {:?} but got {:?}", names, warnings, ); for (name, warning) in names.iter().zip(warnings.iter()) { assert!( warning.contains(name), "Expected warning about '{}' but got: {}", name, warning, ); } } ok("unix"); ok("windows"); ok("any(not(unix), windows)"); ok("foo"); ok("target_arch = \"abc\""); ok("target_feature = \"abc\""); ok("target_os = \"abc\""); ok("target_family = \"abc\""); ok("target_env = \"abc\""); ok("target_endian = \"abc\""); ok("target_pointer_width = \"abc\""); ok("target_vendor = \"abc\""); ok("bar = \"def\""); warn("test", &["test"]); warn("debug_assertions", &["debug_assertions"]); warn("proc_macro", &["proc_macro"]); warn("feature = \"abc\"", &["feature"]); warn("any(not(debug_assertions), windows)", &["debug_assertions"]); warn( "any(not(feature = \"def\"), target_arch = \"abc\")", &["feature"], ); warn( "any(not(target_os = \"windows\"), proc_macro)", &["proc_macro"], ); warn( "any(not(feature = \"windows\"), proc_macro)", &["feature", "proc_macro"], ); warn( "all(not(debug_assertions), any(windows, proc_macro))", &["debug_assertions", "proc_macro"], ); } cargo-0.66.0/crates/cargo-test-macro/000077500000000000000000000000001432416201200173215ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-test-macro/Cargo.toml000066400000000000000000000005161432416201200212530ustar00rootroot00000000000000[package] name = "cargo-test-macro" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://github.com/rust-lang/cargo" repository = "https://github.com/rust-lang/cargo" documentation = "https://github.com/rust-lang/cargo" description = "Helper proc-macro for Cargo's testsuite." [lib] proc-macro = true cargo-0.66.0/crates/cargo-test-macro/src/000077500000000000000000000000001432416201200201105ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-test-macro/src/lib.rs000066400000000000000000000201051432416201200212220ustar00rootroot00000000000000extern crate proc_macro; use proc_macro::*; use std::process::Command; use std::sync::Once; #[proc_macro_attribute] pub fn cargo_test(attr: TokenStream, item: TokenStream) -> TokenStream { // Ideally these options would be embedded in the test itself. However, I // find it very helpful to have the test clearly state whether or not it // is ignored. It would be nice to have some kind of runtime ignore // support (such as // https://internals.rust-lang.org/t/pre-rfc-skippable-tests/14611). // // Unfortunately a big drawback here is that if the environment changes // (such as the existence of the `git` CLI), this will not trigger a // rebuild and the test will still be ignored. In theory, something like // `tracked_env` or `tracked_path` // (https://github.com/rust-lang/rust/issues/99515) could help with this, // but they don't really handle the absence of files well. let mut ignore = false; let mut requires_reason = false; let mut explicit_reason = None; let mut implicit_reasons = Vec::new(); macro_rules! set_ignore { ($predicate:expr, $($arg:tt)*) => { let p = $predicate; ignore |= p; if p { implicit_reasons.push(std::fmt::format(format_args!($($arg)*))); } }; } let is_not_nightly = !version().1; for rule in split_rules(attr) { match rule.as_str() { "build_std_real" => { // Only run the "real" build-std tests on nightly and with an // explicit opt-in (these generally only work on linux, and // have some extra requirements, and are slow, and can pollute // the environment since it downloads dependencies). set_ignore!(is_not_nightly, "requires nightly"); set_ignore!( option_env!("CARGO_RUN_BUILD_STD_TESTS").is_none(), "CARGO_RUN_BUILD_STD_TESTS must be set" ); } "build_std_mock" => { // Only run the "mock" build-std tests on nightly and disable // for windows-gnu which is missing object files (see // https://github.com/rust-lang/wg-cargo-std-aware/issues/46). set_ignore!(is_not_nightly, "requires nightly"); set_ignore!( cfg!(all(target_os = "windows", target_env = "gnu")), "does not work on windows-gnu" ); } "nightly" => { requires_reason = true; set_ignore!(is_not_nightly, "requires nightly"); } s if s.starts_with("requires_") => { let command = &s[9..]; set_ignore!(!has_command(command), "{command} not installed"); } s if s.starts_with(">=1.") => { requires_reason = true; let min_minor = s[4..].parse().unwrap(); let minor = version().0; set_ignore!(minor < min_minor, "requires rustc 1.{minor} or newer"); } s if s.starts_with("reason=") => { explicit_reason = Some(s[7..].parse().unwrap()); } _ => panic!("unknown rule {:?}", rule), } } if requires_reason && explicit_reason.is_none() { panic!( "#[cargo_test] with a rule also requires a reason, \ such as #[cargo_test(nightly, reason = \"needs -Z unstable-thing\")]" ); } // Construct the appropriate attributes. let span = Span::call_site(); let mut ret = TokenStream::new(); let add_attr = |ret: &mut TokenStream, attr_name, attr_input| { ret.extend(Some(TokenTree::from(Punct::new('#', Spacing::Alone)))); let attr = TokenTree::from(Ident::new(attr_name, span)); let mut attr_stream: TokenStream = attr.into(); if let Some(input) = attr_input { attr_stream.extend(input); } ret.extend(Some(TokenTree::from(Group::new( Delimiter::Bracket, attr_stream, )))); }; add_attr(&mut ret, "test", None); if ignore { let reason = explicit_reason .or_else(|| { (!implicit_reasons.is_empty()) .then(|| TokenTree::from(Literal::string(&implicit_reasons.join(", "))).into()) }) .map(|reason: TokenStream| { let mut stream = TokenStream::new(); stream.extend(Some(TokenTree::from(Punct::new('=', Spacing::Alone)))); stream.extend(Some(reason)); stream }); add_attr(&mut ret, "ignore", reason); } // Find where the function body starts, and add the boilerplate at the start. for token in item { let group = match token { TokenTree::Group(g) => { if g.delimiter() == Delimiter::Brace { g } else { ret.extend(Some(TokenTree::Group(g))); continue; } } other => { ret.extend(Some(other)); continue; } }; let mut new_body = to_token_stream( r#"let _test_guard = { let tmp_dir = option_env!("CARGO_TARGET_TMPDIR"); cargo_test_support::paths::init_root(tmp_dir) };"#, ); new_body.extend(group.stream()); ret.extend(Some(TokenTree::from(Group::new( group.delimiter(), new_body, )))); } ret } fn split_rules(t: TokenStream) -> Vec { let tts: Vec<_> = t.into_iter().collect(); tts.split(|tt| match tt { TokenTree::Punct(p) => p.as_char() == ',', _ => false, }) .filter(|parts| !parts.is_empty()) .map(|parts| { parts .into_iter() .map(|part| part.to_string()) .collect::() }) .collect() } fn to_token_stream(code: &str) -> TokenStream { code.parse().unwrap() } static mut VERSION: (u32, bool) = (0, false); fn version() -> &'static (u32, bool) { static INIT: Once = Once::new(); INIT.call_once(|| { let output = Command::new("rustc") .arg("-V") .output() .expect("rustc should run"); let stdout = std::str::from_utf8(&output.stdout).expect("utf8"); let vers = stdout.split_whitespace().skip(1).next().unwrap(); let is_nightly = option_env!("CARGO_TEST_DISABLE_NIGHTLY").is_none() && (vers.contains("-nightly") || vers.contains("-dev")); let minor = vers.split('.').skip(1).next().unwrap().parse().unwrap(); unsafe { VERSION = (minor, is_nightly) } }); unsafe { &VERSION } } fn has_command(command: &str) -> bool { let output = match Command::new(command).arg("--version").output() { Ok(output) => output, Err(e) => { // hg is not installed on GitHub macOS or certain constrained // environments like Docker. Consider installing it if Cargo gains // more hg support, but otherwise it isn't critical. if is_ci() && command != "hg" { panic!( "expected command `{}` to be somewhere in PATH: {}", command, e ); } return false; } }; if !output.status.success() { panic!( "expected command `{}` to be runnable, got error {}:\n\ stderr:{}\n\ stdout:{}\n", command, output.status, String::from_utf8_lossy(&output.stderr), String::from_utf8_lossy(&output.stdout) ); } true } /// Whether or not this running in a Continuous Integration environment. fn is_ci() -> bool { // Consider using `tracked_env` instead of option_env! when it is stabilized. // `tracked_env` will handle changes, but not require rebuilding the macro // itself like option_env does. option_env!("CI").is_some() || option_env!("TF_BUILD").is_some() } cargo-0.66.0/crates/cargo-test-support/000077500000000000000000000000001432416201200177345ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-test-support/Cargo.toml000066400000000000000000000013741432416201200216710ustar00rootroot00000000000000[package] name = "cargo-test-support" version = "0.1.0" license = "MIT OR Apache-2.0" edition = "2021" [lib] doctest = false [dependencies] anyhow = "1.0.34" cargo-test-macro = { path = "../cargo-test-macro" } cargo-util = { path = "../cargo-util" } snapbox = { version = "0.3.0", features = ["diff", "path"] } filetime = "0.2" flate2 = { version = "1.0", default-features = false, features = ["zlib"] } git2 = "0.15.0" glob = "0.3" itertools = "0.10.0" lazy_static = "1.0" remove_dir_all = "0.5" serde_json = "1.0" tar = { version = "0.4.38", default-features = false } termcolor = "1.1.2" toml_edit = { version = "0.14.3", features = ["serde", "easy", "perf"] } url = "2.2.2" [target.'cfg(windows)'.dependencies] winapi = "0.3" [features] deny-warnings = [] cargo-0.66.0/crates/cargo-test-support/build.rs000066400000000000000000000001661432416201200214040ustar00rootroot00000000000000fn main() { println!( "cargo:rustc-env=NATIVE_ARCH={}", std::env::var("TARGET").unwrap() ); } cargo-0.66.0/crates/cargo-test-support/src/000077500000000000000000000000001432416201200205235ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-test-support/src/compare.rs000066400000000000000000000525571432416201200225350ustar00rootroot00000000000000//! Routines for comparing and diffing output. //! //! # Patterns //! //! Many of these functions support special markup to assist with comparing //! text that may vary or is otherwise uninteresting for the test at hand. The //! supported patterns are: //! //! - `[..]` is a wildcard that matches 0 or more characters on the same line //! (similar to `.*` in a regex). It is non-greedy. //! - `[EXE]` optionally adds `.exe` on Windows (empty string on other //! platforms). //! - `[ROOT]` is the path to the test directory's root. //! - `[CWD]` is the working directory of the process that was run. //! - There is a wide range of substitutions (such as `[COMPILING]` or //! `[WARNING]`) to match cargo's "status" output and allows you to ignore //! the alignment. See the source of `substitute_macros` for a complete list //! of substitutions. //! //! # Normalization //! //! In addition to the patterns described above, the strings are normalized //! in such a way to avoid unwanted differences. The normalizations are: //! //! - Raw tab characters are converted to the string ``. This is helpful //! so that raw tabs do not need to be written in the expected string, and //! to avoid confusion of tabs vs spaces. //! - Backslashes are converted to forward slashes to deal with Windows paths. //! This helps so that all tests can be written assuming forward slashes. //! Other heuristics are applied to try to ensure Windows-style paths aren't //! a problem. //! - Carriage returns are removed, which can help when running on Windows. use crate::diff; use crate::paths; use anyhow::{bail, Context, Result}; use serde_json::Value; use std::env; use std::fmt; use std::path::Path; use std::str; use url::Url; /// Default `snapbox` Assertions /// /// # Snapshots /// /// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: /// /// - `skip`: do not run the tests /// - `ignore`: run the tests but ignore their failure /// - `verify`: run the tests /// - `overwrite`: update the snapshots based on the output of the tests /// /// # Patterns /// /// - `[..]` is a character wildcard, stopping at line breaks /// - `\n...\n` is a multi-line wildcard /// - `[EXE]` matches the exe suffix for the current platform /// - `[ROOT]` matches [`paths::root()`][crate::paths::root] /// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL /// /// # Normalization /// /// In addition to the patterns described above, text is normalized /// in such a way to avoid unwanted differences. The normalizations are: /// /// - Backslashes are converted to forward slashes to deal with Windows paths. /// This helps so that all tests can be written assuming forward slashes. /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. pub fn assert_ui() -> snapbox::Assert { let root = paths::root(); // Use `from_file_path` instead of `from_dir_path` so the trailing slash is // put in the users output, rather than hidden in the variable let root_url = url::Url::from_file_path(&root).unwrap().to_string(); let root = root.display().to_string(); let mut subs = snapbox::Substitutions::new(); subs.extend([ ( "[EXE]", std::borrow::Cow::Borrowed(std::env::consts::EXE_SUFFIX), ), ("[ROOT]", std::borrow::Cow::Owned(root)), ("[ROOTURL]", std::borrow::Cow::Owned(root_url)), ]) .unwrap(); snapbox::Assert::new() .action_env(snapbox::DEFAULT_ACTION_ENV) .substitutions(subs) } /// Normalizes the output so that it can be compared against the expected value. fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String { // It's easier to read tabs in outputs if they don't show up as literal // hidden characters let actual = actual.replace('\t', ""); if cfg!(windows) { // Let's not deal with \r\n vs \n on windows... let actual = actual.replace('\r', ""); normalize_windows(&actual, cwd) } else { actual } } /// Normalizes the expected string so that it can be compared against the actual output. fn normalize_expected(expected: &str, cwd: Option<&Path>) -> String { let expected = substitute_macros(expected); if cfg!(windows) { normalize_windows(&expected, cwd) } else { let expected = match cwd { None => expected, Some(cwd) => expected.replace("[CWD]", &cwd.display().to_string()), }; let expected = expected.replace("[ROOT]", &paths::root().display().to_string()); expected } } /// Normalizes text for both actual and expected strings on Windows. fn normalize_windows(text: &str, cwd: Option<&Path>) -> String { // Let's not deal with / vs \ (windows...) let text = text.replace('\\', "/"); // Weirdness for paths on Windows extends beyond `/` vs `\` apparently. // Namely paths like `c:\` and `C:\` are equivalent and that can cause // issues. The return value of `env::current_dir()` may return a // lowercase drive name, but we round-trip a lot of values through `Url` // which will auto-uppercase the drive name. To just ignore this // distinction we try to canonicalize as much as possible, taking all // forms of a path and canonicalizing them to one. let replace_path = |s: &str, path: &Path, with: &str| { let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap(); let path1 = path.display().to_string().replace('\\', "/"); let path2 = path_through_url.display().to_string().replace('\\', "/"); s.replace(&path1, with) .replace(&path2, with) .replace(with, &path1) }; let text = match cwd { None => text, Some(p) => replace_path(&text, p, "[CWD]"), }; // Similar to cwd above, perform similar treatment to the root path // which in theory all of our paths should otherwise get rooted at. let root = paths::root(); let text = replace_path(&text, &root, "[ROOT]"); text } fn substitute_macros(input: &str) -> String { let macros = [ ("[RUNNING]", " Running"), ("[COMPILING]", " Compiling"), ("[CHECKING]", " Checking"), ("[COMPLETED]", " Completed"), ("[CREATED]", " Created"), ("[FINISHED]", " Finished"), ("[ERROR]", "error:"), ("[WARNING]", "warning:"), ("[NOTE]", "note:"), ("[HELP]", "help:"), ("[DOCUMENTING]", " Documenting"), ("[FRESH]", " Fresh"), ("[UPDATING]", " Updating"), ("[ADDING]", " Adding"), ("[REMOVING]", " Removing"), ("[DOCTEST]", " Doc-tests"), ("[PACKAGING]", " Packaging"), ("[DOWNLOADING]", " Downloading"), ("[DOWNLOADED]", " Downloaded"), ("[UPLOADING]", " Uploading"), ("[VERIFYING]", " Verifying"), ("[ARCHIVING]", " Archiving"), ("[INSTALLING]", " Installing"), ("[REPLACING]", " Replacing"), ("[UNPACKING]", " Unpacking"), ("[SUMMARY]", " Summary"), ("[FIXED]", " Fixed"), ("[FIXING]", " Fixing"), ("[EXE]", env::consts::EXE_SUFFIX), ("[IGNORED]", " Ignored"), ("[INSTALLED]", " Installed"), ("[REPLACED]", " Replaced"), ("[BUILDING]", " Building"), ("[LOGIN]", " Login"), ("[LOGOUT]", " Logout"), ("[YANK]", " Yank"), ("[OWNER]", " Owner"), ("[MIGRATING]", " Migrating"), ("[EXECUTABLE]", " Executable"), ("[SKIPPING]", " Skipping"), ]; let mut result = input.to_owned(); for &(pat, subst) in ¯os { result = result.replace(pat, subst) } result } /// Compares one string against another, checking that they both match. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. /// /// - `description` explains where the output is from (usually "stdout" or "stderr"). /// - `other_output` is other output to display in the error (usually stdout or stderr). pub fn match_exact( expected: &str, actual: &str, description: &str, other_output: &str, cwd: Option<&Path>, ) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(WildStr::new).collect(); let a: Vec<_> = actual.lines().map(WildStr::new).collect(); if e == a { return Ok(()); } let diff = diff::colored_diff(&e, &a); bail!( "{} did not match:\n\ {}\n\n\ other output:\n\ {}\n", description, diff, other_output, ); } /// Convenience wrapper around [`match_exact`] which will panic on error. #[track_caller] pub fn assert_match_exact(expected: &str, actual: &str) { if let Err(e) = match_exact(expected, actual, "", "", None) { crate::panic_error("", e); } } /// Checks that the given string contains the given lines, ignoring the order /// of the lines. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let mut a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); // match more-constrained lines first, although in theory we'll // need some sort of recursive match here. This handles the case // that you expect "a\n[..]b" and two lines are printed out, // "ab\n"a", where technically we do match unordered but a naive // search fails to find this. This simple sort at least gets the // test suite to pass for now, but we may need to get more fancy // if tests start failing again. a.sort_by_key(|s| s.line.len()); let mut changes = Vec::new(); let mut a_index = 0; let mut failure = false; use crate::diff::Change; for (e_i, e_line) in e.into_iter().enumerate() { match a.iter().position(|a_line| e_line == *a_line) { Some(index) => { let a_line = a.remove(index); changes.push(Change::Keep(e_i, index, a_line)); a_index += 1; } None => { failure = true; changes.push(Change::Remove(e_i, e_line)); } } } for unmatched in a { failure = true; changes.push(Change::Add(a_index, unmatched)); a_index += 1; } if failure { bail!( "Expected lines did not match (ignoring order):\n{}\n", diff::render_colored_changes(&changes) ); } else { Ok(()) } } /// Checks that the given string contains the given contiguous lines /// somewhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); if e.len() == 0 { bail!("expected length must not be zero"); } for window in a.windows(e.len()) { if window == e { return Ok(()); } } bail!( "expected to find:\n\ {}\n\n\ did not find in output:\n\ {}", expected, actual ); } /// Checks that the given string does not contain the given contiguous lines /// anywhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { if match_contains(expected, actual, cwd).is_ok() { bail!( "expected not to find:\n\ {}\n\n\ but found in output:\n\ {}", expected, actual ); } else { Ok(()) } } /// Checks that the given string contains the given contiguous lines /// somewhere, and should be repeated `number` times. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. pub fn match_contains_n( expected: &str, number: usize, actual: &str, cwd: Option<&Path>, ) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect(); if e.len() == 0 { bail!("expected length must not be zero"); } let matches = a.windows(e.len()).filter(|window| *window == e).count(); if matches == number { Ok(()) } else { bail!( "expected to find {} occurrences of:\n\ {}\n\n\ but found {} matches in the output:\n\ {}", number, expected, matches, actual ) } } /// Checks that the given string has a line that contains the given patterns, /// and that line also does not contain the `without` patterns. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. /// /// See [`crate::Execs::with_stderr_line_without`] for an example and cautions /// against using. pub fn match_with_without( actual: &str, with: &[String], without: &[String], cwd: Option<&Path>, ) -> Result<()> { let actual = normalize_actual(actual, cwd); let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, cwd)); let with: Vec<_> = with.iter().map(norm).collect(); let without: Vec<_> = without.iter().map(norm).collect(); let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect(); let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect(); let matches: Vec<_> = actual .lines() .map(WildStr::new) .filter(|line| with_wild.iter().all(|with| with == line)) .filter(|line| !without_wild.iter().any(|without| without == line)) .collect(); match matches.len() { 0 => bail!( "Could not find expected line in output.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Actual stderr:\n\ {}\n", with, without, actual ), 1 => Ok(()), _ => bail!( "Found multiple matching lines, but only expected one.\n\ With contents: {:?}\n\ Without contents: {:?}\n\ Matching lines:\n\ {}\n", with, without, itertools::join(matches, "\n") ), } } /// Checks that the given string of JSON objects match the given set of /// expected JSON objects. /// /// See [`crate::Execs::with_json`] for more details. pub fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let (exp_objs, act_objs) = collect_json_objects(expected, actual)?; if exp_objs.len() != act_objs.len() { bail!( "expected {} json lines, got {}, stdout:\n{}", exp_objs.len(), act_objs.len(), actual ); } for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) { find_json_mismatch(exp_obj, &act_obj, cwd)?; } Ok(()) } /// Checks that the given string of JSON objects match the given set of /// expected JSON objects, ignoring their order. /// /// See [`crate::Execs::with_json_contains_unordered`] for more details and /// cautions when using. pub fn match_json_contains_unordered( expected: &str, actual: &str, cwd: Option<&Path>, ) -> Result<()> { let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?; for exp_obj in exp_objs { match act_objs .iter() .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok()) { Some(index) => act_objs.remove(index), None => { bail!( "Did not find expected JSON:\n\ {}\n\ Remaining available output:\n\ {}\n", serde_json::to_string_pretty(&exp_obj).unwrap(), itertools::join( act_objs.iter().map(|o| serde_json::to_string(o).unwrap()), "\n" ) ); } }; } Ok(()) } fn collect_json_objects( expected: &str, actual: &str, ) -> Result<(Vec, Vec)> { let expected_objs: Vec<_> = expected .split("\n\n") .map(|expect| { expect .parse() .with_context(|| format!("failed to parse expected JSON object:\n{}", expect)) }) .collect::>()?; let actual_objs: Vec<_> = actual .lines() .filter(|line| line.starts_with('{')) .map(|line| { line.parse() .with_context(|| format!("failed to parse JSON object:\n{}", line)) }) .collect::>()?; Ok((expected_objs, actual_objs)) } /// Compares JSON object for approximate equality. /// You can use `[..]` wildcard in strings (useful for OS-dependent things such /// as paths). You can use a `"{...}"` string literal as a wildcard for /// arbitrary nested JSON (useful for parts of object emitted by other programs /// (e.g., rustc) rather than Cargo itself). pub fn find_json_mismatch(expected: &Value, actual: &Value, cwd: Option<&Path>) -> Result<()> { match find_json_mismatch_r(expected, actual, cwd) { Some((expected_part, actual_part)) => bail!( "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", serde_json::to_string_pretty(expected).unwrap(), serde_json::to_string_pretty(&actual).unwrap(), serde_json::to_string_pretty(expected_part).unwrap(), serde_json::to_string_pretty(actual_part).unwrap(), ), None => Ok(()), } } fn find_json_mismatch_r<'a>( expected: &'a Value, actual: &'a Value, cwd: Option<&Path>, ) -> Option<(&'a Value, &'a Value)> { use serde_json::Value::*; match (expected, actual) { (&Number(ref l), &Number(ref r)) if l == r => None, (&Bool(l), &Bool(r)) if l == r => None, (&String(ref l), _) if l == "{...}" => None, (&String(ref l), &String(ref r)) => { if match_exact(l, r, "", "", cwd).is_err() { Some((expected, actual)) } else { None } } (&Array(ref l), &Array(ref r)) => { if l.len() != r.len() { return Some((expected, actual)); } l.iter() .zip(r.iter()) .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Object(ref l), &Object(ref r)) => { let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); if !same_keys { return Some((expected, actual)); } l.values() .zip(r.values()) .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Null, &Null) => None, // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. _ => Some((expected, actual)), } } /// A single line string that supports `[..]` wildcard matching. pub struct WildStr<'a> { has_meta: bool, line: &'a str, } impl<'a> WildStr<'a> { pub fn new(line: &'a str) -> WildStr<'a> { WildStr { has_meta: line.contains("[..]"), line, } } } impl<'a> PartialEq for WildStr<'a> { fn eq(&self, other: &Self) -> bool { match (self.has_meta, other.has_meta) { (false, false) => self.line == other.line, (true, false) => meta_cmp(self.line, other.line), (false, true) => meta_cmp(other.line, self.line), (true, true) => panic!("both lines cannot have [..]"), } } } fn meta_cmp(a: &str, mut b: &str) -> bool { for (i, part) in a.split("[..]").enumerate() { match b.find(part) { Some(j) => { if i == 0 && j != 0 { return false; } b = &b[j + part.len()..]; } None => return false, } } b.is_empty() || a.ends_with("[..]") } impl fmt::Display for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.line) } } impl fmt::Debug for WildStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.line) } } #[test] fn wild_str_cmp() { for (a, b) in &[ ("a b", "a b"), ("a[..]b", "a b"), ("a[..]", "a b"), ("[..]", "a b"), ("[..]b", "a b"), ] { assert_eq!(WildStr::new(a), WildStr::new(b)); } for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] { assert_ne!(WildStr::new(a), WildStr::new(b)); } } cargo-0.66.0/crates/cargo-test-support/src/cross_compile.rs000066400000000000000000000204721432416201200237370ustar00rootroot00000000000000//! Support for cross-compile tests with the `--target` flag. //! //! Note that cross-testing is very limited. You need to install the //! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa). //! //! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests //! if you are unable to use the alternate target. Unfortunately 32-bit //! support on macOS is going away, so macOS users are out of luck. //! //! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI. use crate::{basic_manifest, main_file, project}; use cargo_util::ProcessError; use std::env; use std::fmt::Write; use std::process::{Command, Output}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Once; /// Whether or not the resulting cross binaries can run on the host. static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false); pub fn disabled() -> bool { // First, disable if requested. match env::var("CFG_DISABLE_CROSS_TESTS") { Ok(ref s) if *s == "1" => return true, _ => {} } // Cross tests are only tested to work on macos, linux, and MSVC windows. if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) { return true; } // It's not particularly common to have a cross-compilation setup, so // try to detect that before we fail a bunch of tests through no fault // of the user. static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false); static CHECK: Once = Once::new(); let cross_target = alternate(); let run_cross_test = || -> anyhow::Result { let p = project() .at("cross_test") .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0")) .file("src/main.rs", &main_file(r#""testing!""#, &[])) .build(); let build_result = p .cargo("build --target") .arg(&cross_target) .exec_with_output(); if build_result.is_ok() { CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst); } let result = p .cargo("run --target") .arg(&cross_target) .exec_with_output(); if result.is_ok() { CAN_RUN_ON_HOST.store(true, Ordering::SeqCst); } build_result }; CHECK.call_once(|| { drop(run_cross_test()); }); if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) { // We were able to compile a simple project, so the user has the // necessary `std::` bits installed. Therefore, tests should not // be disabled. return false; } // We can't compile a simple cross project. We want to warn the user // by failing a single test and having the remainder of the cross tests // pass. We don't use `std::sync::Once` here because panicking inside its // `call_once` method would poison the `Once` instance, which is not what // we want. static HAVE_WARNED: AtomicBool = AtomicBool::new(false); if HAVE_WARNED.swap(true, Ordering::SeqCst) { // We are some other test and somebody else is handling the warning. // Just disable the current test. return true; } // We are responsible for warning the user, which we do by panicking. let mut message = format!( " Cannot cross compile to {}. This failure can be safely ignored. If you would prefer to not see this failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\". Alternatively, you can install the necessary libraries to enable cross compilation tests. Cross compilation tests depend on your host platform. ", cross_target ); if cfg!(target_os = "linux") { message.push_str( " Linux cross tests target i686-unknown-linux-gnu, which requires the ability to build and run 32-bit targets. This requires the 32-bit libraries to be installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to install the necessary libraries. ", ); } else if cfg!(target_os = "macos") { message.push_str( " macOS cross tests target x86_64-apple-ios, which requires the iOS SDK to be installed. This should be included with Xcode automatically. If you are using the Xcode command line tools, you'll need to install the full Xcode app (from the Apple App Store), and switch to it with this command: sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer Some cross-tests want to *run* the executables on the host. These tests will be ignored if this is not possible. On macOS, this means you need an iOS simulator installed to run these tests. To install a simulator, open Xcode, go to preferences > Components, and download the latest iOS simulator. ", ); } else if cfg!(target_os = "windows") { message.push_str( " Windows cross tests target i686-pc-windows-msvc, which requires the ability to build and run 32-bit targets. This should work automatically if you have properly installed Visual Studio build tools. ", ); } else { // The check at the top should prevent this. panic!("platform should have been skipped"); } let rustup_available = Command::new("rustup").output().is_ok(); if rustup_available { write!( message, " Make sure that the appropriate `rustc` target is installed with rustup: rustup target add {} ", cross_target ) .unwrap(); } else { write!( message, " rustup does not appear to be installed. Make sure that the appropriate `rustc` target is installed for the target `{}`. ", cross_target ) .unwrap(); } // Show the actual error message. match run_cross_test() { Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"), Err(err) => match err.downcast_ref::() { Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(), None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(), }, } panic!("{}", message); } /// The arch triple of the test-running host. pub fn native() -> &'static str { env!("NATIVE_ARCH") } pub fn native_arch() -> &'static str { match native() .split("-") .next() .expect("Target triple has unexpected format") { "x86_64" => "x86_64", "aarch64" => "aarch64", "i686" => "x86", _ => panic!("This test should be gated on cross_compile::disabled."), } } /// The alternate target-triple to build with. /// /// Only use this function on tests that check `cross_compile::disabled`. pub fn alternate() -> &'static str { if cfg!(all(target_os = "macos", target_arch = "aarch64")) { "x86_64-apple-darwin" } else if cfg!(target_os = "macos") { "x86_64-apple-ios" } else if cfg!(target_os = "linux") { "i686-unknown-linux-gnu" } else if cfg!(all(target_os = "windows", target_env = "msvc")) { "i686-pc-windows-msvc" } else if cfg!(all(target_os = "windows", target_env = "gnu")) { "i686-pc-windows-gnu" } else { panic!("This test should be gated on cross_compile::disabled."); } } pub fn alternate_arch() -> &'static str { if cfg!(target_os = "macos") { "x86_64" } else { "x86" } } /// A target-triple that is neither the host nor the target. /// /// Rustc may not work with it and it's alright, apart from being a /// valid target triple it is supposed to be used only as a /// placeholder for targets that should not be considered. pub fn unused() -> &'static str { "wasm32-unknown-unknown" } /// Whether or not the host can run cross-compiled executables. pub fn can_run_on_host() -> bool { if disabled() { return false; } // macos is currently configured to cross compile to x86_64-apple-ios // which requires a simulator to run. Azure's CI image appears to have the // SDK installed, but are not configured to launch iOS images with a // simulator. if cfg!(target_os = "macos") { if CAN_RUN_ON_HOST.load(Ordering::SeqCst) { return true; } else { println!("Note: Cannot run on host, skipping."); return false; } } else { assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst)); return true; } } cargo-0.66.0/crates/cargo-test-support/src/diff.rs000066400000000000000000000122031432416201200217770ustar00rootroot00000000000000//! A simple Myers diff implementation. //! //! This focuses on being short and simple, and the expense of being //! inefficient. A key characteristic here is that this supports cargotest's //! `[..]` wildcard matching. That means things like hashing can't be used. //! Since Cargo's output tends to be small, this should be sufficient. use std::fmt; use std::io::Write; use termcolor::{Ansi, Color, ColorSpec, NoColor, WriteColor}; /// A single line change to be applied to the original. #[derive(Debug, Eq, PartialEq)] pub enum Change { Add(usize, T), Remove(usize, T), Keep(usize, usize, T), } pub fn diff<'a, T>(a: &'a [T], b: &'a [T]) -> Vec> where T: PartialEq, { if a.is_empty() && b.is_empty() { return vec![]; } let mut diff = vec![]; for (prev_x, prev_y, x, y) in backtrack(&a, &b) { if x == prev_x { diff.push(Change::Add(prev_y + 1, &b[prev_y])); } else if y == prev_y { diff.push(Change::Remove(prev_x + 1, &a[prev_x])); } else { diff.push(Change::Keep(prev_x + 1, prev_y + 1, &a[prev_x])); } } diff.reverse(); diff } fn shortest_edit(a: &[T], b: &[T]) -> Vec> where T: PartialEq, { let max = a.len() + b.len(); let mut v = vec![0; 2 * max + 1]; let mut trace = vec![]; for d in 0..=max { trace.push(v.clone()); for k in (0..=(2 * d)).step_by(2) { let mut x = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) { // Move down v[max - d + k + 1] } else { // Move right v[max - d + k - 1] + 1 }; let mut y = x + d - k; // Step diagonally as far as possible. while x < a.len() && y < b.len() && a[x] == b[y] { x += 1; y += 1; } v[max - d + k] = x; // Return if reached the bottom-right position. if x >= a.len() && y >= b.len() { return trace; } } } panic!("finished without hitting end?"); } fn backtrack(a: &[T], b: &[T]) -> Vec<(usize, usize, usize, usize)> where T: PartialEq, { let mut result = vec![]; let mut x = a.len(); let mut y = b.len(); let max = x + y; for (d, v) in shortest_edit(a, b).iter().enumerate().rev() { let k = x + d - y; let prev_k = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) { k + 1 } else { k - 1 }; let prev_x = v[max - d + prev_k]; let prev_y = (prev_x + d).saturating_sub(prev_k); while x > prev_x && y > prev_y { result.push((x - 1, y - 1, x, y)); x -= 1; y -= 1; } if d > 0 { result.push((prev_x, prev_y, x, y)); } x = prev_x; y = prev_y; } return result; } pub fn colored_diff<'a, T>(a: &'a [T], b: &'a [T]) -> String where T: PartialEq + fmt::Display, { let changes = diff(a, b); render_colored_changes(&changes) } pub fn render_colored_changes(changes: &[Change]) -> String { // termcolor is not very ergonomic, but I don't want to bring in another dependency. let mut red = ColorSpec::new(); red.set_fg(Some(Color::Red)); let mut green = ColorSpec::new(); green.set_fg(Some(Color::Green)); let mut dim = ColorSpec::new(); dim.set_dimmed(true); let mut v = Vec::new(); let mut result: Box = if crate::is_ci() { // Don't use color on CI. Even though GitHub can display colors, it // makes reading the raw logs more difficult. Box::new(NoColor::new(&mut v)) } else { Box::new(Ansi::new(&mut v)) }; for change in changes { let (nums, sign, color, text) = match change { Change::Add(i, s) => (format!(" {:<4} ", i), '+', &green, s), Change::Remove(i, s) => (format!("{:<4} ", i), '-', &red, s), Change::Keep(x, y, s) => (format!("{:<4}{:<4} ", x, y), ' ', &dim, s), }; result.set_color(&dim).unwrap(); write!(result, "{}", nums).unwrap(); let mut bold = color.clone(); bold.set_bold(true); result.set_color(&bold).unwrap(); write!(result, "{}", sign).unwrap(); result.reset().unwrap(); result.set_color(&color).unwrap(); write!(result, "{}", text).unwrap(); result.reset().unwrap(); writeln!(result).unwrap(); } drop(result); String::from_utf8(v).unwrap() } #[cfg(test)] pub fn compare(a: &str, b: &str) { let a: Vec<_> = a.chars().collect(); let b: Vec<_> = b.chars().collect(); let changes = diff(&a, &b); let mut result = vec![]; for change in changes { match change { Change::Add(_, s) => result.push(*s), Change::Remove(_, _s) => {} Change::Keep(_, _, s) => result.push(*s), } } assert_eq!(b, result); } #[test] fn basic_tests() { compare("", ""); compare("A", ""); compare("", "B"); compare("ABCABBA", "CBABAC"); } cargo-0.66.0/crates/cargo-test-support/src/git.rs000066400000000000000000000161211432416201200216550ustar00rootroot00000000000000/* # Git Testing Support ## Creating a git dependency `git::new()` is an easy way to create a new git repository containing a project that you can then use as a dependency. It will automatically add all the files you specify in the project and commit them to the repository. Example: ``` let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1")) .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) }); // Use the `url()` method to get the file url to the new repository. let p = project() .file("Cargo.toml", &format!(r#" [package] name = "a" version = "1.0.0" [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url())) .file("src/lib.rs", "extern crate dep1;") .build(); ``` ## Manually creating repositories `git::repo()` can be used to create a `RepoBuilder` which provides a way of adding files to a blank repository and committing them. If you want to then manipulate the repository (such as adding new files or tags), you can use `git2::Repository::open()` to open the repository and then use some of the helper functions in this file to interact with the repository. */ use crate::{path2url, project, Project, ProjectBuilder}; use std::fs; use std::path::{Path, PathBuf}; use std::sync::Once; use url::Url; #[must_use] pub struct RepoBuilder { repo: git2::Repository, files: Vec, } pub struct Repository(git2::Repository); /// Create a `RepoBuilder` to build a new git repository. /// /// Call `build()` to finalize and create the repository. pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } impl RepoBuilder { pub fn init(p: &Path) -> RepoBuilder { t!(fs::create_dir_all(p.parent().unwrap())); let repo = init(p); RepoBuilder { repo, files: Vec::new(), } } /// Add a file to the repository. pub fn file(self, path: &str, contents: &str) -> RepoBuilder { let mut me = self.nocommit_file(path, contents); me.files.push(PathBuf::from(path)); me } /// Add a file that will be left in the working directory, but not added /// to the repository. pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { let dst = self.repo.workdir().unwrap().join(path); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, contents)); self } /// Create the repository and commit the new files. pub fn build(self) -> Repository { { let mut index = t!(self.repo.index()); for file in self.files.iter() { t!(index.add_path(file)); } t!(index.write()); let id = t!(index.write_tree()); let tree = t!(self.repo.find_tree(id)); let sig = t!(self.repo.signature()); t!(self .repo .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); } let RepoBuilder { repo, .. } = self; Repository(repo) } } impl Repository { pub fn root(&self) -> &Path { self.0.workdir().unwrap() } pub fn url(&self) -> Url { path2url(self.0.workdir().unwrap().to_path_buf()) } pub fn revparse_head(&self) -> String { self.0 .revparse_single("HEAD") .expect("revparse HEAD") .id() .to_string() } } /// Initialize a new repository at the given path. pub fn init(path: &Path) -> git2::Repository { default_search_path(); let repo = t!(git2::Repository::init(path)); default_repo_cfg(&repo); repo } fn default_search_path() { use crate::paths::global_root; use git2::{opts::set_search_path, ConfigLevel}; static INIT: Once = Once::new(); INIT.call_once(|| unsafe { let path = global_root().join("blank_git_search_path"); t!(set_search_path(ConfigLevel::System, &path)); t!(set_search_path(ConfigLevel::Global, &path)); t!(set_search_path(ConfigLevel::XDG, &path)); t!(set_search_path(ConfigLevel::ProgramData, &path)); }) } fn default_repo_cfg(repo: &git2::Repository) { let mut cfg = t!(repo.config()); t!(cfg.set_str("user.email", "foo@bar.com")); t!(cfg.set_str("user.name", "Foo Bar")); } /// Create a new git repository with a project. pub fn new(name: &str, callback: F) -> Project where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { new_repo(name, callback).0 } /// Create a new git repository with a project. /// Returns both the Project and the git Repository. pub fn new_repo(name: &str, callback: F) -> (Project, git2::Repository) where F: FnOnce(ProjectBuilder) -> ProjectBuilder, { let mut git_project = project().at(name); git_project = callback(git_project); let git_project = git_project.build(); let repo = init(&git_project.root()); add(&repo); commit(&repo); (git_project, repo) } /// Add all files in the working directory to the git index. pub fn add(repo: &git2::Repository) { // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules // as well, and then fail because they're directories. As a stop-gap, we just // ignore all submodules. let mut s = t!(repo.submodules()); for submodule in s.iter_mut() { t!(submodule.add_to_index(false)); } let mut index = t!(repo.index()); t!(index.add_all( ["*"].iter(), git2::IndexAddOption::DEFAULT, Some( &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) { 1 } else { 0 }) ) )); t!(index.write()); } /// Add a git submodule to the repository. pub fn add_submodule<'a>( repo: &'a git2::Repository, url: &str, path: &Path, ) -> git2::Submodule<'a> { let path = path.to_str().unwrap().replace(r"\", "/"); let mut s = t!(repo.submodule(url, Path::new(&path), false)); let subrepo = t!(s.open()); default_repo_cfg(&subrepo); t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); let mut origin = t!(subrepo.find_remote("origin")); t!(origin.fetch(&Vec::::new(), None, None)); t!(subrepo.checkout_head(None)); t!(s.add_finalize()); s } /// Commit changes to the git repository. pub fn commit(repo: &git2::Repository) -> git2::Oid { let tree_id = t!(t!(repo.index()).write_tree()); let sig = t!(repo.signature()); let mut parents = Vec::new(); if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) { parents.push(t!(repo.find_commit(parent))) } let parents = parents.iter().collect::>(); t!(repo.commit( Some("HEAD"), &sig, &sig, "test", &t!(repo.find_tree(tree_id)), &parents )) } /// Create a new tag in the git repository. pub fn tag(repo: &git2::Repository, name: &str) { let head = repo.head().unwrap().target().unwrap(); t!(repo.tag( name, &t!(repo.find_object(head, None)), &t!(repo.signature()), "make a new tag", false )); } cargo-0.66.0/crates/cargo-test-support/src/install.rs000066400000000000000000000015021432416201200225350ustar00rootroot00000000000000use crate::paths; use std::env::consts::EXE_SUFFIX; use std::path::{Path, PathBuf}; /// Used by `cargo install` tests to assert an executable binary /// has been installed. Example usage: /// /// assert_has_installed_exe(cargo_home(), "foo"); #[track_caller] pub fn assert_has_installed_exe>(path: P, name: &'static str) { assert!(check_has_installed_exe(path, name)); } #[track_caller] pub fn assert_has_not_installed_exe>(path: P, name: &'static str) { assert!(!check_has_installed_exe(path, name)); } fn check_has_installed_exe>(path: P, name: &'static str) -> bool { path.as_ref().join("bin").join(exe(name)).is_file() } pub fn cargo_home() -> PathBuf { paths::home().join(".cargo") } pub fn exe(name: &str) -> String { format!("{}{}", name, EXE_SUFFIX) } cargo-0.66.0/crates/cargo-test-support/src/lib.rs000066400000000000000000001272641432416201200216530ustar00rootroot00000000000000//! # Cargo test support. //! //! See for a guide on writing tests. #![allow(clippy::all)] #![cfg_attr(feature = "deny-warnings", deny(warnings))] use std::env; use std::ffi::OsStr; use std::fmt::Write; use std::fs; use std::os; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::str; use std::time::{self, Duration}; use anyhow::{bail, Result}; use cargo_util::{is_ci, ProcessBuilder, ProcessError}; use serde_json; use url::Url; use self::paths::CargoPathExt; #[macro_export] macro_rules! t { ($e:expr) => { match $e { Ok(e) => e, Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e), } }; } #[macro_export] macro_rules! curr_dir { () => { $crate::_curr_dir(std::path::Path::new(file!())); }; } #[doc(hidden)] pub fn _curr_dir(mut file_path: &'static Path) -> &'static Path { if !file_path.exists() { // HACK: Must be running in the rust-lang/rust workspace, adjust the paths accordingly. let prefix = PathBuf::from("src").join("tools").join("cargo"); if let Ok(crate_relative) = file_path.strip_prefix(prefix) { file_path = crate_relative } } assert!(file_path.exists(), "{} does not exist", file_path.display()); file_path.parent().unwrap() } #[track_caller] pub fn panic_error(what: &str, err: impl Into) -> ! { let err = err.into(); pe(what, err); #[track_caller] fn pe(what: &str, err: anyhow::Error) -> ! { let mut result = format!("{}\nerror: {}", what, err); for cause in err.chain().skip(1) { drop(writeln!(result, "\nCaused by:")); drop(write!(result, "{}", cause)); } panic!("\n{}", result); } } pub use cargo_test_macro::cargo_test; pub mod compare; pub mod cross_compile; mod diff; pub mod git; pub mod install; pub mod paths; pub mod publish; pub mod registry; pub mod tools; pub mod prelude { pub use crate::ArgLine; pub use crate::CargoCommand; pub use crate::ChannelChanger; pub use crate::TestEnv; } /* * * ===== Builders ===== * */ #[derive(PartialEq, Clone)] struct FileBuilder { path: PathBuf, body: String, executable: bool, } impl FileBuilder { pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder { FileBuilder { path, body: body.to_string(), executable: executable, } } fn mk(&mut self) { if self.executable { self.path.set_extension(env::consts::EXE_EXTENSION); } self.dirname().mkdir_p(); fs::write(&self.path, &self.body) .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e)); #[cfg(unix)] if self.executable { use std::os::unix::fs::PermissionsExt; let mut perms = fs::metadata(&self.path).unwrap().permissions(); let mode = perms.mode(); perms.set_mode(mode | 0o111); fs::set_permissions(&self.path, perms).unwrap(); } } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq, Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: false, } } pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst, src, src_is_dir: true, } } #[cfg(unix)] fn mk(&self) { self.dirname().mkdir_p(); t!(os::unix::fs::symlink(&self.dst, &self.src)); } #[cfg(windows)] fn mk(&mut self) { self.dirname().mkdir_p(); if self.src_is_dir { t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); } else { if let Some(ext) = self.dst.extension() { if ext == env::consts::EXE_EXTENSION { self.src.set_extension(ext); } } t!(os::windows::fs::symlink_file(&self.dst, &self.src)); } } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } /// A cargo project to run tests against. /// /// See [`ProjectBuilder`] or [`Project::from_template`] to get started. pub struct Project { root: PathBuf, } /// Create a project to run tests against /// /// The project can be constructed programmatically or from the filesystem with [`Project::from_template`] #[must_use] pub struct ProjectBuilder { root: Project, files: Vec, symlinks: Vec, no_manifest: bool, } impl ProjectBuilder { /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.root() } /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.root.target_debug_dir() } pub fn new(root: PathBuf) -> ProjectBuilder { ProjectBuilder { root: Project { root }, files: vec![], symlinks: vec![], no_manifest: false, } } pub fn at>(mut self, path: P) -> Self { self.root = Project { root: paths::root().join(path), }; self } /// Adds a file to the project. pub fn file>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, false); self } /// Adds an executable file to the project. pub fn executable>(mut self, path: B, body: &str) -> Self { self._file(path.as_ref(), body, true); self } fn _file(&mut self, path: &Path, body: &str, executable: bool) { self.files.push(FileBuilder::new( self.root.root().join(path), body, executable, )); } /// Adds a symlink to a file to the project. pub fn symlink>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), self.root.root().join(src), )); self } /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( self.root.root().join(dst), self.root.root().join(src), )); self } pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self } /// Creates the project. pub fn build(mut self) -> Project { // First, clean the directory if it already exists self.rm_root(); // Create the empty directory self.root.root().mkdir_p(); let manifest_path = self.root.root().join("Cargo.toml"); if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) { self._file( Path::new("Cargo.toml"), &basic_manifest("foo", "0.0.1"), false, ) } let past = time::SystemTime::now() - Duration::new(1, 0); let ftime = filetime::FileTime::from_system_time(past); for file in self.files.iter_mut() { file.mk(); if is_coarse_mtime() { // Place the entire project 1 second in the past to ensure // that if cargo is called multiple times, the 2nd call will // see targets as "fresh". Without this, if cargo finishes in // under 1 second, the second call will see the mtime of // source == mtime of output and consider it dirty. filetime::set_file_times(&file.path, ftime, ftime).unwrap(); } } for symlink in self.symlinks.iter_mut() { symlink.mk(); } let ProjectBuilder { root, .. } = self; root } fn rm_root(&self) { self.root.root().rm_rf() } } impl Project { /// Copy the test project from a fixed state pub fn from_template(template_path: impl AsRef) -> Self { let root = paths::root(); let project_root = root.join("case"); snapbox::path::copy_template(template_path.as_ref(), &project_root).unwrap(); Self { root: project_root } } /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.clone() } /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target` pub fn build_dir(&self) -> PathBuf { self.root().join("target") } /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.build_dir().join("debug") } /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo` pub fn url(&self) -> Url { path2url(self.root()) } /// Path to an example built as a library. /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib` pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { self.target_debug_dir() .join("examples") .join(paths::get_lib_filename(name, kind)) } /// Path to a debug binary. /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo` pub fn bin(&self, b: &str) -> PathBuf { self.build_dir() .join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a release binary. /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo` pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir() .join("release") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } /// Path to a debug binary for a specific target triple. /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo` pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug").join(&format!( "{}{}", b, env::consts::EXE_SUFFIX )) } /// Returns an iterator of paths matching the glob pattern, which is /// relative to the project root. pub fn glob>(&self, pattern: P) -> glob::Paths { let pattern = self.root().join(pattern); glob::glob(pattern.to_str().expect("failed to convert pattern to str")) .expect("failed to glob") } /// Changes the contents of an existing file. pub fn change_file(&self, path: &str, body: &str) { FileBuilder::new(self.root().join(path), body, false).mk() } /// Creates a `ProcessBuilder` to run a program in the project /// and wrap it in an Execs to assert on the execution. /// Example: /// p.process(&p.bin("foo")) /// .with_stdout("bar\n") /// .run(); pub fn process>(&self, program: T) -> Execs { let mut p = process(program); p.cwd(self.root()); execs().with_process_builder(p) } /// Creates a `ProcessBuilder` to run cargo. /// Arguments can be separated by spaces. /// Example: /// p.cargo("build --bin foo").run(); pub fn cargo(&self, cmd: &str) -> Execs { let mut execs = self.process(&cargo_exe()); if let Some(ref mut p) = execs.process_builder { p.arg_line(cmd); } execs } /// Safely run a process after `cargo build`. /// /// Windows has a problem where a process cannot be reliably /// be replaced, removed, or renamed immediately after executing it. /// The action may fail (with errors like Access is denied), or /// it may succeed, but future attempts to use the same filename /// will fail with "Already Exists". /// /// If you have a test that needs to do `cargo run` multiple /// times, you should instead use `cargo build` and use this /// method to run the executable. Each time you call this, /// use a new name for `dst`. /// See rust-lang/cargo#5481. pub fn rename_run(&self, src: &str, dst: &str) -> Execs { let src = self.bin(src); let dst = self.bin(dst); fs::rename(&src, &dst) .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); self.process(dst) } /// Returns the contents of `Cargo.lock`. pub fn read_lockfile(&self) -> String { self.read_file("Cargo.lock") } /// Returns the contents of a path in the project root pub fn read_file(&self, path: &str) -> String { let full = self.root().join(path); fs::read_to_string(&full) .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e)) } /// Modifies `Cargo.toml` to remove all commented lines. pub fn uncomment_root_manifest(&self) { let contents = self.read_file("Cargo.toml").replace("#", ""); fs::write(self.root().join("Cargo.toml"), contents).unwrap(); } pub fn symlink(&self, src: impl AsRef, dst: impl AsRef) { let src = self.root().join(src.as_ref()); let dst = self.root().join(dst.as_ref()); #[cfg(unix)] { if let Err(e) = os::unix::fs::symlink(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } #[cfg(windows)] { if src.is_dir() { if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } else { if let Err(e) = os::windows::fs::symlink_file(&src, &dst) { panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); } } } } } // Generates a project layout pub fn project() -> ProjectBuilder { ProjectBuilder::new(paths::root().join("foo")) } // Generates a project layout in given directory pub fn project_in(dir: &str) -> ProjectBuilder { ProjectBuilder::new(paths::root().join(dir).join("foo")) } // Generates a project layout inside our fake home dir pub fn project_in_home(name: &str) -> ProjectBuilder { ProjectBuilder::new(paths::home().join(name)) } // === Helpers === pub fn main_file(println: &str, deps: &[&str]) -> String { let mut buf = String::new(); for dep in deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(println); buf.push_str("); }\n"); buf } pub fn cargo_exe() -> PathBuf { snapbox::cmd::cargo_bin("cargo") } /// This is the raw output from the process. /// /// This is similar to `std::process::Output`, however the `status` is /// translated to the raw `code`. This is necessary because `ProcessError` /// does not have access to the raw `ExitStatus` because `ProcessError` needs /// to be serializable (for the Rustc cache), and `ExitStatus` does not /// provide a constructor. pub struct RawOutput { pub code: Option, pub stdout: Vec, pub stderr: Vec, } #[must_use] #[derive(Clone)] pub struct Execs { ran: bool, process_builder: Option, expect_stdout: Option, expect_stdin: Option, expect_stderr: Option, expect_exit_code: Option, expect_stdout_contains: Vec, expect_stderr_contains: Vec, expect_stdout_contains_n: Vec<(String, usize)>, expect_stdout_not_contains: Vec, expect_stderr_not_contains: Vec, expect_stderr_unordered: Vec, expect_stderr_with_without: Vec<(Vec, Vec)>, expect_json: Option, expect_json_contains_unordered: Option, stream_output: bool, } impl Execs { pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs { self.process_builder = Some(p); self } /// Verifies that stdout is equal to the given lines. /// See [`compare`] for supported patterns. pub fn with_stdout(&mut self, expected: S) -> &mut Self { self.expect_stdout = Some(expected.to_string()); self } /// Verifies that stderr is equal to the given lines. /// See [`compare`] for supported patterns. pub fn with_stderr(&mut self, expected: S) -> &mut Self { self.expect_stderr = Some(expected.to_string()); self } /// Writes the given lines to stdin. pub fn with_stdin(&mut self, expected: S) -> &mut Self { self.expect_stdin = Some(expected.to_string()); self } /// Verifies the exit code from the process. /// /// This is not necessary if the expected exit code is `0`. pub fn with_status(&mut self, expected: i32) -> &mut Self { self.expect_exit_code = Some(expected); self } /// Removes exit code check for the process. /// /// By default, the expected exit code is `0`. pub fn without_status(&mut self) -> &mut Self { self.expect_exit_code = None; self } /// Verifies that stdout contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. pub fn with_stdout_contains(&mut self, expected: S) -> &mut Self { self.expect_stdout_contains.push(expected.to_string()); self } /// Verifies that stderr contains the given contiguous lines somewhere in /// its output. /// /// See [`compare`] for supported patterns. pub fn with_stderr_contains(&mut self, expected: S) -> &mut Self { self.expect_stderr_contains.push(expected.to_string()); self } /// Verifies that stdout contains the given contiguous lines somewhere in /// its output, and should be repeated `number` times. /// /// See [`compare`] for supported patterns. pub fn with_stdout_contains_n(&mut self, expected: S, number: usize) -> &mut Self { self.expect_stdout_contains_n .push((expected.to_string(), number)); self } /// Verifies that stdout does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// See note on [`Self::with_stderr_does_not_contain`]. pub fn with_stdout_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stdout_not_contains.push(expected.to_string()); self } /// Verifies that stderr does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// Care should be taken when using this method because there is a /// limitless number of possible things that *won't* appear. A typo means /// your test will pass without verifying the correct behavior. If /// possible, write the test first so that it fails, and then implement /// your fix/feature to make it pass. pub fn with_stderr_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stderr_not_contains.push(expected.to_string()); self } /// Verifies that all of the stderr output is equal to the given lines, /// ignoring the order of the lines. /// /// See [`compare`] for supported patterns. /// /// This is useful when checking the output of `cargo build -v` since /// the order of the output is not always deterministic. /// Recommend use `with_stderr_contains` instead unless you really want to /// check *every* line of output. /// /// Be careful when using patterns such as `[..]`, because you may end up /// with multiple lines that might match, and this is not smart enough to /// do anything like longest-match. For example, avoid something like: /// /// ```text /// [RUNNING] `rustc [..] /// [RUNNING] `rustc --crate-name foo [..] /// ``` /// /// This will randomly fail if the other crate name is `bar`, and the /// order changes. pub fn with_stderr_unordered(&mut self, expected: S) -> &mut Self { self.expect_stderr_unordered.push(expected.to_string()); self } /// Verify that a particular line appears in stderr with and without the /// given substrings. Exactly one line must match. /// /// The substrings are matched as `contains`. Example: /// /// ```no_run /// execs.with_stderr_line_without( /// &[ /// "[RUNNING] `rustc --crate-name build_script_build", /// "-C opt-level=3", /// ], /// &["-C debuginfo", "-C incremental"], /// ) /// ``` /// /// This will check that a build line includes `-C opt-level=3` but does /// not contain `-C debuginfo` or `-C incremental`. /// /// Be careful writing the `without` fragments, see note in /// `with_stderr_does_not_contain`. pub fn with_stderr_line_without( &mut self, with: &[S], without: &[S], ) -> &mut Self { let with = with.iter().map(|s| s.to_string()).collect(); let without = without.iter().map(|s| s.to_string()).collect(); self.expect_stderr_with_without.push((with, without)); self } /// Verifies the JSON output matches the given JSON. /// /// This is typically used when testing cargo commands that emit JSON. /// Each separate JSON object should be separated by a blank line. /// Example: /// /// ```rust,ignore /// assert_that( /// p.cargo("metadata"), /// execs().with_json(r#" /// {"example": "abc"} /// /// {"example": "def"} /// "#) /// ); /// ``` /// /// - Objects should match in the order given. /// - The order of arrays is ignored. /// - Strings support patterns described in [`compare`]. /// - Use `"{...}"` to match any object. pub fn with_json(&mut self, expected: &str) -> &mut Self { self.expect_json = Some(expected.to_string()); self } /// Verifies JSON output contains the given objects (in any order) somewhere /// in its output. /// /// CAUTION: Be very careful when using this. Make sure every object is /// unique (not a subset of one another). Also avoid using objects that /// could possibly match multiple output lines unless you're very sure of /// what you are doing. /// /// See `with_json` for more detail. pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self { match &mut self.expect_json_contains_unordered { None => self.expect_json_contains_unordered = Some(expected.to_string()), Some(e) => { e.push_str("\n\n"); e.push_str(expected); } } self } /// Forward subordinate process stdout/stderr to the terminal. /// Useful for printf debugging of the tests. /// CAUTION: CI will fail if you leave this in your test! #[allow(unused)] pub fn stream(&mut self) -> &mut Self { self.stream_output = true; self } pub fn arg>(&mut self, arg: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.arg(arg); } self } pub fn cwd>(&mut self, path: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { if let Some(cwd) = p.get_cwd() { let new_path = cwd.join(path.as_ref()); p.cwd(new_path); } else { p.cwd(path); } } self } fn get_cwd(&self) -> Option<&Path> { self.process_builder.as_ref().and_then(|p| p.get_cwd()) } pub fn env>(&mut self, key: &str, val: T) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env(key, val); } self } pub fn env_remove(&mut self, key: &str) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.env_remove(key); } self } pub fn exec_with_output(&mut self) -> Result { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.exec_with_output() } pub fn build_command(&mut self) -> Command { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); p.build_command() } /// Enables nightly features for testing /// /// The list of reasons should be why nightly cargo is needed. If it is /// becuase of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]` pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self { if let Some(ref mut p) = self.process_builder { p.masquerade_as_nightly_cargo(reasons); } self } pub fn enable_mac_dsym(&mut self) -> &mut Self { if cfg!(target_os = "macos") { self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed"); } self } #[track_caller] pub fn run(&mut self) { self.ran = true; let mut p = (&self.process_builder).clone().unwrap(); if let Some(stdin) = self.expect_stdin.take() { p.stdin(stdin); } if let Err(e) = self.match_process(&p) { panic_error(&format!("test failed running {}", p), e); } } #[track_caller] pub fn run_expect_error(&mut self) { self.ran = true; let p = (&self.process_builder).clone().unwrap(); if self.match_process(&p).is_ok() { panic!("test was expected to fail, but succeeded running {}", p); } } /// Runs the process, checks the expected output, and returns the first /// JSON object on stdout. #[track_caller] pub fn run_json(&mut self) -> serde_json::Value { self.ran = true; let p = (&self.process_builder).clone().unwrap(); match self.match_process(&p) { Err(e) => panic_error(&format!("test failed running {}", p), e), Ok(output) => serde_json::from_slice(&output.stdout).unwrap_or_else(|e| { panic!( "\nfailed to parse JSON: {}\n\ output was:\n{}\n", e, String::from_utf8_lossy(&output.stdout) ); }), } } #[track_caller] pub fn run_output(&mut self, output: &Output) { self.ran = true; if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) { panic_error("process did not return the expected result", e) } } fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) { if self.expect_exit_code.unwrap_or(0) != 0 && self.expect_stdout.is_none() && self.expect_stdin.is_none() && self.expect_stderr.is_none() && self.expect_stdout_contains.is_empty() && self.expect_stderr_contains.is_empty() && self.expect_stdout_contains_n.is_empty() && self.expect_stdout_not_contains.is_empty() && self.expect_stderr_not_contains.is_empty() && self.expect_stderr_unordered.is_empty() && self.expect_stderr_with_without.is_empty() && self.expect_json.is_none() && self.expect_json_contains_unordered.is_none() { panic!( "`with_status()` is used, but no output is checked.\n\ The test must check the output to ensure the correct error is triggered.\n\ --- stdout\n{}\n--- stderr\n{}", String::from_utf8_lossy(stdout), String::from_utf8_lossy(stderr), ); } } fn match_process(&self, process: &ProcessBuilder) -> Result { println!("running {}", process); let res = if self.stream_output { if is_ci() { panic!("`.stream()` is for local debugging") } process.exec_with_streaming( &mut |out| { println!("{}", out); Ok(()) }, &mut |err| { eprintln!("{}", err); Ok(()) }, true, ) } else { process.exec_with_output() }; match res { Ok(out) => { self.match_output(out.status.code(), &out.stdout, &out.stderr)?; return Ok(RawOutput { stdout: out.stdout, stderr: out.stderr, code: out.status.code(), }); } Err(e) => { if let Some(ProcessError { stdout: Some(stdout), stderr: Some(stderr), code, .. }) = e.downcast_ref::() { self.match_output(*code, stdout, stderr)?; return Ok(RawOutput { stdout: stdout.to_vec(), stderr: stderr.to_vec(), code: *code, }); } bail!("could not exec process {}: {:?}", process, e) } } } fn match_output(&self, code: Option, stdout: &[u8], stderr: &[u8]) -> Result<()> { self.verify_checks_output(stdout, stderr); let stdout = str::from_utf8(stdout).expect("stdout is not utf8"); let stderr = str::from_utf8(stderr).expect("stderr is not utf8"); let cwd = self.get_cwd(); match self.expect_exit_code { None => {} Some(expected) if code == Some(expected) => {} Some(expected) => bail!( "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}", code.unwrap_or(-1), expected, stdout, stderr ), } if let Some(expect_stdout) = &self.expect_stdout { compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?; } if let Some(expect_stderr) = &self.expect_stderr { compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?; } for expect in self.expect_stdout_contains.iter() { compare::match_contains(expect, stdout, cwd)?; } for expect in self.expect_stderr_contains.iter() { compare::match_contains(expect, stderr, cwd)?; } for &(ref expect, number) in self.expect_stdout_contains_n.iter() { compare::match_contains_n(expect, number, stdout, cwd)?; } for expect in self.expect_stdout_not_contains.iter() { compare::match_does_not_contain(expect, stdout, cwd)?; } for expect in self.expect_stderr_not_contains.iter() { compare::match_does_not_contain(expect, stderr, cwd)?; } for expect in self.expect_stderr_unordered.iter() { compare::match_unordered(expect, stderr, cwd)?; } for (with, without) in self.expect_stderr_with_without.iter() { compare::match_with_without(stderr, with, without, cwd)?; } if let Some(ref expect_json) = self.expect_json { compare::match_json(expect_json, stdout, cwd)?; } if let Some(ref expected) = self.expect_json_contains_unordered { compare::match_json_contains_unordered(expected, stdout, cwd)?; } Ok(()) } } impl Drop for Execs { fn drop(&mut self) { if !self.ran && !std::thread::panicking() { panic!("forgot to run this command"); } } } pub fn execs() -> Execs { Execs { ran: false, process_builder: None, expect_stdout: None, expect_stderr: None, expect_stdin: None, expect_exit_code: Some(0), expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), expect_stdout_contains_n: Vec::new(), expect_stdout_not_contains: Vec::new(), expect_stderr_not_contains: Vec::new(), expect_stderr_unordered: Vec::new(), expect_stderr_with_without: Vec::new(), expect_json: None, expect_json_contains_unordered: None, stream_output: false, } } pub fn basic_manifest(name: &str, version: &str) -> String { format!( r#" [package] name = "{}" version = "{}" authors = [] "#, name, version ) } pub fn basic_bin_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "{}" "#, name, name ) } pub fn basic_lib_manifest(name: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "{}" "#, name, name ) } pub fn path2url>(p: P) -> Url { Url::from_file_path(p).ok().unwrap() } struct RustcInfo { verbose_version: String, host: String, } impl RustcInfo { fn new() -> RustcInfo { let output = ProcessBuilder::new("rustc") .arg("-vV") .exec_with_output() .expect("rustc should exec"); let verbose_version = String::from_utf8(output.stdout).expect("utf8 output"); let host = verbose_version .lines() .filter_map(|line| line.strip_prefix("host: ")) .next() .expect("verbose version has host: field") .to_string(); RustcInfo { verbose_version, host, } } } lazy_static::lazy_static! { static ref RUSTC_INFO: RustcInfo = RustcInfo::new(); } /// The rustc host such as `x86_64-unknown-linux-gnu`. pub fn rustc_host() -> &'static str { &RUSTC_INFO.host } /// The host triple suitable for use in a cargo environment variable (uppercased). pub fn rustc_host_env() -> String { rustc_host().to_uppercase().replace('-', "_") } pub fn is_nightly() -> bool { let vv = &RUSTC_INFO.verbose_version; // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all // nightly-only tests are disabled there. Otherwise, it could make it // difficult to land changes which would need to be made simultaneously in // rust-lang/cargo and rust-lan/rust, which isn't possible. env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() && (vv.contains("-nightly") || vv.contains("-dev")) } pub fn process>(t: T) -> ProcessBuilder { _process(t.as_ref()) } fn _process(t: &OsStr) -> ProcessBuilder { let mut p = ProcessBuilder::new(t); p.cwd(&paths::root()).test_env(); p } /// Enable nightly features for testing pub trait ChannelChanger { /// The list of reasons should be why nightly cargo is needed. If it is /// becuase of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]`. fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self; } impl ChannelChanger for &mut ProcessBuilder { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } impl ChannelChanger for snapbox::cmd::Command { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } /// Establish a process's test environment pub trait TestEnv: Sized { fn test_env(mut self) -> Self { // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless // specified otherwise. for (k, _v) in env::vars() { if k.starts_with("CARGO_") { self = self.env_remove(&k); } } if env::var_os("RUSTUP_TOOLCHAIN").is_some() { // Override the PATH to avoid executing the rustup wrapper thousands // of times. This makes the testsuite run substantially faster. lazy_static::lazy_static! { static ref RUSTC_DIR: PathBuf = { match ProcessBuilder::new("rustup") .args(&["which", "rustc"]) .exec_with_output() { Ok(output) => { let s = str::from_utf8(&output.stdout).expect("utf8").trim(); let mut p = PathBuf::from(s); p.pop(); p } Err(e) => { panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e); } } }; } let path = env::var_os("PATH").unwrap_or_default(); let paths = env::split_paths(&path); let new_path = env::join_paths(std::iter::once(RUSTC_DIR.clone()).chain(paths)).unwrap(); self = self.env("PATH", new_path); } self = self .current_dir(&paths::root()) .env("HOME", paths::home()) .env("CARGO_HOME", paths::home().join(".cargo")) .env("__CARGO_TEST_ROOT", paths::global_root()) // Force Cargo to think it's on the stable channel for all tests, this // should hopefully not surprise us as we add cargo features over time and // cargo rides the trains. .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") // For now disable incremental by default as support hasn't ridden to the // stable channel yet. Once incremental support hits the stable compiler we // can switch this to one and then fix the tests. .env("CARGO_INCREMENTAL", "0") .env_remove("__CARGO_DEFAULT_LIB_METADATA") .env_remove("RUSTC") .env_remove("RUSTDOC") .env_remove("RUSTC_WRAPPER") .env_remove("RUSTFLAGS") .env_remove("RUSTDOCFLAGS") .env_remove("XDG_CONFIG_HOME") // see #2345 .env("GIT_CONFIG_NOSYSTEM", "1") // keep trying to sandbox ourselves .env_remove("EMAIL") .env_remove("USER") // not set on some rust-lang docker images .env_remove("MFLAGS") .env_remove("MAKEFLAGS") .env_remove("GIT_AUTHOR_NAME") .env_remove("GIT_AUTHOR_EMAIL") .env_remove("GIT_COMMITTER_NAME") .env_remove("GIT_COMMITTER_EMAIL") .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows if cfg!(target_os = "macos") { // Work-around a bug in macOS 10.15, see `link_or_copy` for details. self = self.env("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS", "1"); } self } fn current_dir>(self, path: S) -> Self; fn env>(self, key: &str, value: S) -> Self; fn env_remove(self, key: &str) -> Self; } impl TestEnv for &mut ProcessBuilder { fn current_dir>(self, path: S) -> Self { let path = path.as_ref(); self.cwd(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } impl TestEnv for snapbox::cmd::Command { fn current_dir>(self, path: S) -> Self { self.current_dir(path) } fn env>(self, key: &str, value: S) -> Self { self.env(key, value) } fn env_remove(self, key: &str) -> Self { self.env_remove(key) } } /// Test the cargo command pub trait CargoCommand { fn cargo_ui() -> Self; } impl CargoCommand for snapbox::cmd::Command { fn cargo_ui() -> Self { Self::new(cargo_exe()) .with_assert(compare::assert_ui()) .test_env() } } /// Add a list of arguments as a line pub trait ArgLine: Sized { fn arg_line(mut self, s: &str) -> Self { for mut arg in s.split_whitespace() { if (arg.starts_with('"') && arg.ends_with('"')) || (arg.starts_with('\'') && arg.ends_with('\'')) { arg = &arg[1..(arg.len() - 1).max(1)]; } else if arg.contains(&['"', '\''][..]) { panic!("shell-style argument parsing is not supported") } self = self.arg(arg); } self } fn arg>(self, s: S) -> Self; } impl ArgLine for &mut ProcessBuilder { fn arg>(self, s: S) -> Self { self.arg(s) } } impl ArgLine for snapbox::cmd::Command { fn arg>(self, s: S) -> Self { self.arg(s) } } pub fn cargo_process(s: &str) -> Execs { let mut p = process(&cargo_exe()); p.arg_line(s); execs().with_process_builder(p) } pub fn git_process(s: &str) -> ProcessBuilder { let mut p = process("git"); p.arg_line(s); p } pub fn sleep_ms(ms: u64) { ::std::thread::sleep(Duration::from_millis(ms)); } /// Returns `true` if the local filesystem has low-resolution mtimes. pub fn is_coarse_mtime() -> bool { // If the filetime crate is being used to emulate HFS then // return `true`, without looking at the actual hardware. cfg!(emulate_second_only_system) || // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS // filesystem, (or any filesystem with low-resolution mtimes). However, // that's tricky to detect, so for now just deal with CI. cfg!(target_os = "macos") && is_ci() } /// Some CI setups are much slower then the equipment used by Cargo itself. /// Architectures that do not have a modern processor, hardware emulation, etc. /// This provides a way for those setups to increase the cut off for all the time based test. pub fn slow_cpu_multiplier(main: u64) -> Duration { lazy_static::lazy_static! { static ref SLOW_CPU_MULTIPLIER: u64 = env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER").ok().and_then(|m| m.parse().ok()).unwrap_or(1); } Duration::from_secs(*SLOW_CPU_MULTIPLIER * main) } #[cfg(windows)] pub fn symlink_supported() -> bool { if is_ci() { // We want to be absolutely sure this runs on CI. return true; } let src = paths::root().join("symlink_src"); fs::write(&src, "").unwrap(); let dst = paths::root().join("symlink_dst"); let result = match os::windows::fs::symlink_file(&src, &dst) { Ok(_) => { fs::remove_file(&dst).unwrap(); true } Err(e) => { eprintln!( "symlinks not supported: {:?}\n\ Windows 10 users should enable developer mode.", e ); false } }; fs::remove_file(&src).unwrap(); return result; } #[cfg(not(windows))] pub fn symlink_supported() -> bool { true } /// The error message for ENOENT. pub fn no_such_file_err_msg() -> String { std::io::Error::from_raw_os_error(2).to_string() } cargo-0.66.0/crates/cargo-test-support/src/paths.rs000066400000000000000000000234661432416201200222230ustar00rootroot00000000000000use filetime::{self, FileTime}; use lazy_static::lazy_static; use std::cell::RefCell; use std::collections::HashMap; use std::env; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; static CARGO_INTEGRATION_TEST_DIR: &str = "cit"; lazy_static! { // TODO: Use `SyncOnceCell` when stable static ref GLOBAL_ROOT: Mutex> = Mutex::new(None); static ref TEST_ROOTS: Mutex> = Default::default(); } /// This is used when running cargo is pre-CARGO_TARGET_TMPDIR /// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough. fn global_root_legacy() -> PathBuf { let mut path = t!(env::current_exe()); path.pop(); // chop off exe name path.pop(); // chop off "deps" path.push("tmp"); path.mkdir_p(); path } fn set_global_root(tmp_dir: Option<&'static str>) { let mut lock = GLOBAL_ROOT.lock().unwrap(); if lock.is_none() { let mut root = match tmp_dir { Some(tmp_dir) => PathBuf::from(tmp_dir), None => global_root_legacy(), }; root.push(CARGO_INTEGRATION_TEST_DIR); *lock = Some(root); } } pub fn global_root() -> PathBuf { let lock = GLOBAL_ROOT.lock().unwrap(); match lock.as_ref() { Some(p) => p.clone(), None => unreachable!("GLOBAL_ROOT not set yet"), } } // We need to give each test a unique id. The test name could serve this // purpose, but the `test` crate doesn't have a way to obtain the current test // name.[*] Instead, we used the `cargo-test-macro` crate to automatically // insert an init function for each test that sets the test name in a thread // local variable. // // [*] It does set the thread name, but only when running concurrently. If not // running concurrently, all tests are run on the main thread. thread_local! { static TEST_ID: RefCell> = RefCell::new(None); } pub struct TestIdGuard { _private: (), } pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); TEST_ID.with(|n| *n.borrow_mut() = Some(id)); let guard = TestIdGuard { _private: () }; set_global_root(tmp_dir); let r = root(); r.rm_rf(); r.mkdir_p(); guard } impl Drop for TestIdGuard { fn drop(&mut self) { TEST_ID.with(|n| *n.borrow_mut() = None); } } pub fn root() -> PathBuf { let id = TEST_ID.with(|n| { n.borrow().expect( "Tests must use the `#[cargo_test]` attribute in \ order to be able to use the crate root.", ) }); let mut root = global_root(); root.push(&format!("t{}", id)); root } pub fn home() -> PathBuf { let mut path = root(); path.push("home"); path.mkdir_p(); path } pub trait CargoPathExt { fn rm_rf(&self); fn mkdir_p(&self); fn move_into_the_past(&self) { self.move_in_time(|sec, nsec| (sec - 3600, nsec)) } fn move_into_the_future(&self) { self.move_in_time(|sec, nsec| (sec + 3600, nsec)) } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32); } impl CargoPathExt for Path { fn rm_rf(&self) { let meta = match self.symlink_metadata() { Ok(meta) => meta, Err(e) => { if e.kind() == ErrorKind::NotFound { return; } panic!("failed to remove {:?}, could not read: {:?}", self, e); } }; // There is a race condition between fetching the metadata and // actually performing the removal, but we don't care all that much // for our tests. if meta.is_dir() { if let Err(e) = remove_dir_all::remove_dir_all(self) { panic!("failed to remove {:?}: {:?}", self, e) } } else if let Err(e) = fs::remove_file(self) { panic!("failed to remove {:?}: {:?}", self, e) } } fn mkdir_p(&self) { fs::create_dir_all(self) .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e)) } fn move_in_time(&self, travel_amount: F) where F: Fn(i64, u32) -> (i64, u32), { if self.is_file() { time_travel(self, &travel_amount); } else { recurse(self, &self.join("target"), &travel_amount); } fn recurse(p: &Path, bad: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { if p.is_file() { time_travel(p, travel_amount) } else if !p.starts_with(bad) { for f in t!(fs::read_dir(p)) { let f = t!(f).path(); recurse(&f, bad, travel_amount); } } } fn time_travel(path: &Path, travel_amount: &F) where F: Fn(i64, u32) -> (i64, u32), { let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat); let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds()); let newtime = FileTime::from_unix_time(sec, nsec); // Sadly change_file_times has a failure mode where a readonly file // cannot have its times changed on windows. do_op(path, "set file times", |path| { filetime::set_file_times(path, newtime, newtime) }); } } } fn do_op(path: &Path, desc: &str, mut f: F) where F: FnMut(&Path) -> io::Result<()>, { match f(path) { Ok(()) => {} Err(ref e) if e.kind() == ErrorKind::PermissionDenied => { let mut p = t!(path.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(path, p)); // Unix also requires the parent to not be readonly for example when // removing files let parent = path.parent().unwrap(); let mut p = t!(parent.metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(parent, p)); f(path).unwrap_or_else(|e| { panic!("failed to {} {}: {}", desc, path.display(), e); }) } Err(e) => { panic!("failed to {} {}: {}", desc, path.display(), e); } } } /// Get the filename for a library. /// /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" /// /// For example, dynamic library named "foo" would return: /// - macOS: "libfoo.dylib" /// - Windows: "foo.dll" /// - Unix: "libfoo.so" pub fn get_lib_filename(name: &str, kind: &str) -> String { let prefix = get_lib_prefix(kind); let extension = get_lib_extension(kind); format!("{}{}.{}", prefix, name, extension) } pub fn get_lib_prefix(kind: &str) -> &str { match kind { "lib" | "rlib" => "lib", "staticlib" | "dylib" | "proc-macro" => { if cfg!(windows) { "" } else { "lib" } } _ => unreachable!(), } } pub fn get_lib_extension(kind: &str) -> &str { match kind { "lib" | "rlib" => "rlib", "staticlib" => { if cfg!(windows) { "lib" } else { "a" } } "dylib" | "proc-macro" => { if cfg!(windows) { "dll" } else if cfg!(target_os = "macos") { "dylib" } else { "so" } } _ => unreachable!(), } } /// Returns the sysroot as queried from rustc. pub fn sysroot() -> String { let output = Command::new("rustc") .arg("--print=sysroot") .output() .expect("rustc to run"); assert!(output.status.success()); let sysroot = String::from_utf8(output.stdout).unwrap(); sysroot.trim().to_string() } /// Returns true if names such as aux.* are allowed. /// /// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved` /// for a list). More recent versions of Windows have relaxed this restriction. This test /// determines whether we are running in a mode that allows Windows reserved names. #[cfg(windows)] pub fn windows_reserved_names_are_allowed() -> bool { use cargo_util::is_ci; // Ensure tests still run in CI until we need to migrate. if is_ci() { return false; } use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use std::ptr; use winapi::um::fileapi::GetFullPathNameW; let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect(); let buffer_length = unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) }; if buffer_length == 0 { // This means the call failed, so we'll conservatively assume reserved names are not allowed. return false; } let mut buffer = vec![0u16; buffer_length as usize]; let result = unsafe { GetFullPathNameW( test_file_name.as_ptr(), buffer_length, buffer.as_mut_ptr(), ptr::null_mut(), ) }; if result == 0 { // Once again, conservatively assume reserved names are not allowed if the // GetFullPathNameW call failed. return false; } // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so // we detect this case by checking if the string starts with \\.\ // // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect(); if buffer.starts_with(&prefix) { false } else { true } } cargo-0.66.0/crates/cargo-test-support/src/publish.rs000066400000000000000000000123551432416201200225450ustar00rootroot00000000000000use crate::compare::{assert_match_exact, find_json_mismatch}; use crate::registry::{self, alt_api_path}; use flate2::read::GzDecoder; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{self, prelude::*, SeekFrom}; use std::path::{Path, PathBuf}; use tar::Archive; fn read_le_u32(mut reader: R) -> io::Result where R: Read, { let mut buf = [0; 4]; reader.read_exact(&mut buf)?; Ok(u32::from_le_bytes(buf)) } /// Checks the result of a crate publish. pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, &[], ); } /// Checks the result of a crate publish, along with the contents of the files. pub fn validate_upload_with_contents( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, expected_contents, ); } /// Checks the result of a crate publish to an alternative registry. pub fn validate_alt_upload( expected_json: &str, expected_crate_name: &str, expected_files: &[&str], ) { let new_path = alt_api_path().join("api/v1/crates/new"); _validate_upload( &new_path, expected_json, expected_crate_name, expected_files, &[], ); } fn _validate_upload( new_path: &Path, expected_json: &str, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let mut f = File::open(new_path).unwrap(); // 32-bit little-endian integer of length of JSON data. let json_sz = read_le_u32(&mut f).expect("read json length"); let mut json_bytes = vec![0; json_sz as usize]; f.read_exact(&mut json_bytes).expect("read JSON data"); let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) { panic!("{}", e); } // 32-bit little-endian integer of length of crate file. let crate_sz = read_le_u32(&mut f).expect("read crate length"); let mut krate_bytes = vec![0; crate_sz as usize]; f.read_exact(&mut krate_bytes).expect("read crate data"); // Check at end. let current = f.seek(SeekFrom::Current(0)).unwrap(); assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current); // Verify the tarball. validate_crate_contents( &krate_bytes[..], expected_crate_name, expected_files, expected_contents, ); } /// Checks the contents of a `.crate` file. /// /// - `expected_crate_name` should be something like `foo-0.0.1.crate`. /// - `expected_files` should be a complete list of files in the crate /// (relative to expected_crate_name). /// - `expected_contents` should be a list of `(file_name, contents)` tuples /// to validate the contents of the given file. Only the listed files will /// be checked (others will be ignored). pub fn validate_crate_contents( reader: impl Read, expected_crate_name: &str, expected_files: &[&str], expected_contents: &[(&str, &str)], ) { let mut rdr = GzDecoder::new(reader); assert_eq!( rdr.header().unwrap().filename().unwrap(), expected_crate_name.as_bytes() ); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let mut ar = Archive::new(&contents[..]); let files: HashMap = ar .entries() .unwrap() .map(|entry| { let mut entry = entry.unwrap(); let name = entry.path().unwrap().into_owned(); let mut contents = String::new(); entry.read_to_string(&mut contents).unwrap(); (name, contents) }) .collect(); assert!(expected_crate_name.ends_with(".crate")); let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]); let actual_files: HashSet = files.keys().cloned().collect(); let expected_files: HashSet = expected_files .iter() .map(|name| base_crate_name.join(name)) .collect(); let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect(); let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect(); if !missing.is_empty() || !extra.is_empty() { panic!( "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n", missing, extra ); } if !expected_contents.is_empty() { for (e_file_name, e_file_contents) in expected_contents { let full_e_name = base_crate_name.join(e_file_name); let actual_contents = files .get(&full_e_name) .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name)); assert_match_exact(e_file_contents, actual_contents); } } } cargo-0.66.0/crates/cargo-test-support/src/registry.rs000066400000000000000000001134571432416201200227540ustar00rootroot00000000000000use crate::git::repo; use crate::paths; use cargo_util::paths::append; use cargo_util::{registry::make_dep_path, Sha256}; use flate2::write::GzEncoder; use flate2::Compression; use std::collections::{BTreeMap, HashMap}; use std::fs::{self, File}; use std::io::{BufRead, BufReader, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::path::{Path, PathBuf}; use std::thread; use tar::{Builder, Header}; use url::Url; /// Gets the path to the local index pretending to be crates.io. This is a Git repo /// initialized with a `config.json` file pointing to `dl_path` for downloads /// and `api_path` for uploads. pub fn registry_path() -> PathBuf { generate_path("registry") } /// Gets the path for local web API uploads. Cargo will place the contents of a web API /// request here. For example, `api/v1/crates/new` is the result of publishing a crate. pub fn api_path() -> PathBuf { generate_path("api") } /// Gets the path where crates can be downloaded using the web API endpoint. Crates /// should be organized as `{name}/{version}/download` to match the web API /// endpoint. This is rarely used and must be manually set up. fn dl_path() -> PathBuf { generate_path("dl") } /// Gets the alternative-registry version of `registry_path`. fn alt_registry_path() -> PathBuf { generate_path("alternative-registry") } /// Gets the alternative-registry version of `registry_url`. fn alt_registry_url() -> Url { generate_url("alternative-registry") } /// Gets the alternative-registry version of `dl_path`. pub fn alt_dl_path() -> PathBuf { generate_path("alternative-dl") } /// Gets the alternative-registry version of `api_path`. pub fn alt_api_path() -> PathBuf { generate_path("alternative-api") } fn generate_path(name: &str) -> PathBuf { paths::root().join(name) } fn generate_url(name: &str) -> Url { Url::from_file_path(generate_path(name)).ok().unwrap() } /// A builder for initializing registries. pub struct RegistryBuilder { /// If set, configures an alternate registry with the given name. alternative: Option, /// If set, the authorization token for the registry. token: Option, /// If set, serves the index over http. http_index: bool, /// If set, serves the API over http. http_api: bool, /// If set, config.json includes 'api' api: bool, /// Write the token in the configuration. configure_token: bool, /// Write the registry in configuration. configure_registry: bool, /// API responders. custom_responders: HashMap<&'static str, Box Response>>, } pub struct TestRegistry { _server: Option, index_url: Url, path: PathBuf, api_url: Url, dl_url: Url, token: Option, } impl TestRegistry { pub fn index_url(&self) -> &Url { &self.index_url } pub fn api_url(&self) -> &Url { &self.api_url } pub fn token(&self) -> &str { self.token .as_deref() .expect("registry was not configured with a token") } } impl RegistryBuilder { #[must_use] pub fn new() -> RegistryBuilder { RegistryBuilder { alternative: None, token: Some("api-token".to_string()), http_api: false, http_index: false, api: true, configure_registry: true, configure_token: true, custom_responders: HashMap::new(), } } /// Adds a custom HTTP response for a specific url #[must_use] pub fn add_responder Response>( mut self, url: &'static str, responder: R, ) -> Self { self.custom_responders.insert(url, Box::new(responder)); self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative_named(mut self, alt: &str) -> Self { self.alternative = Some(alt.to_string()); self } /// Sets whether or not to initialize as an alternative registry. #[must_use] pub fn alternative(self) -> Self { self.alternative_named("alternative") } /// Prevents placing a token in the configuration #[must_use] pub fn no_configure_token(mut self) -> Self { self.configure_token = false; self } /// Prevents adding the registry to the configuration. #[must_use] pub fn no_configure_registry(mut self) -> Self { self.configure_registry = false; self } /// Sets the token value #[must_use] pub fn token(mut self, token: &str) -> Self { self.token = Some(token.to_string()); self } /// Operate the index over http #[must_use] pub fn http_index(mut self) -> Self { self.http_index = true; self } /// Operate the api over http #[must_use] pub fn http_api(mut self) -> Self { self.http_api = true; self } /// The registry has no api. #[must_use] pub fn no_api(mut self) -> Self { self.api = false; self } /// Initializes the registry. #[must_use] pub fn build(self) -> TestRegistry { let config_path = paths::home().join(".cargo/config"); t!(fs::create_dir_all(config_path.parent().unwrap())); let prefix = if let Some(alternative) = &self.alternative { format!("{alternative}-") } else { String::new() }; let registry_path = generate_path(&format!("{prefix}registry")); let index_url = generate_url(&format!("{prefix}registry")); let api_url = generate_url(&format!("{prefix}api")); let dl_url = generate_url(&format!("{prefix}dl")); let dl_path = generate_path(&format!("{prefix}dl")); let api_path = generate_path(&format!("{prefix}api")); let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api { // No need to start the HTTP server. (None, index_url, api_url, dl_url) } else { let server = HttpServer::new( registry_path.clone(), dl_path, self.token.clone(), self.custom_responders, ); let index_url = if self.http_index { server.index_url() } else { index_url }; let api_url = if self.http_api { server.api_url() } else { api_url }; let dl_url = server.dl_url(); (Some(server), index_url, api_url, dl_url) }; let registry = TestRegistry { api_url, index_url, _server: server, dl_url, path: registry_path, token: self.token, }; if self.configure_registry { if let Some(alternative) = &self.alternative { append( &config_path, format!( " [registries.{alternative}] index = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); } else { append( &config_path, format!( " [source.crates-io] replace-with = 'dummy-registry' [source.dummy-registry] registry = '{}'", registry.index_url ) .as_bytes(), ) .unwrap(); } } if self.configure_token { let token = registry.token.as_deref().unwrap(); let credentials = paths::home().join(".cargo/credentials"); if let Some(alternative) = &self.alternative { append( &credentials, format!( r#" [registries.{alternative}] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } else { append( &credentials, format!( r#" [registry] token = "{token}" "# ) .as_bytes(), ) .unwrap(); } } let api = if self.api { format!(r#","api":"{}""#, registry.api_url) } else { String::new() }; // Initialize a new registry. repo(®istry.path) .file( "config.json", &format!(r#"{{"dl":"{}"{api}}}"#, registry.dl_url), ) .build(); fs::create_dir_all(api_path.join("api/v1/crates")).unwrap(); registry } } /// A builder for creating a new package in a registry. /// /// This uses "source replacement" using an automatically generated /// `.cargo/config` file to ensure that dependencies will use these packages /// instead of contacting crates.io. See `source-replacement.md` for more /// details on how source replacement works. /// /// Call `publish` to finalize and create the package. /// /// If no files are specified, an empty `lib.rs` file is automatically created. /// /// The `Cargo.toml` file is automatically generated based on the methods /// called on `Package` (for example, calling `dep()` will add to the /// `[dependencies]` automatically). You may also specify a `Cargo.toml` file /// to override the generated one. /// /// This supports different registry types: /// - Regular source replacement that replaces `crates.io` (the default). /// - A "local registry" which is a subset for vendoring (see /// `Package::local`). /// - An "alternative registry" which requires specifying the registry name /// (see `Package::alternative`). /// /// This does not support "directory sources". See `directory.rs` for /// `VendorPackage` which implements directory sources. /// /// # Example /// ``` /// // Publish package "a" depending on "b". /// Package::new("a", "1.0.0") /// .dep("b", "1.0.0") /// .file("src/lib.rs", r#" /// extern crate b; /// pub fn f() -> i32 { b::f() * 2 } /// "#) /// .publish(); /// /// // Publish package "b". /// Package::new("b", "1.0.0") /// .file("src/lib.rs", r#" /// pub fn f() -> i32 { 12 } /// "#) /// .publish(); /// /// // Create a project that uses package "a". /// let p = project() /// .file("Cargo.toml", r#" /// [package] /// name = "foo" /// version = "0.0.1" /// /// [dependencies] /// a = "1.0" /// "#) /// .file("src/main.rs", r#" /// extern crate a; /// fn main() { println!("{}", a::f()); } /// "#) /// .build(); /// /// p.cargo("run").with_stdout("24").run(); /// ``` #[must_use] pub struct Package { name: String, vers: String, deps: Vec, files: Vec, yanked: bool, features: FeatureMap, local: bool, alternative: bool, invalid_json: bool, proc_macro: bool, links: Option, rust_version: Option, cargo_features: Vec, v: Option, } type FeatureMap = BTreeMap>; #[derive(Clone)] pub struct Dependency { name: String, vers: String, kind: String, artifact: Option<(String, Option)>, target: Option, features: Vec, registry: Option, package: Option, optional: bool, } /// A file to be created in a package. struct PackageFile { path: String, contents: String, /// The Unix mode for the file. Note that when extracted on Windows, this /// is mostly ignored since it doesn't have the same style of permissions. mode: u32, /// If `true`, the file is created in the root of the tarfile, used for /// testing invalid packages. extra: bool, } const DEFAULT_MODE: u32 = 0o644; /// Initializes the on-disk registry and sets up the config so that crates.io /// is replaced with the one on disk. pub fn init() -> TestRegistry { RegistryBuilder::new().build() } /// Variant of `init` that initializes the "alternative" registry and crates.io /// replacement. pub fn alt_init() -> TestRegistry { init(); RegistryBuilder::new().alternative().build() } pub struct HttpServerHandle { addr: SocketAddr, } impl HttpServerHandle { pub fn index_url(&self) -> Url { Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap() } pub fn api_url(&self) -> Url { Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap() } pub fn dl_url(&self) -> Url { Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap() } } impl Drop for HttpServerHandle { fn drop(&mut self) { if let Ok(mut stream) = TcpStream::connect(self.addr) { // shutdown the server let _ = stream.write_all(b"stop"); let _ = stream.flush(); } } } /// Request to the test http server #[derive(Debug)] pub struct Request { pub url: Url, pub method: String, pub authorization: Option, pub if_modified_since: Option, pub if_none_match: Option, } /// Response from the test http server pub struct Response { pub code: u32, pub headers: Vec, pub body: Vec, } struct HttpServer { listener: TcpListener, registry_path: PathBuf, dl_path: PathBuf, token: Option, custom_responders: HashMap<&'static str, Box Response>>, } impl HttpServer { pub fn new( registry_path: PathBuf, dl_path: PathBuf, token: Option, api_responders: HashMap<&'static str, Box Response>>, ) -> HttpServerHandle { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let server = HttpServer { listener, registry_path, dl_path, token, custom_responders: api_responders, }; thread::spawn(move || server.start()); HttpServerHandle { addr } } fn start(&self) { let mut line = String::new(); 'server: loop { let (socket, _) = self.listener.accept().unwrap(); let mut buf = BufReader::new(socket); line.clear(); if buf.read_line(&mut line).unwrap() == 0 { // Connection terminated. continue; } // Read the "GET path HTTP/1.1" line. let mut parts = line.split_ascii_whitespace(); let method = parts.next().unwrap().to_ascii_lowercase(); if method == "stop" { // Shutdown the server. return; } let addr = self.listener.local_addr().unwrap(); let url = format!( "http://{}/{}", addr, parts.next().unwrap().trim_start_matches('/') ); let url = Url::parse(&url).unwrap(); // Grab headers we care about. let mut if_modified_since = None; let mut if_none_match = None; let mut authorization = None; loop { line.clear(); if buf.read_line(&mut line).unwrap() == 0 { continue 'server; } if line == "\r\n" { // End of headers. line.clear(); break; } let (name, value) = line.split_once(':').unwrap(); let name = name.trim().to_ascii_lowercase(); let value = value.trim().to_string(); match name.as_str() { "if-modified-since" => if_modified_since = Some(value), "if-none-match" => if_none_match = Some(value), "authorization" => authorization = Some(value), _ => {} } } let req = Request { authorization, if_modified_since, if_none_match, method, url, }; println!("req: {:#?}", req); let response = self.route(&req); let buf = buf.get_mut(); write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap(); write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap(); for header in response.headers { write!(buf, "{}\r\n", header).unwrap(); } write!(buf, "\r\n").unwrap(); buf.write_all(&response.body).unwrap(); buf.flush().unwrap(); } } /// Route the request fn route(&self, req: &Request) -> Response { let authorized = |mutatation: bool| { if mutatation { self.token == req.authorization } else { assert!(req.authorization.is_none(), "unexpected token"); true } }; // Check for custom responder if let Some(responder) = self.custom_responders.get(req.url.path()) { return responder(&req); } let path: Vec<_> = req.url.path()[1..].split('/').collect(); match (req.method.as_str(), path.as_slice()) { ("get", ["index", ..]) => { if !authorized(false) { self.unauthorized(req) } else { self.index(&req) } } ("get", ["dl", ..]) => { if !authorized(false) { self.unauthorized(req) } else { self.dl(&req) } } // The remainder of the operators in the test framework do nothing other than responding 'ok'. // // Note: We don't need to support anything real here because the testing framework publishes crates // by writing directly to the filesystem instead. If the test framework is changed to publish // via the HTTP API, then this should be made more complete. // publish ("put", ["api", "v1", "crates", "new"]) // yank | ("delete", ["api", "v1", "crates", .., "yank"]) // unyank | ("put", ["api", "v1", "crates", .., "unyank"]) // owners | ("get" | "put" | "delete", ["api", "v1", "crates", .., "owners"]) => { if !authorized(true) { self.unauthorized(req) } else { self.ok(&req) } } _ => self.not_found(&req), } } /// Unauthorized response fn unauthorized(&self, _req: &Request) -> Response { Response { code: 401, headers: vec![], body: b"Unauthorized message from server.".to_vec(), } } /// Not found response fn not_found(&self, _req: &Request) -> Response { Response { code: 404, headers: vec![], body: b"not found".to_vec(), } } /// Respond OK without doing anything fn ok(&self, _req: &Request) -> Response { Response { code: 200, headers: vec![], body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(), } } /// Serve the download endpoint fn dl(&self, req: &Request) -> Response { let file = self .dl_path .join(req.url.path().strip_prefix("/dl/").unwrap()); println!("{}", file.display()); if !file.exists() { return self.not_found(req); } return Response { body: fs::read(&file).unwrap(), code: 200, headers: vec![], }; } /// Serve the registry index fn index(&self, req: &Request) -> Response { let file = self .registry_path .join(req.url.path().strip_prefix("/index/").unwrap()); if !file.exists() { return self.not_found(req); } else { // Now grab info about the file. let data = fs::read(&file).unwrap(); let etag = Sha256::new().update(&data).finish_hex(); let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap()); // Start to construct our response: let mut any_match = false; let mut all_match = true; if let Some(expected) = &req.if_none_match { if &etag != expected { all_match = false; } else { any_match = true; } } if let Some(expected) = &req.if_modified_since { // NOTE: Equality comparison is good enough for tests. if &last_modified != expected { all_match = false; } else { any_match = true; } } if any_match && all_match { return Response { body: Vec::new(), code: 304, headers: vec![], }; } else { return Response { body: data, code: 200, headers: vec![ format!("ETag: \"{}\"", etag), format!("Last-Modified: {}", last_modified), ], }; } } } } impl Package { /// Creates a new package builder. /// Call `publish()` to finalize and build the package. pub fn new(name: &str, vers: &str) -> Package { let config = paths::home().join(".cargo/config"); if !config.exists() { init(); } Package { name: name.to_string(), vers: vers.to_string(), deps: Vec::new(), files: Vec::new(), yanked: false, features: BTreeMap::new(), local: false, alternative: false, invalid_json: false, proc_macro: false, links: None, rust_version: None, cargo_features: Vec::new(), v: None, } } /// Call with `true` to publish in a "local registry". /// /// See `source-replacement.html#local-registry-sources` for more details /// on local registries. See `local_registry.rs` for the tests that use /// this. pub fn local(&mut self, local: bool) -> &mut Package { self.local = local; self } /// Call with `true` to publish in an "alternative registry". /// /// The name of the alternative registry is called "alternative". /// /// See `src/doc/src/reference/registries.md` for more details on /// alternative registries. See `alt_registry.rs` for the tests that use /// this. pub fn alternative(&mut self, alternative: bool) -> &mut Package { self.alternative = alternative; self } /// Adds a file to the package. pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.file_with_mode(name, DEFAULT_MODE, contents) } /// Adds a file with a specific Unix mode. pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: contents.to_string(), mode, extra: false, }); self } /// Adds an "extra" file that is not rooted within the package. /// /// Normal files are automatically placed within a directory named /// `$PACKAGE-$VERSION`. This allows you to override that behavior, /// typically for testing invalid behavior. pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package { self.files.push(PackageFile { path: path.to_string(), contents: contents.to_string(), mode: DEFAULT_MODE, extra: true, }); self } /// Adds a normal dependency. Example: /// ``` /// [dependencies] /// foo = {version = "1.0"} /// ``` pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(&Dependency::new(name, vers)) } /// Adds a dependency with the given feature. Example: /// ``` /// [dependencies] /// foo = {version = "1.0", "features": ["feat1", "feat2"]} /// ``` pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package { self.add_dep(Dependency::new(name, vers).enable_features(features)) } /// Adds a platform-specific dependency. Example: /// ``` /// [target.'cfg(windows)'.dependencies] /// foo = {version = "1.0"} /// ``` pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).target(target)) } /// Adds a dependency to the alternative registry. pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).registry("alternative")) } /// Adds a dev-dependency. Example: /// ``` /// [dev-dependencies] /// foo = {version = "1.0"} /// ``` pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).dev()) } /// Adds a build-dependency. Example: /// ``` /// [build-dependencies] /// foo = {version = "1.0"} /// ``` pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.add_dep(Dependency::new(name, vers).build()) } pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package { self.deps.push(dep.clone()); self } /// Specifies whether or not the package is "yanked". pub fn yanked(&mut self, yanked: bool) -> &mut Package { self.yanked = yanked; self } /// Specifies whether or not this is a proc macro. pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package { self.proc_macro = proc_macro; self } /// Adds an entry in the `[features]` section. pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package { let deps = deps.iter().map(|s| s.to_string()).collect(); self.features.insert(name.to_string(), deps); self } /// Specify a minimal Rust version. pub fn rust_version(&mut self, rust_version: &str) -> &mut Package { self.rust_version = Some(rust_version.into()); self } /// Causes the JSON line emitted in the index to be invalid, presumably /// causing Cargo to skip over this version. pub fn invalid_json(&mut self, invalid: bool) -> &mut Package { self.invalid_json = invalid; self } pub fn links(&mut self, links: &str) -> &mut Package { self.links = Some(links.to_string()); self } pub fn cargo_feature(&mut self, feature: &str) -> &mut Package { self.cargo_features.push(feature.to_owned()); self } /// Sets the index schema version for this package. /// /// See `cargo::sources::registry::RegistryPackage` for more information. pub fn schema_version(&mut self, version: u32) -> &mut Package { self.v = Some(version); self } /// Creates the package and place it in the registry. /// /// This does not actually use Cargo's publishing system, but instead /// manually creates the entry in the registry on the filesystem. /// /// Returns the checksum for the package. pub fn publish(&self) -> String { self.make_archive(); // Figure out what we're going to write into the index. let deps = self .deps .iter() .map(|dep| { // In the index, the `registry` is null if it is from the same registry. // In Cargo.toml, it is None if it is from crates.io. let registry_url = match (self.alternative, dep.registry.as_deref()) { (false, None) => None, (false, Some("alternative")) => Some(alt_registry_url().to_string()), (true, None) => { Some("https://github.com/rust-lang/crates.io-index".to_string()) } (true, Some("alternative")) => None, _ => panic!("registry_dep currently only supports `alternative`"), }; serde_json::json!({ "name": dep.name, "req": dep.vers, "features": dep.features, "default_features": true, "target": dep.target, "artifact": dep.artifact, "optional": dep.optional, "kind": dep.kind, "registry": registry_url, "package": dep.package, }) }) .collect::>(); let cksum = { let c = t!(fs::read(&self.archive_dst())); cksum(&c) }; let name = if self.invalid_json { serde_json::json!(1) } else { serde_json::json!(self.name) }; // This emulates what crates.io may do in the future. let (features, features2) = split_index_features(self.features.clone()); let mut json = serde_json::json!({ "name": name, "vers": self.vers, "deps": deps, "cksum": cksum, "features": features, "yanked": self.yanked, "links": self.links, }); if let Some(f2) = &features2 { json["features2"] = serde_json::json!(f2); json["v"] = serde_json::json!(2); } if let Some(v) = self.v { json["v"] = serde_json::json!(v); } let line = json.to_string(); let file = make_dep_path(&self.name, false); let registry_path = if self.alternative { alt_registry_path() } else { registry_path() }; // Write file/line in the index. let dst = if self.local { registry_path.join("index").join(&file) } else { registry_path.join(&file) }; let prev = fs::read_to_string(&dst).unwrap_or_default(); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, prev + &line[..] + "\n")); // Add the new file to the index. if !self.local { let repo = t!(git2::Repository::open(®istry_path)); let mut index = t!(repo.index()); t!(index.add_path(Path::new(&file))); t!(index.write()); let id = t!(index.write_tree()); // Commit this change. let tree = t!(repo.find_tree(id)); let sig = t!(repo.signature()); let parent = t!(repo.refname_to_id("refs/heads/master")); let parent = t!(repo.find_commit(parent)); t!(repo.commit( Some("HEAD"), &sig, &sig, "Another commit", &tree, &[&parent] )); } cksum } fn make_archive(&self) { let dst = self.archive_dst(); t!(fs::create_dir_all(dst.parent().unwrap())); let f = t!(File::create(&dst)); let mut a = Builder::new(GzEncoder::new(f, Compression::default())); if !self .files .iter() .any(|PackageFile { path, .. }| path == "Cargo.toml") { self.append_manifest(&mut a); } if self.files.is_empty() { self.append(&mut a, "src/lib.rs", DEFAULT_MODE, ""); } else { for PackageFile { path, contents, mode, extra, } in &self.files { if *extra { self.append_raw(&mut a, path, *mode, contents); } else { self.append(&mut a, path, *mode, contents); } } } } fn append_manifest(&self, ar: &mut Builder) { let mut manifest = String::new(); if !self.cargo_features.is_empty() { manifest.push_str(&format!( "cargo-features = {}\n\n", toml_edit::ser::to_item(&self.cargo_features).unwrap() )); } manifest.push_str(&format!( r#" [package] name = "{}" version = "{}" authors = [] "#, self.name, self.vers )); if let Some(version) = &self.rust_version { manifest.push_str(&format!("rust-version = \"{}\"", version)); } for dep in self.deps.iter() { let target = match dep.target { None => String::new(), Some(ref s) => format!("target.'{}'.", s), }; let kind = match &dep.kind[..] { "build" => "build-", "dev" => "dev-", _ => "", }; manifest.push_str(&format!( r#" [{}{}dependencies.{}] version = "{}" "#, target, kind, dep.name, dep.vers )); if let Some((artifact, target)) = &dep.artifact { manifest.push_str(&format!("artifact = \"{}\"\n", artifact)); if let Some(target) = &target { manifest.push_str(&format!("target = \"{}\"\n", target)) } } if let Some(registry) = &dep.registry { assert_eq!(registry, "alternative"); manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url())); } } if self.proc_macro { manifest.push_str("[lib]\nproc-macro = true\n"); } self.append(ar, "Cargo.toml", DEFAULT_MODE, &manifest); } fn append(&self, ar: &mut Builder, file: &str, mode: u32, contents: &str) { self.append_raw( ar, &format!("{}-{}/{}", self.name, self.vers, file), mode, contents, ); } fn append_raw(&self, ar: &mut Builder, path: &str, mode: u32, contents: &str) { let mut header = Header::new_ustar(); header.set_size(contents.len() as u64); t!(header.set_path(path)); header.set_mode(mode); header.set_cksum(); t!(ar.append(&header, contents.as_bytes())); } /// Returns the path to the compressed package file. pub fn archive_dst(&self) -> PathBuf { if self.local { registry_path().join(format!("{}-{}.crate", self.name, self.vers)) } else if self.alternative { alt_dl_path() .join(&self.name) .join(&self.vers) .join("download") } else { dl_path().join(&self.name).join(&self.vers).join("download") } } } pub fn cksum(s: &[u8]) -> String { Sha256::new().update(s).finish_hex() } impl Dependency { pub fn new(name: &str, vers: &str) -> Dependency { Dependency { name: name.to_string(), vers: vers.to_string(), kind: "normal".to_string(), artifact: None, target: None, features: Vec::new(), package: None, optional: false, registry: None, } } /// Changes this to `[build-dependencies]`. pub fn build(&mut self) -> &mut Self { self.kind = "build".to_string(); self } /// Changes this to `[dev-dependencies]`. pub fn dev(&mut self) -> &mut Self { self.kind = "dev".to_string(); self } /// Changes this to `[target.$target.dependencies]`. pub fn target(&mut self, target: &str) -> &mut Self { self.target = Some(target.to_string()); self } /// Change the artifact to be of the given kind, like "bin", or "staticlib", /// along with a specific target triple if provided. pub fn artifact(&mut self, kind: &str, target: Option) -> &mut Self { self.artifact = Some((kind.to_string(), target)); self } /// Adds `registry = $registry` to this dependency. pub fn registry(&mut self, registry: &str) -> &mut Self { self.registry = Some(registry.to_string()); self } /// Adds `features = [ ... ]` to this dependency. pub fn enable_features(&mut self, features: &[&str]) -> &mut Self { self.features.extend(features.iter().map(|s| s.to_string())); self } /// Adds `package = ...` to this dependency. pub fn package(&mut self, pkg: &str) -> &mut Self { self.package = Some(pkg.to_string()); self } /// Changes this to an optional dependency. pub fn optional(&mut self, optional: bool) -> &mut Self { self.optional = optional; self } } fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { let mut features2 = FeatureMap::new(); for (feat, values) in features.iter_mut() { if values .iter() .any(|value| value.starts_with("dep:") || value.contains("?/")) { let new_values = values.drain(..).collect(); features2.insert(feat.clone(), new_values); } } if features2.is_empty() { (features, None) } else { (features, Some(features2)) } } cargo-0.66.0/crates/cargo-test-support/src/tools.rs000066400000000000000000000067521432416201200222430ustar00rootroot00000000000000//! Common executables that can be reused by various tests. use crate::{basic_manifest, paths, project, Project}; use lazy_static::lazy_static; use std::path::{Path, PathBuf}; use std::sync::Mutex; lazy_static! { static ref ECHO_WRAPPER: Mutex> = Mutex::new(None); static ref ECHO: Mutex> = Mutex::new(None); } /// Returns the path to an executable that works as a wrapper around rustc. /// /// The wrapper will echo the command line it was called with to stderr. pub fn echo_wrapper() -> PathBuf { let mut lock = ECHO_WRAPPER.lock().unwrap(); if let Some(path) = &*lock { return path.clone(); } let p = project() .at(paths::global_root().join("rustc-echo-wrapper")) .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0")) .file( "src/main.rs", r#" use std::fs::read_to_string; use std::path::PathBuf; fn main() { // Handle args from `@path` argfile for rustc let args = std::env::args() .flat_map(|p| if let Some(p) = p.strip_prefix("@") { read_to_string(p).unwrap().lines().map(String::from).collect() } else { vec![p] }) .collect::>(); eprintln!("WRAPPER CALLED: {}", args[1..].join(" ")); let status = std::process::Command::new(&args[1]) .args(&args[2..]).status().unwrap(); std::process::exit(status.code().unwrap_or(1)); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("rustc-echo-wrapper"); *lock = Some(path.clone()); path } /// Returns the path to an executable that prints its arguments. /// /// Do not expect this to be anything fancy. pub fn echo() -> PathBuf { let mut lock = ECHO.lock().unwrap(); if let Some(path) = &*lock { return path.clone(); } if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) { *lock = Some(path.clone()); return path; } // Often on Windows, `echo` is not available. let p = project() .at(paths::global_root().join("basic-echo")) .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0")) .file( "src/main.rs", r#" fn main() { let mut s = String::new(); let mut it = std::env::args().skip(1).peekable(); while let Some(n) = it.next() { s.push_str(&n); if it.peek().is_some() { s.push(' '); } } println!("{}", s); } "#, ) .build(); p.cargo("build").run(); let path = p.bin("basic-echo"); *lock = Some(path.clone()); path } /// Returns a project which builds a cargo-echo simple subcommand pub fn echo_subcommand() -> Project { let p = project() .at("cargo-echo") .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1")) .file( "src/main.rs", r#" fn main() { let args: Vec<_> = ::std::env::args().skip(1).collect(); println!("{}", args.join(" ")); } "#, ) .build(); p.cargo("build").run(); p } cargo-0.66.0/crates/cargo-util/000077500000000000000000000000001432416201200162205ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-util/Cargo.toml000066400000000000000000000013211432416201200201450ustar00rootroot00000000000000[package] name = "cargo-util" version = "0.2.2" edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://github.com/rust-lang/cargo" repository = "https://github.com/rust-lang/cargo" description = "Miscellaneous support code used by Cargo." [dependencies] anyhow = "1.0.34" crypto-hash = "0.3.1" filetime = "0.2.9" hex = "0.4.2" jobserver = "0.1.21" libc = "0.2.88" log = "0.4.6" same-file = "1.0.6" shell-escape = "0.1.4" tempfile = "3.1.0" walkdir = "2.3.1" [target.'cfg(target_os = "macos")'.dependencies] core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] } [target.'cfg(windows)'.dependencies] miow = "0.3.6" winapi = { version = "0.3.9", features = ["consoleapi", "minwindef"] } cargo-0.66.0/crates/cargo-util/LICENSE-APACHE000077700000000000000000000000001432416201200225112../../LICENSE-APACHEustar00rootroot00000000000000cargo-0.66.0/crates/cargo-util/LICENSE-MIT000077700000000000000000000000001432416201200217312../../LICENSE-MITustar00rootroot00000000000000cargo-0.66.0/crates/cargo-util/src/000077500000000000000000000000001432416201200170075ustar00rootroot00000000000000cargo-0.66.0/crates/cargo-util/src/lib.rs000066400000000000000000000007521432416201200201270ustar00rootroot00000000000000//! Miscellaneous support code used by Cargo. pub use self::read2::read2; pub use process_builder::ProcessBuilder; pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError}; pub use sha256::Sha256; pub mod paths; mod process_builder; mod process_error; mod read2; pub mod registry; mod sha256; /// Whether or not this running in a Continuous Integration environment. pub fn is_ci() -> bool { std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() } cargo-0.66.0/crates/cargo-util/src/paths.rs000066400000000000000000000653651432416201200205130ustar00rootroot00000000000000//! Various utilities for working with files and paths. use anyhow::{Context, Result}; use filetime::FileTime; use std::env; use std::ffi::{OsStr, OsString}; use std::fs::{self, File, OpenOptions}; use std::io; use std::io::prelude::*; use std::iter; use std::path::{Component, Path, PathBuf}; use tempfile::Builder as TempFileBuilder; /// Joins paths into a string suitable for the `PATH` environment variable. /// /// This is equivalent to [`std::env::join_paths`], but includes a more /// detailed error message. The given `env` argument is the name of the /// environment variable this is will be used for, which is included in the /// error message. pub fn join_paths>(paths: &[T], env: &str) -> Result { env::join_paths(paths.iter()) .with_context(|| { let paths = paths.iter().map(Path::new).collect::>(); format!("failed to join path array: {:?}", paths) }) .with_context(|| { format!( "failed to join search paths together\n\ Does ${} have an unterminated quote character?", env ) }) } /// Returns the name of the environment variable used for searching for /// dynamic libraries. pub fn dylib_path_envvar() -> &'static str { if cfg!(windows) { "PATH" } else if cfg!(target_os = "macos") { // When loading and linking a dynamic library or bundle, dlopen // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and // DYLD_FALLBACK_LIBRARY_PATH. // In the Mach-O format, a dynamic library has an "install path." // Clients linking against the library record this path, and the // dynamic linker, dyld, uses it to locate the library. // dyld searches DYLD_LIBRARY_PATH *before* the install path. // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot // find the library in the install path. // Setting DYLD_LIBRARY_PATH can easily have unintended // consequences. // // Also, DYLD_LIBRARY_PATH appears to have significant performance // penalty starting in 10.13. Cargo's testsuite ran more than twice as // slow with it on CI. "DYLD_FALLBACK_LIBRARY_PATH" } else { "LD_LIBRARY_PATH" } } /// Returns a list of directories that are searched for dynamic libraries. /// /// Note that some operating systems will have defaults if this is empty that /// will need to be dealt with. pub fn dylib_path() -> Vec { match env::var_os(dylib_path_envvar()) { Some(var) => env::split_paths(&var).collect(), None => Vec::new(), } } /// Normalize a path, removing things like `.` and `..`. /// /// CAUTION: This does not resolve symlinks (unlike /// [`std::fs::canonicalize`]). This may cause incorrect or surprising /// behavior at times. This should be used carefully. Unfortunately, /// [`std::fs::canonicalize`] can be hard to use correctly, since it can often /// fail, or on Windows returns annoying device paths. This is a problem Cargo /// needs to improve on. pub fn normalize_path(path: &Path) -> PathBuf { let mut components = path.components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { components.next(); PathBuf::from(c.as_os_str()) } else { PathBuf::new() }; for component in components { match component { Component::Prefix(..) => unreachable!(), Component::RootDir => { ret.push(component.as_os_str()); } Component::CurDir => {} Component::ParentDir => { ret.pop(); } Component::Normal(c) => { ret.push(c); } } } ret } /// Returns the absolute path of where the given executable is located based /// on searching the `PATH` environment variable. /// /// Returns an error if it cannot be found. pub fn resolve_executable(exec: &Path) -> Result { if exec.components().count() == 1 { let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?; let candidates = env::split_paths(&paths).flat_map(|path| { let candidate = path.join(&exec); let with_exe = if env::consts::EXE_EXTENSION.is_empty() { None } else { Some(candidate.with_extension(env::consts::EXE_EXTENSION)) }; iter::once(candidate).chain(with_exe) }); for candidate in candidates { if candidate.is_file() { return Ok(candidate); } } anyhow::bail!("no executable for `{}` found in PATH", exec.display()) } else { Ok(exec.into()) } } /// Reads a file to a string. /// /// Equivalent to [`std::fs::read_to_string`] with better error messages. pub fn read(path: &Path) -> Result { match String::from_utf8(read_bytes(path)?) { Ok(s) => Ok(s), Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()), } } /// Reads a file into a bytes vector. /// /// Equivalent to [`std::fs::read`] with better error messages. pub fn read_bytes(path: &Path) -> Result> { fs::read(path).with_context(|| format!("failed to read `{}`", path.display())) } /// Writes a file to disk. /// /// Equivalent to [`std::fs::write`] with better error messages. pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); fs::write(path, contents.as_ref()) .with_context(|| format!("failed to write `{}`", path.display())) } /// Equivalent to [`write()`], but does not write anything if the file contents /// are identical to the given contents. pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { (|| -> Result<()> { let contents = contents.as_ref(); let mut f = OpenOptions::new() .read(true) .write(true) .create(true) .open(&path)?; let mut orig = Vec::new(); f.read_to_end(&mut orig)?; if orig != contents { f.set_len(0)?; f.seek(io::SeekFrom::Start(0))?; f.write_all(contents)?; } Ok(()) })() .with_context(|| format!("failed to write `{}`", path.as_ref().display()))?; Ok(()) } /// Equivalent to [`write()`], but appends to the end instead of replacing the /// contents. pub fn append(path: &Path, contents: &[u8]) -> Result<()> { (|| -> Result<()> { let mut f = OpenOptions::new() .write(true) .append(true) .create(true) .open(path)?; f.write_all(contents)?; Ok(()) })() .with_context(|| format!("failed to write `{}`", path.display()))?; Ok(()) } /// Creates a new file. pub fn create>(path: P) -> Result { let path = path.as_ref(); File::create(path).with_context(|| format!("failed to create file `{}`", path.display())) } /// Opens an existing file. pub fn open>(path: P) -> Result { let path = path.as_ref(); File::open(path).with_context(|| format!("failed to open file `{}`", path.display())) } /// Returns the last modification time of a file. pub fn mtime(path: &Path) -> Result { let meta = fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; Ok(FileTime::from_last_modification_time(&meta)) } /// Returns the maximum mtime of the given path, recursing into /// subdirectories, and following symlinks. pub fn mtime_recursive(path: &Path) -> Result { let meta = fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; if !meta.is_dir() { return Ok(FileTime::from_last_modification_time(&meta)); } let max_meta = walkdir::WalkDir::new(path) .follow_links(true) .into_iter() .filter_map(|e| match e { Ok(e) => Some(e), Err(e) => { // Ignore errors while walking. If Cargo can't access it, the // build script probably can't access it, either. log::debug!("failed to determine mtime while walking directory: {}", e); None } }) .filter_map(|e| { if e.path_is_symlink() { // Use the mtime of both the symlink and its target, to // handle the case where the symlink is modified to a // different target. let sym_meta = match std::fs::symlink_metadata(e.path()) { Ok(m) => m, Err(err) => { // I'm not sure when this is really possible (maybe a // race with unlinking?). Regardless, if Cargo can't // read it, the build script probably can't either. log::debug!( "failed to determine mtime while fetching symlink metadata of {}: {}", e.path().display(), err ); return None; } }; let sym_mtime = FileTime::from_last_modification_time(&sym_meta); // Walkdir follows symlinks. match e.metadata() { Ok(target_meta) => { let target_mtime = FileTime::from_last_modification_time(&target_meta); Some(sym_mtime.max(target_mtime)) } Err(err) => { // Can't access the symlink target. If Cargo can't // access it, the build script probably can't access // it either. log::debug!( "failed to determine mtime of symlink target for {}: {}", e.path().display(), err ); Some(sym_mtime) } } } else { let meta = match e.metadata() { Ok(m) => m, Err(err) => { // I'm not sure when this is really possible (maybe a // race with unlinking?). Regardless, if Cargo can't // read it, the build script probably can't either. log::debug!( "failed to determine mtime while fetching metadata of {}: {}", e.path().display(), err ); return None; } }; Some(FileTime::from_last_modification_time(&meta)) } }) .max() // or_else handles the case where there are no files in the directory. .unwrap_or_else(|| FileTime::from_last_modification_time(&meta)); Ok(max_meta) } /// Record the current time on the filesystem (using the filesystem's clock) /// using a file at the given directory. Returns the current time. pub fn set_invocation_time(path: &Path) -> Result { // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, // then this can be removed. let timestamp = path.join("invoked.timestamp"); write( ×tamp, "This file has an mtime of when this was started.", )?; let ft = mtime(×tamp)?; log::debug!("invocation time for {:?} is {}", path, ft); Ok(ft) } /// Converts a path to UTF-8 bytes. pub fn path2bytes(path: &Path) -> Result<&[u8]> { #[cfg(unix)] { use std::os::unix::prelude::*; Ok(path.as_os_str().as_bytes()) } #[cfg(windows)] { match path.as_os_str().to_str() { Some(s) => Ok(s.as_bytes()), None => Err(anyhow::format_err!( "invalid non-unicode path: {}", path.display() )), } } } /// Converts UTF-8 bytes to a path. pub fn bytes2path(bytes: &[u8]) -> Result { #[cfg(unix)] { use std::os::unix::prelude::*; Ok(PathBuf::from(OsStr::from_bytes(bytes))) } #[cfg(windows)] { use std::str; match str::from_utf8(bytes) { Ok(s) => Ok(PathBuf::from(s)), Err(..) => Err(anyhow::format_err!("invalid non-unicode path")), } } } /// Returns an iterator that walks up the directory hierarchy towards the root. /// /// Each item is a [`Path`]. It will start with the given path, finishing at /// the root. If the `stop_root_at` parameter is given, it will stop at the /// given path (which will be the last item). pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { PathAncestors::new(path, stop_root_at) } pub struct PathAncestors<'a> { current: Option<&'a Path>, stop_at: Option, } impl<'a> PathAncestors<'a> { fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { let stop_at = env::var("__CARGO_TEST_ROOT") .ok() .map(PathBuf::from) .or_else(|| stop_root_at.map(|p| p.to_path_buf())); PathAncestors { current: Some(path), //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. stop_at, } } } impl<'a> Iterator for PathAncestors<'a> { type Item = &'a Path; fn next(&mut self) -> Option<&'a Path> { if let Some(path) = self.current { self.current = path.parent(); if let Some(ref stop_at) = self.stop_at { if path == stop_at { self.current = None; } } Some(path) } else { None } } } /// Equivalent to [`std::fs::create_dir_all`] with better error messages. pub fn create_dir_all(p: impl AsRef) -> Result<()> { _create_dir_all(p.as_ref()) } fn _create_dir_all(p: &Path) -> Result<()> { fs::create_dir_all(p) .with_context(|| format!("failed to create directory `{}`", p.display()))?; Ok(()) } /// Recursively remove all files and directories at the given directory. /// /// This does *not* follow symlinks. pub fn remove_dir_all>(p: P) -> Result<()> { _remove_dir_all(p.as_ref()) } fn _remove_dir_all(p: &Path) -> Result<()> { if p.symlink_metadata() .with_context(|| format!("could not get metadata for `{}` to remove", p.display()))? .is_symlink() { return remove_file(p); } let entries = p .read_dir() .with_context(|| format!("failed to read directory `{}`", p.display()))?; for entry in entries { let entry = entry?; let path = entry.path(); if entry.file_type()?.is_dir() { remove_dir_all(&path)?; } else { remove_file(&path)?; } } remove_dir(&p) } /// Equivalent to [`std::fs::remove_dir`] with better error messages. pub fn remove_dir>(p: P) -> Result<()> { _remove_dir(p.as_ref()) } fn _remove_dir(p: &Path) -> Result<()> { fs::remove_dir(p).with_context(|| format!("failed to remove directory `{}`", p.display()))?; Ok(()) } /// Equivalent to [`std::fs::remove_file`] with better error messages. /// /// If the file is readonly, this will attempt to change the permissions to /// force the file to be deleted. pub fn remove_file>(p: P) -> Result<()> { _remove_file(p.as_ref()) } fn _remove_file(p: &Path) -> Result<()> { let mut err = match fs::remove_file(p) { Ok(()) => return Ok(()), Err(e) => e, }; if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { match fs::remove_file(p) { Ok(()) => return Ok(()), Err(e) => err = e, } } Err(err).with_context(|| format!("failed to remove file `{}`", p.display()))?; Ok(()) } fn set_not_readonly(p: &Path) -> io::Result { let mut perms = p.metadata()?.permissions(); if !perms.readonly() { return Ok(false); } perms.set_readonly(false); fs::set_permissions(p, perms)?; Ok(true) } /// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. /// /// If the destination already exists, it is removed before linking. pub fn link_or_copy(src: impl AsRef, dst: impl AsRef) -> Result<()> { let src = src.as_ref(); let dst = dst.as_ref(); _link_or_copy(src, dst) } fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> { log::debug!("linking {} to {}", src.display(), dst.display()); if same_file::is_same_file(src, dst).unwrap_or(false) { return Ok(()); } // NB: we can't use dst.exists(), as if dst is a broken symlink, // dst.exists() will return false. This is problematic, as we still need to // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us // whether dst exists *without* following symlinks, which is what we want. if fs::symlink_metadata(dst).is_ok() { remove_file(&dst)?; } let link_result = if src.is_dir() { #[cfg(target_os = "redox")] use std::os::redox::fs::symlink; #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] // FIXME: This should probably panic or have a copy fallback. Symlinks // are not supported in all windows environments. Currently symlinking // is only used for .dSYM directories on macos, but this shouldn't be // accidentally relied upon. use std::os::windows::fs::symlink_dir as symlink; let dst_dir = dst.parent().unwrap(); let src = if src.starts_with(dst_dir) { src.strip_prefix(dst_dir).unwrap() } else { src }; symlink(src, dst) } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { // This is a work-around for a bug in macOS 10.15. When running on // APFS, there seems to be a strange race condition with // Gatekeeper where it will forcefully kill a process launched via // `cargo run` with SIGKILL. Copying seems to avoid the problem. // This shouldn't affect anyone except Cargo's test suite because // it is very rare, and only seems to happen under heavy load and // rapidly creating lots of executables and running them. // See https://github.com/rust-lang/cargo/issues/7821 for the // gory details. fs::copy(src, dst).map(|_| ()) } else { if cfg!(target_os = "macos") { // This is a work-around for a bug on macos. There seems to be a race condition // with APFS when hard-linking binaries. Gatekeeper does not have signing or // hash information stored in kernel when running the process. Therefore killing it. // This problem does not appear when copying files as kernel has time to process it. // Note that: fs::copy on macos is using CopyOnWrite (syscall fclonefileat) which should be // as fast as hardlinking. // See https://github.com/rust-lang/cargo/issues/10060 for the details fs::copy(src, dst).map(|_| ()) } else { fs::hard_link(src, dst) } }; link_result .or_else(|err| { log::debug!("link failed {}. falling back to fs::copy", err); fs::copy(src, dst).map(|_| ()) }) .with_context(|| { format!( "failed to link or copy `{}` to `{}`", src.display(), dst.display() ) })?; Ok(()) } /// Copies a file from one location to another. /// /// Equivalent to [`std::fs::copy`] with better error messages. pub fn copy, Q: AsRef>(from: P, to: Q) -> Result { let from = from.as_ref(); let to = to.as_ref(); fs::copy(from, to) .with_context(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) } /// Changes the filesystem mtime (and atime if possible) for the given file. /// /// This intentionally does not return an error, as this is sometimes not /// supported on network filesystems. For the current uses in Cargo, this is a /// "best effort" approach, and errors shouldn't be propagated. pub fn set_file_time_no_err>(path: P, time: FileTime) { let path = path.as_ref(); match filetime::set_file_times(path, time, time) { Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time), Err(e) => log::warn!( "could not set mtime of {} to {}: {:?}", path.display(), time, e ), } } /// Strips `base` from `path`. /// /// This canonicalizes both paths before stripping. This is useful if the /// paths are obtained in different ways, and one or the other may or may not /// have been normalized in some way. pub fn strip_prefix_canonical>( path: P, base: P, ) -> Result { // Not all filesystems support canonicalize. Just ignore if it doesn't work. let safe_canonicalize = |path: &Path| match path.canonicalize() { Ok(p) => p, Err(e) => { log::warn!("cannot canonicalize {:?}: {:?}", path, e); path.to_path_buf() } }; let canon_path = safe_canonicalize(path.as_ref()); let canon_base = safe_canonicalize(base.as_ref()); canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf()) } /// Creates an excluded from cache directory atomically with its parents as needed. /// /// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing /// parent directories will not be created in an atomic manner. /// /// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it /// already exists. pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef) -> Result<()> { let path = p.as_ref(); if path.is_dir() { return Ok(()); } let parent = path.parent().unwrap(); let base = path.file_name().unwrap(); create_dir_all(parent)?; // We do this in two steps (first create a temporary directory and exclude // it from backups, then rename it to the desired name. If we created the // directory directly where it should be and then excluded it from backups // we would risk a situation where cargo is interrupted right after the directory // creation but before the exclusion the the directory would remain non-excluded from // backups because we only perform exclusion right after we created the directory // ourselves. // // We need the tempdir created in parent instead of $TMP, because only then we can be // easily sure that rename() will succeed (the new name needs to be on the same mount // point as the old one). let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?; exclude_from_backups(tempdir.path()); exclude_from_content_indexing(tempdir.path()); // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used // here to create the directory directly and fs::create_dir_all() explicitly treats // the directory being created concurrently by another thread or process as success, // hence the check below to follow the existing behavior. If we get an error at // rename() and suddently the directory (which didn't exist a moment earlier) exists // we can infer from it it's another cargo process doing work. if let Err(e) = fs::rename(tempdir.path(), path) { if !path.exists() { return Err(anyhow::Error::from(e)); } } Ok(()) } /// Mark an existing directory as excluded from backups and indexing. /// /// Errors in marking it are ignored. pub fn exclude_from_backups_and_indexing(p: impl AsRef) { let path = p.as_ref(); exclude_from_backups(path); exclude_from_content_indexing(path); } /// Marks the directory as excluded from archives/backups. /// /// This is recommended to prevent derived/temporary files from bloating backups. There are two /// mechanisms used to achieve this right now: /// /// * A dedicated resource property excluding from Time Machine backups on macOS /// * CACHEDIR.TAG files supported by various tools in a platform-independent way fn exclude_from_backups(path: &Path) { exclude_from_time_machine(path); let _ = std::fs::write( path.join("CACHEDIR.TAG"), "Signature: 8a477f597d28d172789f06886806bc55 # This file is a cache directory tag created by cargo. # For information about cache directory tags see https://bford.info/cachedir/ ", ); // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature. } /// Marks the directory as excluded from content indexing. /// /// This is recommended to prevent the content of derived/temporary files from being indexed. /// This is very important for Windows users, as the live content indexing significantly slows /// cargo's I/O operations. /// /// This is currently a no-op on non-Windows platforms. fn exclude_from_content_indexing(path: &Path) { #[cfg(windows)] { use std::iter::once; use std::os::windows::prelude::OsStrExt; use winapi::um::fileapi::{GetFileAttributesW, SetFileAttributesW}; use winapi::um::winnt::FILE_ATTRIBUTE_NOT_CONTENT_INDEXED; let path: Vec = path.as_os_str().encode_wide().chain(once(0)).collect(); unsafe { SetFileAttributesW( path.as_ptr(), GetFileAttributesW(path.as_ptr()) | FILE_ATTRIBUTE_NOT_CONTENT_INDEXED, ); } } #[cfg(not(windows))] { let _ = path; } } #[cfg(not(target_os = "macos"))] fn exclude_from_time_machine(_: &Path) {} #[cfg(target_os = "macos")] /// Marks files or directories as excluded from Time Machine on macOS fn exclude_from_time_machine(path: &Path) { use core_foundation::base::TCFType; use core_foundation::{number, string, url}; use std::ptr; // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); let path = url::CFURL::from_path(path, false); if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { unsafe { url::CFURLSetResourcePropertyForKey( path.as_concrete_TypeRef(), is_excluded_key.as_concrete_TypeRef(), number::kCFBooleanTrue as *const _, ptr::null_mut(), ); } } // Errors are ignored, since it's an optional feature and failure // doesn't prevent Cargo from working } cargo-0.66.0/crates/cargo-util/src/process_builder.rs000066400000000000000000000571001432416201200225440ustar00rootroot00000000000000use crate::process_error::ProcessError; use crate::read2; use anyhow::{bail, Context, Result}; use jobserver::Client; use shell_escape::escape; use tempfile::NamedTempFile; use std::collections::BTreeMap; use std::env; use std::ffi::{OsStr, OsString}; use std::fmt; use std::io::{self, Write}; use std::iter::once; use std::path::Path; use std::process::{Command, ExitStatus, Output, Stdio}; /// A builder object for an external process, similar to [`std::process::Command`]. #[derive(Clone, Debug)] pub struct ProcessBuilder { /// The program to execute. program: OsString, /// A list of arguments to pass to the program. args: Vec, /// Any environment variables that should be set for the program. env: BTreeMap>, /// The directory to run the program from. cwd: Option, /// A list of wrappers that wrap the original program when calling /// [`ProcessBuilder::wrapped`]. The last one is the outermost one. wrappers: Vec, /// The `make` jobserver. See the [jobserver crate] for /// more information. /// /// [jobserver crate]: https://docs.rs/jobserver/ jobserver: Option, /// `true` to include environment variable in display. display_env_vars: bool, /// `true` to retry with an argfile if hitting "command line too big" error. /// See [`ProcessBuilder::retry_with_argfile`] for more information. retry_with_argfile: bool, /// Data to write to stdin. stdin: Option>, } impl fmt::Display for ProcessBuilder { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "`")?; if self.display_env_vars { for (key, val) in self.env.iter() { if let Some(val) = val { let val = escape(val.to_string_lossy()); if cfg!(windows) { write!(f, "set {}={}&& ", key, val)?; } else { write!(f, "{}={} ", key, val)?; } } } } write!(f, "{}", self.get_program().to_string_lossy())?; for arg in self.get_args() { write!(f, " {}", escape(arg.to_string_lossy()))?; } write!(f, "`") } } impl ProcessBuilder { /// Creates a new [`ProcessBuilder`] with the given executable path. pub fn new>(cmd: T) -> ProcessBuilder { ProcessBuilder { program: cmd.as_ref().to_os_string(), args: Vec::new(), cwd: None, env: BTreeMap::new(), wrappers: Vec::new(), jobserver: None, display_env_vars: false, retry_with_argfile: false, stdin: None, } } /// (chainable) Sets the executable for the process. pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { self.program = program.as_ref().to_os_string(); self } /// (chainable) Adds `arg` to the args list. pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { self.args.push(arg.as_ref().to_os_string()); self } /// (chainable) Adds multiple `args` to the args list. pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { self.args .extend(args.iter().map(|t| t.as_ref().to_os_string())); self } /// (chainable) Replaces the args list with the given `args`. pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { if let Some(program) = self.wrappers.pop() { // User intend to replace all args, so we // - use the outermost wrapper as the main program, and // - cleanup other inner wrappers. self.program = program; self.wrappers = Vec::new(); } self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); self } /// (chainable) Sets the current working directory of the process. pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { self.cwd = Some(path.as_ref().to_os_string()); self } /// (chainable) Sets an environment variable for the process. pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { self.env .insert(key.to_string(), Some(val.as_ref().to_os_string())); self } /// (chainable) Unsets an environment variable for the process. pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { self.env.insert(key.to_string(), None); self } /// Gets the executable name. pub fn get_program(&self) -> &OsString { self.wrappers.last().unwrap_or(&self.program) } /// Gets the program arguments. pub fn get_args(&self) -> impl Iterator { self.wrappers .iter() .rev() .chain(once(&self.program)) .chain(self.args.iter()) .skip(1) // Skip the main `program } /// Gets the current working directory for the process. pub fn get_cwd(&self) -> Option<&Path> { self.cwd.as_ref().map(Path::new) } /// Gets an environment variable as the process will see it (will inherit from environment /// unless explicitally unset). pub fn get_env(&self, var: &str) -> Option { self.env .get(var) .cloned() .or_else(|| Some(env::var_os(var))) .and_then(|s| s) } /// Gets all environment variables explicitly set or unset for the process (not inherited /// vars). pub fn get_envs(&self) -> &BTreeMap> { &self.env } /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for /// more information. /// /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { self.jobserver = Some(jobserver.clone()); self } /// Enables environment variable display. pub fn display_env_vars(&mut self) -> &mut Self { self.display_env_vars = true; self } /// Enables retrying with an argfile if hitting "command line too big" error /// /// This is primarily for the `@path` arg of rustc and rustdoc, which treat /// each line as an command-line argument, so `LF` and `CRLF` bytes are not /// valid as an argument for argfile at this moment. /// For example, `RUSTDOCFLAGS="--crate-version foo\nbar" cargo doc` is /// valid when invoking from command-line but not from argfile. /// /// To sum up, the limitations of the argfile are: /// /// - Must be valid UTF-8 encoded. /// - Must not contain any newlines in each argument. /// /// Ref: /// /// - https://doc.rust-lang.org/rustdoc/command-line-arguments.html#path-load-command-line-flags-from-a-path /// - https://doc.rust-lang.org/rustc/command-line-arguments.html#path-load-command-line-flags-from-a-path> pub fn retry_with_argfile(&mut self, enabled: bool) -> &mut Self { self.retry_with_argfile = enabled; self } /// Sets a value that will be written to stdin of the process on launch. pub fn stdin>>(&mut self, stdin: T) -> &mut Self { self.stdin = Some(stdin.into()); self } fn should_retry_with_argfile(&self, err: &io::Error) -> bool { self.retry_with_argfile && imp::command_line_too_big(err) } /// Like [`Command::status`] but with a better error message. pub fn status(&self) -> Result { self._status() .with_context(|| ProcessError::could_not_execute(self)) } fn _status(&self) -> io::Result { if !debug_force_argfile(self.retry_with_argfile) { let mut cmd = self.build_command(); match cmd.spawn() { Err(ref e) if self.should_retry_with_argfile(e) => {} Err(e) => return Err(e), Ok(mut child) => return child.wait(), } } let (mut cmd, argfile) = self.build_command_with_argfile()?; let status = cmd.spawn()?.wait(); close_tempfile_and_log_error(argfile); status } /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. pub fn exec(&self) -> Result<()> { let exit = self.status()?; if exit.success() { Ok(()) } else { Err(ProcessError::new( &format!("process didn't exit successfully: {}", self), Some(exit), None, ) .into()) } } /// Replaces the current process with the target process. /// /// On Unix, this executes the process using the Unix syscall `execvp`, which will block /// this process, and will only return if there is an error. /// /// On Windows this isn't technically possible. Instead we emulate it to the best of our /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C /// handling to the application at hand, which will either terminate or handle it itself. /// According to Microsoft's documentation at /// . /// the Ctrl-C signal is sent to all processes attached to a terminal, which should /// include our child process. If the child terminates then we'll reap them in Cargo /// pretty quickly, and if the child handles the signal then we won't terminate /// (and we shouldn't!) until the process itself later exits. pub fn exec_replace(&self) -> Result<()> { imp::exec_replace(self) } /// Like [`Command::output`] but with a better error message. pub fn output(&self) -> Result { self._output() .with_context(|| ProcessError::could_not_execute(self)) } fn _output(&self) -> io::Result { if !debug_force_argfile(self.retry_with_argfile) { let mut cmd = self.build_command(); match piped(&mut cmd, self.stdin.is_some()).spawn() { Err(ref e) if self.should_retry_with_argfile(e) => {} Err(e) => return Err(e), Ok(mut child) => { if let Some(stdin) = &self.stdin { child.stdin.take().unwrap().write_all(stdin)?; } return child.wait_with_output(); } } } let (mut cmd, argfile) = self.build_command_with_argfile()?; let mut child = piped(&mut cmd, self.stdin.is_some()).spawn()?; if let Some(stdin) = &self.stdin { child.stdin.take().unwrap().write_all(stdin)?; } let output = child.wait_with_output(); close_tempfile_and_log_error(argfile); output } /// Executes the process, returning the stdio output, or an error if non-zero exit status. pub fn exec_with_output(&self) -> Result { let output = self.output()?; if output.status.success() { Ok(output) } else { Err(ProcessError::new( &format!("process didn't exit successfully: {}", self), Some(output.status), Some(&output), ) .into()) } } /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which /// can mutate the string data. /// /// If any invocations of these function return an error, it will be propagated. /// /// If `capture_output` is true, then all the output will also be buffered /// and stored in the returned `Output` object. If it is false, no caching /// is done, and the callbacks are solely responsible for handling the /// output. pub fn exec_with_streaming( &self, on_stdout_line: &mut dyn FnMut(&str) -> Result<()>, on_stderr_line: &mut dyn FnMut(&str) -> Result<()>, capture_output: bool, ) -> Result { let mut stdout = Vec::new(); let mut stderr = Vec::new(); let mut callback_error = None; let mut stdout_pos = 0; let mut stderr_pos = 0; let spawn = |mut cmd| { if !debug_force_argfile(self.retry_with_argfile) { match piped(&mut cmd, false).spawn() { Err(ref e) if self.should_retry_with_argfile(e) => {} Err(e) => return Err(e), Ok(child) => return Ok((child, None)), } } let (mut cmd, argfile) = self.build_command_with_argfile()?; Ok((piped(&mut cmd, false).spawn()?, Some(argfile))) }; let status = (|| { let cmd = self.build_command(); let (mut child, argfile) = spawn(cmd)?; let out = child.stdout.take().unwrap(); let err = child.stderr.take().unwrap(); read2(out, err, &mut |is_out, data, eof| { let pos = if is_out { &mut stdout_pos } else { &mut stderr_pos }; let idx = if eof { data.len() } else { match data[*pos..].iter().rposition(|b| *b == b'\n') { Some(i) => *pos + i + 1, None => { *pos = data.len(); return; } } }; let new_lines = &data[..idx]; for line in String::from_utf8_lossy(new_lines).lines() { if callback_error.is_some() { break; } let callback_result = if is_out { on_stdout_line(line) } else { on_stderr_line(line) }; if let Err(e) = callback_result { callback_error = Some(e); break; } } if capture_output { let dst = if is_out { &mut stdout } else { &mut stderr }; dst.extend(new_lines); } data.drain(..idx); *pos = 0; })?; let status = child.wait(); if let Some(argfile) = argfile { close_tempfile_and_log_error(argfile); } status })() .with_context(|| ProcessError::could_not_execute(self))?; let output = Output { status, stdout, stderr, }; { let to_print = if capture_output { Some(&output) } else { None }; if let Some(e) = callback_error { let cx = ProcessError::new( &format!("failed to parse process output: {}", self), Some(output.status), to_print, ); bail!(anyhow::Error::new(cx).context(e)); } else if !output.status.success() { bail!(ProcessError::new( &format!("process didn't exit successfully: {}", self), Some(output.status), to_print, )); } } Ok(output) } /// Builds the command with an `@` argfile that contains all the /// arguments. This is primarily served for rustc/rustdoc command family. fn build_command_with_argfile(&self) -> io::Result<(Command, NamedTempFile)> { use std::io::Write as _; let mut tmp = tempfile::Builder::new() .prefix("cargo-argfile.") .tempfile()?; let mut arg = OsString::from("@"); arg.push(tmp.path()); let mut cmd = self.build_command_without_args(); cmd.arg(arg); log::debug!("created argfile at {} for {self}", tmp.path().display()); let cap = self.get_args().map(|arg| arg.len() + 1).sum::(); let mut buf = Vec::with_capacity(cap); for arg in &self.args { let arg = arg.to_str().ok_or_else(|| { io::Error::new( io::ErrorKind::Other, format!( "argument for argfile contains invalid UTF-8 characters: `{}`", arg.to_string_lossy() ), ) })?; if arg.contains('\n') { return Err(io::Error::new( io::ErrorKind::Other, format!("argument for argfile contains newlines: `{arg}`"), )); } writeln!(buf, "{arg}")?; } tmp.write_all(&mut buf)?; Ok((cmd, tmp)) } /// Builds a command from `ProcessBuilder` for everything but not `args`. fn build_command_without_args(&self) -> Command { let mut command = { let mut iter = self.wrappers.iter().rev().chain(once(&self.program)); let mut cmd = Command::new(iter.next().expect("at least one `program` exists")); cmd.args(iter); cmd }; if let Some(cwd) = self.get_cwd() { command.current_dir(cwd); } for (k, v) in &self.env { match *v { Some(ref v) => { command.env(k, v); } None => { command.env_remove(k); } } } if let Some(ref c) = self.jobserver { c.configure(&mut command); } command } /// Converts `ProcessBuilder` into a `std::process::Command`, and handles /// the jobserver, if present. /// /// Note that this method doesn't take argfile fallback into account. The /// caller should handle it by themselves. pub fn build_command(&self) -> Command { let mut command = self.build_command_without_args(); for arg in &self.args { command.arg(arg); } command } /// Wraps an existing command with the provided wrapper, if it is present and valid. /// /// # Examples /// /// ```rust /// use cargo_util::ProcessBuilder; /// // Running this would execute `rustc` /// let cmd = ProcessBuilder::new("rustc"); /// /// // Running this will execute `sccache rustc` /// let cmd = cmd.wrapped(Some("sccache")); /// ``` pub fn wrapped(mut self, wrapper: Option>) -> Self { if let Some(wrapper) = wrapper.as_ref() { let wrapper = wrapper.as_ref(); if !wrapper.is_empty() { self.wrappers.push(wrapper.to_os_string()); } } self } } /// Forces the command to use `@path` argfile. /// /// You should set `__CARGO_TEST_FORCE_ARGFILE` to enable this. fn debug_force_argfile(retry_enabled: bool) -> bool { cfg!(debug_assertions) && env::var("__CARGO_TEST_FORCE_ARGFILE").is_ok() && retry_enabled } /// Creates new pipes for stderr, stdout, and optionally stdin. fn piped(cmd: &mut Command, pipe_stdin: bool) -> &mut Command { cmd.stdout(Stdio::piped()) .stderr(Stdio::piped()) .stdin(if pipe_stdin { Stdio::piped() } else { Stdio::null() }) } fn close_tempfile_and_log_error(file: NamedTempFile) { file.close().unwrap_or_else(|e| { log::warn!("failed to close temporary file: {e}"); }); } #[cfg(unix)] mod imp { use super::{close_tempfile_and_log_error, debug_force_argfile, ProcessBuilder, ProcessError}; use anyhow::Result; use std::io; use std::os::unix::process::CommandExt; pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { let mut error; let mut file = None; if debug_force_argfile(process_builder.retry_with_argfile) { let (mut command, argfile) = process_builder.build_command_with_argfile()?; file = Some(argfile); error = command.exec() } else { let mut command = process_builder.build_command(); error = command.exec(); if process_builder.should_retry_with_argfile(&error) { let (mut command, argfile) = process_builder.build_command_with_argfile()?; file = Some(argfile); error = command.exec() } } if let Some(file) = file { close_tempfile_and_log_error(file); } Err(anyhow::Error::from(error).context(ProcessError::new( &format!("could not execute process {}", process_builder), None, None, ))) } pub fn command_line_too_big(err: &io::Error) -> bool { err.raw_os_error() == Some(libc::E2BIG) } } #[cfg(windows)] mod imp { use super::{ProcessBuilder, ProcessError}; use anyhow::Result; use std::io; use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; use winapi::um::consoleapi::SetConsoleCtrlHandler; unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { // Do nothing; let the child process handle it. TRUE } pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { unsafe { if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { return Err(ProcessError::new("Could not set Ctrl-C handler.", None, None).into()); } } // Just execute the process as normal. process_builder.exec() } pub fn command_line_too_big(err: &io::Error) -> bool { use winapi::shared::winerror::ERROR_FILENAME_EXCED_RANGE; err.raw_os_error() == Some(ERROR_FILENAME_EXCED_RANGE as i32) } } #[cfg(test)] mod tests { use super::ProcessBuilder; use std::fs; #[test] fn argfile_build_succeeds() { let mut cmd = ProcessBuilder::new("echo"); cmd.args(["foo", "bar"].as_slice()); let (cmd, argfile) = cmd.build_command_with_argfile().unwrap(); assert_eq!(cmd.get_program(), "echo"); let cmd_args: Vec<_> = cmd.get_args().map(|s| s.to_str().unwrap()).collect(); assert_eq!(cmd_args.len(), 1); assert!(cmd_args[0].starts_with("@")); assert!(cmd_args[0].contains("cargo-argfile.")); let buf = fs::read_to_string(argfile.path()).unwrap(); assert_eq!(buf, "foo\nbar\n"); } #[test] fn argfile_build_fails_if_arg_contains_newline() { let mut cmd = ProcessBuilder::new("echo"); cmd.arg("foo\n"); let err = cmd.build_command_with_argfile().unwrap_err(); assert_eq!( err.to_string(), "argument for argfile contains newlines: `foo\n`" ); } #[test] fn argfile_build_fails_if_arg_contains_invalid_utf8() { let mut cmd = ProcessBuilder::new("echo"); #[cfg(windows)] let invalid_arg = { use std::os::windows::prelude::*; std::ffi::OsString::from_wide(&[0x0066, 0x006f, 0xD800, 0x006f]) }; #[cfg(unix)] let invalid_arg = { use std::os::unix::ffi::OsStrExt; std::ffi::OsStr::from_bytes(&[0x66, 0x6f, 0x80, 0x6f]).to_os_string() }; cmd.arg(invalid_arg); let err = cmd.build_command_with_argfile().unwrap_err(); assert_eq!( err.to_string(), "argument for argfile contains invalid UTF-8 characters: `foοΏ½o`" ); } } cargo-0.66.0/crates/cargo-util/src/process_error.rs000066400000000000000000000174671432416201200222630ustar00rootroot00000000000000//! Error value for [`crate::ProcessBuilder`] when a process fails. use std::fmt; use std::process::{ExitStatus, Output}; use std::str; #[derive(Debug)] pub struct ProcessError { /// A detailed description to show to the user why the process failed. pub desc: String, /// The exit status of the process. /// /// This can be `None` if the process failed to launch (like process not /// found) or if the exit status wasn't a code but was instead something /// like termination via a signal. pub code: Option, /// The stdout from the process. /// /// This can be `None` if the process failed to launch, or the output was /// not captured. pub stdout: Option>, /// The stderr from the process. /// /// This can be `None` if the process failed to launch, or the output was /// not captured. pub stderr: Option>, } impl fmt::Display for ProcessError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.desc.fmt(f) } } impl std::error::Error for ProcessError {} impl ProcessError { /// Creates a new [`ProcessError`]. /// /// * `status` can be `None` if the process did not launch. /// * `output` can be `None` if the process did not launch, or output was not captured. pub fn new(msg: &str, status: Option, output: Option<&Output>) -> ProcessError { let exit = match status { Some(s) => exit_status_to_string(s), None => "never executed".to_string(), }; Self::new_raw( msg, status.and_then(|s| s.code()), &exit, output.map(|s| s.stdout.as_slice()), output.map(|s| s.stderr.as_slice()), ) } /// Creates a new [`ProcessError`] with the raw output data. /// /// * `code` can be `None` for situations like being killed by a signal on unix. pub fn new_raw( msg: &str, code: Option, status: &str, stdout: Option<&[u8]>, stderr: Option<&[u8]>, ) -> ProcessError { let mut desc = format!("{} ({})", msg, status); if let Some(out) = stdout { match str::from_utf8(out) { Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } } if let Some(out) = stderr { match str::from_utf8(out) { Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } } ProcessError { desc, code, stdout: stdout.map(|s| s.to_vec()), stderr: stderr.map(|s| s.to_vec()), } } /// Creates a [`ProcessError`] with "could not execute process {cmd}". /// /// * `cmd` is usually but not limited to [`std::process::Command`]. pub fn could_not_execute(cmd: impl fmt::Display) -> ProcessError { ProcessError::new(&format!("could not execute process {cmd}"), None, None) } } /// Converts an [`ExitStatus`] to a human-readable string suitable for /// displaying to a user. pub fn exit_status_to_string(status: ExitStatus) -> String { return status_to_string(status); #[cfg(unix)] fn status_to_string(status: ExitStatus) -> String { use std::os::unix::process::*; if let Some(signal) = status.signal() { let name = match signal as libc::c_int { libc::SIGABRT => ", SIGABRT: process abort signal", libc::SIGALRM => ", SIGALRM: alarm clock", libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", libc::SIGHUP => ", SIGHUP: hangup", libc::SIGILL => ", SIGILL: illegal instruction", libc::SIGINT => ", SIGINT: terminal interrupt signal", libc::SIGKILL => ", SIGKILL: kill", libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", libc::SIGQUIT => ", SIGQUIT: terminal quit signal", libc::SIGSEGV => ", SIGSEGV: invalid memory reference", libc::SIGTERM => ", SIGTERM: termination signal", libc::SIGBUS => ", SIGBUS: access to undefined memory", #[cfg(not(target_os = "haiku"))] libc::SIGSYS => ", SIGSYS: bad system call", libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", _ => "", }; format!("signal: {}{}", signal, name) } else { status.to_string() } } #[cfg(windows)] fn status_to_string(status: ExitStatus) -> String { use winapi::shared::minwindef::DWORD; use winapi::um::winnt::*; let mut base = status.to_string(); let extra = match status.code().unwrap() as DWORD { STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", STATUS_NO_MEMORY => "STATUS_NO_MEMORY", STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", _ => return base, }; base.push_str(", "); base.push_str(extra); base } } /// Returns `true` if the given process exit code is something a normal /// process would exit with. /// /// This helps differentiate from abnormal termination codes, such as /// segmentation faults or signals. pub fn is_simple_exit_code(code: i32) -> bool { // Typical unix exit codes are 0 to 127. // Windows doesn't have anything "typical", and is a // 32-bit number (which appears signed here, but is really // unsigned). However, most of the interesting NTSTATUS // codes are very large. This is just a rough // approximation of which codes are "normal" and which // ones are abnormal termination. code >= 0 && code <= 127 } cargo-0.66.0/crates/cargo-util/src/read2.rs000066400000000000000000000131661432416201200203610ustar00rootroot00000000000000pub use self::imp::read2; #[cfg(unix)] mod imp { use std::io; use std::io::prelude::*; use std::mem; use std::os::unix::prelude::*; use std::process::{ChildStderr, ChildStdout}; pub fn read2( mut out_pipe: ChildStdout, mut err_pipe: ChildStderr, data: &mut dyn FnMut(bool, &mut Vec, bool), ) -> io::Result<()> { unsafe { libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); } let mut out_done = false; let mut err_done = false; let mut out = Vec::new(); let mut err = Vec::new(); let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; fds[0].fd = out_pipe.as_raw_fd(); fds[0].events = libc::POLLIN; fds[1].fd = err_pipe.as_raw_fd(); fds[1].events = libc::POLLIN; let mut nfds = 2; let mut errfd = 1; while nfds > 0 { // wait for either pipe to become readable using `select` let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; if r == -1 { let err = io::Error::last_os_error(); if err.kind() == io::ErrorKind::Interrupted { continue; } return Err(err); } // Read as much as we can from each pipe, ignoring EWOULDBLOCK or // EAGAIN. If we hit EOF, then this will happen because the underlying // reader will return Ok(0), in which case we'll see `Ok` ourselves. In // this case we flip the other fd back into blocking mode and read // whatever's leftover on that file descriptor. let handle = |res: io::Result<_>| match res { Ok(_) => Ok(true), Err(e) => { if e.kind() == io::ErrorKind::WouldBlock { Ok(false) } else { Err(e) } } }; if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { err_done = true; nfds -= 1; } data(false, &mut err, err_done); if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { out_done = true; fds[0].fd = err_pipe.as_raw_fd(); errfd = 0; nfds -= 1; } data(true, &mut out, out_done); } Ok(()) } } #[cfg(windows)] mod imp { use std::io; use std::os::windows::prelude::*; use std::process::{ChildStderr, ChildStdout}; use std::slice; use miow::iocp::{CompletionPort, CompletionStatus}; use miow::pipe::NamedPipe; use miow::Overlapped; use winapi::shared::winerror::ERROR_BROKEN_PIPE; struct Pipe<'a> { dst: &'a mut Vec, overlapped: Overlapped, pipe: NamedPipe, done: bool, } pub fn read2( out_pipe: ChildStdout, err_pipe: ChildStderr, data: &mut dyn FnMut(bool, &mut Vec, bool), ) -> io::Result<()> { let mut out = Vec::new(); let mut err = Vec::new(); let port = CompletionPort::new(1)?; port.add_handle(0, &out_pipe)?; port.add_handle(1, &err_pipe)?; unsafe { let mut out_pipe = Pipe::new(out_pipe, &mut out); let mut err_pipe = Pipe::new(err_pipe, &mut err); out_pipe.read()?; err_pipe.read()?; let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; while !out_pipe.done || !err_pipe.done { for status in port.get_many(&mut status, None)? { if status.token() == 0 { out_pipe.complete(status); data(true, out_pipe.dst, out_pipe.done); out_pipe.read()?; } else { err_pipe.complete(status); data(false, err_pipe.dst, err_pipe.done); err_pipe.read()?; } } } Ok(()) } } impl<'a> Pipe<'a> { unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { Pipe { dst, pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), overlapped: Overlapped::zero(), done: false, } } unsafe fn read(&mut self) -> io::Result<()> { let dst = slice_to_end(self.dst); match self.pipe.read_overlapped(dst, self.overlapped.raw()) { Ok(_) => Ok(()), Err(e) => { if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { self.done = true; Ok(()) } else { Err(e) } } } } unsafe fn complete(&mut self, status: &CompletionStatus) { let prev = self.dst.len(); self.dst.set_len(prev + status.bytes_transferred() as usize); if status.bytes_transferred() == 0 { self.done = true; } } } unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { if v.capacity() == 0 { v.reserve(16); } if v.capacity() == v.len() { v.reserve(1); } slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) } } cargo-0.66.0/crates/cargo-util/src/registry.rs000066400000000000000000000030311432416201200212220ustar00rootroot00000000000000/// Make a path to a dependency, which aligns to /// /// - [index from of Cargo's index on filesystem][1], and /// - [index from Crates.io][2]. /// /// [1]: https://docs.rs/cargo/latest/cargo/sources/registry/index.html#the-format-of-the-index /// [2]: https://github.com/rust-lang/crates.io-index pub fn make_dep_path(dep_name: &str, prefix_only: bool) -> String { let (slash, name) = if prefix_only { ("", "") } else { ("/", dep_name) }; match dep_name.len() { 1 => format!("1{}{}", slash, name), 2 => format!("2{}{}", slash, name), 3 => format!("3/{}{}{}", &dep_name[..1], slash, name), _ => format!("{}/{}{}{}", &dep_name[0..2], &dep_name[2..4], slash, name), } } #[cfg(test)] mod tests { use super::make_dep_path; #[test] fn prefix_only() { assert_eq!(make_dep_path("a", true), "1"); assert_eq!(make_dep_path("ab", true), "2"); assert_eq!(make_dep_path("abc", true), "3/a"); assert_eq!(make_dep_path("Abc", true), "3/A"); assert_eq!(make_dep_path("AbCd", true), "Ab/Cd"); assert_eq!(make_dep_path("aBcDe", true), "aB/cD"); } #[test] fn full() { assert_eq!(make_dep_path("a", false), "1/a"); assert_eq!(make_dep_path("ab", false), "2/ab"); assert_eq!(make_dep_path("abc", false), "3/a/abc"); assert_eq!(make_dep_path("Abc", false), "3/A/Abc"); assert_eq!(make_dep_path("AbCd", false), "Ab/Cd/AbCd"); assert_eq!(make_dep_path("aBcDe", false), "aB/cD/aBcDe"); } } cargo-0.66.0/crates/cargo-util/src/sha256.rs000066400000000000000000000025301432416201200203650ustar00rootroot00000000000000use super::paths; use anyhow::{Context, Result}; use crypto_hash::{Algorithm, Hasher}; use std::fs::File; use std::io::{self, Read, Write}; use std::path::Path; pub struct Sha256(Hasher); impl Sha256 { pub fn new() -> Sha256 { let hasher = Hasher::new(Algorithm::SHA256); Sha256(hasher) } pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { let _ = self.0.write_all(bytes); self } pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { let mut buf = [0; 64 * 1024]; loop { let n = file.read(&mut buf)?; if n == 0 { break Ok(self); } self.update(&buf[..n]); } } pub fn update_path>(&mut self, path: P) -> Result<&mut Sha256> { let path = path.as_ref(); let file = paths::open(path)?; self.update_file(&file) .with_context(|| format!("failed to read `{}`", path.display()))?; Ok(self) } pub fn finish(&mut self) -> [u8; 32] { let mut ret = [0u8; 32]; let data = self.0.finish(); ret.copy_from_slice(&data[..]); ret } pub fn finish_hex(&mut self) -> String { hex::encode(self.finish()) } } impl Default for Sha256 { fn default() -> Self { Self::new() } } cargo-0.66.0/crates/crates-io/000077500000000000000000000000001432416201200160405ustar00rootroot00000000000000cargo-0.66.0/crates/crates-io/Cargo.toml000066400000000000000000000006221432416201200177700ustar00rootroot00000000000000[package] name = "crates-io" version = "0.34.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = """ Helpers for interacting with crates.io """ [lib] name = "crates_io" path = "lib.rs" [dependencies] curl = "0.4" anyhow = "1.0.34" percent-encoding = "2.0" serde = { version = "1.0", features = ['derive'] } serde_json = "1.0" url = "2.0" cargo-0.66.0/crates/crates-io/LICENSE-APACHE000077700000000000000000000000001432416201200223312../../LICENSE-APACHEustar00rootroot00000000000000cargo-0.66.0/crates/crates-io/LICENSE-MIT000077700000000000000000000000001432416201200215512../../LICENSE-MITustar00rootroot00000000000000cargo-0.66.0/crates/crates-io/lib.rs000066400000000000000000000366461432416201200171730ustar00rootroot00000000000000#![allow(clippy::all)] use std::collections::BTreeMap; use std::fmt; use std::fs::File; use std::io::prelude::*; use std::io::{Cursor, SeekFrom}; use std::time::Instant; use anyhow::{bail, format_err, Context, Result}; use curl::easy::{Easy, List}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; use serde::{Deserialize, Serialize}; use url::Url; pub struct Registry { /// The base URL for issuing API requests. host: String, /// Optional authorization token. /// If None, commands requiring authorization will fail. token: Option, /// Curl handle for issuing requests. handle: Easy, } #[derive(PartialEq, Clone, Copy)] pub enum Auth { Authorized, Unauthorized, } #[derive(Deserialize)] pub struct Crate { pub name: String, pub description: Option, pub max_version: String, } #[derive(Serialize)] pub struct NewCrate { pub name: String, pub vers: String, pub deps: Vec, pub features: BTreeMap>, pub authors: Vec, pub description: Option, pub documentation: Option, pub homepage: Option, pub readme: Option, pub readme_file: Option, pub keywords: Vec, pub categories: Vec, pub license: Option, pub license_file: Option, pub repository: Option, pub badges: BTreeMap>, pub links: Option, } #[derive(Serialize)] pub struct NewCrateDependency { pub optional: bool, pub default_features: bool, pub name: String, pub features: Vec, pub version_req: String, pub target: Option, pub kind: String, #[serde(skip_serializing_if = "Option::is_none")] pub registry: Option, #[serde(skip_serializing_if = "Option::is_none")] pub explicit_name_in_toml: Option, } #[derive(Deserialize)] pub struct User { pub id: u32, pub login: String, pub avatar: Option, pub email: Option, pub name: Option, } pub struct Warnings { pub invalid_categories: Vec, pub invalid_badges: Vec, pub other: Vec, } #[derive(Deserialize)] struct R { ok: bool, } #[derive(Deserialize)] struct OwnerResponse { ok: bool, msg: String, } #[derive(Deserialize)] struct ApiErrorList { errors: Vec, } #[derive(Deserialize)] struct ApiError { detail: String, } #[derive(Serialize)] struct OwnersReq<'a> { users: &'a [&'a str], } #[derive(Deserialize)] struct Users { users: Vec, } #[derive(Deserialize)] struct TotalCrates { total: u32, } #[derive(Deserialize)] struct Crates { crates: Vec, meta: TotalCrates, } #[derive(Debug)] pub enum ResponseError { Curl(curl::Error), Api { code: u32, errors: Vec, }, Code { code: u32, headers: Vec, body: String, }, Other(anyhow::Error), } impl std::error::Error for ResponseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { ResponseError::Curl(..) => None, ResponseError::Api { .. } => None, ResponseError::Code { .. } => None, ResponseError::Other(e) => Some(e.as_ref()), } } } impl fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ResponseError::Curl(e) => write!(f, "{}", e), ResponseError::Api { code, errors } => { f.write_str("the remote server responded with an error")?; if *code != 200 { write!(f, " (status {} {})", code, reason(*code))?; }; write!(f, ": {}", errors.join(", ")) } ResponseError::Code { code, headers, body, } => write!( f, "failed to get a 200 OK response, got {}\n\ headers:\n\ \t{}\n\ body:\n\ {}", code, headers.join("\n\t"), body ), ResponseError::Other(..) => write!(f, "invalid response from server"), } } } impl From for ResponseError { fn from(error: curl::Error) -> Self { ResponseError::Curl(error) } } impl Registry { /// Creates a new `Registry`. /// /// ## Example /// /// ```rust /// use curl::easy::Easy; /// use crates_io::Registry; /// /// let mut handle = Easy::new(); /// // If connecting to crates.io, a user-agent is required. /// handle.useragent("my_crawler (example.com/info)"); /// let mut reg = Registry::new_handle(String::from("https://crates.io"), None, handle); /// ``` pub fn new_handle(host: String, token: Option, handle: Easy) -> Registry { Registry { host, token, handle, } } pub fn host(&self) -> &str { &self.host } pub fn host_is_crates_io(&self) -> bool { is_url_crates_io(&self.host) } pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result { let body = serde_json::to_string(&OwnersReq { users: owners })?; let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?; assert!(serde_json::from_str::(&body)?.ok); Ok(serde_json::from_str::(&body)?.msg) } pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { let body = serde_json::to_string(&OwnersReq { users: owners })?; let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?; assert!(serde_json::from_str::(&body)?.ok); Ok(()) } pub fn list_owners(&mut self, krate: &str) -> Result> { let body = self.get(&format!("/crates/{}/owners", krate))?; Ok(serde_json::from_str::(&body)?.users) } pub fn publish(&mut self, krate: &NewCrate, mut tarball: &File) -> Result { let json = serde_json::to_string(krate)?; // Prepare the body. The format of the upload request is: // // // (metadata for the package) // // // NOTE: This can be replaced with `stream_len` if it is ever stabilized. // // This checks the length using seeking instead of metadata, because // on some filesystems, getting the metadata will fail because // the file was renamed in ops::package. let tarball_len = tarball .seek(SeekFrom::End(0)) .with_context(|| "failed to seek tarball")?; tarball .seek(SeekFrom::Start(0)) .with_context(|| "failed to seek tarball")?; let header = { let mut w = Vec::new(); w.extend(&(json.len() as u32).to_le_bytes()); w.extend(json.as_bytes().iter().cloned()); w.extend(&(tarball_len as u32).to_le_bytes()); w }; let size = tarball_len as usize + header.len(); let mut body = Cursor::new(header).chain(tarball); let url = format!("{}/api/v1/crates/new", self.host); let token = match self.token.as_ref() { Some(s) => s, None => bail!("no upload token found, please run `cargo login`"), }; self.handle.put(true)?; self.handle.url(&url)?; self.handle.in_filesize(size as u64)?; let mut headers = List::new(); headers.append("Accept: application/json")?; headers.append(&format!("Authorization: {}", token))?; self.handle.http_headers(headers)?; let started = Instant::now(); let body = self .handle(&mut |buf| body.read(buf).unwrap_or(0)) .map_err(|e| match e { ResponseError::Code { code, .. } if code == 503 && started.elapsed().as_secs() >= 29 && self.host_is_crates_io() => { format_err!( "Request timed out after 30 seconds. If you're trying to \ upload a crate it may be too large. If the crate is under \ 10MB in size, you can email help@crates.io for assistance.\n\ Total size was {}.", tarball_len ) } _ => e.into(), })?; let response = if body.is_empty() { "{}".parse()? } else { body.parse::()? }; let invalid_categories: Vec = response .get("warnings") .and_then(|j| j.get("invalid_categories")) .and_then(|j| j.as_array()) .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) .unwrap_or_else(Vec::new); let invalid_badges: Vec = response .get("warnings") .and_then(|j| j.get("invalid_badges")) .and_then(|j| j.as_array()) .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) .unwrap_or_else(Vec::new); let other: Vec = response .get("warnings") .and_then(|j| j.get("other")) .and_then(|j| j.as_array()) .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) .unwrap_or_else(Vec::new); Ok(Warnings { invalid_categories, invalid_badges, other, }) } pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec, u32)> { let formatted_query = percent_encode(query.as_bytes(), NON_ALPHANUMERIC); let body = self.req( &format!("/crates?q={}&per_page={}", formatted_query, limit), None, Auth::Unauthorized, )?; let crates = serde_json::from_str::(&body)?; Ok((crates.crates, crates.meta.total)) } pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?; assert!(serde_json::from_str::(&body)?.ok); Ok(()) } pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?; assert!(serde_json::from_str::(&body)?.ok); Ok(()) } fn put(&mut self, path: &str, b: &[u8]) -> Result { self.handle.put(true)?; self.req(path, Some(b), Auth::Authorized) } fn get(&mut self, path: &str) -> Result { self.handle.get(true)?; self.req(path, None, Auth::Authorized) } fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result { self.handle.custom_request("DELETE")?; self.req(path, b, Auth::Authorized) } fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result { self.handle.url(&format!("{}/api/v1{}", self.host, path))?; let mut headers = List::new(); headers.append("Accept: application/json")?; headers.append("Content-Type: application/json")?; if authorized == Auth::Authorized { let token = match self.token.as_ref() { Some(s) => s, None => bail!("no upload token found, please run `cargo login`"), }; headers.append(&format!("Authorization: {}", token))?; } self.handle.http_headers(headers)?; match body { Some(mut body) => { self.handle.upload(true)?; self.handle.in_filesize(body.len() as u64)?; self.handle(&mut |buf| body.read(buf).unwrap_or(0)) .map_err(|e| e.into()) } None => self.handle(&mut |_| 0).map_err(|e| e.into()), } } fn handle( &mut self, read: &mut dyn FnMut(&mut [u8]) -> usize, ) -> std::result::Result { let mut headers = Vec::new(); let mut body = Vec::new(); { let mut handle = self.handle.transfer(); handle.read_function(|buf| Ok(read(buf)))?; handle.write_function(|data| { body.extend_from_slice(data); Ok(data.len()) })?; handle.header_function(|data| { // Headers contain trailing \r\n, trim them to make it easier // to work with. let s = String::from_utf8_lossy(data).trim().to_string(); headers.push(s); true })?; handle.perform()?; } let body = match String::from_utf8(body) { Ok(body) => body, Err(..) => { return Err(ResponseError::Other(format_err!( "response body was not valid utf-8" ))) } }; let errors = serde_json::from_str::(&body) .ok() .map(|s| s.errors.into_iter().map(|s| s.detail).collect::>()); match (self.handle.response_code()?, errors) { (0, None) | (200, None) => Ok(body), (code, Some(errors)) => Err(ResponseError::Api { code, errors }), (code, None) => Err(ResponseError::Code { code, headers, body, }), } } } fn reason(code: u32) -> &'static str { // Taken from https://developer.mozilla.org/en-US/docs/Web/HTTP/Status match code { 100 => "Continue", 101 => "Switching Protocol", 103 => "Early Hints", 200 => "OK", 201 => "Created", 202 => "Accepted", 203 => "Non-Authoritative Information", 204 => "No Content", 205 => "Reset Content", 206 => "Partial Content", 300 => "Multiple Choice", 301 => "Moved Permanently", 302 => "Found", 303 => "See Other", 304 => "Not Modified", 307 => "Temporary Redirect", 308 => "Permanent Redirect", 400 => "Bad Request", 401 => "Unauthorized", 402 => "Payment Required", 403 => "Forbidden", 404 => "Not Found", 405 => "Method Not Allowed", 406 => "Not Acceptable", 407 => "Proxy Authentication Required", 408 => "Request Timeout", 409 => "Conflict", 410 => "Gone", 411 => "Length Required", 412 => "Precondition Failed", 413 => "Payload Too Large", 414 => "URI Too Long", 415 => "Unsupported Media Type", 416 => "Request Range Not Satisfiable", 417 => "Expectation Failed", 429 => "Too Many Requests", 431 => "Request Header Fields Too Large", 500 => "Internal Server Error", 501 => "Not Implemented", 502 => "Bad Gateway", 503 => "Service Unavailable", 504 => "Gateway Timeout", _ => "", } } /// Returns `true` if the host of the given URL is "crates.io". pub fn is_url_crates_io(url: &str) -> bool { Url::parse(url) .map(|u| u.host_str() == Some("crates.io")) .unwrap_or(false) } cargo-0.66.0/crates/credential/000077500000000000000000000000001432416201200162645ustar00rootroot00000000000000cargo-0.66.0/crates/credential/README.md000066400000000000000000000004471432416201200175500ustar00rootroot00000000000000# Cargo Credential Packages This directory contains Cargo packages for handling storage of tokens in a secure manner. `cargo-credential` is a generic library to assist writing a credential process. The other directories contain implementations that integrate with specific credential systems. cargo-0.66.0/crates/credential/cargo-credential-1password/000077500000000000000000000000001432416201200234105ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-1password/Cargo.toml000066400000000000000000000006311432416201200253400ustar00rootroot00000000000000[package] name = "cargo-credential-1password" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = "A Cargo credential process that stores tokens in a 1password vault." [dependencies] cargo-credential = { version = "0.1.0", path = "../cargo-credential" } serde = { version = "1.0.117", features = ["derive"] } serde_json = "1.0.59" cargo-0.66.0/crates/credential/cargo-credential-1password/src/000077500000000000000000000000001432416201200241775ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-1password/src/main.rs000066400000000000000000000233511432416201200254750ustar00rootroot00000000000000//! Cargo registry 1password credential process. use cargo_credential::{Credential, Error}; use serde::Deserialize; use std::io::Read; use std::process::{Command, Stdio}; const CARGO_TAG: &str = "cargo-registry"; /// Implementation of 1password keychain access for Cargo registries. struct OnePasswordKeychain { account: Option, vault: Option, sign_in_address: Option, email: Option, } /// 1password Login item type, used for the JSON output of `op get item`. #[derive(Deserialize)] struct Login { details: Details, } #[derive(Deserialize)] struct Details { fields: Vec, } #[derive(Deserialize)] struct Field { designation: String, value: String, } /// 1password item from `op list items`. #[derive(Deserialize)] struct ListItem { uuid: String, overview: Overview, } #[derive(Deserialize)] struct Overview { title: String, } impl OnePasswordKeychain { fn new() -> Result { let mut args = std::env::args().skip(1); let mut action = false; let mut account = None; let mut vault = None; let mut sign_in_address = None; let mut email = None; while let Some(arg) = args.next() { match arg.as_str() { "--account" => { account = Some(args.next().ok_or("--account needs an arg")?); } "--vault" => { vault = Some(args.next().ok_or("--vault needs an arg")?); } "--sign-in-address" => { sign_in_address = Some(args.next().ok_or("--sign-in-address needs an arg")?); } "--email" => { email = Some(args.next().ok_or("--email needs an arg")?); } s if s.starts_with('-') => { return Err(format!("unknown option {}", s).into()); } _ => { if action { return Err("too many arguments".into()); } else { action = true; } } } } if sign_in_address.is_none() && email.is_some() { return Err("--email requires --sign-in-address".into()); } Ok(OnePasswordKeychain { account, vault, sign_in_address, email, }) } fn signin(&self) -> Result, Error> { // If there are any session env vars, we'll assume that this is the // correct account, and that the user knows what they are doing. if std::env::vars().any(|(name, _)| name.starts_with("OP_SESSION_")) { return Ok(None); } let mut cmd = Command::new("op"); cmd.arg("signin"); if let Some(addr) = &self.sign_in_address { cmd.arg(addr); if let Some(email) = &self.email { cmd.arg(email); } } cmd.arg("--raw"); cmd.stdout(Stdio::piped()); #[cfg(unix)] const IN_DEVICE: &str = "/dev/tty"; #[cfg(windows)] const IN_DEVICE: &str = "CONIN$"; let stdin = std::fs::OpenOptions::new() .read(true) .write(true) .open(IN_DEVICE)?; cmd.stdin(stdin); let mut child = cmd .spawn() .map_err(|e| format!("failed to spawn `op`: {}", e))?; let mut buffer = String::new(); child .stdout .as_mut() .unwrap() .read_to_string(&mut buffer) .map_err(|e| format!("failed to get session from `op`: {}", e))?; if let Some(end) = buffer.find('\n') { buffer.truncate(end); } let status = child .wait() .map_err(|e| format!("failed to wait for `op`: {}", e))?; if !status.success() { return Err(format!("failed to run `op signin`: {}", status).into()); } Ok(Some(buffer)) } fn make_cmd(&self, session: &Option, args: &[&str]) -> Command { let mut cmd = Command::new("op"); cmd.args(args); if let Some(account) = &self.account { cmd.arg("--account"); cmd.arg(account); } if let Some(vault) = &self.vault { cmd.arg("--vault"); cmd.arg(vault); } if let Some(session) = session { cmd.arg("--session"); cmd.arg(session); } cmd } fn run_cmd(&self, mut cmd: Command) -> Result { cmd.stdout(Stdio::piped()); let mut child = cmd .spawn() .map_err(|e| format!("failed to spawn `op`: {}", e))?; let mut buffer = String::new(); child .stdout .as_mut() .unwrap() .read_to_string(&mut buffer) .map_err(|e| format!("failed to read `op` output: {}", e))?; let status = child .wait() .map_err(|e| format!("failed to wait for `op`: {}", e))?; if !status.success() { return Err(format!("`op` command exit error: {}", status).into()); } Ok(buffer) } fn search( &self, session: &Option, registry_name: &str, ) -> Result, Error> { let cmd = self.make_cmd( session, &[ "list", "items", "--categories", "Login", "--tags", CARGO_TAG, ], ); let buffer = self.run_cmd(cmd)?; let items: Vec = serde_json::from_str(&buffer) .map_err(|e| format!("failed to deserialize JSON from 1password list: {}", e))?; let mut matches = items .into_iter() .filter(|item| item.overview.title == registry_name); match matches.next() { Some(login) => { // Should this maybe just sort on `updatedAt` and return the newest one? if matches.next().is_some() { return Err(format!( "too many 1password logins match registry name {}, \ consider deleting the excess entries", registry_name ) .into()); } Ok(Some(login.uuid)) } None => Ok(None), } } fn modify(&self, session: &Option, uuid: &str, token: &str) -> Result<(), Error> { let cmd = self.make_cmd( session, &["edit", "item", uuid, &format!("password={}", token)], ); self.run_cmd(cmd)?; Ok(()) } fn create( &self, session: &Option, registry_name: &str, api_url: &str, token: &str, ) -> Result<(), Error> { let cmd = self.make_cmd( session, &[ "create", "item", "Login", &format!("password={}", token), &format!("url={}", api_url), "--title", registry_name, "--tags", CARGO_TAG, ], ); self.run_cmd(cmd)?; Ok(()) } fn get_token(&self, session: &Option, uuid: &str) -> Result { let cmd = self.make_cmd(session, &["get", "item", uuid]); let buffer = self.run_cmd(cmd)?; let item: Login = serde_json::from_str(&buffer) .map_err(|e| format!("failed to deserialize JSON from 1password get: {}", e))?; let password = item .details .fields .into_iter() .find(|item| item.designation == "password"); match password { Some(password) => Ok(password.value), None => Err("could not find password field".into()), } } fn delete(&self, session: &Option, uuid: &str) -> Result<(), Error> { let cmd = self.make_cmd(session, &["delete", "item", uuid]); self.run_cmd(cmd)?; Ok(()) } } impl Credential for OnePasswordKeychain { fn name(&self) -> &'static str { env!("CARGO_PKG_NAME") } fn get(&self, registry_name: &str, _api_url: &str) -> Result { let session = self.signin()?; if let Some(uuid) = self.search(&session, registry_name)? { self.get_token(&session, &uuid) } else { return Err(format!( "no 1password entry found for registry `{}`, try `cargo login` to add a token", registry_name ) .into()); } } fn store(&self, registry_name: &str, api_url: &str, token: &str) -> Result<(), Error> { let session = self.signin()?; // Check if an item already exists. if let Some(uuid) = self.search(&session, registry_name)? { self.modify(&session, &uuid, token) } else { self.create(&session, registry_name, api_url, token) } } fn erase(&self, registry_name: &str, _api_url: &str) -> Result<(), Error> { let session = self.signin()?; // Check if an item already exists. if let Some(uuid) = self.search(&session, registry_name)? { self.delete(&session, &uuid)?; } else { eprintln!("not currently logged in to `{}`", registry_name); } Ok(()) } } fn main() { let op = match OnePasswordKeychain::new() { Ok(op) => op, Err(e) => { eprintln!("error: {}", e); std::process::exit(1); } }; cargo_credential::main(op); } cargo-0.66.0/crates/credential/cargo-credential-gnome-secret/000077500000000000000000000000001432416201200240555ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-gnome-secret/Cargo.toml000066400000000000000000000005731432416201200260120ustar00rootroot00000000000000[package] name = "cargo-credential-gnome-secret" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = "A Cargo credential process that stores tokens with GNOME libsecret." [dependencies] cargo-credential = { version = "0.1.0", path = "../cargo-credential" } [build-dependencies] pkg-config = "0.3.19" cargo-0.66.0/crates/credential/cargo-credential-gnome-secret/build.rs000066400000000000000000000001051432416201200255160ustar00rootroot00000000000000fn main() { pkg_config::probe_library("libsecret-1").unwrap(); } cargo-0.66.0/crates/credential/cargo-credential-gnome-secret/src/000077500000000000000000000000001432416201200246445ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-gnome-secret/src/main.rs000066400000000000000000000137001432416201200261370ustar00rootroot00000000000000//! Cargo registry gnome libsecret credential process. use cargo_credential::{Credential, Error}; use std::ffi::{CStr, CString}; use std::os::raw::{c_char, c_int}; use std::ptr::{null, null_mut}; #[allow(non_camel_case_types)] type gchar = c_char; #[allow(non_camel_case_types)] type gboolean = c_int; type GQuark = u32; #[repr(C)] struct GError { domain: GQuark, code: c_int, message: *mut gchar, } #[repr(C)] struct GCancellable { _private: [u8; 0], } #[repr(C)] struct SecretSchema { name: *const gchar, flags: SecretSchemaFlags, attributes: [SecretSchemaAttribute; 32], } #[repr(C)] #[derive(Copy, Clone)] struct SecretSchemaAttribute { name: *const gchar, attr_type: SecretSchemaAttributeType, } #[repr(C)] enum SecretSchemaFlags { None = 0, } #[repr(C)] #[derive(Copy, Clone)] enum SecretSchemaAttributeType { String = 0, } extern "C" { fn secret_password_store_sync( schema: *const SecretSchema, collection: *const gchar, label: *const gchar, password: *const gchar, cancellable: *mut GCancellable, error: *mut *mut GError, ... ) -> gboolean; fn secret_password_clear_sync( schema: *const SecretSchema, cancellable: *mut GCancellable, error: *mut *mut GError, ... ) -> gboolean; fn secret_password_lookup_sync( schema: *const SecretSchema, cancellable: *mut GCancellable, error: *mut *mut GError, ... ) -> *mut gchar; } struct GnomeSecret; fn label(registry_name: &str) -> CString { CString::new(format!("cargo-registry:{}", registry_name)).unwrap() } fn schema() -> SecretSchema { let mut attributes = [SecretSchemaAttribute { name: null(), attr_type: SecretSchemaAttributeType::String, }; 32]; attributes[0] = SecretSchemaAttribute { name: b"registry\0".as_ptr() as *const gchar, attr_type: SecretSchemaAttributeType::String, }; attributes[1] = SecretSchemaAttribute { name: b"url\0".as_ptr() as *const gchar, attr_type: SecretSchemaAttributeType::String, }; SecretSchema { name: b"org.rust-lang.cargo.registry\0".as_ptr() as *const gchar, flags: SecretSchemaFlags::None, attributes, } } impl Credential for GnomeSecret { fn name(&self) -> &'static str { env!("CARGO_PKG_NAME") } fn get(&self, registry_name: &str, api_url: &str) -> Result { let mut error: *mut GError = null_mut(); let attr_registry = CString::new("registry").unwrap(); let attr_url = CString::new("url").unwrap(); let registry_name_c = CString::new(registry_name).unwrap(); let api_url_c = CString::new(api_url).unwrap(); let schema = schema(); unsafe { let token_c = secret_password_lookup_sync( &schema, null_mut(), &mut error, attr_registry.as_ptr(), registry_name_c.as_ptr(), attr_url.as_ptr(), api_url_c.as_ptr(), null() as *const gchar, ); if !error.is_null() { return Err(format!( "failed to get token: {}", CStr::from_ptr((*error).message).to_str()? ) .into()); } if token_c.is_null() { return Err(format!("cannot find token for {}", registry_name).into()); } let token = CStr::from_ptr(token_c) .to_str() .map_err(|e| format!("expected utf8 token: {}", e))? .to_string(); Ok(token) } } fn store(&self, registry_name: &str, api_url: &str, token: &str) -> Result<(), Error> { let label = label(registry_name); let token = CString::new(token).unwrap(); let mut error: *mut GError = null_mut(); let attr_registry = CString::new("registry").unwrap(); let attr_url = CString::new("url").unwrap(); let registry_name_c = CString::new(registry_name).unwrap(); let api_url_c = CString::new(api_url).unwrap(); let schema = schema(); unsafe { secret_password_store_sync( &schema, b"default\0".as_ptr() as *const gchar, label.as_ptr(), token.as_ptr(), null_mut(), &mut error, attr_registry.as_ptr(), registry_name_c.as_ptr(), attr_url.as_ptr(), api_url_c.as_ptr(), null() as *const gchar, ); if !error.is_null() { return Err(format!( "failed to store token: {}", CStr::from_ptr((*error).message).to_str()? ) .into()); } } Ok(()) } fn erase(&self, registry_name: &str, api_url: &str) -> Result<(), Error> { let schema = schema(); let mut error: *mut GError = null_mut(); let attr_registry = CString::new("registry").unwrap(); let attr_url = CString::new("url").unwrap(); let registry_name_c = CString::new(registry_name).unwrap(); let api_url_c = CString::new(api_url).unwrap(); unsafe { secret_password_clear_sync( &schema, null_mut(), &mut error, attr_registry.as_ptr(), registry_name_c.as_ptr(), attr_url.as_ptr(), api_url_c.as_ptr(), null() as *const gchar, ); if !error.is_null() { return Err(format!( "failed to erase token: {}", CStr::from_ptr((*error).message).to_str()? ) .into()); } } Ok(()) } } fn main() { cargo_credential::main(GnomeSecret); } cargo-0.66.0/crates/credential/cargo-credential-macos-keychain/000077500000000000000000000000001432416201200243605ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-macos-keychain/Cargo.toml000066400000000000000000000005551432416201200263150ustar00rootroot00000000000000[package] name = "cargo-credential-macos-keychain" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = "A Cargo credential process that stores tokens in a macOS keychain." [dependencies] cargo-credential = { version = "0.1.0", path = "../cargo-credential" } security-framework = "2.0.0" cargo-0.66.0/crates/credential/cargo-credential-macos-keychain/src/000077500000000000000000000000001432416201200251475ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-macos-keychain/src/main.rs000066400000000000000000000032541432416201200264450ustar00rootroot00000000000000//! Cargo registry macos keychain credential process. use cargo_credential::{Credential, Error}; use security_framework::os::macos::keychain::SecKeychain; struct MacKeychain; /// The account name is not used. const ACCOUNT: &'static str = ""; fn registry(registry_name: &str) -> String { format!("cargo-registry:{}", registry_name) } impl Credential for MacKeychain { fn name(&self) -> &'static str { env!("CARGO_PKG_NAME") } fn get(&self, registry_name: &str, _api_url: &str) -> Result { let keychain = SecKeychain::default().unwrap(); let service_name = registry(registry_name); let (pass, _item) = keychain.find_generic_password(&service_name, ACCOUNT)?; String::from_utf8(pass.as_ref().to_vec()) .map_err(|_| "failed to convert token to UTF8".into()) } fn store(&self, registry_name: &str, _api_url: &str, token: &str) -> Result<(), Error> { let keychain = SecKeychain::default().unwrap(); let service_name = registry(registry_name); if let Ok((_pass, mut item)) = keychain.find_generic_password(&service_name, ACCOUNT) { item.set_password(token.as_bytes())?; } else { keychain.add_generic_password(&service_name, ACCOUNT, token.as_bytes())?; } Ok(()) } fn erase(&self, registry_name: &str, _api_url: &str) -> Result<(), Error> { let keychain = SecKeychain::default().unwrap(); let service_name = registry(registry_name); let (_pass, item) = keychain.find_generic_password(&service_name, ACCOUNT)?; item.delete(); Ok(()) } } fn main() { cargo_credential::main(MacKeychain); } cargo-0.66.0/crates/credential/cargo-credential-wincred/000077500000000000000000000000001432416201200231205ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-wincred/Cargo.toml000066400000000000000000000006501432416201200250510ustar00rootroot00000000000000[package] name = "cargo-credential-wincred" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = "A Cargo credential process that stores tokens with Windows Credential Manager." [dependencies] cargo-credential = { version = "0.1.0", path = "../cargo-credential" } winapi = { version = "0.3.9", features = ["wincred", "winerror", "impl-default"] } cargo-0.66.0/crates/credential/cargo-credential-wincred/src/000077500000000000000000000000001432416201200237075ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential-wincred/src/main.rs000066400000000000000000000067301432416201200252070ustar00rootroot00000000000000//! Cargo registry windows credential process. use cargo_credential::{Credential, Error}; use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use winapi::shared::minwindef::{DWORD, FILETIME, LPBYTE, TRUE}; use winapi::shared::winerror; use winapi::um::wincred; use winapi::um::winnt::LPWSTR; struct WindowsCredential; /// Converts a string to a nul-terminated wide UTF-16 byte sequence. fn wstr(s: &str) -> Vec { let mut wide: Vec = OsStr::new(s).encode_wide().collect(); if wide.iter().any(|b| *b == 0) { panic!("nul byte in wide string"); } wide.push(0); wide } fn target_name(registry_name: &str) -> Vec { wstr(&format!("cargo-registry:{}", registry_name)) } impl Credential for WindowsCredential { fn name(&self) -> &'static str { env!("CARGO_PKG_NAME") } fn get(&self, registry_name: &str, _api_url: &str) -> Result { let target_name = target_name(registry_name); let mut p_credential: wincred::PCREDENTIALW = std::ptr::null_mut(); unsafe { if wincred::CredReadW( target_name.as_ptr(), wincred::CRED_TYPE_GENERIC, 0, &mut p_credential, ) != TRUE { return Err( format!("failed to fetch token: {}", std::io::Error::last_os_error()).into(), ); } let bytes = std::slice::from_raw_parts( (*p_credential).CredentialBlob, (*p_credential).CredentialBlobSize as usize, ); String::from_utf8(bytes.to_vec()).map_err(|_| "failed to convert token to UTF8".into()) } } fn store(&self, registry_name: &str, _api_url: &str, token: &str) -> Result<(), Error> { let token = token.as_bytes(); let target_name = target_name(registry_name); let comment = wstr("Cargo registry token"); let mut credential = wincred::CREDENTIALW { Flags: 0, Type: wincred::CRED_TYPE_GENERIC, TargetName: target_name.as_ptr() as LPWSTR, Comment: comment.as_ptr() as LPWSTR, LastWritten: FILETIME::default(), CredentialBlobSize: token.len() as DWORD, CredentialBlob: token.as_ptr() as LPBYTE, Persist: wincred::CRED_PERSIST_LOCAL_MACHINE, AttributeCount: 0, Attributes: std::ptr::null_mut(), TargetAlias: std::ptr::null_mut(), UserName: std::ptr::null_mut(), }; let result = unsafe { wincred::CredWriteW(&mut credential, 0) }; if result != TRUE { let err = std::io::Error::last_os_error(); return Err(format!("failed to store token: {}", err).into()); } Ok(()) } fn erase(&self, registry_name: &str, _api_url: &str) -> Result<(), Error> { let target_name = target_name(registry_name); let result = unsafe { wincred::CredDeleteW(target_name.as_ptr(), wincred::CRED_TYPE_GENERIC, 0) }; if result != TRUE { let err = std::io::Error::last_os_error(); if err.raw_os_error() == Some(winerror::ERROR_NOT_FOUND as i32) { eprintln!("not currently logged in to `{}`", registry_name); return Ok(()); } return Err(format!("failed to remove token: {}", err).into()); } Ok(()) } } fn main() { cargo_credential::main(WindowsCredential); } cargo-0.66.0/crates/credential/cargo-credential/000077500000000000000000000000001432416201200214675ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential/Cargo.toml000066400000000000000000000003551432416201200234220ustar00rootroot00000000000000[package] name = "cargo-credential" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = "A library to assist writing Cargo credential helpers." [dependencies] cargo-0.66.0/crates/credential/cargo-credential/README.md000066400000000000000000000017271432416201200227550ustar00rootroot00000000000000# cargo-credential This package is a library to assist writing a Cargo credential helper, which provides an interface to store tokens for authorizing access to a registry such as https://crates.io/. Documentation about credential processes may be found at https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process Example implementations may be found at https://github.com/rust-lang/cargo/tree/master/crates/credential ## Usage Create a Cargo project with this as a dependency: ```toml # Add this to your Cargo.toml: [dependencies] cargo-credential = "0.1" ``` And then include a `main.rs` binary which implements the `Credential` trait, and calls the `main` function which will call the appropriate method of the trait: ```rust // src/main.rs use cargo_credential::{Credential, Error}; struct MyCredential; impl Credential for MyCredential { /// implement trait methods here... } fn main() { cargo_credential::main(MyCredential); } ``` cargo-0.66.0/crates/credential/cargo-credential/src/000077500000000000000000000000001432416201200222565ustar00rootroot00000000000000cargo-0.66.0/crates/credential/cargo-credential/src/lib.rs000066400000000000000000000056451432416201200234040ustar00rootroot00000000000000//! Helper library for writing Cargo credential processes. //! //! A credential process should have a `struct` that implements the `Credential` trait. //! The `main` function should be called with an instance of that struct, such as: //! //! ```rust,ignore //! fn main() { //! cargo_credential::main(MyCredential); //! } //! ``` //! //! This will determine the action to perform (get/store/erase) by looking at //! the CLI arguments for the first argument that does not start with `-`. It //! will then call the corresponding method of the trait to perform the //! requested action. pub type Error = Box; pub trait Credential { /// Returns the name of this credential process. fn name(&self) -> &'static str; /// Retrieves a token for the given registry. fn get(&self, registry_name: &str, api_url: &str) -> Result; /// Stores the given token for the given registry. fn store(&self, registry_name: &str, api_url: &str, token: &str) -> Result<(), Error>; /// Removes the token for the given registry. /// /// If the user is not logged in, this should print a message to stderr if /// possible indicating that the user is not currently logged in, and /// return `Ok`. fn erase(&self, registry_name: &str, api_url: &str) -> Result<(), Error>; } /// Runs the credential interaction by processing the command-line and /// environment variables. pub fn main(credential: impl Credential) { let name = credential.name(); if let Err(e) = doit(credential) { eprintln!("{} error: {}", name, e); std::process::exit(1); } } fn env(name: &str) -> Result { std::env::var(name).map_err(|_| format!("environment variable `{}` is not set", name).into()) } fn doit(credential: impl Credential) -> Result<(), Error> { let which = std::env::args() .skip(1) .skip_while(|arg| arg.starts_with('-')) .next() .ok_or_else(|| "first argument must be the {action}")?; let registry_name = env("CARGO_REGISTRY_NAME")?; let api_url = env("CARGO_REGISTRY_API_URL")?; let result = match which.as_ref() { "get" => credential.get(®istry_name, &api_url).and_then(|token| { println!("{}", token); Ok(()) }), "store" => { read_token().and_then(|token| credential.store(®istry_name, &api_url, &token)) } "erase" => credential.erase(®istry_name, &api_url), _ => { return Err(format!( "unexpected command-line argument `{}`, expected get/store/erase", which ) .into()) } }; result.map_err(|e| format!("failed to `{}` token: {}", which, e).into()) } fn read_token() -> Result { let mut buffer = String::new(); std::io::stdin().read_line(&mut buffer)?; if buffer.ends_with('\n') { buffer.pop(); } Ok(buffer) } cargo-0.66.0/crates/mdman/000077500000000000000000000000001432416201200152465ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/Cargo.lock000066400000000000000000000274241432416201200171640ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "ansi_term" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" dependencies = [ "winapi", ] [[package]] name = "anyhow" version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b602bfe940d21c130f3895acd65221e8a61270debe89d628b9cb4e3ccb8569b" [[package]] name = "bitflags" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "block-buffer" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ "block-padding", "byte-tools", "byteorder", "generic-array", ] [[package]] name = "block-padding" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" dependencies = [ "byte-tools", ] [[package]] name = "byte-tools" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "cfg-if" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] name = "ctor" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39858aa5bac06462d4dd4b9164848eb81ffc4aa5c479746393598fd193afa227" dependencies = [ "quote", "syn", ] [[package]] name = "difference" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" [[package]] name = "digest" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" dependencies = [ "generic-array", ] [[package]] name = "fake-simd" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" [[package]] name = "form_urlencoded" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" dependencies = [ "matches", "percent-encoding", ] [[package]] name = "generic-array" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" dependencies = [ "typenum", ] [[package]] name = "handlebars" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86dbc8a0746b08f363d2e00da48e6c9ceb75c198ac692d2715fcbb5bee74c87d" dependencies = [ "log", "pest", "pest_derive", "quick-error", "serde", "serde_json", "walkdir", ] [[package]] name = "idna" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" dependencies = [ "matches", "unicode-bidi", "unicode-normalization", ] [[package]] name = "itoa" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" [[package]] name = "log" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" dependencies = [ "cfg-if", ] [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "matches" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" [[package]] name = "mdman" version = "0.1.0" dependencies = [ "anyhow", "handlebars", "pretty_assertions", "pulldown-cmark", "same-file", "serde_json", "url", ] [[package]] name = "memchr" version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" [[package]] name = "opaque-debug" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" [[package]] name = "output_vt100" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" dependencies = [ "winapi", ] [[package]] name = "percent-encoding" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pest" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" dependencies = [ "ucd-trie", ] [[package]] name = "pest_derive" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn", ] [[package]] name = "pest_meta" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" dependencies = [ "maplit", "pest", "sha-1", ] [[package]] name = "pretty_assertions" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" dependencies = [ "ansi_term", "ctor", "difference", "output_vt100", ] [[package]] name = "proc-macro2" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12" dependencies = [ "unicode-xid", ] [[package]] name = "pulldown-cmark" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca36dea94d187597e104a5c8e4b07576a8a45aa5db48a65e12940d3eb7461f55" dependencies = [ "bitflags", "memchr", "unicase", ] [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" dependencies = [ "proc-macro2", ] [[package]] name = "ryu" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "serde" version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3" [[package]] name = "serde_json" version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "164eacbdb13512ec2745fb09d51fd5b22b0d65ed294a1dcf7285a360c80a675c" dependencies = [ "itoa", "ryu", "serde", ] [[package]] name = "sha-1" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" dependencies = [ "block-buffer", "digest", "fake-simd", "opaque-debug", ] [[package]] name = "syn" version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cdb98bcb1f9d81d07b536179c269ea15999b5d14ea958196413869445bb5250" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] [[package]] name = "tinyvec" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" [[package]] name = "typenum" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" [[package]] name = "ucd-trie" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" [[package]] name = "unicase" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" dependencies = [ "version_check", ] [[package]] name = "unicode-bidi" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" dependencies = [ "matches", ] [[package]] name = "unicode-normalization" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977" dependencies = [ "tinyvec", ] [[package]] name = "unicode-xid" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" [[package]] name = "url" version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" dependencies = [ "form_urlencoded", "idna", "matches", "percent-encoding", ] [[package]] name = "version_check" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" [[package]] name = "walkdir" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" dependencies = [ "same-file", "winapi", "winapi-util", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" cargo-0.66.0/crates/mdman/Cargo.toml000066400000000000000000000006321432416201200171770ustar00rootroot00000000000000[package] name = "mdman" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" description = "Creates a man page page from markdown." [dependencies] anyhow = "1.0.31" handlebars = { version = "3.2.1", features = ["dir_source"] } pulldown-cmark = { version = "0.7.2", default-features = false } same-file = "1.0.6" serde_json = "1.0.56" url = "2.2.2" [dev-dependencies] pretty_assertions = "0.6.1" cargo-0.66.0/crates/mdman/README.md000066400000000000000000000002331432416201200165230ustar00rootroot00000000000000# mdman mdman is a small utility for creating man pages from markdown text files. ## Usage See the [man page](doc/out/mdman.md) generated by this tool. cargo-0.66.0/crates/mdman/build-man.sh000077500000000000000000000002141432416201200174520ustar00rootroot00000000000000#!/bin/bash set -e cargo run -- -t md -o doc/out doc/*.md cargo run -- -t txt -o doc/out doc/*.md cargo run -- -t man -o doc/out doc/*.md cargo-0.66.0/crates/mdman/doc/000077500000000000000000000000001432416201200160135ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/doc/mdman.md000066400000000000000000000065171432416201200174420ustar00rootroot00000000000000# mdman(1) ## NAME mdman - Converts markdown to a man page ## SYNOPSIS `mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._ ## DESCRIPTION Converts a markdown file to a man page. The source file is first processed as a [handlebars](https://handlebarsjs.com/) template. Then, it is processed as markdown into the target format. This supports different output formats, such as troff or plain text. Every man page should start with a level-1 header with the man name and section, such as `# mdman(1)`. The handlebars template has several special tags to assist with generating the man page: {{{{raw}}}} - Every block of command-line options must be wrapped between `{{#options}}` and `{{/options}}` tags. This tells the processor where the options start and end. - Each option must be expressed with a `{{#option}}` block. The parameters to the the block are a sequence of strings indicating the option. For example, ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that has two different forms. The text within the string is processed as markdown. It is recommended to use formatting similar to this example. The content of the `{{#option}}` block should contain a detailed description of the option. Use the `{{/option}}` tag to end the option block. - References to other man pages should use the `{{man name section}}` expression. For example, `{{man "mdman" 1}}` will generate a reference to the `mdman(1)` man page. For non-troff output, the `--man` option will tell `mdman` how to create links to the man page. If there is no matching `--man` option, then it links to a file named _name_`.md` in the same directory. - Variables can be set with `{{*set name="value"}}`. These variables can then be referenced with `{{name}}` expressions. - Partial templates should be placed in a directory named `includes` next to the source file. Templates can be included with an expression like `{{> template-name}}`. - Other helpers include: - `{{lower value}}` Converts the given value to lowercase. {{{{/raw}}}} ## OPTIONS {{#options}} {{#option "`-t` _type_"}} Specifies the output type. The following output types are supported: - `man` β€” A troff-style man page. Outputs with a numbered extension (like `.1`) matching the man page section. - `md` β€” A markdown file, after all handlebars processing has been finished. Outputs with the `.md` extension. - `txt` β€” A text file, rendered for situations where a man page viewer isn't available. Outputs with the `.txt` extension. {{/option}} {{#option "`-o` _outdir_"}} Specifies the directory where to save the output. {{/option}} {{#option "`--url` _base_url_"}} Specifies a base URL to use for relative URLs within the document. Any relative URL will be joined with this URL. {{/option}} {{#option "`--man` _name_`:`_section_`=`_url_"}} Specifies a URL to use for the given man page. When the `\{{man name section}}` expression is used, the given URL will be inserted as a link. This may be specified multiple times. If a man page reference does not have a matching `--man` entry, then a relative link to a file named _name_`.md` will be used. {{/option}} {{#option "_sources..._"}} The source input filename, may be specified multiple times. {{/option}} {{/options}} ## EXAMPLES 1. Convert the given documents to man pages: mdman -t man -o doc doc/mdman.md cargo-0.66.0/crates/mdman/doc/out/000077500000000000000000000000001432416201200166225ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/doc/out/mdman.1000066400000000000000000000074631432416201200200120ustar00rootroot00000000000000'\" t .TH "MDMAN" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" mdman \- Converts markdown to a man page .SH "SYNOPSIS" \fBmdman\fR [\fIoptions\fR] \fB\-t\fR \fItype\fR \fB\-o\fR \fIoutdir\fR \fIsources...\fR .SH "DESCRIPTION" Converts a markdown file to a man page. .sp The source file is first processed as a \fIhandlebars\fR template. Then, it is processed as markdown into the target format. This supports different output formats, such as troff or plain text. .sp Every man page should start with a level\-1 header with the man name and section, such as \fB# mdman(1)\fR\&. .sp The handlebars template has several special tags to assist with generating the man page: .sp .RS 4 \h'-04'\(bu\h'+02'Every block of command\-line options must be wrapped between \fB{{#options}}\fR and \fB{{/options}}\fR tags. This tells the processor where the options start and end. .RE .sp .RS 4 \h'-04'\(bu\h'+02'Each option must be expressed with a \fB{{#option}}\fR block. The parameters to the the block are a sequence of strings indicating the option. For example, \fB{{#option "`\-p` _spec_..." "`\-\-package` _spec_..."}}\fR is an option that has two different forms. The text within the string is processed as markdown. It is recommended to use formatting similar to this example. .sp The content of the \fB{{#option}}\fR block should contain a detailed description of the option. .sp Use the \fB{{/option}}\fR tag to end the option block. .RE .sp .RS 4 \h'-04'\(bu\h'+02'References to other man pages should use the \fB{{man name section}}\fR expression. For example, \fB{{man "mdman" 1}}\fR will generate a reference to the \fBmdman(1)\fR man page. For non\-troff output, the \fB\-\-man\fR option will tell \fBmdman\fR how to create links to the man page. If there is no matching \fB\-\-man\fR option, then it links to a file named \fIname\fR\fB\&.md\fR in the same directory. .RE .sp .RS 4 \h'-04'\(bu\h'+02'Variables can be set with \fB{{*set name="value"}}\fR\&. These variables can then be referenced with \fB{{name}}\fR expressions. .RE .sp .RS 4 \h'-04'\(bu\h'+02'Partial templates should be placed in a directory named \fBincludes\fR next to the source file. Templates can be included with an expression like \fB{{> template\-name}}\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'Other helpers include: .sp .RS 4 \h'-04'\(bu\h'+02'\fB{{lower value}}\fR Converts the given value to lowercase. .RE .RE .SH "OPTIONS" .sp \fB\-t\fR \fItype\fR .RS 4 Specifies the output type. The following output types are supported: .sp .RS 4 \h'-04'\(bu\h'+02'\fBman\fR \[em] A troff\-style man page. Outputs with a numbered extension (like \fB\&.1\fR) matching the man page section. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBmd\fR \[em] A markdown file, after all handlebars processing has been finished. Outputs with the \fB\&.md\fR extension. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBtxt\fR \[em] A text file, rendered for situations where a man page viewer isn't available. Outputs with the \fB\&.txt\fR extension. .RE .RE .sp \fB\-o\fR \fIoutdir\fR .RS 4 Specifies the directory where to save the output. .RE .sp \fB\-\-url\fR \fIbase_url\fR .RS 4 Specifies a base URL to use for relative URLs within the document. Any relative URL will be joined with this URL. .RE .sp \fB\-\-man\fR \fIname\fR\fB:\fR\fIsection\fR\fB=\fR\fIurl\fR .RS 4 Specifies a URL to use for the given man page. When the \fB{{man name section}}\fR expression is used, the given URL will be inserted as a link. This may be specified multiple times. If a man page reference does not have a matching \fB\-\-man\fR entry, then a relative link to a file named \fIname\fR\fB\&.md\fR will be used. .RE .sp \fIsources...\fR .RS 4 The source input filename, may be specified multiple times. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Convert the given documents to man pages: .sp .RS 4 .nf mdman \-t man \-o doc doc/mdman.md .fi .RE .RE cargo-0.66.0/crates/mdman/doc/out/mdman.md000066400000000000000000000101251432416201200202370ustar00rootroot00000000000000# mdman(1) ## NAME mdman - Converts markdown to a man page ## SYNOPSIS `mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._ ## DESCRIPTION Converts a markdown file to a man page. The source file is first processed as a [handlebars](https://handlebarsjs.com/) template. Then, it is processed as markdown into the target format. This supports different output formats, such as troff or plain text. Every man page should start with a level-1 header with the man name and section, such as `# mdman(1)`. The handlebars template has several special tags to assist with generating the man page: - Every block of command-line options must be wrapped between `{{#options}}` and `{{/options}}` tags. This tells the processor where the options start and end. - Each option must be expressed with a `{{#option}}` block. The parameters to the the block are a sequence of strings indicating the option. For example, ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that has two different forms. The text within the string is processed as markdown. It is recommended to use formatting similar to this example. The content of the `{{#option}}` block should contain a detailed description of the option. Use the `{{/option}}` tag to end the option block. - References to other man pages should use the `{{man name section}}` expression. For example, `{{man "mdman" 1}}` will generate a reference to the `mdman(1)` man page. For non-troff output, the `--man` option will tell `mdman` how to create links to the man page. If there is no matching `--man` option, then it links to a file named _name_`.md` in the same directory. - Variables can be set with `{{*set name="value"}}`. These variables can then be referenced with `{{name}}` expressions. - Partial templates should be placed in a directory named `includes` next to the source file. Templates can be included with an expression like `{{> template-name}}`. - Other helpers include: - `{{lower value}}` Converts the given value to lowercase. ## OPTIONS
-t type
Specifies the output type. The following output types are supported:

  • man β€” A troff-style man page. Outputs with a numbered extension (like .1) matching the man page section.
  • md β€” A markdown file, after all handlebars processing has been finished. Outputs with the .md extension.
  • txt β€” A text file, rendered for situations where a man page viewer isn't available. Outputs with the .txt extension.
-o outdir
Specifies the directory where to save the output.
--url base_url
Specifies a base URL to use for relative URLs within the document. Any relative URL will be joined with this URL.
--man name:section=url
Specifies a URL to use for the given man page. When the {{man name section}} expression is used, the given URL will be inserted as a link. This may be specified multiple times. If a man page reference does not have a matching --man entry, then a relative link to a file named name.md will be used.
sources...
The source input filename, may be specified multiple times.
## EXAMPLES 1. Convert the given documents to man pages: mdman -t man -o doc doc/mdman.md cargo-0.66.0/crates/mdman/doc/out/mdman.txt000066400000000000000000000070411432416201200204610ustar00rootroot00000000000000MDMAN(1) NAME mdman - Converts markdown to a man page SYNOPSIS mdman [options] -t type -o outdir sources... DESCRIPTION Converts a markdown file to a man page. The source file is first processed as a handlebars template. Then, it is processed as markdown into the target format. This supports different output formats, such as troff or plain text. Every man page should start with a level-1 header with the man name and section, such as # mdman(1). The handlebars template has several special tags to assist with generating the man page: o Every block of command-line options must be wrapped between {{#options}} and {{/options}} tags. This tells the processor where the options start and end. o Each option must be expressed with a {{#option}} block. The parameters to the the block are a sequence of strings indicating the option. For example, {{#option "`-p` _spec_..." "`--package` _spec_..."}} is an option that has two different forms. The text within the string is processed as markdown. It is recommended to use formatting similar to this example. The content of the {{#option}} block should contain a detailed description of the option. Use the {{/option}} tag to end the option block. o References to other man pages should use the {{man name section}} expression. For example, {{man "mdman" 1}} will generate a reference to the mdman(1) man page. For non-troff output, the --man option will tell mdman how to create links to the man page. If there is no matching --man option, then it links to a file named name.md in the same directory. o Variables can be set with {{*set name="value"}}. These variables can then be referenced with {{name}} expressions. o Partial templates should be placed in a directory named includes next to the source file. Templates can be included with an expression like {{> template-name}}. o Other helpers include: o {{lower value}} Converts the given value to lowercase. OPTIONS -t type Specifies the output type. The following output types are supported: o man β€” A troff-style man page. Outputs with a numbered extension (like .1) matching the man page section. o md β€” A markdown file, after all handlebars processing has been finished. Outputs with the .md extension. o txt β€” A text file, rendered for situations where a man page viewer isn't available. Outputs with the .txt extension. -o outdir Specifies the directory where to save the output. --url base_url Specifies a base URL to use for relative URLs within the document. Any relative URL will be joined with this URL. --man name:section=url Specifies a URL to use for the given man page. When the {{man name section}} expression is used, the given URL will be inserted as a link. This may be specified multiple times. If a man page reference does not have a matching --man entry, then a relative link to a file named name.md will be used. sources... The source input filename, may be specified multiple times. EXAMPLES 1. Convert the given documents to man pages: mdman -t man -o doc doc/mdman.md cargo-0.66.0/crates/mdman/src/000077500000000000000000000000001432416201200160355ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/src/format.rs000066400000000000000000000015151432416201200176750ustar00rootroot00000000000000use anyhow::Error; pub mod man; pub mod md; pub mod text; pub trait Formatter { /// Renders the given markdown to the formatter's output. fn render(&self, input: &str) -> Result; /// Renders the start of a block of options (triggered by `{{#options}}`). fn render_options_start(&self) -> &'static str; /// Renders the end of a block of options (triggered by `{{/options}}`). fn render_options_end(&self) -> &'static str; /// Renders an option (triggered by `{{#option}}`). fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result; /// Converts a man page reference into markdown that is appropriate for this format. /// /// Triggered by `{{man name section}}`. fn linkify_man_to_md(&self, name: &str, section: u8) -> Result; } cargo-0.66.0/crates/mdman/src/format/000077500000000000000000000000001432416201200173255ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/src/format/man.rs000066400000000000000000000433431432416201200204550ustar00rootroot00000000000000//! Man-page formatter. use crate::util::{header_text, parse_name_and_section}; use crate::EventIter; use anyhow::{bail, Error}; use pulldown_cmark::{Alignment, Event, LinkType, Tag}; use std::fmt::Write; use url::Url; pub struct ManFormatter { url: Option, } impl ManFormatter { pub fn new(url: Option) -> ManFormatter { ManFormatter { url } } } impl super::Formatter for ManFormatter { fn render(&self, input: &str) -> Result { ManRenderer::render(input, self.url.clone()) } fn render_options_start(&self) -> &'static str { // Tell pulldown_cmark to ignore this. // This will be stripped out later. " &'static str { "]]>" } fn render_option( &self, params: &[&str], block: &str, _man_name: &str, ) -> Result { let rendered_options = params .iter() .map(|param| { let r = self.render(param)?; Ok(r.trim().trim_start_matches(".sp").to_string()) }) .collect::, Error>>()?; let rendered_block = self.render(block)?; let rendered_block = rendered_block.trim().trim_start_matches(".sp").trim(); // .RS = move left margin to right 4. // .RE = move margin back one level. Ok(format!( "\n.sp\n{}\n.RS 4\n{}\n.RE\n", rendered_options.join(", "), rendered_block )) } fn linkify_man_to_md(&self, name: &str, section: u8) -> Result { Ok(format!("`{}`({})", name, section)) } } #[derive(Copy, Clone)] enum Font { Bold, Italic, } impl Font { fn str_from_stack(font_stack: &[Font]) -> &'static str { let has_bold = font_stack.iter().any(|font| matches!(font, Font::Bold)); let has_italic = font_stack.iter().any(|font| matches!(font, Font::Italic)); match (has_bold, has_italic) { (false, false) => "\\fR", // roman (normal) (false, true) => "\\fI", // italic (true, false) => "\\fB", // bold (true, true) => "\\f(BI", // bold italic } } } struct ManRenderer<'e> { output: String, parser: EventIter<'e>, font_stack: Vec, } impl<'e> ManRenderer<'e> { fn render(input: &str, url: Option) -> Result { let parser = crate::md_parser(input, url); let output = String::with_capacity(input.len() * 3 / 2); let mut mr = ManRenderer { parser, output, font_stack: Vec::new(), }; mr.push_man()?; Ok(mr.output) } fn push_man(&mut self) -> Result<(), Error> { // If this is true, this is inside a cdata block used for hiding // content from pulldown_cmark. let mut in_cdata = false; // The current list stack. None if unordered, Some if ordered with the // given number as the current index. let mut list: Vec> = Vec::new(); // Used in some cases where spacing isn't desired. let mut suppress_paragraph = false; let mut table_cell_index = 0; while let Some((event, range)) = self.parser.next() { let this_suppress_paragraph = suppress_paragraph; suppress_paragraph = false; match event { Event::Start(tag) => { match tag { Tag::Paragraph => { if !this_suppress_paragraph { self.flush(); self.output.push_str(".sp\n"); } } Tag::Heading(n) => { if n == 1 { self.push_top_header()?; } else if n == 2 { // Section header let text = header_text(&mut self.parser)?; self.flush(); write!(self.output, ".SH \"{}\"\n", text)?; suppress_paragraph = true; } else { // Subsection header let text = header_text(&mut self.parser)?; self.flush(); write!(self.output, ".SS \"{}\"\n", text)?; suppress_paragraph = true; } } Tag::BlockQuote => { self.flush(); // .RS = move left margin over 3 // .ll = shrink line length self.output.push_str(".RS 3\n.ll -5\n.sp\n"); suppress_paragraph = true; } Tag::CodeBlock(_kind) => { // space down, indent 4, no-fill mode self.flush(); self.output.push_str(".sp\n.RS 4\n.nf\n"); } Tag::List(start) => list.push(start), Tag::Item => { // Note: This uses explicit movement instead of .IP // because the spacing on .IP looks weird to me. // space down, indent 4 self.flush(); self.output.push_str(".sp\n.RS 4\n"); match list.last_mut().expect("item must have list start") { // Ordered list. Some(n) => { // move left 4, output the list index number, move right 1. write!(self.output, "\\h'-04' {}.\\h'+01'", n)?; *n += 1; } // Unordered list. None => self.output.push_str("\\h'-04'\\(bu\\h'+02'"), } suppress_paragraph = true; } Tag::FootnoteDefinition(_label) => unimplemented!(), Tag::Table(alignment) => { // Table start // allbox = draw a box around all the cells // tab(:) = Use `:` to separate cell data (instead of tab) // ; = end of options self.output.push_str( "\n.TS\n\ allbox tab(:);\n", ); let alignments: Vec<_> = alignment .iter() .map(|a| match a { Alignment::Left | Alignment::None => "lt", Alignment::Center => "ct", Alignment::Right => "rt", }) .collect(); self.output.push_str(&alignments.join(" ")); self.output.push_str(".\n"); table_cell_index = 0; } Tag::TableHead => { table_cell_index = 0; } Tag::TableRow => { table_cell_index = 0; self.output.push('\n'); } Tag::TableCell => { if table_cell_index != 0 { // Separator between columns. self.output.push(':'); } // Start a text block. self.output.push_str("T{\n"); table_cell_index += 1 } Tag::Emphasis => self.push_font(Font::Italic), Tag::Strong => self.push_font(Font::Bold), // Strikethrough isn't usually supported for TTY. Tag::Strikethrough => self.output.push_str("~~"), Tag::Link(link_type, dest_url, _title) => { if dest_url.starts_with('#') { // In a man page, page-relative anchors don't // have much meaning. continue; } match link_type { LinkType::Autolink | LinkType::Email => { // The text is a copy of the URL, which is not needed. match self.parser.next() { Some((Event::Text(_), _range)) => {} _ => bail!("expected text after autolink"), } } LinkType::Inline | LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => { self.push_font(Font::Italic); } // This is currently unused. This is only // emitted with a broken link callback, but I // felt it is too annoying to escape `[` in // option descriptions. LinkType::ReferenceUnknown | LinkType::CollapsedUnknown | LinkType::ShortcutUnknown => { bail!( "link with missing reference `{}` located at offset {}", dest_url, range.start ); } } } Tag::Image(_link_type, _dest_url, _title) => { bail!("images are not currently supported") } } } Event::End(tag) => { match &tag { Tag::Paragraph => self.flush(), Tag::Heading(_n) => {} Tag::BlockQuote => { self.flush(); // restore left margin, restore line length self.output.push_str(".br\n.RE\n.ll\n"); } Tag::CodeBlock(_kind) => { self.flush(); // Restore fill mode, move margin back one level. self.output.push_str(".fi\n.RE\n"); } Tag::List(_) => { list.pop(); } Tag::Item => { self.flush(); // Move margin back one level. self.output.push_str(".RE\n"); } Tag::FootnoteDefinition(_label) => {} Tag::Table(_) => { // Table end // I don't know why, but the .sp is needed to provide // space with the following content. self.output.push_str("\n.TE\n.sp\n"); } Tag::TableHead => {} Tag::TableRow => {} Tag::TableCell => { // End text block. self.output.push_str("\nT}"); } Tag::Emphasis | Tag::Strong => self.pop_font(), Tag::Strikethrough => self.output.push_str("~~"), Tag::Link(link_type, dest_url, _title) => { if dest_url.starts_with('#') { continue; } match link_type { LinkType::Autolink | LinkType::Email => {} LinkType::Inline | LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => { self.pop_font(); self.output.push(' '); } _ => { panic!("unexpected tag {:?}", tag); } } write!(self.output, "<{}>", escape(&dest_url)?)?; } Tag::Image(_link_type, _dest_url, _title) => {} } } Event::Text(t) => { self.output.push_str(&escape(&t)?); } Event::Code(t) => { self.push_font(Font::Bold); self.output.push_str(&escape(&t)?); self.pop_font(); } Event::Html(t) => { if t.starts_with("") { in_cdata = false; } else if !t.trim().is_empty() { self.output.push_str(&t); } } else { self.output.push_str(&escape(&t)?); } } Event::FootnoteReference(_t) => {} Event::SoftBreak => self.output.push('\n'), Event::HardBreak => { self.flush(); self.output.push_str(".br\n"); } Event::Rule => { self.flush(); // \l' **length** ' Draw horizontal line (default underscore). // \n(.lu Gets value from register "lu" (current line length) self.output.push_str("\\l'\\n(.lu'\n"); } Event::TaskListMarker(_b) => unimplemented!(), } } Ok(()) } fn flush(&mut self) { if !self.output.ends_with('\n') { self.output.push('\n'); } } /// Switch to the given font. /// /// Because the troff sequence `\fP` for switching to the "previous" font /// doesn't support nesting, this needs to emulate it here. This is needed /// for situations like **hi _there_**. fn push_font(&mut self, font: Font) { self.font_stack.push(font); self.output.push_str(Font::str_from_stack(&self.font_stack)); } fn pop_font(&mut self) { self.font_stack.pop(); self.output.push_str(Font::str_from_stack(&self.font_stack)); } /// Parse and render the first top-level header of the document. fn push_top_header(&mut self) -> Result<(), Error> { // This enables the tbl preprocessor for tables. // This seems to be enabled by default on every modern system I could // find, but it doesn't seem to hurt to enable this. self.output.push_str("'\\\" t\n"); // Extract the name of the man page. let text = header_text(&mut self.parser)?; let (name, section) = parse_name_and_section(&text)?; // .TH = Table header // .nh = disable hyphenation // .ad l = Left-adjust mode (disable justified). // .ss sets sentence_space_size to 0 (prevents double spaces after . // if . is last on the line) write!( self.output, ".TH \"{}\" \"{}\"\n\ .nh\n\ .ad l\n\ .ss \\n[.ss] 0\n", escape(&name.to_uppercase())?, section )?; Ok(()) } } fn escape(s: &str) -> Result { let mut replaced = s .replace('\\', "\\(rs") .replace('-', "\\-") .replace('\u{00A0}', "\\ ") // non-breaking space (non-stretchable) .replace('–', "\\[en]") // \u{2013} en-dash .replace('β€”', "\\[em]") // \u{2014} em-dash .replace('β”‚', "|") // \u{2502} box drawing light vertical (could use \[br]) .replace('β”œ', "|") // \u{251C} box drawings light vertical and right .replace('β””', "`") // \u{2514} box drawings light up and right .replace('─', "\\-") // \u{2500} box drawing light horizontal ; if replaced.starts_with('.') { replaced = format!("\\&.{}", &replaced[1..]); } else if replaced.starts_with('\'') { replaced = format!("\\(aq{}", &replaced[1..]); } if let Some(ch) = replaced.chars().find(|ch| { !matches!(ch, '\n' | ' ' | '!'..='/' | '0'..='9' | ':'..='@' | 'A'..='Z' | '['..='`' | 'a'..='z' | '{'..='~') }) { bail!( "character {:?} is not allowed (update the translation table if needed)", ch ); } Ok(replaced) } cargo-0.66.0/crates/mdman/src/format/md.rs000066400000000000000000000064241432416201200203010ustar00rootroot00000000000000//! Markdown formatter. use crate::util::unwrap; use crate::ManMap; use anyhow::{bail, format_err, Error}; use std::fmt::Write; pub struct MdFormatter { man_map: ManMap, } impl MdFormatter { pub fn new(man_map: ManMap) -> MdFormatter { MdFormatter { man_map } } } impl MdFormatter { fn render_html(&self, input: &str) -> Result { let parser = crate::md_parser(input, None); let mut html_output: String = String::with_capacity(input.len() * 3 / 2); pulldown_cmark::html::push_html(&mut html_output, parser.map(|(e, _r)| e)); Ok(html_output) } } impl super::Formatter for MdFormatter { fn render(&self, input: &str) -> Result { Ok(input.replace("\r\n", "\n")) } fn render_options_start(&self) -> &'static str { "
" } fn render_options_end(&self) -> &'static str { "
" } fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result { let mut result = String::new(); fn unwrap_p(t: &str) -> &str { unwrap(t, "

", "

") } for param in params { let rendered = self.render_html(param)?; let no_p = unwrap_p(&rendered); // split out first term to use as the id. let first = no_p .split_whitespace() .next() .ok_or_else(|| format_err!("did not expect option `{}` to be empty", param))?; let no_tags = trim_tags(first); if no_tags.is_empty() { bail!("unexpected empty option with no tags `{}`", param); } let id = format!("option-{}-{}", man_name, no_tags); write!( result, "
\ {OPTION}
\n", ID = id, OPTION = no_p )?; } let rendered_block = self.render_html(block)?; write!( result, "
{}
\n", unwrap_p(&rendered_block) )?; Ok(result) } fn linkify_man_to_md(&self, name: &str, section: u8) -> Result { let s = match self.man_map.get(&(name.to_string(), section)) { Some(link) => format!("[{}({})]({})", name, section, link), None => format!("[{}({})]({}.html)", name, section, name), }; Ok(s) } } fn trim_tags(s: &str) -> String { // This is a hack. It removes all HTML tags. let mut in_tag = false; let mut in_char_ref = false; s.chars() .filter(|&ch| match ch { '<' if in_tag => panic!("unexpected nested tag"), '&' if in_char_ref => panic!("unexpected nested char ref"), '<' => { in_tag = true; false } '&' => { in_char_ref = true; false } '>' if in_tag => { in_tag = false; false } ';' if in_char_ref => { in_char_ref = false; false } _ => !in_tag && !in_char_ref, }) .collect() } cargo-0.66.0/crates/mdman/src/format/text.rs000066400000000000000000000560761432416201200206750ustar00rootroot00000000000000//! Text formatter. use crate::util::{header_text, unwrap}; use crate::EventIter; use anyhow::{bail, Error}; use pulldown_cmark::{Alignment, Event, LinkType, Tag}; use std::fmt::Write; use std::mem; use url::Url; pub struct TextFormatter { url: Option, } impl TextFormatter { pub fn new(url: Option) -> TextFormatter { TextFormatter { url } } } impl super::Formatter for TextFormatter { fn render(&self, input: &str) -> Result { TextRenderer::render(input, self.url.clone(), 0) } fn render_options_start(&self) -> &'static str { // Tell pulldown_cmark to ignore this. // This will be stripped out later. " &'static str { "]]>" } fn render_option( &self, params: &[&str], block: &str, _man_name: &str, ) -> Result { let rendered_options = params .iter() .map(|param| TextRenderer::render(param, self.url.clone(), 0)) .collect::, Error>>()?; let trimmed: Vec<_> = rendered_options.iter().map(|o| o.trim()).collect(); // Wrap in HTML tags, they will be stripped out during rendering. Ok(format!( "
{}
\n
{}
\n
\n", trimmed.join(", "), block )) } fn linkify_man_to_md(&self, name: &str, section: u8) -> Result { Ok(format!("`{}`({})", name, section)) } } struct TextRenderer<'e> { output: String, indent: usize, /// The current line being written. Once a line break is encountered (such /// as starting a new paragraph), this will be written to `output` via /// `flush`. line: String, /// The current word being written. Once a break is encountered (such as a /// space) this will be written to `line` via `flush_word`. word: String, parser: EventIter<'e>, /// The base URL used for relative URLs. url: Option, table: Table, } impl<'e> TextRenderer<'e> { fn render(input: &str, url: Option, indent: usize) -> Result { let parser = crate::md_parser(input, url.clone()); let output = String::with_capacity(input.len() * 3 / 2); let mut mr = TextRenderer { output, indent, line: String::new(), word: String::new(), parser, url, table: Table::new(), }; mr.push_md()?; Ok(mr.output) } fn push_md(&mut self) -> Result<(), Error> { // If this is true, this is inside a cdata block used for hiding // content from pulldown_cmark. let mut in_cdata = false; // The current list stack. None if unordered, Some if ordered with the // given number as the current index. let mut list: Vec> = Vec::new(); // Used in some cases where spacing isn't desired. let mut suppress_paragraph = false; // Whether or not word-wrapping is enabled. let mut wrap_text = true; while let Some((event, range)) = self.parser.next() { let this_suppress_paragraph = suppress_paragraph; // Always reset suppression, even if the next event isn't a // paragraph. This is in essence, a 1-token lookahead where the // suppression is only enabled if the next event is a paragraph. suppress_paragraph = false; match event { Event::Start(tag) => { match tag { Tag::Paragraph => { if !this_suppress_paragraph { self.flush(); } } Tag::Heading(n) => { self.flush(); if n == 1 { let text = header_text(&mut self.parser)?; self.push_to_line(&text.to_uppercase()); self.hard_break(); self.hard_break(); } else if n == 2 { let text = header_text(&mut self.parser)?; self.push_to_line(&text.to_uppercase()); self.flush(); self.indent = 7; } else { let text = header_text(&mut self.parser)?; self.push_indent((n as usize - 2) * 3); self.push_to_line(&text); self.flush(); self.indent = (n as usize - 1) * 3 + 1; } } Tag::BlockQuote => { self.indent += 3; } Tag::CodeBlock(_kind) => { self.flush(); wrap_text = false; self.indent += 4; } Tag::List(start) => list.push(start), Tag::Item => { self.flush(); match list.last_mut().expect("item must have list start") { // Ordered list. Some(n) => { self.push_indent(self.indent); write!(self.line, "{}.", n)?; *n += 1; } // Unordered list. None => { self.push_indent(self.indent); self.push_to_line("o ") } } self.indent += 3; suppress_paragraph = true; } Tag::FootnoteDefinition(_label) => unimplemented!(), Tag::Table(alignment) => { assert!(self.table.alignment.is_empty()); self.flush(); self.table.alignment.extend(alignment); let table = self.table.process(&mut self.parser, self.indent)?; self.output.push_str(&table); self.hard_break(); self.table = Table::new(); } Tag::TableHead | Tag::TableRow | Tag::TableCell => { bail!("unexpected table element") } Tag::Emphasis => {} Tag::Strong => {} // Strikethrough isn't usually supported for TTY. Tag::Strikethrough => self.word.push_str("~~"), Tag::Link(link_type, dest_url, _title) => { if dest_url.starts_with('#') { // In a man page, page-relative anchors don't // have much meaning. continue; } match link_type { LinkType::Autolink | LinkType::Email => { // The text is a copy of the URL, which is not needed. match self.parser.next() { Some((Event::Text(_), _range)) => {} _ => bail!("expected text after autolink"), } } LinkType::Inline | LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => {} // This is currently unused. This is only // emitted with a broken link callback, but I // felt it is too annoying to escape `[` in // option descriptions. LinkType::ReferenceUnknown | LinkType::CollapsedUnknown | LinkType::ShortcutUnknown => { bail!( "link with missing reference `{}` located at offset {}", dest_url, range.start ); } } } Tag::Image(_link_type, _dest_url, _title) => { bail!("images are not currently supported") } } } Event::End(tag) => match &tag { Tag::Paragraph => { self.flush(); self.hard_break(); } Tag::Heading(_n) => {} Tag::BlockQuote => { self.indent -= 3; } Tag::CodeBlock(_kind) => { self.hard_break(); wrap_text = true; self.indent -= 4; } Tag::List(_) => { list.pop(); } Tag::Item => { self.flush(); self.indent -= 3; self.hard_break(); } Tag::FootnoteDefinition(_label) => {} Tag::Table(_) => {} Tag::TableHead => {} Tag::TableRow => {} Tag::TableCell => {} Tag::Emphasis => {} Tag::Strong => {} Tag::Strikethrough => self.word.push_str("~~"), Tag::Link(link_type, dest_url, _title) => { if dest_url.starts_with('#') { continue; } match link_type { LinkType::Autolink | LinkType::Email => {} LinkType::Inline | LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => self.flush_word(), _ => { panic!("unexpected tag {:?}", tag); } } self.flush_word(); write!(self.word, "<{}>", dest_url)?; } Tag::Image(_link_type, _dest_url, _title) => {} }, Event::Text(t) | Event::Code(t) => { if wrap_text { let chunks = split_chunks(&t); for chunk in chunks { if chunk == " " { self.flush_word(); } else { self.word.push_str(chunk); } } } else { for line in t.lines() { self.push_indent(self.indent); self.push_to_line(line); self.flush(); } } } Event::Html(t) => { if t.starts_with("") { in_cdata = false; } else { let trimmed = t.trim(); if trimmed.is_empty() { continue; } if trimmed == "
" { self.hard_break(); } else if trimmed.starts_with("
") { let opts = unwrap(trimmed, "
", "
"); self.push_indent(self.indent); self.push_to_line(opts); self.flush(); } else if trimmed.starts_with("
") { let mut def = String::new(); while let Some((Event::Html(t), _range)) = self.parser.next() { if t.starts_with("
") { break; } def.push_str(&t); } let rendered = TextRenderer::render(&def, self.url.clone(), self.indent + 4)?; self.push_to_line(rendered.trim_end()); self.flush(); } else { self.push_to_line(&t); self.flush(); } } } else { self.push_to_line(&t); self.flush(); } } Event::FootnoteReference(_t) => {} Event::SoftBreak => self.flush_word(), Event::HardBreak => self.flush(), Event::Rule => { self.flush(); self.push_indent(self.indent); self.push_to_line(&"_".repeat(79 - self.indent * 2)); self.flush(); } Event::TaskListMarker(_b) => unimplemented!(), } } Ok(()) } fn flush(&mut self) { self.flush_word(); if !self.line.is_empty() { self.output.push_str(&self.line); self.output.push('\n'); self.line.clear(); } } fn hard_break(&mut self) { self.flush(); if !self.output.ends_with("\n\n") { self.output.push('\n'); } } fn flush_word(&mut self) { if self.word.is_empty() { return; } if self.line.len() + self.word.len() >= 79 { self.output.push_str(&self.line); self.output.push('\n'); self.line.clear(); } if self.line.is_empty() { self.push_indent(self.indent); self.line.push_str(&self.word); } else { self.line.push(' '); self.line.push_str(&self.word); } self.word.clear(); } fn push_indent(&mut self, indent: usize) { for _ in 0..indent { self.line.push(' '); } } fn push_to_line(&mut self, text: &str) { self.flush_word(); self.line.push_str(text); } } /// Splits the text on whitespace. /// /// Consecutive whitespace is collapsed to a single ' ', and is included as a /// separate element in the result. fn split_chunks(text: &str) -> Vec<&str> { let mut result = Vec::new(); let mut start = 0; while start < text.len() { match text[start..].find(' ') { Some(i) => { if i != 0 { result.push(&text[start..start + i]); } result.push(" "); // Skip past whitespace. match text[start + i..].find(|c| c != ' ') { Some(n) => { start = start + i + n; } None => { break; } } } None => { result.push(&text[start..]); break; } } } result } struct Table { alignment: Vec, rows: Vec>, row: Vec, cell: String, } impl Table { fn new() -> Table { Table { alignment: Vec::new(), rows: Vec::new(), row: Vec::new(), cell: String::new(), } } /// Processes table events and generates a text table. fn process(&mut self, parser: &mut EventIter<'_>, indent: usize) -> Result { while let Some((event, _range)) = parser.next() { match event { Event::Start(tag) => match tag { Tag::TableHead | Tag::TableRow | Tag::TableCell | Tag::Emphasis | Tag::Strong => {} Tag::Strikethrough => self.cell.push_str("~~"), // Links not yet supported, they usually won't fit. Tag::Link(_, _, _) => {} _ => bail!("unexpected tag in table: {:?}", tag), }, Event::End(tag) => match tag { Tag::Table(_) => return self.render(indent), Tag::TableCell => { let cell = mem::replace(&mut self.cell, String::new()); self.row.push(cell); } Tag::TableHead | Tag::TableRow => { let row = mem::replace(&mut self.row, Vec::new()); self.rows.push(row); } Tag::Strikethrough => self.cell.push_str("~~"), _ => {} }, Event::Text(t) | Event::Code(t) => { self.cell.push_str(&t); } Event::Html(t) => bail!("html unsupported in tables: {:?}", t), _ => bail!("unexpected event in table: {:?}", event), } } bail!("table end not reached"); } fn render(&self, indent: usize) -> Result { // This is an extremely primitive layout routine. // First compute the potential maximum width of each cell. // 2 for 1 space margin on left and right. let width_acc = vec![2; self.alignment.len()]; let mut col_widths = self .rows .iter() .map(|row| row.iter().map(|cell| cell.len())) .fold(width_acc, |mut acc, row| { acc.iter_mut() .zip(row) // +3 for left/right margin and | symbol .for_each(|(a, b)| *a = (*a).max(b + 3)); acc }); // Shrink each column until it fits the total width, proportional to // the columns total percent width. let max_width = 78 - indent; // Include total len for | characters, and +1 for final |. let total_width = col_widths.iter().sum::() + col_widths.len() + 1; if total_width > max_width { let to_shrink = total_width - max_width; // Compute percentage widths, and shrink each column based on its // total percentage. for width in &mut col_widths { let percent = *width as f64 / total_width as f64; *width -= (to_shrink as f64 * percent).ceil() as usize; } } // Start rendering. let mut result = String::new(); // Draw the horizontal line separating each row. let mut row_line = String::new(); row_line.push_str(&" ".repeat(indent)); row_line.push('+'); let lines = col_widths .iter() .map(|width| "-".repeat(*width)) .collect::>(); row_line.push_str(&lines.join("+")); row_line.push('+'); row_line.push('\n'); // Draw top of the table. result.push_str(&row_line); // Draw each row. for row in &self.rows { // Word-wrap and fill each column as needed. let filled = fill_row(row, &col_widths, &self.alignment); // Need to transpose the cells across rows for cells that span // multiple rows. let height = filled.iter().map(|c| c.len()).max().unwrap(); for row_i in 0..height { result.push_str(&" ".repeat(indent)); result.push('|'); for filled_row in &filled { let cell = &filled_row[row_i]; result.push_str(cell); result.push('|'); } result.push('\n'); } result.push_str(&row_line); } Ok(result) } } /// Formats a row, filling cells with spaces and word-wrapping text. /// /// Returns a vec of cells, where each cell is split into multiple lines. fn fill_row(row: &[String], col_widths: &[usize], alignment: &[Alignment]) -> Vec> { let mut cell_lines = row .iter() .zip(col_widths) .zip(alignment) .map(|((cell, width), alignment)| fill_cell(cell, *width - 2, *alignment)) .collect::>(); // Fill each cell to match the maximum vertical height of the tallest cell. let max_lines = cell_lines.iter().map(|cell| cell.len()).max().unwrap(); for (cell, width) in cell_lines.iter_mut().zip(col_widths) { if cell.len() < max_lines { cell.extend(std::iter::repeat(" ".repeat(*width)).take(max_lines - cell.len())); } } cell_lines } /// Formats a cell. Word-wraps based on width, and adjusts based on alignment. /// /// Returns a vec of lines for the cell. fn fill_cell(text: &str, width: usize, alignment: Alignment) -> Vec { let fill_width = |text: &str| match alignment { Alignment::None | Alignment::Left => format!(" {: format!(" {:^width$} ", text, width = width), Alignment::Right => format!(" {:>width$} ", text, width = width), }; if text.len() < width { // No wrapping necessary, just format. vec![fill_width(text)] } else { // Word-wrap the cell. let mut result = Vec::new(); let mut line = String::new(); for word in text.split_whitespace() { if line.len() + word.len() >= width { // todo: word.len() > width result.push(fill_width(&line)); line.clear(); } if line.is_empty() { line.push_str(word); } else { line.push(' '); line.push_str(&word); } } if !line.is_empty() { result.push(fill_width(&line)); } result } } cargo-0.66.0/crates/mdman/src/hbs.rs000066400000000000000000000155711432416201200171700ustar00rootroot00000000000000//! Handlebars template processing. use crate::format::Formatter; use anyhow::Error; use handlebars::{ handlebars_helper, Context, Decorator, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext, RenderError, Renderable, }; use std::collections::HashMap; use std::path::Path; type FormatterRef<'a> = &'a (dyn Formatter + Send + Sync); /// Processes the handlebars template at the given file. pub fn expand(file: &Path, formatter: FormatterRef) -> Result { let mut handlebars = Handlebars::new(); handlebars.set_strict_mode(true); handlebars.register_helper("lower", Box::new(lower)); handlebars.register_helper("options", Box::new(OptionsHelper { formatter })); handlebars.register_helper("option", Box::new(OptionHelper { formatter })); handlebars.register_helper("man", Box::new(ManLinkHelper { formatter })); handlebars.register_decorator("set", Box::new(set_decorator)); handlebars.register_template_file("template", file)?; let includes = file.parent().unwrap().join("includes"); handlebars.register_templates_directory(".md", includes)?; let man_name = file .file_stem() .expect("expected filename") .to_str() .expect("utf8 filename") .to_string(); let data = HashMap::from([("man_name", man_name)]); let expanded = handlebars.render("template", &data)?; Ok(expanded) } /// Helper for `{{#options}}` block. struct OptionsHelper<'a> { formatter: FormatterRef<'a>, } impl HelperDef for OptionsHelper<'_> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper<'reg, 'rc>, r: &'reg Handlebars<'reg>, ctx: &'rc Context, rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { if in_options(rc) { return Err(RenderError::new("options blocks cannot be nested")); } // Prevent nested {{#options}}. set_in_context(rc, "__MDMAN_IN_OPTIONS", serde_json::Value::Bool(true)); let s = self.formatter.render_options_start(); out.write(&s)?; let t = match h.template() { Some(t) => t, None => return Err(RenderError::new("options block must not be empty")), }; let block = t.renders(r, ctx, rc)?; out.write(&block)?; let s = self.formatter.render_options_end(); out.write(&s)?; remove_from_context(rc, "__MDMAN_IN_OPTIONS"); Ok(()) } } /// Whether or not the context is currently inside a `{{#options}}` block. fn in_options(rc: &RenderContext<'_, '_>) -> bool { rc.context() .map_or(false, |ctx| ctx.data().get("__MDMAN_IN_OPTIONS").is_some()) } /// Helper for `{{#option}}` block. struct OptionHelper<'a> { formatter: FormatterRef<'a>, } impl HelperDef for OptionHelper<'_> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper<'reg, 'rc>, r: &'reg Handlebars<'reg>, ctx: &'rc Context, rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { if !in_options(rc) { return Err(RenderError::new("option must be in options block")); } let params = h.params(); if params.is_empty() { return Err(RenderError::new( "option block must have at least one param", )); } // Convert params to strings. let params = params .iter() .map(|param| { param .value() .as_str() .ok_or_else(|| RenderError::new("option params must be strings")) }) .collect::, RenderError>>()?; let t = match h.template() { Some(t) => t, None => return Err(RenderError::new("option block must not be empty")), }; // Render the block. let block = t.renders(r, ctx, rc)?; // Get the name of this page. let man_name = ctx .data() .get("man_name") .expect("expected man_name in context") .as_str() .expect("expect man_name str"); // Ask the formatter to convert this option to its format. let option = self .formatter .render_option(¶ms, &block, man_name) .map_err(|e| RenderError::new(format!("option render failed: {}", e)))?; out.write(&option)?; Ok(()) } } /// Helper for `{{man name section}}` expression. struct ManLinkHelper<'a> { formatter: FormatterRef<'a>, } impl HelperDef for ManLinkHelper<'_> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper<'reg, 'rc>, _r: &'reg Handlebars<'reg>, _ctx: &'rc Context, _rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { let params = h.params(); if params.len() != 2 { return Err(RenderError::new("{{man}} must have two arguments")); } let name = params[0] .value() .as_str() .ok_or_else(|| RenderError::new("man link name must be a string"))?; let section = params[1] .value() .as_u64() .ok_or_else(|| RenderError::new("man link section must be an integer"))?; let section = u8::try_from(section).map_err(|_e| RenderError::new("section number too large"))?; let link = self .formatter .linkify_man_to_md(name, section) .map_err(|e| RenderError::new(format!("failed to linkify man: {}", e)))?; out.write(&link)?; Ok(()) } } /// `{{*set var=value}}` decorator. /// /// This sets a variable to a value within the template context. fn set_decorator( d: &Decorator, _: &Handlebars, _ctx: &Context, rc: &mut RenderContext, ) -> Result<(), RenderError> { let data_to_set = d.hash(); for (k, v) in data_to_set { set_in_context(rc, k, v.value().clone()); } Ok(()) } /// Sets a variable to a value within the context. fn set_in_context(rc: &mut RenderContext, key: &str, value: serde_json::Value) { let mut ctx = match rc.context() { Some(c) => (*c).clone(), None => Context::wraps(serde_json::Value::Object(serde_json::Map::new())).unwrap(), }; if let serde_json::Value::Object(m) = ctx.data_mut() { m.insert(key.to_string(), value); rc.set_context(ctx); } else { panic!("expected object in context"); } } /// Removes a variable from the context. fn remove_from_context(rc: &mut RenderContext, key: &str) { let ctx = rc.context().expect("cannot remove from null context"); let mut ctx = (*ctx).clone(); if let serde_json::Value::Object(m) = ctx.data_mut() { m.remove(key); rc.set_context(ctx); } else { panic!("expected object in context"); } } handlebars_helper!(lower: |s: str| s.to_lowercase()); cargo-0.66.0/crates/mdman/src/lib.rs000066400000000000000000000076371432416201200171660ustar00rootroot00000000000000//! mdman markdown to man converter. use anyhow::{bail, Context, Error}; use pulldown_cmark::{CowStr, Event, LinkType, Options, Parser, Tag}; use std::collections::HashMap; use std::fs; use std::io::{self, BufRead}; use std::ops::Range; use std::path::Path; use url::Url; mod format; mod hbs; mod util; use format::Formatter; /// Mapping of `(name, section)` of a man page to a URL. pub type ManMap = HashMap<(String, u8), String>; /// A man section. pub type Section = u8; /// The output formats supported by mdman. #[derive(Copy, Clone)] pub enum Format { Man, Md, Text, } impl Format { /// The filename extension for the format. pub fn extension(&self, section: Section) -> String { match self { Format::Man => section.to_string(), Format::Md => "md".to_string(), Format::Text => "txt".to_string(), } } } /// Converts the handlebars markdown file at the given path into the given /// format, returning the translated result. pub fn convert( file: &Path, format: Format, url: Option, man_map: ManMap, ) -> Result { let formatter: Box = match format { Format::Man => Box::new(format::man::ManFormatter::new(url)), Format::Md => Box::new(format::md::MdFormatter::new(man_map)), Format::Text => Box::new(format::text::TextFormatter::new(url)), }; let expanded = hbs::expand(file, &*formatter)?; // pulldown-cmark can behave a little differently with Windows newlines, // just normalize it. let expanded = expanded.replace("\r\n", "\n"); formatter.render(&expanded) } /// Pulldown-cmark iterator yielding an `(event, range)` tuple. type EventIter<'a> = Box, Range)> + 'a>; /// Creates a new markdown parser with the given input. pub(crate) fn md_parser(input: &str, url: Option) -> EventIter { let mut options = Options::empty(); options.insert(Options::ENABLE_TABLES); options.insert(Options::ENABLE_FOOTNOTES); options.insert(Options::ENABLE_STRIKETHROUGH); let parser = Parser::new_ext(input, options); let parser = parser.into_offset_iter(); // Translate all links to include the base url. let parser = parser.map(move |(event, range)| match event { Event::Start(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => ( Event::Start(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)), range, ), Event::End(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => ( Event::End(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)), range, ), _ => (event, range), }); Box::new(parser) } fn join_url<'a>(base: Option<&Url>, dest: CowStr<'a>) -> CowStr<'a> { match base { Some(base_url) => { // Absolute URL or page-relative anchor doesn't need to be translated. if dest.contains(':') || dest.starts_with('#') { dest } else { let joined = base_url.join(&dest).unwrap_or_else(|e| { panic!("failed to join URL `{}` to `{}`: {}", dest, base_url, e) }); String::from(joined).into() } } None => dest, } } pub fn extract_section(file: &Path) -> Result { let f = fs::File::open(file).with_context(|| format!("could not open `{}`", file.display()))?; let mut f = io::BufReader::new(f); let mut line = String::new(); f.read_line(&mut line)?; if !line.starts_with("# ") { bail!("expected input file to start with # header"); } let (_name, section) = util::parse_name_and_section(&line[2..].trim()).with_context(|| { format!( "expected input file to have header with the format `# command-name(1)`, found: `{}`", line ) })?; Ok(section) } cargo-0.66.0/crates/mdman/src/main.rs000066400000000000000000000107261432416201200173350ustar00rootroot00000000000000use anyhow::{bail, format_err, Context, Error}; use mdman::{Format, ManMap}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use url::Url; /// Command-line options. struct Options { format: Format, output_dir: PathBuf, sources: Vec, url: Option, man_map: ManMap, } fn main() { if let Err(e) = run() { eprintln!("error: {}", e); for cause in e.chain().skip(1) { eprintln!("\nCaused by:"); for line in cause.to_string().lines() { if line.is_empty() { eprintln!(); } else { eprintln!(" {}", line); } } } std::process::exit(1); } } fn run() -> Result<(), Error> { let opts = process_args()?; if !opts.output_dir.exists() { std::fs::create_dir_all(&opts.output_dir).with_context(|| { format!( "failed to create output directory {}", opts.output_dir.display() ) })?; } for source in &opts.sources { let section = mdman::extract_section(source)?; let filename = Path::new(source.file_name().unwrap()).with_extension(opts.format.extension(section)); let out_path = opts.output_dir.join(filename); if same_file::is_same_file(source, &out_path).unwrap_or(false) { bail!("cannot output to the same file as the source"); } println!("Converting {} -> {}", source.display(), out_path.display()); let result = mdman::convert(&source, opts.format, opts.url.clone(), opts.man_map.clone()) .with_context(|| format!("failed to translate {}", source.display()))?; std::fs::write(out_path, result)?; } Ok(()) } fn process_args() -> Result { let mut format = None; let mut output = None; let mut url = None; let mut man_map: ManMap = HashMap::new(); let mut sources = Vec::new(); let mut args = std::env::args().skip(1); while let Some(arg) = args.next() { match arg.as_str() { "-t" => { format = match args.next().as_deref() { Some("man") => Some(Format::Man), Some("md") => Some(Format::Md), Some("txt") => Some(Format::Text), Some(s) => bail!("unknown output format: {}", s), None => bail!("-t requires a value (man, md, txt)"), }; } "-o" => { output = match args.next() { Some(s) => Some(PathBuf::from(s)), None => bail!("-o requires a value"), }; } "--url" => { url = match args.next() { Some(s) => { let url = Url::parse(&s) .with_context(|| format!("could not convert `{}` to a url", s))?; if !url.path().ends_with('/') { bail!("url `{}` should end with a /", url); } Some(url) } None => bail!("--url requires a value"), } } "--man" => { let man = args .next() .ok_or_else(|| format_err!("--man requires a value"))?; let parts: Vec<_> = man.splitn(2, '=').collect(); let key_parts: Vec<_> = parts[0].splitn(2, ':').collect(); if parts.len() != 2 || key_parts.len() != 2 { bail!("--man expected value with form name:1=link"); } let section: u8 = key_parts[1].parse().with_context(|| { format!("expected unsigned integer for section, got `{}`", parts[1]) })?; man_map.insert((key_parts[0].to_string(), section), parts[1].to_string()); } s => { sources.push(PathBuf::from(s)); } } } if format.is_none() { bail!("-t must be specified (man, md, txt)"); } if output.is_none() { bail!("-o must be specified (output directory)"); } if sources.is_empty() { bail!("at least one source must be specified"); } let opts = Options { format: format.unwrap(), output_dir: output.unwrap(), sources, url, man_map, }; Ok(opts) } cargo-0.66.0/crates/mdman/src/util.rs000066400000000000000000000030471432416201200173640ustar00rootroot00000000000000///! General utilities. use crate::EventIter; use anyhow::{bail, format_err, Context, Error}; use pulldown_cmark::{CowStr, Event, Tag}; /// Splits the text `foo(1)` into "foo" and `1`. pub fn parse_name_and_section(text: &str) -> Result<(&str, u8), Error> { let mut i = text.split_terminator(&['(', ')'][..]); let name = i .next() .ok_or_else(|| format_err!("man reference must have a name"))?; let section = i .next() .ok_or_else(|| format_err!("man reference must have a section such as mycommand(1)"))?; if let Some(s) = i.next() { bail!( "man reference must have the form mycommand(1), got extra part `{}`", s ); } let section: u8 = section .parse() .with_context(|| format!("section must be a number, got {}", section))?; Ok((name, section)) } /// Extracts the text from a header after Tag::Heading has been received. pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result, Error> { let text = match parser.next() { Some((Event::Text(t), _range)) => t, e => bail!("expected plain text in man header, got {:?}", e), }; match parser.next() { Some((Event::End(Tag::Heading(_)), _range)) => { return Ok(text); } e => bail!("expected plain text in man header, got {:?}", e), } } /// Removes tags from the front and back of a string. pub fn unwrap<'t>(text: &'t str, front: &str, back: &str) -> &'t str { text.trim().trim_start_matches(front).trim_end_matches(back) } cargo-0.66.0/crates/mdman/tests/000077500000000000000000000000001432416201200164105ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/tests/compare.rs000066400000000000000000000027041432416201200204070ustar00rootroot00000000000000//! Compares input to expected output. //! //! Use the MDMAN_BLESS environment variable to automatically update the //! expected output. use mdman::{Format, ManMap}; use pretty_assertions::assert_eq; use std::path::PathBuf; use url::Url; fn run(name: &str) { let input = PathBuf::from(format!("tests/compare/{}.md", name)); let url = Some(Url::parse("https://example.org/").unwrap()); let mut map = ManMap::new(); map.insert( ("other-cmd".to_string(), 1), "https://example.org/commands/other-cmd.html".to_string(), ); for &format in &[Format::Man, Format::Md, Format::Text] { let section = mdman::extract_section(&input).unwrap(); let result = mdman::convert(&input, format, url.clone(), map.clone()).unwrap(); let expected_path = format!( "tests/compare/expected/{}.{}", name, format.extension(section) ); if std::env::var("MDMAN_BLESS").is_ok() { std::fs::write(&expected_path, result).unwrap(); } else { let expected = std::fs::read_to_string(&expected_path).unwrap(); // Fix if Windows checked out with autocrlf. let expected = expected.replace("\r\n", "\n"); assert_eq!(expected, result); } } } macro_rules! test( ($name:ident) => ( #[test] fn $name() { run(stringify!($name)); } ) ); test!(formatting); test!(links); test!(options); test!(tables); test!(vars); cargo-0.66.0/crates/mdman/tests/compare/000077500000000000000000000000001432416201200200365ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/tests/compare/expected/000077500000000000000000000000001432416201200216375ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/tests/compare/expected/formatting.1000066400000000000000000000037761432416201200241100ustar00rootroot00000000000000'\" t .TH "FORMATTING" "1" .nh .ad l .ss \n[.ss] 0 .sp This is \fBnested \f(BIformatting\fB \fBtext\fB\fR\&. .SH "SECOND HEADING" Some text at second level. .SS "Third heading" Some text at third level. .SS "Fourth heading" Some text at fourth level. .SH "Quotes and blocks." Here are some quotes and blocks. .RS 3 .ll -5 .sp This is a block quote. Ambidextrously koala apart that prudent blindly alas far amid dear goodness turgid so exact inside oh and alas much fanciful that dark on spoon\-fed adequately insolent walking crud. .br .RE .ll .sp .RS 4 .nf This is a code block. Groundhog watchfully sudden firefly some self\-consciously hotly jeepers satanic after that this parrot this at virtuous some mocking the leaned jeez nightingale as much mallard so because jeez turned dear crud grizzly strenuously. Indented and should be unmodified. .fi .RE .sp .RS 4 .nf This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this goodness regarding reindeer so astride before. Doubly indented .fi .RE .SH "Lists" .sp .RS 4 \h'-04' 1.\h'+01'Ordered list .sp .RS 4 \h'-04'\(bu\h'+02'Unordered list .sp With a second paragraph inside it .sp .RS 4 \h'-04' 1.\h'+01'Inner ordered list .RE .sp .RS 4 \h'-04' 2.\h'+01'Another .RE .RE .sp .RS 4 \h'-04'\(bu\h'+02'Eggs .RE .sp .RS 4 \h'-04'\(bu\h'+02'Milk .sp .RS 4 \h'-04' 5.\h'+01'Don't start at one. .RE .sp .RS 4 \h'-04' 6.\h'+01'tamarind .RE .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Second element .RE .sp .RS 4 \h'-04' 3.\h'+01'Third element .RE .SH "Breaks" This has a .br hard break in it and a soft one. .SH "Horizontal rule" This should contain a line: \l'\n(.lu' .sp Nice! .SH "Strange characters" Handles escaping for characters .sp \&.dot at the start of a line. .sp \(rsfBnot really troff .sp Various characters \(rs \- \[en] \[em] \- | | ` .sp .RS 4 .nf tree `\-\- example |\-\- salamander | |\-\- honey | `\-\- some |\-\- fancifully `\-\- trout .fi .RE .sp \ \ \ \ non\-breaking space. cargo-0.66.0/crates/mdman/tests/compare/expected/formatting.md000066400000000000000000000032571432416201200243420ustar00rootroot00000000000000# formatting(1) This is **nested _formatting_ `text`**. ## SECOND HEADING Some text at second level. ### Third heading Some text at third level. #### Fourth heading Some text at fourth level. ## Quotes and blocks. Here are some quotes and blocks. > This is a block quote. Ambidextrously koala apart that prudent blindly alas > far amid dear goodness turgid so exact inside oh and alas much fanciful that > dark on spoon-fed adequately insolent walking crud. ``` This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous some mocking the leaned jeez nightingale as much mallard so because jeez turned dear crud grizzly strenuously. Indented and should be unmodified. ``` This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this goodness regarding reindeer so astride before. Doubly indented ## Lists 1. Ordered list * Unordered list With a second paragraph inside it 1. Inner ordered list 1. Another * Eggs * Milk 5. Don't start at one. 6. tamarind 1. Second element 1. Third element ## Breaks This has a\ hard break in it and a soft one. ## Horizontal rule This should contain a line: --- Nice! ## Strange characters Handles escaping for characters .dot at the start of a line. \fBnot really troff Various characters \ - – β€” ─ β”‚ β”œ β”” ``` tree └── example β”œβ”€β”€ salamander β”‚ β”œβ”€β”€ honey β”‚ └── some β”œβ”€β”€ fancifully └── trout ```     non-breaking space. cargo-0.66.0/crates/mdman/tests/compare/expected/formatting.txt000066400000000000000000000040321432416201200245510ustar00rootroot00000000000000FORMATTING(1) This is nested formatting text. SECOND HEADING Some text at second level. Third heading Some text at third level. Fourth heading Some text at fourth level. QUOTES AND BLOCKS. Here are some quotes and blocks. This is a block quote. Ambidextrously koala apart that prudent blindly alas far amid dear goodness turgid so exact inside oh and alas much fanciful that dark on spoon-fed adequately insolent walking crud. This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous some mocking the leaned jeez nightingale as much mallard so because jeez turned dear crud grizzly strenuously. Indented and should be unmodified. This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this goodness regarding reindeer so astride before. Doubly indented LISTS 1. Ordered list o Unordered list With a second paragraph inside it 1. Inner ordered list 2. Another o Eggs o Milk 5. Don't start at one. 6. tamarind 2. Second element 3. Third element BREAKS This has a hard break in it and a soft one. HORIZONTAL RULE This should contain a line: _________________________________________________________________ Nice! STRANGE CHARACTERS Handles escaping for characters .dot at the start of a line. \fBnot really troff Various characters \ - – β€” ─ β”‚ β”œ β”” tree └── example β”œβ”€β”€ salamander β”‚ β”œβ”€β”€ honey β”‚ └── some β”œβ”€β”€ fancifully └── trout Β Β Β Β non-breaking space. cargo-0.66.0/crates/mdman/tests/compare/expected/links.1000066400000000000000000000017261432416201200230470ustar00rootroot00000000000000'\" t .TH "LINKS" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" links \- Test of different link kinds .SH "DESCRIPTION" Inline link: \fIinline link\fR .sp Reference link: \fIthis is a link\fR .sp Collapsed: \fIcollapsed\fR .sp Shortcut: \fIshortcut\fR .sp Autolink: .sp Email: .sp Relative link: \fIrelative link\fR .sp Collapsed unknown: [collapsed unknown][] .sp Reference unknown: [foo][unknown] .sp Shortcut unknown: [shortcut unknown] .sp \fBother\-cmd\fR(1) .sp \fBlocal\-cmd\fR(1) .sp \fISome link\fR .sp \fB\-\-include\fR .RS 4 Testing an \fIincluded link\fR \&. .RE .SH "OPTIONS" .sp \fB\-\-foo\-bar\fR .RS 4 Example \fIlink\fR \&. See \fBother\-cmd\fR(1), \fBlocal\-cmd\fR(1) .RE cargo-0.66.0/crates/mdman/tests/compare/expected/links.md000066400000000000000000000023641432416201200233060ustar00rootroot00000000000000# links(1) ## NAME links - Test of different link kinds ## DESCRIPTION Inline link: [inline link](https://example.com/inline) Reference link: [this is a link][bar] Collapsed: [collapsed][] Shortcut: [shortcut] Autolink: Email: Relative link: [relative link](foo/bar.html) Collapsed unknown: [collapsed unknown][] Reference unknown: [foo][unknown] Shortcut unknown: [shortcut unknown] [other-cmd(1)](https://example.org/commands/other-cmd.html) [local-cmd(1)](local-cmd.html) [Some link](foo.html)
Testing an included link.
## OPTIONS
Example link. See other-cmd(1), local-cmd(1)
[bar]: https://example.com/bar [collapsed]: https://example.com/collapsed [shortcut]: https://example.com/shortcut cargo-0.66.0/crates/mdman/tests/compare/expected/links.txt000066400000000000000000000016231432416201200235220ustar00rootroot00000000000000LINKS(1) NAME links - Test of different link kinds DESCRIPTION Inline link: inline link Reference link: this is a link Collapsed: collapsed Shortcut: shortcut Autolink: Email: Relative link: relative link Collapsed unknown: [collapsed unknown][] Reference unknown: [foo][unknown] Shortcut unknown: [shortcut unknown] other-cmd(1) local-cmd(1) Some link --include Testing an included link . OPTIONS --foo-bar Example link . See other-cmd(1), local-cmd(1) cargo-0.66.0/crates/mdman/tests/compare/expected/options.1000066400000000000000000000025421432416201200234170ustar00rootroot00000000000000'\" t .TH "MY\-COMMAND" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" my\-command \- A brief description .SH "SYNOPSIS" \fBmy\-command\fR [\fB\-\-abc\fR | \fB\-\-xyz\fR] \fIname\fR .br \fBmy\-command\fR [\fB\-f\fR \fIfile\fR] .br \fBmy\-command\fR (\fB\-m\fR | \fB\-M\fR) [\fIoldbranch\fR] \fInewbranch\fR .br \fBmy\-command\fR (\fB\-d\fR | \fB\-D\fR) [\fB\-r\fR] \fIbranchname\fR\&... .SH "DESCRIPTION" A description of the command. .sp .RS 4 \h'-04'\(bu\h'+02'One .sp .RS 4 \h'-04'\(bu\h'+02'Sub one .RE .sp .RS 4 \h'-04'\(bu\h'+02'Sub two .RE .RE .sp .RS 4 \h'-04'\(bu\h'+02'Two .RE .sp .RS 4 \h'-04'\(bu\h'+02'Three .RE .SH "OPTIONS" .SS "Command options" .sp \fB\-\-foo\-bar\fR .RS 4 Demo \fIemphasis\fR, \fBstrong\fR, ~~strike~~ .RE .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 This has multiple flags. .RE .sp \fInamed\-arg...\fR .RS 4 A named argument. .RE .SS "Common Options" .sp \fB@\fR\fIfilename\fR .RS 4 Load from filename. .RE .sp \fB\-\-foo\fR [\fIbar\fR] .RS 4 Flag with optional value. .RE .sp \fB\-\-foo\fR[\fB=\fR\fIbar\fR] .RS 4 Alternate syntax for optional value (with required = for disambiguation). .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'An example .sp .RS 4 .nf my\-command \-\-abc .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Another example .sp .RS 4 .nf my\-command \-\-xyz .fi .RE .RE .SH "SEE ALSO" \fBother\-command\fR(1) \fBabc\fR(7) cargo-0.66.0/crates/mdman/tests/compare/expected/options.md000066400000000000000000000040351432416201200236560ustar00rootroot00000000000000# my-command(1) ## NAME my-command - A brief description ## SYNOPSIS `my-command` [`--abc` | `--xyz`] _name_\ `my-command` [`-f` _file_]\ `my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\ `my-command` (`-d` | `-D`) [`-r`] _branchname_... ## DESCRIPTION A description of the command. * One * Sub one * Sub two * Two * Three ## OPTIONS ### Command options
--foo-bar
Demo emphasis, strong, strike
-p spec
--package spec
This has multiple flags.
named-arg...
A named argument.
### Common Options
@filename
Load from filename.
--foo [bar]
Flag with optional value.
--foo[=bar]
Alternate syntax for optional value (with required = for disambiguation).
## EXAMPLES 1. An example ``` my-command --abc ``` 1. Another example my-command --xyz ## SEE ALSO [other-command(1)](other-command.html) [abc(7)](abc.html) cargo-0.66.0/crates/mdman/tests/compare/expected/options.txt000066400000000000000000000017321432416201200240760ustar00rootroot00000000000000MY-COMMAND(1) NAME my-command - A brief description SYNOPSIS my-command [--abc | --xyz] name my-command [-f file] my-command (-m | -M) [oldbranch] newbranch my-command (-d | -D) [-r] branchname... DESCRIPTION A description of the command. o One o Sub one o Sub two o Two o Three OPTIONS Command options --foo-bar Demo emphasis, strong, ~~strike~~ -p spec, --package spec This has multiple flags. named-arg... A named argument. Common Options @filename Load from filename. --foo [bar] Flag with optional value. --foo[=bar] Alternate syntax for optional value (with required = for disambiguation). EXAMPLES 1. An example my-command --abc 2. Another example my-command --xyz SEE ALSO other-command(1) abc(7) cargo-0.66.0/crates/mdman/tests/compare/expected/tables.1000066400000000000000000000015771432416201200232050ustar00rootroot00000000000000'\" t .TH "TABLES" "1" .nh .ad l .ss \n[.ss] 0 .SH "DESCRIPTION" Testing tables. .TS allbox tab(:); lt. T{ Single col T} T{ Hi! :) T} .TE .sp .TS allbox tab(:); lt lt lt. T{ Header content T}:T{ With \fBformat\fR \fItext\fR T}:T{ Another column T} T{ Some data T}:T{ More data T}:T{ T} T{ Extra long amount of text within a column T}:T{ hi T}:T{ there T} .TE .sp .TS allbox tab(:); lt ct rt. T{ Left aligned T}:T{ Center aligned T}:T{ Right aligned T} T{ abc T}:T{ def T}:T{ ghi T} .TE .sp .TS allbox tab(:); lt ct rt. T{ Left aligned T}:T{ Center aligned T}:T{ Right aligned T} T{ X T}:T{ X T}:T{ X T} T{ Extra long text 123456789012 with mixed widths. T}:T{ Extra long text 123456789012 with mixed widths. T}:T{ Extra long text 123456789012 with mixed widths. T} .TE .sp .TS allbox tab(:); lt. T{ Link check T} T{ \fIfoo\fR T} T{ T} .TE .sp cargo-0.66.0/crates/mdman/tests/compare/expected/tables.md000066400000000000000000000013661432416201200234410ustar00rootroot00000000000000# tables(1) ## DESCRIPTION Testing tables. | Single col | -------------- | Hi! :) | Header content | With `format` *text* | Another column ---------------|----------------------|---------------- Some data | More data | Extra long amount of text within a column | hi | there Left aligned | Center aligned | Right aligned -------------|:--------------:|--------------: abc | def | ghi Left aligned | Center aligned | Right aligned -------------|:--------------:|--------------: X | X | X Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Link check | -------------- | [foo] | | | [foo]: https://example.com/ cargo-0.66.0/crates/mdman/tests/compare/expected/tables.txt000066400000000000000000000041321432416201200236520ustar00rootroot00000000000000TABLES(1) DESCRIPTION Testing tables. +-------------+ | Single col | +-------------+ | Hi! :) | +-------------+ +-------------------------------------+----------------+--------------+ | Header content | With format | Another | | | text | column | +-------------------------------------+----------------+--------------+ | Some data | More data | | +-------------------------------------+----------------+--------------+ | Extra long amount of text within a | hi | there | | column | | | +-------------------------------------+----------------+--------------+ +---------------+-----------------+----------------+ | Left aligned | Center aligned | Right aligned | +---------------+-----------------+----------------+ | abc | def | ghi | +---------------+-----------------+----------------+ +-----------------------+-----------------------+-----------------------+ | Left aligned | Center aligned | Right aligned | +-----------------------+-----------------------+-----------------------+ | X | X | X | +-----------------------+-----------------------+-----------------------+ | Extra long text | Extra long text | Extra long text | | 123456789012 with | 123456789012 with | 123456789012 with | | mixed widths. | mixed widths. | mixed widths. | +-----------------------+-----------------------+-----------------------+ +-----------------------+ | Link check | +-----------------------+ | foo | +-----------------------+ | https://example.com/ | +-----------------------+ cargo-0.66.0/crates/mdman/tests/compare/expected/vars.7000066400000000000000000000000751432416201200227040ustar00rootroot00000000000000'\" t .TH "VARS" "7" .nh .ad l .ss \n[.ss] 0 .sp Bar .sp bar cargo-0.66.0/crates/mdman/tests/compare/expected/vars.md000066400000000000000000000000261432416201200231320ustar00rootroot00000000000000# vars(7) Bar bar cargo-0.66.0/crates/mdman/tests/compare/expected/vars.txt000066400000000000000000000000231432416201200233460ustar00rootroot00000000000000VARS(7) Bar bar cargo-0.66.0/crates/mdman/tests/compare/formatting.md000066400000000000000000000032571432416201200225410ustar00rootroot00000000000000# formatting(1) This is **nested _formatting_ `text`**. ## SECOND HEADING Some text at second level. ### Third heading Some text at third level. #### Fourth heading Some text at fourth level. ## Quotes and blocks. Here are some quotes and blocks. > This is a block quote. Ambidextrously koala apart that prudent blindly alas > far amid dear goodness turgid so exact inside oh and alas much fanciful that > dark on spoon-fed adequately insolent walking crud. ``` This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous some mocking the leaned jeez nightingale as much mallard so because jeez turned dear crud grizzly strenuously. Indented and should be unmodified. ``` This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this goodness regarding reindeer so astride before. Doubly indented ## Lists 1. Ordered list * Unordered list With a second paragraph inside it 1. Inner ordered list 1. Another * Eggs * Milk 5. Don't start at one. 6. tamarind 1. Second element 1. Third element ## Breaks This has a\ hard break in it and a soft one. ## Horizontal rule This should contain a line: --- Nice! ## Strange characters Handles escaping for characters .dot at the start of a line. \fBnot really troff Various characters \ - – β€” ─ β”‚ β”œ β”” ``` tree └── example β”œβ”€β”€ salamander β”‚ β”œβ”€β”€ honey β”‚ └── some β”œβ”€β”€ fancifully └── trout ```     non-breaking space. cargo-0.66.0/crates/mdman/tests/compare/includes/000077500000000000000000000000001432416201200216445ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/tests/compare/includes/links-include.md000066400000000000000000000002071432416201200247260ustar00rootroot00000000000000[Some link](foo.html) {{#options}} {{#option "`--include`"}} Testing an [included link](included_link.html). {{/option}} {{/options}} cargo-0.66.0/crates/mdman/tests/compare/includes/options-common.md000066400000000000000000000004231432416201200251460ustar00rootroot00000000000000{{#options}} {{#option "`@`_filename_"}} Load from filename. {{/option}} {{#option "`--foo` [_bar_]"}} Flag with optional value. {{/option}} {{#option "`--foo`[`=`_bar_]"}} Alternate syntax for optional value (with required = for disambiguation). {{/option}} {{/options}} cargo-0.66.0/crates/mdman/tests/compare/links.md000066400000000000000000000014141432416201200215000ustar00rootroot00000000000000# links(1) ## NAME links - Test of different link kinds ## DESCRIPTION Inline link: [inline link](https://example.com/inline) Reference link: [this is a link][bar] Collapsed: [collapsed][] Shortcut: [shortcut] Autolink: Email: Relative link: [relative link](foo/bar.html) Collapsed unknown: [collapsed unknown][] Reference unknown: [foo][unknown] Shortcut unknown: [shortcut unknown] {{man "other-cmd" 1}} {{man "local-cmd" 1}} {{> links-include}} ## OPTIONS {{#options}} {{#option "`--foo-bar`"}} Example [link](bar.html). See {{man "other-cmd" 1}}, {{man "local-cmd" 1}} {{/option}} {{/options}} [bar]: https://example.com/bar [collapsed]: https://example.com/collapsed [shortcut]: https://example.com/shortcut cargo-0.66.0/crates/mdman/tests/compare/options.md000066400000000000000000000015011432416201200220500ustar00rootroot00000000000000# my-command(1) ## NAME my-command - A brief description ## SYNOPSIS `my-command` [`--abc` | `--xyz`] _name_\ `my-command` [`-f` _file_]\ `my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\ `my-command` (`-d` | `-D`) [`-r`] _branchname_... ## DESCRIPTION A description of the command. * One * Sub one * Sub two * Two * Three ## OPTIONS ### Command options {{#options}} {{#option "`--foo-bar`"}} Demo *emphasis*, **strong**, ~~strike~~ {{/option}} {{#option "`-p` _spec_" "`--package` _spec_"}} This has multiple flags. {{/option}} {{#option "_named-arg..._"}} A named argument. {{/option}} {{/options}} ### Common Options {{> options-common}} ## EXAMPLES 1. An example ``` my-command --abc ``` 1. Another example my-command --xyz ## SEE ALSO {{man "other-command" 1}} {{man "abc" 7}} cargo-0.66.0/crates/mdman/tests/compare/tables.md000066400000000000000000000013661432416201200216400ustar00rootroot00000000000000# tables(1) ## DESCRIPTION Testing tables. | Single col | -------------- | Hi! :) | Header content | With `format` *text* | Another column ---------------|----------------------|---------------- Some data | More data | Extra long amount of text within a column | hi | there Left aligned | Center aligned | Right aligned -------------|:--------------:|--------------: abc | def | ghi Left aligned | Center aligned | Right aligned -------------|:--------------:|--------------: X | X | X Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Link check | -------------- | [foo] | | | [foo]: https://example.com/ cargo-0.66.0/crates/mdman/tests/compare/vars.md000066400000000000000000000000661432416201200213350ustar00rootroot00000000000000# vars(7) {{*set foo="Bar"}} {{foo}} {{lower foo}} cargo-0.66.0/crates/mdman/tests/invalid.rs000066400000000000000000000015541432416201200204110ustar00rootroot00000000000000//! Tests for errors and invalid input. use mdman::{Format, ManMap}; use pretty_assertions::assert_eq; use std::path::PathBuf; fn run(name: &str, expected_error: &str) { let input = PathBuf::from(format!("tests/invalid/{}", name)); match mdman::convert(&input, Format::Man, None, ManMap::new()) { Ok(_) => { panic!("expected {} to fail", name); } Err(e) => { assert_eq!(expected_error, e.to_string()); } } } macro_rules! test( ($name:ident, $file_name:expr, $error:expr) => ( #[test] fn $name() { run($file_name, $error); } ) ); test!( nested, "nested.md", "Error rendering \"template\" line 4, col 1: options blocks cannot be nested" ); test!( not_inside_options, "not-inside-options.md", "Error rendering \"template\" line 3, col 1: option must be in options block" ); cargo-0.66.0/crates/mdman/tests/invalid/000077500000000000000000000000001432416201200200365ustar00rootroot00000000000000cargo-0.66.0/crates/mdman/tests/invalid/nested.md000066400000000000000000000001011432416201200216320ustar00rootroot00000000000000# nested(1) {{#options}} {{#options}} {{/options}} {{/options}} cargo-0.66.0/crates/mdman/tests/invalid/not-inside-options.md000066400000000000000000000001271432416201200241220ustar00rootroot00000000000000# not-inside-options(1) {{#option "`-o`"}} Testing without options block. {{/option}} cargo-0.66.0/crates/resolver-tests/000077500000000000000000000000001432416201200171535ustar00rootroot00000000000000cargo-0.66.0/crates/resolver-tests/Cargo.toml000066400000000000000000000003431432416201200211030ustar00rootroot00000000000000[package] name = "resolver-tests" version = "0.1.0" edition = "2018" [dependencies] cargo = { path = "../.." } cargo-util = { path = "../cargo-util" } proptest = "0.9.1" lazy_static = "1.3.0" varisat = "0.2.1" atty = "0.2.11" cargo-0.66.0/crates/resolver-tests/src/000077500000000000000000000000001432416201200177425ustar00rootroot00000000000000cargo-0.66.0/crates/resolver-tests/src/lib.rs000066400000000000000000001050621432416201200210620ustar00rootroot00000000000000#![allow(clippy::all)] use std::cell::RefCell; use std::cmp::PartialEq; use std::cmp::{max, min}; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fmt; use std::fmt::Write; use std::rc::Rc; use std::task::Poll; use std::time::Instant; use cargo::core::dependency::DepKind; use cargo::core::resolver::{self, ResolveOpts, VersionPreferences}; use cargo::core::source::{GitReference, QueryKind, SourceId}; use cargo::core::Resolve; use cargo::core::{Dependency, PackageId, Registry, Summary}; use cargo::util::{CargoResult, Config, Graph, IntoUrl}; use proptest::collection::{btree_map, vec}; use proptest::prelude::*; use proptest::sample::Index; use proptest::string::string_regex; use varisat::{self, ExtendFormula}; pub fn resolve(deps: Vec, registry: &[Summary]) -> CargoResult> { resolve_with_config(deps, registry, &Config::default().unwrap()) } pub fn resolve_and_validated( deps: Vec, registry: &[Summary], sat_resolve: Option, ) -> CargoResult> { let resolve = resolve_with_config_raw(deps.clone(), registry, &Config::default().unwrap()); match resolve { Err(e) => { let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); if sat_resolve.sat_resolve(&deps) { panic!( "the resolve err but the sat_resolve thinks this will work:\n{}", sat_resolve.use_packages().unwrap() ); } Err(e) } Ok(resolve) => { let mut stack = vec![pkg_id("root")]; let mut used = HashSet::new(); let mut links = HashSet::new(); while let Some(p) = stack.pop() { assert!(resolve.contains(&p)); if used.insert(p) { // in the tests all `links` crates end in `-sys` if p.name().ends_with("-sys") { assert!(links.insert(p.name())); } stack.extend(resolve.deps(p).map(|(dp, deps)| { for d in deps { assert!(d.matches_id(dp)); } dp })); } } let out = resolve.sort(); assert_eq!(out.len(), used.len()); let mut pub_deps: HashMap> = HashMap::new(); for &p in out.iter() { // make the list of `p` public dependencies let mut self_pub_dep = HashSet::new(); self_pub_dep.insert(p); for (dp, deps) in resolve.deps(p) { if deps.iter().any(|d| d.is_public()) { self_pub_dep.extend(pub_deps[&dp].iter().cloned()) } } pub_deps.insert(p, self_pub_dep); // check if `p` has a public dependencies conflicts let seen_dep: BTreeSet<_> = resolve .deps(p) .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned()) .collect(); let seen_dep: Vec<_> = seen_dep.iter().collect(); for a in seen_dep.windows(2) { if a[0].name() == a[1].name() { panic!( "the package {:?} can publicly see {:?} and {:?}", p, a[0], a[1] ) } } } let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); if !sat_resolve.sat_is_valid_solution(&out) { panic!( "the sat_resolve err but the resolve thinks this will work:\n{:?}", resolve ); } Ok(out) } } } pub fn resolve_with_config( deps: Vec, registry: &[Summary], config: &Config, ) -> CargoResult> { let resolve = resolve_with_config_raw(deps, registry, config)?; Ok(resolve.sort()) } pub fn resolve_with_config_raw( deps: Vec, registry: &[Summary], config: &Config, ) -> CargoResult { struct MyRegistry<'a> { list: &'a [Summary], used: HashSet, } impl<'a> Registry for MyRegistry<'a> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { for summary in self.list.iter() { let matched = match kind { QueryKind::Exact => dep.matches(summary), QueryKind::Fuzzy => true, }; if matched { self.used.insert(summary.package_id()); f(summary.clone()); } } Poll::Ready(Ok(())) } fn describe_source(&self, _src: SourceId) -> String { String::new() } fn is_replaced(&self, _src: SourceId) -> bool { false } fn block_until_ready(&mut self) -> CargoResult<()> { Ok(()) } } impl<'a> Drop for MyRegistry<'a> { fn drop(&mut self) { if std::thread::panicking() && self.list.len() != self.used.len() { // we found a case that causes a panic and did not use all of the input. // lets print the part of the input that was used for minimization. println!( "{:?}", PrettyPrintRegistry( self.list .iter() .filter(|s| { self.used.contains(&s.package_id()) }) .cloned() .collect() ) ); } } } let mut registry = MyRegistry { list: registry, used: HashSet::new(), }; let summary = Summary::new( config, pkg_id("root"), deps, &BTreeMap::new(), None::<&String>, ) .unwrap(); let opts = ResolveOpts::everything(); let start = Instant::now(); let resolve = resolver::resolve( &[(summary, opts)], &[], &mut registry, &VersionPreferences::default(), Some(config), true, ); // The largest test in our suite takes less then 30 sec. // So lets fail the test if we have ben running for two long. assert!(start.elapsed().as_secs() < 60); resolve } const fn num_bits() -> usize { std::mem::size_of::() * 8 } fn log_bits(x: usize) -> usize { if x == 0 { return 0; } assert!(x > 0); (num_bits::() as u32 - x.leading_zeros()) as usize } fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) { if vars.len() <= 1 { return; } else if vars.len() == 2 { solver.add_clause(&[vars[0].negative(), vars[1].negative()]); return; } else if vars.len() == 3 { solver.add_clause(&[vars[0].negative(), vars[1].negative()]); solver.add_clause(&[vars[0].negative(), vars[2].negative()]); solver.add_clause(&[vars[1].negative(), vars[2].negative()]); return; } // use the "Binary Encoding" from // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf let bits: Vec = solver.new_var_iter(log_bits(vars.len())).collect(); for (i, p) in vars.iter().enumerate() { for b in 0..bits.len() { solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]); } } } fn sat_at_most_one_by_key( cnf: &mut impl varisat::ExtendFormula, data: impl Iterator, ) -> HashMap> { // no two packages with the same links set let mut by_keys: HashMap> = HashMap::new(); for (p, v) in data { by_keys.entry(p).or_default().push(v) } for key in by_keys.values() { sat_at_most_one(cnf, key); } by_keys } /// Resolution can be reduced to the SAT problem. So this is an alternative implementation /// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read, /// as compared to the real resolver. /// /// For the subset of functionality that are currently made by `registry_strategy` this will, /// find a valid resolution if one exists. The big thing that the real resolver does, /// that this one does not do is work with features and optional dependencies. /// /// The SAT library dose not optimize for the newer version, /// so the selected packages may not match the real resolver. #[derive(Clone)] pub struct SatResolve(Rc>); struct SatResolveInner { solver: varisat::Solver<'static>, var_for_is_packages_used: HashMap, by_name: HashMap<&'static str, Vec>, } impl SatResolve { pub fn new(registry: &[Summary]) -> Self { let mut cnf = varisat::CnfFormula::new(); let var_for_is_packages_used: HashMap = registry .iter() .map(|s| (s.package_id(), cnf.new_var())) .collect(); // no two packages with the same links set sat_at_most_one_by_key( &mut cnf, registry .iter() .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()])) .filter(|(l, _)| l.is_some()), ); // no two semver compatible versions of the same package let by_activations_keys = sat_at_most_one_by_key( &mut cnf, var_for_is_packages_used .iter() .map(|(p, &v)| (p.as_activations_key(), v)), ); let mut by_name: HashMap<&'static str, Vec> = HashMap::new(); for p in registry.iter() { by_name .entry(p.name().as_str()) .or_default() .push(p.package_id()) } let empty_vec = vec![]; let mut graph: Graph = Graph::new(); let mut version_selected_for: HashMap< PackageId, HashMap>, > = HashMap::new(); // active packages need each of there `deps` to be satisfied for p in registry.iter() { graph.add(p.package_id()); for dep in p.dependencies() { // This can more easily be written as: // !is_active(p) or one of the things that match dep is_active // All the complexity, from here to the end, is to support public and private dependencies! let mut by_key: HashMap<_, Vec> = HashMap::new(); for &m in by_name .get(dep.package_name().as_str()) .unwrap_or(&empty_vec) .iter() .filter(|&p| dep.matches_id(*p)) { graph.link(p.package_id(), m); by_key .entry(m.as_activations_key()) .or_default() .push(var_for_is_packages_used[&m].positive()); } let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect(); // if `p` is active then we need to select one of the keys let matches: Vec<_> = keys .values() .map(|v| v.positive()) .chain(Some(var_for_is_packages_used[&p.package_id()].negative())) .collect(); cnf.add_clause(&matches); // if a key is active then we need to select one of the versions for (key, vars) in by_key.iter() { let mut matches = vars.clone(); matches.push(keys[key].negative()); cnf.add_clause(&matches); } version_selected_for .entry(p.package_id()) .or_default() .insert(dep.clone(), keys); } } let topological_order = graph.sort(); // we already ensure there is only one version for each `activations_key` so we can think of // `publicly_exports` as being in terms of a set of `activations_key`s let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new(); for &key in by_activations_keys.keys() { // everything publicly depends on itself let var = publicly_exports .entry(key) .or_default() .entry(key) .or_insert_with(|| cnf.new_var()); cnf.add_clause(&[var.positive()]); } // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports` for &p in topological_order.iter() { if let Some(deps) = version_selected_for.get(&p) { let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap(); for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) { for (ver, sel) in versions { for (&export_pid, &export_var) in publicly_exports[ver].iter() { let our_var = p_exports.entry(export_pid).or_insert_with(|| cnf.new_var()); cnf.add_clause(&[ sel.negative(), export_var.negative(), our_var.positive(), ]); } } } publicly_exports.insert(p.as_activations_key(), p_exports); } } // we already ensure there is only one version for each `activations_key` so we can think of // `can_see` as being in terms of a set of `activations_key`s // and if `p` `publicly_exports` `export` then it `can_see` `export` let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new(); // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports` for (&p, deps) in version_selected_for.iter() { let p_can_see = can_see.entry(p).or_default(); for (_, versions) in deps.iter() { for (&ver, sel) in versions { for (&export_pid, &export_var) in publicly_exports[&ver].iter() { let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var()); cnf.add_clause(&[ sel.negative(), export_var.negative(), our_var.positive(), ]); } } } } // a package `can_see` only one version by each name for (_, see) in can_see.iter() { sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v))); } let mut solver = varisat::Solver::new(); solver.add_formula(&cnf); // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far. // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions. solver .solve() .expect("docs say it can't error in default config"); SatResolve(Rc::new(RefCell::new(SatResolveInner { solver, var_for_is_packages_used, by_name, }))) } pub fn sat_resolve(&self, deps: &[Dependency]) -> bool { let mut s = self.0.borrow_mut(); let mut assumption = vec![]; let mut this_call = None; // the starting `deps` need to be satisfied for dep in deps.iter() { let empty_vec = vec![]; let matches: Vec = s .by_name .get(dep.package_name().as_str()) .unwrap_or(&empty_vec) .iter() .filter(|&p| dep.matches_id(*p)) .map(|p| s.var_for_is_packages_used[p].positive()) .collect(); if matches.is_empty() { return false; } else if matches.len() == 1 { assumption.extend_from_slice(&matches) } else { if this_call.is_none() { let new_var = s.solver.new_var(); this_call = Some(new_var); assumption.push(new_var.positive()); } let mut matches = matches; matches.push(this_call.unwrap().negative()); s.solver.add_clause(&matches); } } s.solver.assume(&assumption); s.solver .solve() .expect("docs say it can't error in default config") } pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool { let mut s = self.0.borrow_mut(); for p in pids { if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) { return false; } } let assumption: Vec<_> = s .var_for_is_packages_used .iter() .map(|(p, v)| v.lit(pids.contains(p))) .collect(); s.solver.assume(&assumption); s.solver .solve() .expect("docs say it can't error in default config") } fn use_packages(&self) -> Option { self.0.borrow().solver.model().map(|lits| { let lits: HashSet<_> = lits .iter() .filter(|l| l.is_positive()) .map(|l| l.var()) .collect(); let mut out = String::new(); out.push_str("used:\n"); for (p, v) in self.0.borrow().var_for_is_packages_used.iter() { if lits.contains(v) { writeln!(&mut out, " {}", p).unwrap(); } } out }) } } pub trait ToDep { fn to_dep(self) -> Dependency; } impl ToDep for &'static str { fn to_dep(self) -> Dependency { Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap() } } impl ToDep for Dependency { fn to_dep(self) -> Dependency { self } } pub trait ToPkgId { fn to_pkgid(&self) -> PackageId; } impl ToPkgId for PackageId { fn to_pkgid(&self) -> PackageId { *self } } impl<'a> ToPkgId for &'a str { fn to_pkgid(&self) -> PackageId { PackageId::new(*self, "1.0.0", registry_loc()).unwrap() } } impl, U: AsRef> ToPkgId for (T, U) { fn to_pkgid(&self) -> PackageId { let (name, vers) = self; PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap() } } #[macro_export] macro_rules! pkg { ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({ let d: Vec = vec![$($deps.to_dep()),+]; $crate::pkg_dep($pkgid, d) }); ($pkgid:expr) => ({ $crate::pkg($pkgid) }) } fn registry_loc() -> SourceId { lazy_static::lazy_static! { static ref EXAMPLE_DOT_COM: SourceId = SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap(); } *EXAMPLE_DOT_COM } pub fn pkg(name: T) -> Summary { pkg_dep(name, Vec::new()) } pub fn pkg_dep(name: T, dep: Vec) -> Summary { let pkgid = name.to_pkgid(); let link = if pkgid.name().ends_with("-sys") { Some(pkgid.name().as_str()) } else { None }; Summary::new( &Config::default().unwrap(), name.to_pkgid(), dep, &BTreeMap::new(), link, ) .unwrap() } pub fn pkg_id(name: &str) -> PackageId { PackageId::new(name, "1.0.0", registry_loc()).unwrap() } fn pkg_id_loc(name: &str, loc: &str) -> PackageId { let remote = loc.into_url(); let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); PackageId::new(name, "1.0.0", source_id).unwrap() } pub fn pkg_loc(name: &str, loc: &str) -> Summary { let link = if name.ends_with("-sys") { Some(name) } else { None }; Summary::new( &Config::default().unwrap(), pkg_id_loc(name, loc), Vec::new(), &BTreeMap::new(), link, ) .unwrap() } pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { let mut deps = sum.dependencies().to_vec(); deps.remove(ind); // note: more things will need to be copied over in the future, but it works for now. Summary::new( &Config::default().unwrap(), sum.package_id(), deps, &BTreeMap::new(), sum.links().map(|a| a.as_str()), ) .unwrap() } pub fn dep(name: &str) -> Dependency { dep_req(name, "*") } pub fn dep_req(name: &str, req: &str) -> Dependency { Dependency::parse(name, Some(req), registry_loc()).unwrap() } pub fn dep_req_kind(name: &str, req: &str, kind: DepKind, public: bool) -> Dependency { let mut dep = dep_req(name, req); dep.set_kind(kind); dep.set_public(public); dep } pub fn dep_loc(name: &str, location: &str) -> Dependency { let url = location.into_url().unwrap(); let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&url, master).unwrap(); Dependency::parse(name, Some("1.0.0"), source_id).unwrap() } pub fn dep_kind(name: &str, kind: DepKind) -> Dependency { dep(name).set_kind(kind).clone() } pub fn registry(pkgs: Vec) -> Vec { pkgs } pub fn names(names: &[P]) -> Vec { names.iter().map(|name| name.to_pkgid()).collect() } pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { names .iter() .map(|&(name, loc)| pkg_id_loc(name, loc)) .collect() } /// By default `Summary` and `Dependency` have a very verbose `Debug` representation. /// This replaces with a representation that uses constructors from this file. /// /// If `registry_strategy` is improved to modify more fields /// then this needs to update to display the corresponding constructor. pub struct PrettyPrintRegistry(pub Vec); impl fmt::Debug for PrettyPrintRegistry { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "vec![")?; for s in &self.0 { if s.dependencies().is_empty() { write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?; } else { write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?; for d in s.dependencies() { if d.kind() == DepKind::Normal && &d.version_req().to_string() == "*" && !d.is_public() { write!(f, "dep(\"{}\"),", d.name_in_toml())?; } else if d.kind() == DepKind::Normal && !d.is_public() { write!( f, "dep_req(\"{}\", \"{}\"),", d.name_in_toml(), d.version_req() )?; } else { write!( f, "dep_req_kind(\"{}\", \"{}\", {}, {}),", d.name_in_toml(), d.version_req(), match d.kind() { DepKind::Development => "DepKind::Development", DepKind::Build => "DepKind::Build", DepKind::Normal => "DepKind::Normal", }, d.is_public() )?; } } write!(f, "]),")?; } } write!(f, "]") } } #[test] fn meta_test_deep_pretty_print_registry() { assert_eq!( &format!( "{:?}", PrettyPrintRegistry(vec![ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]), pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), dep_req("other", "1")]), pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), pkg!(("baz", "1.0.1")), pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build, false)]), pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development, false)]), pkg!(("dep_req", "1.0.0")), pkg!(("dep_req", "2.0.0")), ]) ), "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\ pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\ pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\ pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\ pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\ pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\ pkg!((\"baz\", \"1.0.1\")),\ pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\ pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\ pkg!((\"dep_req\", \"1.0.0\")),\ pkg!((\"dep_req\", \"2.0.0\")),]" ) } /// This generates a random registry index. /// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..) /// This strategy has a high probability of having valid dependencies pub fn registry_strategy( max_crates: usize, max_versions: usize, shrinkage: usize, ) -> impl Strategy { let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap(); let raw_version = ..max_versions.pow(3); let version_from_raw = move |r: usize| { let major = ((r / max_versions) / max_versions) % max_versions; let minor = (r / max_versions) % max_versions; let patch = r % max_versions; format!("{}.{}.{}", major, minor, patch) }; // If this is false than the crate will depend on the nonexistent "bad" // instead of the complex set we generated for it. let allow_deps = prop::bool::weighted(0.99); let list_of_versions = btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| { ver.into_iter() .map(|a| (version_from_raw(a.0), a.1)) .collect::>() }); let list_of_crates_with_versions = btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| { // root is the name of the thing being compiled // so it would be confusing to have it in the index vers.remove("root"); // bad is a name reserved for a dep that won't work vers.remove("bad"); vers }); // each version of each crate can depend on each crate smaller then it. // In theory shrinkage should be 2, but in practice we get better trees with a larger value. let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage; let raw_version_range = (any::(), any::()); let raw_dependency = ( any::(), any::(), raw_version_range, 0..=1, Just(false), // TODO: ^ this needs to be set back to `any::()` and work before public & private dependencies can stabilize ); fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) { let (a, b) = (a.index(size), b.index(size)); (min(a, b), max(a, b)) } let list_of_raw_dependency = vec(raw_dependency, ..=max_deps); // By default a package depends only on other packages that have a smaller name, // this helps make sure that all things in the resulting index are DAGs. // If this is true then the DAG is maintained with grater instead. let reverse_alphabetical = any::().no_shrink(); ( list_of_crates_with_versions, list_of_raw_dependency, reverse_alphabetical, ) .prop_map( |(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| { let list_of_pkgid: Vec<_> = crate_vers_by_name .iter() .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1))) .collect(); let len_all_pkgid = list_of_pkgid.len(); let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid]; for (a, b, (c, d), k, p) in raw_dependencies { let (a, b) = order_index(a, b, len_all_pkgid); let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) }; let ((dep_name, _), _) = list_of_pkgid[a]; if (list_of_pkgid[b].0).0 == dep_name { continue; } let s = &crate_vers_by_name[dep_name]; let s_last_index = s.len() - 1; let (c, d) = order_index(c, d, s.len()); dependency_by_pkgid[b].push(dep_req_kind( dep_name, &if c == 0 && d == s_last_index { "*".to_string() } else if c == 0 { format!("<={}", s[d].0) } else if d == s_last_index { format!(">={}", s[c].0) } else if c == d { format!("={}", s[c].0) } else { format!(">={}, <={}", s[c].0, s[d].0) }, match k { 0 => DepKind::Normal, 1 => DepKind::Build, // => DepKind::Development, // Development has no impact so don't gen _ => panic!("bad index for DepKind"), }, p && k == 0, )) } let mut out: Vec = list_of_pkgid .into_iter() .zip(dependency_by_pkgid.into_iter()) .map(|(((name, ver), allow_deps), deps)| { pkg_dep( (name, ver).to_pkgid(), if !allow_deps { vec![dep_req("bad", "*")] } else { let mut deps = deps; deps.sort_by_key(|d| d.name_in_toml()); deps.dedup_by_key(|d| d.name_in_toml()); deps }, ) }) .collect(); if reverse_alphabetical { // make sure the complicated cases are at the end out.reverse(); } PrettyPrintRegistry(out) }, ) } /// This test is to test the generator to ensure /// that it makes registries with large dependency trees #[test] fn meta_test_deep_trees_from_strategy() { use proptest::strategy::ValueTree; use proptest::test_runner::TestRunner; let mut dis = [0; 21]; let strategy = registry_strategy(50, 20, 60); let mut test_runner = TestRunner::deterministic(); for _ in 0..128 { let PrettyPrintRegistry(input) = strategy .new_tree(&mut TestRunner::new_with_rng( Default::default(), test_runner.new_rng(), )) .unwrap() .current(); let reg = registry(input.clone()); for this in input.iter().rev().take(10) { let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, ); dis[res .as_ref() .map(|x| min(x.len(), dis.len()) - 1) .unwrap_or(0)] += 1; if dis.iter().all(|&x| x > 0) { return; } } } panic!( "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}", dis ); } /// This test is to test the generator to ensure /// that it makes registries that include multiple versions of the same library #[test] fn meta_test_multiple_versions_strategy() { use proptest::strategy::ValueTree; use proptest::test_runner::TestRunner; let mut dis = [0; 10]; let strategy = registry_strategy(50, 20, 60); let mut test_runner = TestRunner::deterministic(); for _ in 0..128 { let PrettyPrintRegistry(input) = strategy .new_tree(&mut TestRunner::new_with_rng( Default::default(), test_runner.new_rng(), )) .unwrap() .current(); let reg = registry(input.clone()); for this in input.iter().rev().take(10) { let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, ); if let Ok(mut res) = res { let res_len = res.len(); res.sort_by_key(|s| s.name()); res.dedup_by_key(|s| s.name()); dis[min(res_len - res.len(), dis.len() - 1)] += 1; } if dis.iter().all(|&x| x > 0) { return; } } } panic!( "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}", dis ); } /// Assert `xs` contains `elems` #[track_caller] pub fn assert_contains(xs: &[A], elems: &[A]) { for elem in elems { assert!(xs.contains(elem)); } } #[track_caller] pub fn assert_same(a: &[A], b: &[A]) { assert_eq!(a.len(), b.len()); assert_contains(b, a); } cargo-0.66.0/crates/resolver-tests/tests/000077500000000000000000000000001432416201200203155ustar00rootroot00000000000000cargo-0.66.0/crates/resolver-tests/tests/resolve.rs000066400000000000000000001441231432416201200223470ustar00rootroot00000000000000use cargo::core::dependency::DepKind; use cargo::core::Dependency; use cargo::util::Config; use cargo_util::is_ci; use resolver_tests::{ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names, pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated, resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId, }; use proptest::prelude::*; // NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that // certain universal truths are upheld. Therefore, it can pass when there is a problem, // but if it fails then there really is something wrong. When testing something as // complicated as the resolver, the problems can be very subtle and hard to generate. // We have had a history of these tests only failing on PRs long after a bug is introduced. // If you have one of these test fail please report it on #6258, // and if you did not change the resolver then feel free to retry without concern. proptest! { #![proptest_config(ProptestConfig { max_shrink_iters: if is_ci() || !atty::is(atty::Stream::Stderr) { // This attempts to make sure that CI will fail fast, 0 } else { // but that local builds will give a small clear test case. u32::MAX }, result_cache: prop::test_runner::basic_result_cache, .. ProptestConfig::default() })] /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. #[test] fn prop_passes_validation( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) ) { let reg = registry(input.clone()); let sat_resolve = SatResolve::new(®); // there is only a small chance that any one // crate will be interesting. // So we try some of the most complicated. for this in input.iter().rev().take(20) { let _ = resolve_and_validated( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, Some(sat_resolve.clone()), ); } } /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. #[test] fn prop_minimum_version_errors_the_same( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) ) { let mut config = Config::default().unwrap(); config.nightly_features_allowed = true; config .configure( 1, false, None, false, false, false, &None, &["minimal-versions".to_string()], &[], ) .unwrap(); let reg = registry(input.clone()); // there is only a small chance that any one // crate will be interesting. // So we try some of the most complicated. for this in input.iter().rev().take(10) { // minimal-versions change what order the candidates // are tried but not the existence of a solution let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, ); let mres = resolve_with_config( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, &config, ); prop_assert_eq!( res.is_ok(), mres.is_ok(), "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve", this.name(), this.version() ) } } /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. #[test] fn prop_removing_a_dep_cant_break( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), indexes_to_remove in prop::collection::vec((any::(), any::()), ..10) ) { let reg = registry(input.clone()); let mut removed_input = input.clone(); for (summary_idx, dep_idx) in indexes_to_remove { if !removed_input.is_empty() { let summary_idx = summary_idx.index(removed_input.len()); let deps = removed_input[summary_idx].dependencies(); if !deps.is_empty() { let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len())); removed_input[summary_idx] = new; } } } let removed_reg = registry(removed_input); // there is only a small chance that any one // crate will be interesting. // So we try some of the most complicated. for this in input.iter().rev().take(10) { if resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, ).is_ok() { prop_assert!( resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], &removed_reg, ).is_ok(), "full index worked for `{} = \"={}\"` but removing some deps broke it!", this.name(), this.version(), ) } } } /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. #[test] fn prop_limited_independence_of_irrelevant_alternatives( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), indexes_to_unpublish in prop::collection::vec(any::(), ..10) ) { let reg = registry(input.clone()); // there is only a small chance that any one // crate will be interesting. // So we try some of the most complicated. for this in input.iter().rev().take(10) { let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, ); match res { Ok(r) => { // If resolution was successful, then unpublishing a version of a crate // that was not selected should not change that. let not_selected: Vec<_> = input .iter() .cloned() .filter(|x| !r.contains(&x.package_id())) .collect(); if !not_selected.is_empty() { let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(¬_selected)).collect(); let new_reg = registry( input .iter() .cloned() .filter(|x| !indexes_to_unpublish.contains(&x)) .collect(), ); let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], &new_reg, ); // Note: that we can not assert that the two `res` are identical // as the resolver does depend on irrelevant alternatives. // It uses how constrained a dependency requirement is // to determine what order to evaluate requirements. prop_assert!( res.is_ok(), "unpublishing {:?} stopped `{} = \"={}\"` from working", indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), this.name(), this.version() ) } } Err(_) => { // If resolution was unsuccessful, then it should stay unsuccessful // even if any version of a crate is unpublished. let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect(); let new_reg = registry( input .iter() .cloned() .filter(|x| !indexes_to_unpublish.contains(&x)) .collect(), ); let res = resolve( vec![dep_req(&this.name(), &format!("={}", this.version()))], &new_reg, ); prop_assert!( res.is_err(), "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!", this.name(), this.version(), indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), ) } } } } } #[test] #[should_panic(expected = "pub dep")] // The error handling is not yet implemented. fn pub_fail() { let input = vec![ pkg!(("a", "0.0.4")), pkg!(("a", "0.0.5")), pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]), pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]), ]; let reg = registry(input); assert!(resolve_and_validated(vec![dep("kB")], ®, None).is_err()); } #[test] fn basic_public_dependency() { let reg = registry(vec![ pkg!(("A", "0.1.0")), pkg!(("A", "0.2.0")), pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]), pkg!("C" => [dep("A"), dep("B")]), ]); let res = resolve_and_validated(vec![dep("C")], ®, None).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("C", "1.0.0"), ("B", "1.0.0"), ("A", "0.1.0"), ]), ); } #[test] fn public_dependency_filling_in() { // The resolver has an optimization where if a candidate to resolve a dependency // has already bean activated then we skip looking at the candidates dependencies. // However, we have to be careful as the new path may make pub dependencies invalid. // Triggering this case requires dependencies to be resolved in a specific order. // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations: // 1. `d`'s dep on `c` is resolved // 2. `d`'s dep on `a` is resolved with `0.1.1` // 3. `c`'s dep on `b` is resolved with `0.0.2` // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c` // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization. // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see. let reg = registry(vec![ pkg!(("a", "0.0.6")), pkg!(("a", "0.1.1")), pkg!(("b", "0.0.0") => [dep("bad")]), pkg!(("b", "0.0.1") => [dep("bad")]), pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]), pkg!("c" => [dep_req("b", ">=0.0.1")]), pkg!("d" => [dep("c"), dep("a"), dep("b")]), ]); let res = resolve_and_validated(vec![dep("d")], ®, None).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("d", "1.0.0"), ("c", "1.0.0"), ("b", "0.0.2"), ("a", "0.0.6"), ]), ); } #[test] fn public_dependency_filling_in_and_update() { // The resolver has an optimization where if a candidate to resolve a dependency // has already bean activated then we skip looking at the candidates dependencies. // However, we have to be careful as the new path may make pub dependencies invalid. // Triggering this case requires dependencies to be resolved in a specific order. // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations: // 1. `D`'s dep on `B` is resolved // 2. `D`'s dep on `C` is resolved // 3. `B`'s dep on `A` is resolved with `0.0.0` // 4. `C`'s dep on `B` triggering the optimization. // So did we add `A 0.0.0` to the deps `C` can see? // Or are we going to resolve `C`'s dep on `A` with `0.0.2`? // Lets try it and see. let reg = registry(vec![ pkg!(("A", "0.0.0")), pkg!(("A", "0.0.2")), pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]), pkg!("C" => [dep("A"),dep("B")]), pkg!("D" => [dep("B"),dep("C")]), ]); let res = resolve_and_validated(vec![dep("D")], ®, None).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("D", "1.0.0"), ("C", "1.0.0"), ("B", "1.0.0"), ("A", "0.0.0"), ]), ); } #[test] fn public_dependency_skipping() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // the effects of pub dep must be accounted for. let input = vec![ pkg!(("a", "0.2.0")), pkg!(("a", "2.0.0")), pkg!(("b", "0.0.0") => [dep("bad")]), pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]), pkg!("c" => [dep("a"),dep("b")]), ]; let reg = registry(input); resolve_and_validated(vec![dep("c")], ®, None).unwrap(); } #[test] fn public_dependency_skipping_in_backtracking() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // the effects of pub dep must be accounted for. let input = vec![ pkg!(("A", "0.0.0") => [dep("bad")]), pkg!(("A", "0.0.1") => [dep("bad")]), pkg!(("A", "0.0.2") => [dep("bad")]), pkg!(("A", "0.0.3") => [dep("bad")]), pkg!(("A", "0.0.4")), pkg!(("A", "0.0.5")), pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]), pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]), ]; let reg = registry(input); resolve_and_validated(vec![dep("C")], ®, None).unwrap(); } #[test] fn public_sat_topological_order() { let input = vec![ pkg!(("a", "0.0.1")), pkg!(("a", "0.0.0")), pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]), pkg!(("b", "0.0.0") => [dep("bad"),]), pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]), ]; let reg = registry(input); assert!(resolve_and_validated(vec![dep("A")], ®, None).is_err()); } #[test] fn public_sat_unused_makes_things_pub() { let input = vec![ pkg!(("a", "0.0.1")), pkg!(("a", "0.0.0")), pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]), pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]), pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]), ]; let reg = registry(input); resolve_and_validated(vec![dep("c")], ®, None).unwrap(); } #[test] fn public_sat_unused_makes_things_pub_2() { let input = vec![ pkg!(("c", "0.0.2")), pkg!(("c", "0.0.1")), pkg!(("a-sys", "0.0.2")), pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]), pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]), pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]), ]; let reg = registry(input); resolve_and_validated(vec![dep("A")], ®, None).unwrap(); } #[test] #[should_panic(expected = "assertion failed: !name.is_empty()")] fn test_dependency_with_empty_name() { // Bug 5229, dependency-names must not be empty "".to_dep(); } #[test] fn test_resolving_empty_dependency_list() { let res = resolve(Vec::new(), ®istry(vec![])).unwrap(); assert_eq!(res, names(&["root"])); } #[test] fn test_resolving_only_package() { let reg = registry(vec![pkg!("foo")]); let res = resolve(vec![dep("foo")], ®).unwrap(); assert_same(&res, &names(&["root", "foo"])); } #[test] fn test_resolving_one_dep() { let reg = registry(vec![pkg!("foo"), pkg!("bar")]); let res = resolve(vec![dep("foo")], ®).unwrap(); assert_same(&res, &names(&["root", "foo"])); } #[test] fn test_resolving_multiple_deps() { let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); let res = resolve(vec![dep("foo"), dep("baz")], ®).unwrap(); assert_same(&res, &names(&["root", "foo", "baz"])); } #[test] fn test_resolving_transitive_deps() { let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); let res = resolve(vec![dep("bar")], ®).unwrap(); assert_same(&res, &names(&["root", "foo", "bar"])); } #[test] fn test_resolving_common_transitive_deps() { let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); let res = resolve(vec![dep("foo"), dep("bar")], ®).unwrap(); assert_same(&res, &names(&["root", "foo", "bar"])); } #[test] fn test_resolving_with_same_name() { let list = vec![ pkg_loc("foo", "https://first.example.com"), pkg_loc("bar", "https://second.example.com"), ]; let reg = registry(list); let res = resolve( vec![ dep_loc("foo", "https://first.example.com"), dep_loc("bar", "https://second.example.com"), ], ®, ) .unwrap(); let mut names = loc_names(&[ ("foo", "https://first.example.com"), ("bar", "https://second.example.com"), ]); names.push(pkg_id("root")); assert_same(&res, &names); } #[test] fn test_resolving_with_dev_deps() { let reg = registry(vec![ pkg!("foo" => ["bar", dep_kind("baz", DepKind::Development)]), pkg!("baz" => ["bat", dep_kind("bam", DepKind::Development)]), pkg!("bar"), pkg!("bat"), ]); let res = resolve( vec![dep("foo"), dep_kind("baz", DepKind::Development)], ®, ) .unwrap(); assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"])); } #[test] fn resolving_with_many_versions() { let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); let res = resolve(vec![dep("foo")], ®).unwrap(); assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")])); } #[test] fn resolving_with_specific_version() { let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); let res = resolve(vec![dep_req("foo", "=1.0.1")], ®).unwrap(); assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")])); } #[test] fn test_resolving_maximum_version_with_transitive_deps() { let reg = registry(vec![ pkg!(("util", "1.2.2")), pkg!(("util", "1.0.0")), pkg!(("util", "1.1.1")), pkg!("foo" => [dep_req("util", "1.0.0")]), pkg!("bar" => [dep_req("util", ">=1.0.1")]), ]); let res = resolve(vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], ®).unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0"), ("util", "1.2.2"), ]), ); assert!(!res.contains(&("util", "1.0.1").to_pkgid())); assert!(!res.contains(&("util", "1.1.1").to_pkgid())); } #[test] fn test_resolving_minimum_version_with_transitive_deps() { let reg = registry(vec![ pkg!(("util", "1.2.2")), pkg!(("util", "1.0.0")), pkg!(("util", "1.1.1")), pkg!("foo" => [dep_req("util", "1.0.0")]), pkg!("bar" => [dep_req("util", ">=1.0.1")]), ]); let mut config = Config::default().unwrap(); // -Z minimal-versions // When the minimal-versions config option is specified then the lowest // possible version of a package should be selected. "util 1.0.0" can't be // selected because of the requirements of "bar", so the minimum version // must be 1.1.1. config.nightly_features_allowed = true; config .configure( 1, false, None, false, false, false, &None, &["minimal-versions".to_string()], &[], ) .unwrap(); let res = resolve_with_config( vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], ®, &config, ) .unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0"), ("util", "1.1.1"), ]), ); assert!(!res.contains(&("util", "1.2.2").to_pkgid())); assert!(!res.contains(&("util", "1.0.0").to_pkgid())); } #[test] fn resolving_incompat_versions() { let reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2")), pkg!("bar" => [dep_req("foo", "=1.0.2")]), ]); assert!(resolve(vec![dep_req("foo", "=1.0.1"), dep("bar")], ®).is_err()); } #[test] fn resolving_wrong_case_from_registry() { // In the future we may #5678 allow this to happen. // For back compatibility reasons, we probably won't. // But we may want to future prove ourselves by understanding it. // This test documents the current behavior. let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]); assert!(resolve(vec![dep("bar")], ®).is_err()); } #[test] fn resolving_mis_hyphenated_from_registry() { // In the future we may #2775 allow this to happen. // For back compatibility reasons, we probably won't. // But we may want to future prove ourselves by understanding it. // This test documents the current behavior. let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]); assert!(resolve(vec![dep("bar")], ®).is_err()); } #[test] fn resolving_backtrack() { let reg = registry(vec![ pkg!(("foo", "1.0.2") => [dep("bar")]), pkg!(("foo", "1.0.1") => [dep("baz")]), pkg!("bar" => [dep_req("foo", "=2.0.2")]), pkg!("baz"), ]); let res = resolve(vec![dep_req("foo", "^1")], ®).unwrap(); assert_contains( &res, &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]), ); } #[test] fn resolving_backtrack_features() { // test for cargo/issues/4347 let mut bad = dep("bar"); bad.set_features(vec!["bad"]); let reg = registry(vec![ pkg!(("foo", "1.0.2") => [bad]), pkg!(("foo", "1.0.1") => [dep("bar")]), pkg!("bar"), ]); let res = resolve(vec![dep_req("foo", "^1")], ®).unwrap(); assert_contains( &res, &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]), ); } #[test] fn resolving_allows_multiple_compatible_versions() { let reg = registry(vec![ pkg!(("foo", "1.0.0")), pkg!(("foo", "2.0.0")), pkg!(("foo", "0.1.0")), pkg!(("foo", "0.2.0")), pkg!("bar" => ["d1", "d2", "d3", "d4"]), pkg!("d1" => [dep_req("foo", "1")]), pkg!("d2" => [dep_req("foo", "2")]), pkg!("d3" => [dep_req("foo", "0.1")]), pkg!("d4" => [dep_req("foo", "0.2")]), ]); let res = resolve(vec![dep("bar")], ®).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("foo", "2.0.0"), ("foo", "0.1.0"), ("foo", "0.2.0"), ("d1", "1.0.0"), ("d2", "1.0.0"), ("d3", "1.0.0"), ("d4", "1.0.0"), ("bar", "1.0.0"), ]), ); } #[test] fn resolving_with_deep_backtracking() { let reg = registry(vec![ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), dep_req("other", "1")]), pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), pkg!(("baz", "1.0.1")), pkg!(("dep_req", "1.0.0")), pkg!(("dep_req", "2.0.0")), ]); let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "2.0.0"), ("baz", "1.0.1"), ]), ); } #[test] fn resolving_with_sys_crates() { // This is based on issues/4902 // With `l` a normal library we get 2copies so everyone gets the newest compatible. // But `l-sys` a library with a links attribute we make sure there is only one. let reg = registry(vec![ pkg!(("l-sys", "0.9.1")), pkg!(("l-sys", "0.10.0")), pkg!(("l", "0.9.1")), pkg!(("l", "0.10.0")), pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]), pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]), ]); let res = resolve(vec![dep_req("d", "1"), dep_req("r", "1")], ®).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("d", "1.0.0"), ("r", "1.0.0"), ("l-sys", "0.9.1"), ("l", "0.9.1"), ("l", "0.10.0"), ]), ); } #[test] fn resolving_with_constrained_sibling_backtrack_parent() { // There is no point in considering all of the backtrack_trap{1,2} // candidates since they can't change the result of failing to // resolve 'constrained'. Cargo should (ideally) skip past them and resume // resolution once the activation of the parent, 'bar', is rolled back. // Note that the traps are slightly more constrained to make sure they // get picked first. let mut reglist = vec![ pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"), dep_req("constrained", "=1.0.0")]), pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"), dep_req("backtrack_trap2", "1.0.2"), dep_req("constrained", "1.0.0")]), pkg!(("constrained", "1.0.0")), pkg!(("backtrack_trap1", "1.0.0")), pkg!(("backtrack_trap2", "1.0.0")), ]; // Bump this to make the test harder - it adds more versions of bar that will // fail to resolve, and more versions of the traps to consider. const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2 for i in 1..NUM_BARS_AND_TRAPS { let vsn = format!("1.0.{}", i); reglist.push( pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"), dep_req("backtrack_trap2", "1.0.2"), dep_req("constrained", "1.0.1")]), ); reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); reglist.push(pkg!(("constrained", vsn.clone()))); } let reg = registry(reglist); let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0"), ("constrained", "1.0.0"), ]), ); } #[test] fn resolving_with_many_equivalent_backtracking() { let mut reglist = Vec::new(); const DEPTH: usize = 200; const BRANCHING_FACTOR: usize = 100; // Each level depends on the next but the last level does not exist. // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH) // and this test will time out. With cashing we need to discover that none of these // can be activated O(BRANCHING_FACTOR * DEPTH) for l in 0..DEPTH { let name = format!("level{}", l); let next = format!("level{}", l + 1); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); } } let reg = registry(reglist.clone()); let res = resolve(vec![dep("level0")], ®); assert!(res.is_err()); // It is easy to write code that quickly returns an error. // Lets make sure we can find a good answer if it is there. reglist.push(pkg!(("level0", "1.0.0"))); let reg = registry(reglist.clone()); let res = resolve(vec![dep("level0")], ®).unwrap(); assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")])); // Make sure we have not special case no candidates. reglist.push(pkg!(("constrained", "1.1.0"))); reglist.push(pkg!(("constrained", "1.0.0"))); reglist.push( pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]), ); let reg = registry(reglist.clone()); let res = resolve(vec![dep("level0"), dep("constrained")], ®).unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), ("level0", "1.0.0"), ("constrained", "1.1.0"), ]), ); let reg = registry(reglist.clone()); let res = resolve(vec![dep_req("level0", "1.0.1"), dep("constrained")], ®).unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), (format!("level{}", DEPTH).as_str(), "1.0.0"), ("constrained", "1.0.0"), ]), ); let reg = registry(reglist); let res = resolve( vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")], ®, ); assert!(res.is_err()); } #[test] fn resolving_with_deep_traps() { let mut reglist = Vec::new(); const DEPTH: usize = 200; const BRANCHING_FACTOR: usize = 100; // Each backtrack_trap depends on the next, and adds a backtrack frame. // None of witch is going to help with `bad`. for l in 0..DEPTH { let name = format!("backtrack_trap{}", l); let next = format!("backtrack_trap{}", l + 1); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); } } { let name = format!("backtrack_trap{}", DEPTH); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push(pkg!((name.as_str(), vsn.as_str()))); } } { // slightly less constrained to make sure `cloaking` gets picked last. for i in 1..(BRANCHING_FACTOR + 10) { let vsn = format!("1.0.{}", i); reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")])); } } let reg = registry(reglist); let res = resolve(vec![dep("backtrack_trap0"), dep("cloaking")], ®); assert!(res.is_err()); } #[test] fn resolving_with_constrained_cousins_backtrack() { let mut reglist = Vec::new(); const DEPTH: usize = 100; const BRANCHING_FACTOR: usize = 50; // Each backtrack_trap depends on the next. // The last depends on a specific ver of constrained. for l in 0..DEPTH { let name = format!("backtrack_trap{}", l); let next = format!("backtrack_trap{}", l + 1); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); } } { let name = format!("backtrack_trap{}", DEPTH); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push( pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]), ); } } { // slightly less constrained to make sure `constrained` gets picked last. for i in 0..(BRANCHING_FACTOR + 10) { let vsn = format!("1.0.{}", i); reglist.push(pkg!(("constrained", vsn.as_str()))); } reglist.push(pkg!(("constrained", "1.1.0"))); reglist.push(pkg!(("constrained", "2.0.0"))); reglist.push(pkg!(("constrained", "2.0.1"))); } reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")])); let reg = registry(reglist.clone()); // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"` // but `constrained= "2.0.1"` is already picked. // Only then to try and solve `constrained= "~1.0.0"` which is incompatible. let res = resolve( vec![ dep("backtrack_trap0"), dep_req("constrained", "2.0.1"), dep("cloaking"), ], ®, ); assert!(res.is_err()); // Each level depends on the next but the last depends on incompatible deps. // Let's make sure that we can cache that a dep has incompatible deps. for l in 0..DEPTH { let name = format!("level{}", l); let next = format!("level{}", l + 1); for i in 1..BRANCHING_FACTOR { let vsn = format!("1.0.{}", i); reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); } } reglist.push( pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep("backtrack_trap0"), dep("cloaking") ]), ); let reg = registry(reglist); let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.1")], ®); assert!(res.is_err()); let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.0")], ®).unwrap(); assert_contains( &res, &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]), ); } #[test] fn resolving_with_constrained_sibling_backtrack_activation() { // It makes sense to resolve most-constrained deps first, but // with that logic the backtrack traps here come between the two // attempted resolutions of 'constrained'. When backtracking, // cargo should skip past them and resume resolution once the // number of activations for 'constrained' changes. let mut reglist = vec![ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"), dep_req("backtrack_trap1", "1.0"), dep_req("backtrack_trap2", "1.0"), dep_req("constrained", "<=1.0.60")]), pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]), ]; // Bump these to make the test harder, but you'll also need to // change the version constraints on `constrained` above. To correctly // exercise Cargo, the relationship between the values is: // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn // to make sure the traps are resolved between `constrained`. const NUM_TRAPS: usize = 45; // min 1 const NUM_CONSTRAINED: usize = 100; // min 1 for i in 0..NUM_TRAPS { let vsn = format!("1.0.{}", i); reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); } for i in 0..NUM_CONSTRAINED { let vsn = format!("1.0.{}", i); reglist.push(pkg!(("constrained", vsn.clone()))); } let reg = registry(reglist); let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); assert_contains( &res, &names(&[ ("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0"), ("constrained", "1.0.60"), ]), ); } #[test] fn resolving_with_public_constrained_sibling() { // It makes sense to resolve most-constrained deps first, but // with that logic the backtrack traps here come between the two // attempted resolutions of 'constrained'. When backtracking, // cargo should skip past them and resume resolution once the // number of activations for 'constrained' changes. let mut reglist = vec![ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"), dep_req("backtrack_trap1", "1.0"), dep_req("backtrack_trap2", "1.0"), dep_req("constrained", "<=60")]), pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]), ]; // Bump these to make the test harder, but you'll also need to // change the version constraints on `constrained` above. To correctly // exercise Cargo, the relationship between the values is: // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn // to make sure the traps are resolved between `constrained`. const NUM_TRAPS: usize = 45; // min 1 const NUM_CONSTRAINED: usize = 100; // min 1 for i in 0..NUM_TRAPS { let vsn = format!("1.0.{}", i); reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); } for i in 0..NUM_CONSTRAINED { let vsn = format!("{}.0.0", i); reglist.push(pkg!(("constrained", vsn.clone()))); } let reg = registry(reglist); let _ = resolve_and_validated(vec![dep_req("foo", "1")], ®, None); } #[test] fn resolving_with_constrained_sibling_transitive_dep_effects() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // be taken to not miss the transitive effects of alternatives. E.g. // in the right-to-left resolution of the graph below, B may // affect whether D is successfully resolved. // // A // / | \ // B C D // | | // C D let reg = registry(vec![ pkg!(("A", "1.0.0") => [dep_req("B", "1.0"), dep_req("C", "1.0"), dep_req("D", "1.0.100")]), pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]), pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]), pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]), pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]), pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]), pkg!(("D", "1.0.0")), pkg!(("D", "1.0.1")), pkg!(("D", "1.0.2")), pkg!(("D", "1.0.100")), pkg!(("D", "1.0.101")), pkg!(("D", "1.0.102")), pkg!(("D", "1.0.103")), pkg!(("D", "1.0.104")), pkg!(("D", "1.0.105")), ]); let res = resolve(vec![dep_req("A", "1")], ®).unwrap(); assert_same( &res, &names(&[ ("root", "1.0.0"), ("A", "1.0.0"), ("B", "1.0.0"), ("C", "1.0.0"), ("D", "1.0.105"), ]), ); } #[test] fn incomplete_information_skipping() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // be taken to not miss the transitive effects of alternatives. // Fuzzing discovered that for some reason cargo was skipping based // on incomplete information in the following case: // minimized bug found in: // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 let input = vec![ pkg!(("a", "1.0.0")), pkg!(("a", "1.1.0")), pkg!("b" => [dep("a")]), pkg!(("c", "1.0.0")), pkg!(("c", "1.1.0")), pkg!("d" => [dep_req("c", "=1.0")]), pkg!(("e", "1.0.0")), pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]), pkg!("to_yank"), pkg!(("f", "1.0.0") => [ dep("to_yank"), dep("d"), ]), pkg!(("f", "1.1.0") => [dep("d")]), pkg!("g" => [ dep("b"), dep("e"), dep("f"), ]), ]; let reg = registry(input.clone()); let res = resolve(vec![dep("g")], ®).unwrap(); let package_to_yank = "to_yank".to_pkgid(); // this package is not used in the resolution. assert!(!res.contains(&package_to_yank)); // so when we yank it let new_reg = registry( input .iter() .cloned() .filter(|x| package_to_yank != x.package_id()) .collect(), ); assert_eq!(input.len(), new_reg.len() + 1); // it should still build assert!(resolve(vec![dep("g")], &new_reg).is_ok()); } #[test] fn incomplete_information_skipping_2() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // be taken to not miss the transitive effects of alternatives. // Fuzzing discovered that for some reason cargo was skipping based // on incomplete information in the following case: // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 let input = vec![ pkg!(("b", "3.8.10")), pkg!(("b", "8.7.4")), pkg!(("b", "9.4.6")), pkg!(("c", "1.8.8")), pkg!(("c", "10.2.5")), pkg!(("d", "4.1.2") => [ dep_req("bad", "=6.10.9"), ]), pkg!(("d", "5.5.6")), pkg!(("d", "5.6.10")), pkg!(("to_yank", "8.0.1")), pkg!(("to_yank", "8.8.1")), pkg!(("e", "4.7.8") => [ dep_req("d", ">=5.5.6, <=5.6.10"), dep_req("to_yank", "=8.0.1"), ]), pkg!(("e", "7.4.9") => [ dep_req("bad", "=4.7.5"), ]), pkg!("f" => [ dep_req("d", ">=4.1.2, <=5.5.6"), ]), pkg!("g" => [ dep("bad"), ]), pkg!(("h", "3.8.3") => [ dep("g"), ]), pkg!(("h", "6.8.3") => [ dep("f"), ]), pkg!(("h", "8.1.9") => [ dep_req("to_yank", "=8.8.1"), ]), pkg!("i" => [ dep("b"), dep("c"), dep("e"), dep("h"), ]), ]; let reg = registry(input.clone()); let res = resolve(vec![dep("i")], ®).unwrap(); let package_to_yank = ("to_yank", "8.8.1").to_pkgid(); // this package is not used in the resolution. assert!(!res.contains(&package_to_yank)); // so when we yank it let new_reg = registry( input .iter() .cloned() .filter(|x| package_to_yank != x.package_id()) .collect(), ); assert_eq!(input.len(), new_reg.len() + 1); // it should still build assert!(resolve(vec![dep("i")], &new_reg).is_ok()); } #[test] fn incomplete_information_skipping_3() { // When backtracking due to a failed dependency, if Cargo is // trying to be clever and skip irrelevant dependencies, care must // be taken to not miss the transitive effects of alternatives. // Fuzzing discovered that for some reason cargo was skipping based // on incomplete information in the following case: // minimized bug found in: // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 let input = vec![ pkg! {("to_yank", "3.0.3")}, pkg! {("to_yank", "3.3.0")}, pkg! {("to_yank", "3.3.1")}, pkg! {("a", "3.3.0") => [ dep_req("to_yank", "=3.0.3"), ] }, pkg! {("a", "3.3.2") => [ dep_req("to_yank", "<=3.3.0"), ] }, pkg! {("b", "0.1.3") => [ dep_req("a", "=3.3.0"), ] }, pkg! {("b", "2.0.2") => [ dep_req("to_yank", "3.3.0"), dep("a"), ] }, pkg! {("b", "2.3.3") => [ dep_req("to_yank", "3.3.0"), dep_req("a", "=3.3.0"), ] }, ]; let reg = registry(input.clone()); let res = resolve(vec![dep("b")], ®).unwrap(); let package_to_yank = ("to_yank", "3.0.3").to_pkgid(); // this package is not used in the resolution. assert!(!res.contains(&package_to_yank)); // so when we yank it let new_reg = registry( input .iter() .cloned() .filter(|x| package_to_yank != x.package_id()) .collect(), ); assert_eq!(input.len(), new_reg.len() + 1); // it should still build assert!(resolve(vec![dep("b")], &new_reg).is_ok()); } #[test] fn resolving_but_no_exists() { let reg = registry(vec![]); let res = resolve(vec![dep_req("foo", "1")], ®); assert!(res.is_err()); assert_eq!( res.err().unwrap().to_string(), "no matching package named `foo` found\n\ location searched: registry `https://example.com/`\n\ required by package `root v1.0.0 (registry `https://example.com/`)`\ " ); } #[test] fn resolving_cycle() { let reg = registry(vec![pkg!("foo" => ["foo"])]); let _ = resolve(vec![dep_req("foo", "1")], ®); } #[test] fn hard_equality() { let reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.0")), pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), ]); let res = resolve(vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], ®).unwrap(); assert_same( &res, &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]), ); } #[test] fn large_conflict_cache() { let mut input = vec![ pkg!(("last", "0.0.0") => [dep("bad")]), // just to make sure last is less constrained ]; let mut root_deps = vec![dep("last")]; const NUM_VERSIONS: u8 = 20; for name in 0..=NUM_VERSIONS { // a large number of conflicts can easily be generated by a sys crate. let sys_name = format!("{}-sys", (b'a' + name) as char); let in_len = input.len(); input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&sys_name, "=0.0.0")])); root_deps.push(dep_req(&sys_name, ">= 0.0.1")); // a large number of conflicts can also easily be generated by a major release version. let plane_name = format!("{}", (b'a' + name) as char); let in_len = input.len(); input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&plane_name, "=1.0.0")])); root_deps.push(dep_req(&plane_name, ">= 1.0.1")); for i in 0..=NUM_VERSIONS { input.push(pkg!((&sys_name, format!("{}.0.0", i)))); input.push(pkg!((&plane_name, format!("1.0.{}", i)))); } } let reg = registry(input); let _ = resolve(root_deps, ®); } #[test] fn off_by_one_bug() { let input = vec![ pkg!(("A-sys", "0.0.1")), pkg!(("A-sys", "0.0.4")), pkg!(("A-sys", "0.0.6")), pkg!(("A-sys", "0.0.7")), pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]), pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]), pkg!(("a", "0.0.1")), pkg!(("a", "0.0.2")), pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]), pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]), ]; let reg = registry(input); let _ = resolve_and_validated(vec![dep("f")], ®, None); } #[test] fn conflict_store_bug() { let input = vec![ pkg!(("A", "0.0.3")), pkg!(("A", "0.0.5")), pkg!(("A", "0.0.9") => [dep("bad"),]), pkg!(("A", "0.0.10") => [dep("bad"),]), pkg!(("L-sys", "0.0.1") => [dep("bad"),]), pkg!(("L-sys", "0.0.5")), pkg!(("R", "0.0.4") => [ dep_req("L-sys", "= 0.0.5"), ]), pkg!(("R", "0.0.6")), pkg!(("a-sys", "0.0.5")), pkg!(("a-sys", "0.0.11")), pkg!(("c", "0.0.12") => [ dep_req("R", ">= 0.0.3, <= 0.0.4"), ]), pkg!(("c", "0.0.13") => [ dep_req("a-sys", ">= 0.0.8, <= 0.0.11"), ]), pkg!(("c0", "0.0.6") => [ dep_req("L-sys", "<= 0.0.2"), ]), pkg!(("c0", "0.0.10") => [ dep_req("A", ">= 0.0.9, <= 0.0.10"), dep_req("a-sys", "= 0.0.5"), ]), pkg!("j" => [ dep_req("A", ">= 0.0.3, <= 0.0.5"), dep_req("R", ">=0.0.4, <= 0.0.6"), dep_req("c", ">= 0.0.9"), dep_req("c0", ">= 0.0.6"), ]), ]; let reg = registry(input); let _ = resolve_and_validated(vec![dep("j")], ®, None); } #[test] fn conflict_store_more_then_one_match() { let input = vec![ pkg!(("A", "0.0.0")), pkg!(("A", "0.0.1")), pkg!(("A-sys", "0.0.0")), pkg!(("A-sys", "0.0.1")), pkg!(("A-sys", "0.0.2")), pkg!(("A-sys", "0.0.3")), pkg!(("A-sys", "0.0.12")), pkg!(("A-sys", "0.0.16")), pkg!(("B-sys", "0.0.0")), pkg!(("B-sys", "0.0.1")), pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]), pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]), pkg!(("BA-sys", "0.0.1") => [dep("bad"),]), pkg!(("BA-sys", "0.0.2") => [dep("bad"),]), pkg!("nA" => [ dep("A"), dep_req("A-sys", "<= 0.0.3"), dep("B-sys"), dep("BA-sys"), ]), ]; let reg = registry(input); let _ = resolve_and_validated(vec![dep("nA")], ®, None); } #[test] fn bad_lockfile_from_8249() { let input = vec![ pkg!(("a-sys", "0.2.0")), pkg!(("a-sys", "0.1.0")), pkg!(("b", "0.1.0") => [ dep_req("a-sys", "0.1"), // should be optional: true, but not deeded for now ]), pkg!(("c", "1.0.0") => [ dep_req("b", "=0.1.0"), ]), pkg!("foo" => [ dep_req("a-sys", "=0.2.0"), { let mut b = dep_req("b", "=0.1.0"); b.set_features(vec!["a-sys"]); b }, dep_req("c", "=1.0.0"), ]), ]; let reg = registry(input); let _ = resolve_and_validated(vec![dep("foo")], ®, None); } #[test] fn cyclic_good_error_message() { let input = vec![ pkg!(("A", "0.0.0") => [dep("C")]), pkg!(("B", "0.0.0") => [dep("C")]), pkg!(("C", "0.0.0") => [dep("A")]), ]; let reg = registry(input); let error = resolve(vec![dep("A"), dep("B")], ®).unwrap_err(); println!("{}", error); assert_eq!("\ cyclic package dependency: package `A v0.0.0 (registry `https://example.com/`)` depends on itself. Cycle: package `A v0.0.0 (registry `https://example.com/`)` ... which satisfies dependency `A = \"*\"` of package `C v0.0.0 (registry `https://example.com/`)` ... which satisfies dependency `C = \"*\"` of package `A v0.0.0 (registry `https://example.com/`)`\ ", error.to_string()); } cargo-0.66.0/publish.py000077500000000000000000000026241432416201200147200ustar00rootroot00000000000000#!/usr/bin/env python3 # This script is used to publish Cargo to crates.io. import os import re import subprocess import time import urllib.request from urllib.error import HTTPError TO_PUBLISH = [ 'crates/cargo-platform', 'crates/cargo-util', 'crates/crates-io', '.', ] def already_published(name, version): try: urllib.request.urlopen('https://crates.io/api/v1/crates/%s/%s/download' % (name, version)) except HTTPError as e: if e.code == 404: return False raise return True def maybe_publish(path): content = open(os.path.join(path, 'Cargo.toml')).read() name = re.search('^name = "([^"]+)"', content, re.M).group(1) version = re.search('^version = "([^"]+)"', content, re.M).group(1) if already_published(name, version): print('%s %s is already published, skipping' % (name, version)) return False subprocess.check_call(['cargo', 'publish', '--no-verify'], cwd=path) return True def main(): print('Starting publish...') for i, path in enumerate(TO_PUBLISH): if maybe_publish(path): if i < len(TO_PUBLISH)-1: # Sleep to allow the index to update. This should probably # check that the index is updated, or use a retry loop # instead. time.sleep(5) print('Publish complete!') if __name__ == '__main__': main() cargo-0.66.0/src/000077500000000000000000000000001432416201200134605ustar00rootroot00000000000000cargo-0.66.0/src/bin/000077500000000000000000000000001432416201200142305ustar00rootroot00000000000000cargo-0.66.0/src/bin/cargo/000077500000000000000000000000001432416201200153235ustar00rootroot00000000000000cargo-0.66.0/src/bin/cargo/cli.rs000066400000000000000000000434131432416201200164450ustar00rootroot00000000000000use anyhow::anyhow; use cargo::core::shell::Shell; use cargo::core::{features, CliUnstable}; use cargo::{self, drop_print, drop_println, CliResult, Config}; use clap::{AppSettings, Arg, ArgMatches}; use itertools::Itertools; use std::collections::HashMap; use std::fmt::Write; use super::commands; use super::list_commands; use crate::command_prelude::*; use cargo::core::features::HIDDEN; lazy_static::lazy_static! { // Maps from commonly known external commands (not builtin to cargo) to their // description, for the help page. Reserved for external subcommands that are // core within the rust ecosystem (esp ones that might become internal in the future). static ref KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS: HashMap<&'static str, &'static str> = HashMap::from([ ("clippy", "Checks a package to catch common mistakes and improve your Rust code."), ("fmt", "Formats all bin and lib files of the current crate using rustfmt."), ]); } pub fn main(config: &mut LazyConfig) -> CliResult { let args = cli().try_get_matches()?; // CAUTION: Be careful with using `config` until it is configured below. // In general, try to avoid loading config values unless necessary (like // the [alias] table). let config = config.get_mut(); // Global args need to be extracted before expanding aliases because the // clap code for extracting a subcommand discards global options // (appearing before the subcommand). let (expanded_args, global_args) = expand_aliases(config, args, vec![])?; if expanded_args .get_one::("unstable-features") .map(String::as_str) == Some("help") { let options = CliUnstable::help(); let non_hidden_options: Vec<(String, String)> = options .iter() .filter(|(_, help_message)| *help_message != HIDDEN) .map(|(name, help)| (name.to_string(), help.to_string())) .collect(); let longest_option = non_hidden_options .iter() .map(|(option_name, _)| option_name.len()) .max() .unwrap_or(0); let help_lines: Vec = non_hidden_options .iter() .map(|(option_name, option_help_message)| { let option_name_kebab_case = option_name.replace("_", "-"); let padding = " ".repeat(longest_option - option_name.len()); // safe to subtract format!( " -Z {}{} -- {}", option_name_kebab_case, padding, option_help_message ) }) .collect(); let joined = help_lines.join("\n"); drop_println!( config, " Available unstable (nightly-only) flags: {} Run with 'cargo -Z [FLAG] [SUBCOMMAND]'", joined ); if !config.nightly_features_allowed { drop_println!( config, "\nUnstable flags are only available on the nightly channel \ of Cargo, but this is the `{}` channel.\n\ {}", features::channel(), features::SEE_CHANNELS ); } drop_println!( config, "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about these flags." ); return Ok(()); } let is_verbose = expanded_args.verbose() > 0; if expanded_args.flag("version") { let version = get_version_string(is_verbose); drop_print!(config, "{}", version); return Ok(()); } if let Some(code) = expanded_args.get_one::("explain") { let mut procss = config.load_global_rustc(None)?.process(); procss.arg("--explain").arg(code).exec()?; return Ok(()); } if expanded_args.flag("list") { drop_println!(config, "Installed Commands:"); for (name, command) in list_commands(config) { let known_external_desc = KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS.get(name.as_str()); match command { CommandInfo::BuiltIn { about } => { assert!( known_external_desc.is_none(), "KNOWN_EXTERNAL_COMMANDS shouldn't contain builtin \"{}\"", name ); let summary = about.unwrap_or_default(); let summary = summary.lines().next().unwrap_or(&summary); // display only the first line drop_println!(config, " {:<20} {}", name, summary); } CommandInfo::External { path } => { if let Some(desc) = known_external_desc { drop_println!(config, " {:<20} {}", name, desc); } else if is_verbose { drop_println!(config, " {:<20} {}", name, path.display()); } else { drop_println!(config, " {}", name); } } CommandInfo::Alias { target } => { drop_println!( config, " {:<20} alias: {}", name, target.iter().join(" ") ); } } } return Ok(()); } let (cmd, subcommand_args) = match expanded_args.subcommand() { Some((cmd, args)) => (cmd, args), _ => { // No subcommand provided. cli().print_help()?; return Ok(()); } }; config_configure(config, &expanded_args, subcommand_args, global_args)?; super::init_git_transports(config); execute_subcommand(config, cmd, subcommand_args) } pub fn get_version_string(is_verbose: bool) -> String { let version = cargo::version(); let mut version_string = format!("cargo {}\n", version); if is_verbose { version_string.push_str(&format!("release: {}\n", version.version)); if let Some(ref ci) = version.commit_info { version_string.push_str(&format!("commit-hash: {}\n", ci.commit_hash)); version_string.push_str(&format!("commit-date: {}\n", ci.commit_date)); } writeln!(version_string, "host: {}", env!("RUST_HOST_TARGET")).unwrap(); add_libgit2(&mut version_string); add_curl(&mut version_string); add_ssl(&mut version_string); writeln!(version_string, "os: {}", os_info::get()).unwrap(); } version_string } fn add_libgit2(version_string: &mut String) { let git2_v = git2::Version::get(); let lib_v = git2_v.libgit2_version(); let vendored = if git2_v.vendored() { format!("vendored") } else { format!("system") }; writeln!( version_string, "libgit2: {}.{}.{} (sys:{} {})", lib_v.0, lib_v.1, lib_v.2, git2_v.crate_version(), vendored ) .unwrap(); } fn add_curl(version_string: &mut String) { let curl_v = curl::Version::get(); let vendored = if curl_v.vendored() { format!("vendored") } else { format!("system") }; writeln!( version_string, "libcurl: {} (sys:{} {} ssl:{})", curl_v.version(), curl_sys::rust_crate_version(), vendored, curl_v.ssl_version().unwrap_or("none") ) .unwrap(); } fn add_ssl(version_string: &mut String) { #[cfg(feature = "openssl")] { writeln!(version_string, "ssl: {}", openssl::version::version()).unwrap(); } #[cfg(not(feature = "openssl"))] { let _ = version_string; // Silence unused warning. } } fn expand_aliases( config: &mut Config, args: ArgMatches, mut already_expanded: Vec, ) -> Result<(ArgMatches, GlobalArgs), CliError> { if let Some((cmd, args)) = args.subcommand() { match ( commands::builtin_exec(cmd), super::aliased_command(config, cmd)?, ) { (Some(_), Some(_)) => { // User alias conflicts with a built-in subcommand config.shell().warn(format!( "user-defined alias `{}` is ignored, because it is shadowed by a built-in command", cmd, ))?; } (Some(_), None) => { // Command is built-in and is not conflicting with alias, but contains ignored values. if let Some(mut values) = args.get_many::("") { config.shell().warn(format!( "trailing arguments after built-in command `{}` are ignored: `{}`", cmd, values.join(" "), ))?; } } (None, None) => {} (_, Some(mut alias)) => { // Check if this alias is shadowing an external subcommand // (binary of the form `cargo-`) // Currently this is only a warning, but after a transition period this will become // a hard error. if let Some(path) = super::find_external_subcommand(config, cmd) { config.shell().warn(format!( "\ user-defined alias `{}` is shadowing an external subcommand found at: `{}` This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #10049 .", cmd, path.display(), ))?; } alias.extend(args.get_many::("").unwrap_or_default().cloned()); // new_args strips out everything before the subcommand, so // capture those global options now. // Note that an alias to an external command will not receive // these arguments. That may be confusing, but such is life. let global_args = GlobalArgs::new(args); let new_args = cli().no_binary_name(true).try_get_matches_from(alias)?; let new_cmd = new_args.subcommand_name().expect("subcommand is required"); already_expanded.push(cmd.to_string()); if already_expanded.contains(&new_cmd.to_string()) { // Crash if the aliases are corecursive / unresolvable return Err(anyhow!( "alias {} has unresolvable recursive definition: {} -> {}", already_expanded[0], already_expanded.join(" -> "), new_cmd, ) .into()); } let (expanded_args, _) = expand_aliases(config, new_args, already_expanded)?; return Ok((expanded_args, global_args)); } } }; Ok((args, GlobalArgs::default())) } fn config_configure( config: &mut Config, args: &ArgMatches, subcommand_args: &ArgMatches, global_args: GlobalArgs, ) -> CliResult { let arg_target_dir = &subcommand_args.value_of_path("target-dir", config); let verbose = global_args.verbose + args.verbose(); // quiet is unusual because it is redefined in some subcommands in order // to provide custom help text. let quiet = args.flag("quiet") || subcommand_args.flag("quiet") || global_args.quiet; let global_color = global_args.color; // Extract so it can take reference. let color = args .get_one::("color") .map(String::as_str) .or_else(|| global_color.as_deref()); let frozen = args.flag("frozen") || global_args.frozen; let locked = args.flag("locked") || global_args.locked; let offline = args.flag("offline") || global_args.offline; let mut unstable_flags = global_args.unstable_flags; if let Some(values) = args.get_many::("unstable-features") { unstable_flags.extend(values.cloned()); } let mut config_args = global_args.config_args; if let Some(values) = args.get_many::("config") { config_args.extend(values.cloned()); } config.configure( verbose, quiet, color, frozen, locked, offline, arg_target_dir, &unstable_flags, &config_args, )?; Ok(()) } fn execute_subcommand(config: &mut Config, cmd: &str, subcommand_args: &ArgMatches) -> CliResult { if let Some(exec) = commands::builtin_exec(cmd) { return exec(config, subcommand_args); } let mut ext_args: Vec<&str> = vec![cmd]; ext_args.extend( subcommand_args .get_many::("") .unwrap_or_default() .map(String::as_str), ); super::execute_external_subcommand(config, cmd, &ext_args) } #[derive(Default)] struct GlobalArgs { verbose: u32, quiet: bool, color: Option, frozen: bool, locked: bool, offline: bool, unstable_flags: Vec, config_args: Vec, } impl GlobalArgs { fn new(args: &ArgMatches) -> GlobalArgs { GlobalArgs { verbose: args.verbose(), quiet: args.flag("quiet"), color: args.get_one::("color").cloned(), frozen: args.flag("frozen"), locked: args.flag("locked"), offline: args.flag("offline"), unstable_flags: args .get_many::("unstable-features") .unwrap_or_default() .cloned() .collect(), config_args: args .get_many::("config") .unwrap_or_default() .cloned() .collect(), } } } pub fn cli() -> App { let is_rustup = std::env::var_os("RUSTUP_HOME").is_some(); let usage = if is_rustup { "cargo [+toolchain] [OPTIONS] [SUBCOMMAND]" } else { "cargo [OPTIONS] [SUBCOMMAND]" }; App::new("cargo") .allow_external_subcommands(true) .setting(AppSettings::DeriveDisplayOrder) // Doesn't mix well with our list of common cargo commands. See clap-rs/clap#3108 for // opening clap up to allow us to style our help template .disable_colored_help(true) // Provide a custom help subcommand for calling into man pages .disable_help_subcommand(true) .override_usage(usage) .help_template( "\ Rust's package manager USAGE: {usage} OPTIONS: {options} Some common cargo commands are (see all commands with --list): build, b Compile the current package check, c Analyze the current package and report errors, but don't build object files clean Remove the target directory doc, d Build this package's and its dependencies' documentation new Create a new cargo package init Create a new cargo package in an existing directory add Add dependencies to a manifest file run, r Run a binary or example of the local package test, t Run the tests bench Run the benchmarks update Update dependencies listed in Cargo.lock search Search registry for crates publish Package and upload this package to the registry install Install a Rust binary. Default location is $HOME/.cargo/bin uninstall Uninstall a Rust binary See 'cargo help ' for more information on a specific command.\n", ) .arg(flag("version", "Print version info and exit").short('V')) .arg(flag("list", "List installed commands")) .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE")) .arg( opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", ) .short('v') .action(ArgAction::Count) .global(true), ) .arg_quiet() .arg( opt("color", "Coloring: auto, always, never") .value_name("WHEN") .global(true), ) .arg(flag("frozen", "Require Cargo.lock and cache are up to date").global(true)) .arg(flag("locked", "Require Cargo.lock is up to date").global(true)) .arg(flag("offline", "Run without accessing the network").global(true)) .arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true)) .arg( Arg::new("unstable-features") .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") .short('Z') .value_name("FLAG") .action(ArgAction::Append) .global(true), ) .subcommands(commands::builtin()) } /// Delay loading [`Config`] until access. /// /// In the common path, the [`Config`] is dependent on CLI parsing and shouldn't be loaded until /// after that is done but some other paths (like fix or earlier errors) might need access to it, /// so this provides a way to share the instance and the implementation across these different /// accesses. pub struct LazyConfig { config: Option, } impl LazyConfig { pub fn new() -> Self { Self { config: None } } /// Get the config, loading it if needed /// /// On error, the process is terminated pub fn get(&mut self) -> &Config { self.get_mut() } /// Get the config, loading it if needed /// /// On error, the process is terminated pub fn get_mut(&mut self) -> &mut Config { self.config.get_or_insert_with(|| match Config::default() { Ok(cfg) => cfg, Err(e) => { let mut shell = Shell::new(); cargo::exit_with_error(e.into(), &mut shell) } }) } } #[test] fn verify_cli() { cli().debug_assert(); } cargo-0.66.0/src/bin/cargo/commands/000077500000000000000000000000001432416201200171245ustar00rootroot00000000000000cargo-0.66.0/src/bin/cargo/commands/add.rs000066400000000000000000000303621432416201200202260ustar00rootroot00000000000000use indexmap::IndexMap; use indexmap::IndexSet; use cargo::core::dependency::DepKind; use cargo::core::FeatureValue; use cargo::ops::cargo_add::add; use cargo::ops::cargo_add::AddOptions; use cargo::ops::cargo_add::DepOp; use cargo::ops::cargo_add::DepTable; use cargo::ops::resolve_ws; use cargo::util::command_prelude::*; use cargo::util::interning::InternedString; use cargo::CargoResult; pub fn cli() -> clap::Command<'static> { clap::Command::new("add") .setting(clap::AppSettings::DeriveDisplayOrder) .about("Add dependencies to a Cargo.toml manifest file") .override_usage( "\ cargo add [OPTIONS] [@] ... cargo add [OPTIONS] --path ... cargo add [OPTIONS] --git ..." ) .after_help("Run `cargo help add` for more detailed information.\n") .group(clap::ArgGroup::new("selected").multiple(true).required(true)) .args([ clap::Arg::new("crates") .takes_value(true) .value_name("DEP_ID") .multiple_values(true) .help("Reference to a package to add as a dependency") .long_help( "Reference to a package to add as a dependency You can reference a package by: - ``, like `cargo add serde` (latest version will be used) - `@`, like `cargo add serde@1` or `cargo add serde@=1.0.38`" ) .group("selected"), flag("no-default-features", "Disable the default features"), flag("default-features", "Re-enable the default features") .overrides_with("no-default-features"), clap::Arg::new("features") .short('F') .long("features") .takes_value(true) .value_name("FEATURES") .action(ArgAction::Append) .help("Space or comma separated list of features to activate"), flag("optional", "Mark the dependency as optional") .long_help("Mark the dependency as optional The package name will be exposed as feature of your crate.") .conflicts_with("dev"), flag("no-optional", "Mark the dependency as required") .long_help("Mark the dependency as required The package will be removed from your features.") .conflicts_with("dev") .overrides_with("optional"), clap::Arg::new("rename") .long("rename") .takes_value(true) .value_name("NAME") .help("Rename the dependency") .long_help("Rename the dependency Example uses: - Depending on multiple versions of a crate - Depend on crates with the same name from different registries"), ]) .arg_manifest_path() .args([ clap::Arg::new("package") .short('p') .long("package") .takes_value(true) .value_name("SPEC") .help("Package to modify"), ]) .arg_quiet() .arg_dry_run("Don't actually write the manifest") .next_help_heading("SOURCE") .args([ clap::Arg::new("path") .long("path") .takes_value(true) .value_name("PATH") .help("Filesystem path to local crate to add") .group("selected") .conflicts_with("git"), clap::Arg::new("git") .long("git") .takes_value(true) .value_name("URI") .help("Git repository location") .long_help("Git repository location Without any other information, cargo will use latest commit on the main branch.") .group("selected"), clap::Arg::new("branch") .long("branch") .takes_value(true) .value_name("BRANCH") .help("Git branch to download the crate from") .requires("git") .group("git-ref"), clap::Arg::new("tag") .long("tag") .takes_value(true) .value_name("TAG") .help("Git tag to download the crate from") .requires("git") .group("git-ref"), clap::Arg::new("rev") .long("rev") .takes_value(true) .value_name("REV") .help("Git reference to download the crate from") .long_help("Git reference to download the crate from This is the catch all, handling hashes to named references in remote repositories.") .requires("git") .group("git-ref"), clap::Arg::new("registry") .long("registry") .takes_value(true) .value_name("NAME") .help("Package registry for this dependency"), ]) .next_help_heading("SECTION") .args([ flag("dev", "Add as development dependency") .long_help("Add as development dependency Dev-dependencies are not used when compiling a package for building, but are used for compiling tests, examples, and benchmarks. These dependencies are not propagated to other packages which depend on this package.") .group("section"), flag("build", "Add as build dependency") .long_help("Add as build dependency Build-dependencies are the only dependencies available for use by build scripts (`build.rs` files).") .group("section"), clap::Arg::new("target") .long("target") .takes_value(true) .value_name("TARGET") .value_parser(clap::builder::NonEmptyStringValueParser::new()) .help("Add as dependency to the given target platform") ]) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let dry_run = args.dry_run(); let section = parse_section(args); let ws = args.workspace(config)?; let packages = args.packages_from_flags()?; let packages = packages.get_packages(&ws)?; let spec = match packages.len() { 0 => { return Err(CliError::new( anyhow::format_err!("no packages selected. Please specify one with `-p `"), 101, )); } 1 => packages[0], len => { return Err(CliError::new( anyhow::format_err!( "{len} packages selected. Please specify one with `-p `", ), 101, )); } }; let dependencies = parse_dependencies(config, args)?; let options = AddOptions { config, spec, dependencies, section, dry_run, }; add(&ws, &options)?; if !dry_run { // Reload the workspace since we've changed dependencies let ws = args.workspace(config)?; resolve_ws(&ws)?; } Ok(()) } fn parse_dependencies(config: &Config, matches: &ArgMatches) -> CargoResult> { let path = matches.get_one::("path"); let git = matches.get_one::("git"); let branch = matches.get_one::("branch"); let rev = matches.get_one::("rev"); let tag = matches.get_one::("tag"); let rename = matches.get_one::("rename"); let registry = matches.registry(config)?; let default_features = default_features(matches); let optional = optional(matches); let mut crates = matches .get_many::("crates") .into_iter() .flatten() .map(|c| (Some(c.clone()), None)) .collect::>(); let mut infer_crate_name = false; if crates.is_empty() { if path.is_some() || git.is_some() { crates.insert(None, None); infer_crate_name = true; } else { unreachable!("clap should ensure we have some source selected"); } } for feature in matches .get_many::("features") .into_iter() .flatten() .map(String::as_str) .flat_map(parse_feature) { let parsed_value = FeatureValue::new(InternedString::new(feature)); match parsed_value { FeatureValue::Feature(_) => { if 1 < crates.len() { let candidates = crates .keys() .map(|c| { format!( "`{}/{}`", c.as_deref().expect("only none when there is 1"), feature ) }) .collect::>(); anyhow::bail!("feature `{feature}` must be qualified by the dependency its being activated for, like {}", candidates.join(", ")); } crates .first_mut() .expect("always at least one crate") .1 .get_or_insert_with(IndexSet::new) .insert(feature.to_owned()); } FeatureValue::Dep { .. } => { anyhow::bail!("feature `{feature}` is not allowed to use explicit `dep:` syntax",) } FeatureValue::DepFeature { dep_name, dep_feature, .. } => { if infer_crate_name { anyhow::bail!("`{feature}` is unsupported when inferring the crate name, use `{dep_feature}`"); } if dep_feature.contains('/') { anyhow::bail!("multiple slashes in feature `{feature}` is not allowed"); } crates.get_mut(&Some(dep_name.as_str().to_owned())).ok_or_else(|| { anyhow::format_err!("feature `{dep_feature}` activated for crate `{dep_name}` but the crate wasn't specified") })? .get_or_insert_with(IndexSet::new) .insert(dep_feature.as_str().to_owned()); } } } let mut deps: Vec = Vec::new(); for (crate_spec, features) in crates { let dep = DepOp { crate_spec, rename: rename.map(String::from), features, default_features, optional, registry: registry.clone(), path: path.map(String::from), git: git.map(String::from), branch: branch.map(String::from), rev: rev.map(String::from), tag: tag.map(String::from), }; deps.push(dep); } if deps.len() > 1 && rename.is_some() { anyhow::bail!("cannot specify multiple crates with `--rename`"); } Ok(deps) } fn default_features(matches: &ArgMatches) -> Option { resolve_bool_arg( matches.flag("default-features"), matches.flag("no-default-features"), ) } fn optional(matches: &ArgMatches) -> Option { resolve_bool_arg(matches.flag("optional"), matches.flag("no-optional")) } fn resolve_bool_arg(yes: bool, no: bool) -> Option { match (yes, no) { (true, false) => Some(true), (false, true) => Some(false), (false, false) => None, (_, _) => unreachable!("clap should make this impossible"), } } fn parse_section(matches: &ArgMatches) -> DepTable { let kind = if matches.flag("dev") { DepKind::Development } else if matches.flag("build") { DepKind::Build } else { DepKind::Normal }; let mut table = DepTable::new().set_kind(kind); if let Some(target) = matches.get_one::("target") { assert!(!target.is_empty(), "Target specification may not be empty"); table = table.set_target(target); } table } /// Split feature flag list fn parse_feature(feature: &str) -> impl Iterator { // Not re-using `CliFeatures` because it uses a BTreeSet and loses user's ordering feature .split_whitespace() .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) } cargo-0.66.0/src/bin/cargo/commands/bench.rs000066400000000000000000000051271432416201200205560ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, TestOptions}; pub fn cli() -> App { subcommand("bench") .trailing_var_arg(true) .about("Execute all benchmarks of a local package") .arg_quiet() .arg( Arg::new("BENCHNAME") .help("If specified, only run benches containing this string in their names"), ) .arg( Arg::new("args") .help("Arguments for the bench binary") .multiple_values(true) .last(true), ) .arg_targets_all( "Benchmark only this package's library", "Benchmark only the specified binary", "Benchmark all binaries", "Benchmark only the specified example", "Benchmark all examples", "Benchmark only the specified test target", "Benchmark all tests", "Benchmark only the specified bench target", "Benchmark all benches", "Benchmark all targets", ) .arg(flag("no-run", "Compile, but don't run benchmarks")) .arg_package_spec( "Package to run benchmarks for", "Benchmark all packages in the workspace", "Exclude packages from the benchmark", ) .arg_jobs() .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() .arg_ignore_rust_version() .arg_message_format() .arg(flag( "no-fail-fast", "Run all benchmarks regardless of failure", )) .arg_unit_graph() .arg_timings() .after_help("Run `cargo help bench` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mut compile_opts = args.compile_options( config, CompileMode::Bench, Some(&ws), ProfileChecking::Custom, )?; compile_opts.build_config.requested_profile = args.get_profile_name(config, "bench", ProfileChecking::Custom)?; let ops = TestOptions { no_run: args.flag("no-run"), no_fail_fast: args.flag("no-fail-fast"), compile_opts, }; let bench_args = args.get_one::("BENCHNAME").into_iter(); let bench_args = bench_args.chain(args.get_many::("args").unwrap_or_default()); let bench_args = bench_args.map(String::as_str).collect::>(); ops::run_benches(&ws, &ops, &bench_args) } cargo-0.66.0/src/bin/cargo/commands/build.rs000066400000000000000000000046241432416201200205770ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("build") // subcommand aliases are handled in aliased_command() // .alias("b") .about("Compile a local package and all of its dependencies") .arg_quiet() .arg_package_spec( "Package to build (see `cargo help pkgid`)", "Build all packages in the workspace", "Exclude packages from the build", ) .arg_jobs() .arg_targets_all( "Build only this package's library", "Build only the specified binary", "Build all binaries", "Build only the specified example", "Build all examples", "Build only the specified test target", "Build all tests", "Build only the specified bench target", "Build all benches", "Build all targets", ) .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg( opt( "out-dir", "Copy final artifacts to this directory (unstable)", ) .value_name("PATH"), ) .arg_manifest_path() .arg_ignore_rust_version() .arg_message_format() .arg_build_plan() .arg_unit_graph() .arg_future_incompat_report() .arg_timings() .after_help("Run `cargo help build` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mut compile_opts = args.compile_options( config, CompileMode::Build, Some(&ws), ProfileChecking::Custom, )?; if let Some(out_dir) = args.value_of_path("out-dir", config) { compile_opts.build_config.export_dir = Some(out_dir); } else if let Some(out_dir) = config.build_config()?.out_dir.as_ref() { let out_dir = out_dir.resolve_path(config); compile_opts.build_config.export_dir = Some(out_dir); } if compile_opts.build_config.export_dir.is_some() { config .cli_unstable() .fail_if_stable_opt("--out-dir", 6790)?; } ops::compile(&ws, &compile_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/check.rs000066400000000000000000000036261432416201200205560ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("check") // subcommand aliases are handled in aliased_command() // .alias("c") .about("Check a local package and all of its dependencies for errors") .arg_quiet() .arg_package_spec( "Package(s) to check", "Check all packages in the workspace", "Exclude packages from the check", ) .arg_jobs() .arg_targets_all( "Check only this package's library", "Check only the specified binary", "Check all binaries", "Check only the specified example", "Check all examples", "Check only the specified test target", "Check all tests", "Check only the specified bench target", "Check all benches", "Check all targets", ) .arg_release("Check artifacts in release mode, with optimizations") .arg_profile("Check artifacts with the specified profile") .arg_features() .arg_target_triple("Check for the target triple") .arg_target_dir() .arg_manifest_path() .arg_ignore_rust_version() .arg_message_format() .arg_unit_graph() .arg_future_incompat_report() .arg_timings() .after_help("Run `cargo help check` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; // This is a legacy behavior that causes `cargo check` to pass `--test`. let test = matches!( args.get_one::("profile").map(String::as_str), Some("test") ); let mode = CompileMode::Check { test }; let compile_opts = args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; ops::compile(&ws, &compile_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/clean.rs000066400000000000000000000024301432416201200205530ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, CleanOptions}; use cargo::util::print_available_packages; pub fn cli() -> App { subcommand("clean") .about("Remove artifacts that cargo has generated in the past") .arg_quiet() .arg_package_spec_simple("Package to clean artifacts for") .arg_manifest_path() .arg_target_triple("Target triple to clean output for") .arg_target_dir() .arg_release("Whether or not to clean release artifacts") .arg_profile("Clean artifacts of the specified profile") .arg_doc("Whether or not to clean just the documentation directory") .after_help("Run `cargo help clean` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; } let opts = CleanOptions { config, spec: values(args, "package"), targets: args.targets(), requested_profile: args.get_profile_name(config, "dev", ProfileChecking::Custom)?, profile_specified: args.contains_id("profile") || args.flag("release"), doc: args.flag("doc"), }; ops::clean(&ws, &opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/config.rs000066400000000000000000000035021432416201200207370ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::cargo_config; pub fn cli() -> App { subcommand("config") .about("Inspect configuration values") .after_help("Run `cargo help config` for more detailed information.\n") .subcommand_required(true) .arg_required_else_help(true) .subcommand( subcommand("get") .arg(Arg::new("key").help("The config key to display")) .arg( opt("format", "Display format") .value_parser(cargo_config::ConfigFormat::POSSIBLE_VALUES) .default_value("toml"), ) .arg(flag( "show-origin", "Display where the config value is defined", )) .arg( opt("merged", "Whether or not to merge config values") .value_parser(["yes", "no"]) .default_value("yes"), ), ) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { config .cli_unstable() .fail_if_stable_command(config, "config", 9301)?; match args.subcommand() { Some(("get", args)) => { let opts = cargo_config::GetOptions { key: args.get_one::("key").map(String::as_str), format: args.get_one::("format").unwrap().parse()?, show_origin: args.flag("show-origin"), merged: args.get_one::("merged").map(String::as_str) == Some("yes"), }; cargo_config::get(config, &opts)?; } Some((cmd, _)) => { unreachable!("unexpected command {}", cmd) } None => { unreachable!("unexpected command") } } Ok(()) } cargo-0.66.0/src/bin/cargo/commands/doc.rs000066400000000000000000000037571432416201200202530ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, DocOptions}; pub fn cli() -> App { subcommand("doc") // subcommand aliases are handled in aliased_command() // .alias("d") .about("Build a package's documentation") .arg_quiet() .arg(flag( "open", "Opens the docs in a browser after the operation", )) .arg_package_spec( "Package to document", "Document all packages in the workspace", "Exclude packages from the build", ) .arg(flag( "no-deps", "Don't build documentation for dependencies", )) .arg(flag("document-private-items", "Document private items")) .arg_jobs() .arg_targets_lib_bin_example( "Document only this package's library", "Document only the specified binary", "Document all binaries", "Document only the specified example", "Document all examples", ) .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg_ignore_rust_version() .arg_unit_graph() .arg_timings() .after_help("Run `cargo help doc` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mode = CompileMode::Doc { deps: !args.flag("no-deps"), }; let mut compile_opts = args.compile_options(config, mode, Some(&ws), ProfileChecking::Custom)?; compile_opts.rustdoc_document_private_items = args.flag("document-private-items"); let doc_opts = DocOptions { open_result: args.flag("open"), compile_opts, }; ops::doc(&ws, &doc_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/fetch.rs000066400000000000000000000011751432416201200205670ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; use cargo::ops::FetchOptions; pub fn cli() -> App { subcommand("fetch") .about("Fetch dependencies of a package from the network") .arg_quiet() .arg_manifest_path() .arg_target_triple("Fetch dependencies for the target triple") .after_help("Run `cargo help fetch` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let opts = FetchOptions { config, targets: args.targets(), }; let _ = ops::fetch(&ws, &opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/fix.rs000066400000000000000000000060151432416201200202620ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("fix") .about("Automatically fix lint warnings reported by rustc") .arg_quiet() .arg_package_spec( "Package(s) to fix", "Fix all packages in the workspace", "Exclude packages from the fixes", ) .arg_jobs() .arg_targets_all( "Fix only this package's library", "Fix only the specified binary", "Fix all binaries", "Fix only the specified example", "Fix all examples", "Fix only the specified test target", "Fix all tests", "Fix only the specified bench target", "Fix all benches", "Fix all targets (default)", ) .arg_release("Fix artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Fix for the target triple") .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg(flag( "broken-code", "Fix code even if it already has compiler errors", )) .arg(flag("edition", "Fix in preparation for the next edition")) .arg(flag( "edition-idioms", "Fix warnings to migrate to the idioms of an edition", )) .arg(flag( "allow-no-vcs", "Fix code even if a VCS was not detected", )) .arg(flag( "allow-dirty", "Fix code even if the working directory is dirty", )) .arg(flag( "allow-staged", "Fix code even if the working directory has staged changes", )) .arg_ignore_rust_version() .arg_timings() .after_help("Run `cargo help fix` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; // This is a legacy behavior that causes `cargo fix` to pass `--test`. let test = matches!( args.get_one::("profile").map(String::as_str), Some("test") ); let mode = CompileMode::Check { test }; // Unlike other commands default `cargo fix` to all targets to fix as much // code as we can. let mut opts = args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; if !opts.filter.is_specific() { // cargo fix with no target selection implies `--all-targets`. opts.filter = ops::CompileFilter::new_all_targets(); } ops::fix( &ws, &mut ops::FixOptions { edition: args.flag("edition"), idioms: args.flag("edition-idioms"), compile_opts: opts, allow_dirty: args.flag("allow-dirty"), allow_no_vcs: args.flag("allow-no-vcs"), allow_staged: args.flag("allow-staged"), broken_code: args.flag("broken-code"), }, )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/generate_lockfile.rs000066400000000000000000000007111432416201200231330ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("generate-lockfile") .about("Generate the lockfile for a package") .arg_quiet() .arg_manifest_path() .after_help("Run `cargo help generate-lockfile` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; ops::generate_lockfile(&ws)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/git_checkout.rs000066400000000000000000000005751432416201200221510ustar00rootroot00000000000000use crate::command_prelude::*; const REMOVED: &str = "The `git-checkout` subcommand has been removed."; pub fn cli() -> App { subcommand("git-checkout") .about("This subcommand has been removed") .hide(true) .override_help(REMOVED) } pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult { Err(anyhow::format_err!(REMOVED).into()) } cargo-0.66.0/src/bin/cargo/commands/help.rs000066400000000000000000000111041432416201200204170ustar00rootroot00000000000000use crate::aliased_command; use crate::command_prelude::*; use cargo::util::errors::CargoResult; use cargo::{drop_println, Config}; use cargo_util::paths::resolve_executable; use flate2::read::GzDecoder; use std::ffi::OsString; use std::io::Read; use std::io::Write; use std::path::Path; const COMPRESSED_MAN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/man.tgz")); pub fn cli() -> App { subcommand("help") .about("Displays help for a cargo subcommand") .arg(Arg::new("SUBCOMMAND")) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let subcommand = args.get_one::("SUBCOMMAND"); if let Some(subcommand) = subcommand { if !try_help(config, subcommand)? { crate::execute_external_subcommand(config, subcommand, &[subcommand, "--help"])?; } } else { let mut cmd = crate::cli::cli(); let _ = cmd.print_help(); } Ok(()) } fn try_help(config: &Config, subcommand: &str) -> CargoResult { let subcommand = match check_alias(config, subcommand) { // If this alias is more than a simple subcommand pass-through, show the alias. Some(argv) if argv.len() > 1 => { let alias = argv.join(" "); drop_println!(config, "`{}` is aliased to `{}`", subcommand, alias); return Ok(true); } // Otherwise, resolve the alias into its subcommand. Some(argv) => { // An alias with an empty argv can be created via `"empty-alias" = ""`. let first = argv.get(0).map(String::as_str).unwrap_or(subcommand); first.to_string() } None => subcommand.to_string(), }; let subcommand = match check_builtin(&subcommand) { Some(s) => s, None => return Ok(false), }; if resolve_executable(Path::new("man")).is_ok() { let man = match extract_man(subcommand, "1") { Some(man) => man, None => return Ok(false), }; write_and_spawn(subcommand, &man, "man")?; } else { let txt = match extract_man(subcommand, "txt") { Some(txt) => txt, None => return Ok(false), }; if resolve_executable(Path::new("less")).is_ok() { write_and_spawn(subcommand, &txt, "less")?; } else if resolve_executable(Path::new("more")).is_ok() { write_and_spawn(subcommand, &txt, "more")?; } else { drop(std::io::stdout().write_all(&txt)); } } Ok(true) } /// Checks if the given subcommand is an alias. /// /// Returns None if it is not an alias. fn check_alias(config: &Config, subcommand: &str) -> Option> { aliased_command(config, subcommand).ok().flatten() } /// Checks if the given subcommand is a built-in command (not via an alias). /// /// Returns None if it is not a built-in command. fn check_builtin(subcommand: &str) -> Option<&str> { super::builtin_exec(subcommand).map(|_| subcommand) } /// Extracts the given man page from the compressed archive. /// /// Returns None if the command wasn't found. fn extract_man(subcommand: &str, extension: &str) -> Option> { let extract_name = OsString::from(format!("cargo-{}.{}", subcommand, extension)); let gz = GzDecoder::new(COMPRESSED_MAN); let mut ar = tar::Archive::new(gz); // Unwraps should be safe here, since this is a static archive generated // by our build script. It should never be an invalid format! for entry in ar.entries().unwrap() { let mut entry = entry.unwrap(); let path = entry.path().unwrap(); if path.file_name().unwrap() != extract_name { continue; } let mut result = Vec::new(); entry.read_to_end(&mut result).unwrap(); return Some(result); } None } /// Write the contents of a man page to disk and spawn the given command to /// display it. fn write_and_spawn(name: &str, contents: &[u8], command: &str) -> CargoResult<()> { let prefix = format!("cargo-{}.", name); let mut tmp = tempfile::Builder::new().prefix(&prefix).tempfile()?; let f = tmp.as_file_mut(); f.write_all(contents)?; f.flush()?; let path = tmp.path(); // Use a path relative to the temp directory so that it can work on // cygwin/msys systems which don't handle windows-style paths. let mut relative_name = std::ffi::OsString::from("./"); relative_name.push(path.file_name().unwrap()); let mut cmd = std::process::Command::new(command) .arg(relative_name) .current_dir(path.parent().unwrap()) .spawn()?; drop(cmd.wait()); Ok(()) } cargo-0.66.0/src/bin/cargo/commands/init.rs000066400000000000000000000012461432416201200204400ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("init") .about("Create a new cargo package in an existing directory") .arg_quiet() .arg(Arg::new("path").default_value(".")) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg_new_opts() .after_help("Run `cargo help init` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let opts = args.new_options(config)?; let project_kind = ops::init(&opts, config)?; config .shell() .status("Created", format!("{} package", project_kind))?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/install.rs000066400000000000000000000152641432416201200211500ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::core::{GitReference, SourceId, Workspace}; use cargo::ops; use cargo::util::IntoUrl; use cargo_util::paths; pub fn cli() -> App { subcommand("install") .about("Install a Rust binary. Default location is $HOME/.cargo/bin") .arg_quiet() .arg( Arg::new("crate") .value_parser(clap::builder::NonEmptyStringValueParser::new()) .multiple_values(true), ) .arg( opt("version", "Specify a version to install") .alias("vers") .value_name("VERSION") .requires("crate"), ) .arg( opt("git", "Git URL to install the specified crate from") .value_name("URL") .conflicts_with_all(&["path", "index", "registry"]), ) .arg( opt("branch", "Branch to use when installing from git") .value_name("BRANCH") .requires("git"), ) .arg( opt("tag", "Tag to use when installing from git") .value_name("TAG") .requires("git"), ) .arg( opt("rev", "Specific commit to use when installing from git") .value_name("SHA") .requires("git"), ) .arg( opt("path", "Filesystem path to local crate to install") .value_name("PATH") .conflicts_with_all(&["git", "index", "registry"]), ) .arg(flag( "list", "list all installed packages and their versions", )) .arg_jobs() .arg(flag("force", "Force overwriting existing crates or binaries").short('f')) .arg(flag("no-track", "Do not save tracking information")) .arg_features() .arg_profile("Install artifacts with the specified profile") .arg(flag( "debug", "Build in debug mode (with the 'dev' profile) instead of release mode", )) .arg_targets_bins_examples( "Install only the specified binary", "Install all binaries", "Install only the specified example", "Install all examples", ) .arg_target_triple("Build for the target triple") .arg_target_dir() .arg(opt("root", "Directory to install packages into").value_name("DIR")) .arg( opt("index", "Registry index to install from") .value_name("INDEX") .requires("crate") .conflicts_with_all(&["git", "path", "registry"]), ) .arg( opt("registry", "Registry to use") .value_name("REGISTRY") .requires("crate") .conflicts_with_all(&["git", "path", "index"]), ) .arg_message_format() .arg_timings() .after_help("Run `cargo help install` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let path = args.value_of_path("path", config); if let Some(path) = &path { config.reload_rooted_at(path)?; } else { // TODO: Consider calling set_search_stop_path(home). config.reload_rooted_at(config.home().clone().into_path_unlocked())?; } // In general, we try to avoid normalizing paths in Cargo, // but in these particular cases we need it to fix rust-lang/cargo#10283. // (Handle `SourceId::for_path` and `Workspace::new`, // but not `Config::reload_rooted_at` which is always cwd) let path = path.map(|p| paths::normalize_path(&p)); let version = args.get_one::("version").map(String::as_str); let krates = args .get_many::("crate") .unwrap_or_default() .map(|k| resolve_crate(k, version)) .collect::>>()?; let mut from_cwd = false; let source = if let Some(url) = args.get_one::("git") { let url = url.into_url()?; let gitref = if let Some(branch) = args.get_one::("branch") { GitReference::Branch(branch.clone()) } else if let Some(tag) = args.get_one::("tag") { GitReference::Tag(tag.clone()) } else if let Some(rev) = args.get_one::("rev") { GitReference::Rev(rev.clone()) } else { GitReference::DefaultBranch }; SourceId::for_git(&url, gitref)? } else if let Some(path) = &path { SourceId::for_path(path)? } else if krates.is_empty() { from_cwd = true; SourceId::for_path(config.cwd())? } else if let Some(registry) = args.registry(config)? { SourceId::alt_registry(config, ®istry)? } else if let Some(index) = args.get_one::("index") { SourceId::for_registry(&index.into_url()?)? } else { SourceId::crates_io(config)? }; let root = args.get_one::("root").map(String::as_str); // We only provide workspace information for local crate installation from // one of the following sources: // - From current working directory (only work for edition 2015). // - From a specific local file path (from `--path` arg). // // This workspace information is for emitting helpful messages from // `ArgMatchesExt::compile_options` and won't affect the actual compilation. let workspace = if from_cwd { args.workspace(config).ok() } else if let Some(path) = &path { Workspace::new(&path.join("Cargo.toml"), config).ok() } else { None }; let mut compile_opts = args.compile_options( config, CompileMode::Build, workspace.as_ref(), ProfileChecking::Custom, )?; compile_opts.build_config.requested_profile = args.get_profile_name(config, "release", ProfileChecking::Custom)?; if args.flag("list") { ops::install_list(root, config)?; } else { ops::install( config, root, krates, source, from_cwd, &compile_opts, args.flag("force"), args.flag("no-track"), )?; } Ok(()) } fn resolve_crate<'k>( mut krate: &'k str, mut version: Option<&'k str>, ) -> crate::CargoResult<(&'k str, Option<&'k str>)> { if let Some((k, v)) = krate.split_once('@') { if version.is_some() { anyhow::bail!("cannot specify both `@{v}` and `--version`"); } if k.is_empty() { // by convention, arguments starting with `@` are response files anyhow::bail!("missing crate name for `@{v}`"); } krate = k; version = Some(v); } Ok((krate, version)) } cargo-0.66.0/src/bin/cargo/commands/locate_project.rs000066400000000000000000000046351432416201200224770ustar00rootroot00000000000000use crate::command_prelude::*; use anyhow::bail; use cargo::{drop_println, CargoResult}; use serde::Serialize; pub fn cli() -> App { subcommand("locate-project") .about("Print a JSON representation of a Cargo.toml file's location") .arg_quiet() .arg_manifest_path() .arg( opt( "message-format", "Output representation [possible values: json, plain]", ) .value_name("FMT"), ) .arg(flag("workspace", "Locate Cargo.toml of the workspace root")) .after_help("Run `cargo help locate-project` for more detailed information.\n") } #[derive(Serialize)] pub struct ProjectLocation<'a> { root: &'a str, } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let root_manifest; let workspace; let root = match WhatToFind::parse(args) { WhatToFind::CurrentManifest => { root_manifest = args.root_manifest(config)?; &root_manifest } WhatToFind::Workspace => { workspace = args.workspace(config)?; workspace.root_manifest() } }; let root = root .to_str() .ok_or_else(|| { anyhow::format_err!( "your package path contains characters \ not representable in Unicode" ) }) .map_err(|e| CliError::new(e, 1))?; let location = ProjectLocation { root }; match MessageFormat::parse(args)? { MessageFormat::Json => config.shell().print_json(&location)?, MessageFormat::Plain => drop_println!(config, "{}", location.root), } Ok(()) } enum WhatToFind { CurrentManifest, Workspace, } impl WhatToFind { fn parse(args: &ArgMatches) -> Self { if args.flag("workspace") { WhatToFind::Workspace } else { WhatToFind::CurrentManifest } } } enum MessageFormat { Json, Plain, } impl MessageFormat { fn parse(args: &ArgMatches) -> CargoResult { let fmt = match args.get_one::("message-format") { Some(fmt) => fmt, None => return Ok(MessageFormat::Json), }; match fmt.to_ascii_lowercase().as_str() { "json" => Ok(MessageFormat::Json), "plain" => Ok(MessageFormat::Plain), s => bail!("invalid message format specifier: `{}`", s), } } } cargo-0.66.0/src/bin/cargo/commands/login.rs000066400000000000000000000012621432416201200206030ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("login") .about( "Save an api token from the registry locally. \ If token is not specified, it will be read from stdin.", ) .arg_quiet() .arg(Arg::new("token")) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help login` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { ops::registry_login( config, args.get_one::("token").cloned(), args.get_one::("registry").cloned(), )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/logout.rs000066400000000000000000000012611432416201200210030ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("logout") .about("Remove an API token from the registry locally") .arg_quiet() .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help logout` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { if !config.cli_unstable().credential_process { config .cli_unstable() .fail_if_stable_command(config, "logout", 8933)?; } config.load_credentials()?; ops::registry_logout(config, args.get_one::("registry").cloned())?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/metadata.rs000066400000000000000000000033031432416201200212510ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, OutputMetadataOptions}; pub fn cli() -> App { subcommand("metadata") .about( "Output the resolved dependencies of a package, \ the concrete used versions including overrides, \ in machine-readable format", ) .arg_quiet() .arg_features() .arg(multi_opt( "filter-platform", "TRIPLE", "Only include resolve dependencies matching the given target-triple", )) .arg(flag( "no-deps", "Output information only about the workspace members \ and don't fetch dependencies", )) .arg_manifest_path() .arg( opt("format-version", "Format version") .value_name("VERSION") .value_parser(["1"]), ) .after_help("Run `cargo help metadata` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let version = match args.get_one::("format-version") { None => { config.shell().warn( "please specify `--format-version` flag explicitly \ to avoid compatibility problems", )?; 1 } Some(version) => version.parse().unwrap(), }; let options = OutputMetadataOptions { cli_features: args.cli_features()?, no_deps: args.flag("no-deps"), filter_platforms: args._values_of("filter-platform"), version, }; let result = ops::output_metadata(&ws, &options)?; config.shell().print_json(&result)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/mod.rs000066400000000000000000000056751432416201200202660ustar00rootroot00000000000000use crate::command_prelude::*; pub fn builtin() -> Vec { vec![ add::cli(), bench::cli(), build::cli(), check::cli(), clean::cli(), config::cli(), doc::cli(), fetch::cli(), fix::cli(), generate_lockfile::cli(), git_checkout::cli(), help::cli(), init::cli(), install::cli(), locate_project::cli(), login::cli(), logout::cli(), metadata::cli(), new::cli(), owner::cli(), package::cli(), pkgid::cli(), publish::cli(), read_manifest::cli(), report::cli(), run::cli(), rustc::cli(), rustdoc::cli(), search::cli(), test::cli(), tree::cli(), uninstall::cli(), update::cli(), vendor::cli(), verify_project::cli(), version::cli(), yank::cli(), ] } pub fn builtin_exec(cmd: &str) -> Option CliResult> { let f = match cmd { "add" => add::exec, "bench" => bench::exec, "build" => build::exec, "check" => check::exec, "clean" => clean::exec, "config" => config::exec, "doc" => doc::exec, "fetch" => fetch::exec, "fix" => fix::exec, "generate-lockfile" => generate_lockfile::exec, "git-checkout" => git_checkout::exec, "help" => help::exec, "init" => init::exec, "install" => install::exec, "locate-project" => locate_project::exec, "login" => login::exec, "logout" => logout::exec, "metadata" => metadata::exec, "new" => new::exec, "owner" => owner::exec, "package" => package::exec, "pkgid" => pkgid::exec, "publish" => publish::exec, "read-manifest" => read_manifest::exec, "report" => report::exec, "run" => run::exec, "rustc" => rustc::exec, "rustdoc" => rustdoc::exec, "search" => search::exec, "test" => test::exec, "tree" => tree::exec, "uninstall" => uninstall::exec, "update" => update::exec, "vendor" => vendor::exec, "verify-project" => verify_project::exec, "version" => version::exec, "yank" => yank::exec, _ => return None, }; Some(f) } pub mod add; pub mod bench; pub mod build; pub mod check; pub mod clean; pub mod config; pub mod doc; pub mod fetch; pub mod fix; pub mod generate_lockfile; pub mod git_checkout; pub mod help; pub mod init; pub mod install; pub mod locate_project; pub mod login; pub mod logout; pub mod metadata; pub mod new; pub mod owner; pub mod package; pub mod pkgid; pub mod publish; pub mod read_manifest; pub mod report; pub mod run; pub mod rustc; pub mod rustdoc; pub mod search; pub mod test; pub mod tree; pub mod uninstall; pub mod update; pub mod vendor; pub mod verify_project; pub mod version; pub mod yank; cargo-0.66.0/src/bin/cargo/commands/new.rs000066400000000000000000000015051432416201200202640ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("new") .about("Create a new cargo package at ") .arg_quiet() .arg(Arg::new("path").required(true)) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg_new_opts() .after_help("Run `cargo help new` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let opts = args.new_options(config)?; ops::new(&opts, config)?; let path = args.get_one::("path").unwrap(); let package_name = if let Some(name) = args.get_one::("name") { name } else { path }; config.shell().status( "Created", format!("{} `{}` package", opts.kind, package_name), )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/owner.rs000066400000000000000000000032421432416201200206250ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, OwnersOptions}; pub fn cli() -> App { subcommand("owner") .about("Manage the owners of a crate on the registry") .arg_quiet() .arg(Arg::new("crate")) .arg( multi_opt( "add", "LOGIN", "Name of a user or team to invite as an owner", ) .short('a'), ) .arg( multi_opt( "remove", "LOGIN", "Name of a user or team to remove as an owner", ) .short('r'), ) .arg(flag("list", "List owners of a crate").short('l')) .arg(opt("index", "Registry index to modify owners for").value_name("INDEX")) .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help owner` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { config.load_credentials()?; let registry = args.registry(config)?; let opts = OwnersOptions { krate: args.get_one::("crate").cloned(), token: args.get_one::("token").cloned(), index: args.get_one::("index").cloned(), to_add: args .get_many::("add") .map(|xs| xs.cloned().collect()), to_remove: args .get_many::("remove") .map(|xs| xs.cloned().collect()), list: args.flag("list"), registry, }; ops::modify_owners(config, &opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/package.rs000066400000000000000000000034231432416201200210670ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, PackageOpts}; pub fn cli() -> App { subcommand("package") .about("Assemble the local package into a distributable tarball") .arg_quiet() .arg( flag( "list", "Print files included in a package without making one", ) .short('l'), ) .arg(flag( "no-verify", "Don't verify the contents by building them", )) .arg(flag( "no-metadata", "Ignore warnings about a lack of human-usable metadata", )) .arg(flag( "allow-dirty", "Allow dirty working directories to be packaged", )) .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_features() .arg_package_spec_no_all( "Package(s) to assemble", "Assemble all packages in the workspace", "Don't assemble specified packages", ) .arg_manifest_path() .arg_jobs() .after_help("Run `cargo help package` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let specs = args.packages_from_flags()?; ops::package( &ws, &PackageOpts { config, verify: !args.flag("no-verify"), list: args.flag("list"), check_metadata: !args.flag("no-metadata"), allow_dirty: args.flag("allow-dirty"), to_package: specs, targets: args.targets(), jobs: args.jobs()?, keep_going: args.keep_going(), cli_features: args.cli_features()?, }, )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/pkgid.rs000066400000000000000000000015531432416201200205740ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; use cargo::util::print_available_packages; pub fn cli() -> App { subcommand("pkgid") .about("Print a fully qualified package specification") .arg_quiet() .arg(Arg::new("spec")) .arg_package("Argument to get the package ID specifier for") .arg_manifest_path() .after_help("Run `cargo help pkgid` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)? } let spec = args .get_one::("spec") .or_else(|| args.get_one::("package")) .map(String::as_str); let spec = ops::pkgid(&ws, spec)?; cargo::drop_println!(config, "{}", spec); Ok(()) } cargo-0.66.0/src/bin/cargo/commands/publish.rs000066400000000000000000000032761432416201200211500ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, PublishOpts}; pub fn cli() -> App { subcommand("publish") .about("Upload a package to the registry") .arg_quiet() .arg_index() .arg(opt("token", "Token to use when uploading").value_name("TOKEN")) .arg(flag( "no-verify", "Don't verify the contents by building them", )) .arg(flag( "allow-dirty", "Allow dirty working directories to be packaged", )) .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_package("Package to publish") .arg_manifest_path() .arg_features() .arg_jobs() .arg_dry_run("Perform all checks without uploading") .arg(opt("registry", "Registry to publish to").value_name("REGISTRY")) .after_help("Run `cargo help publish` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { config.load_credentials()?; let registry = args.registry(config)?; let ws = args.workspace(config)?; let index = args.index()?; ops::publish( &ws, &PublishOpts { config, token: args.get_one::("token").map(|s| s.to_string()), index, verify: !args.flag("no-verify"), allow_dirty: args.flag("allow-dirty"), to_publish: args.packages_from_flags()?, targets: args.targets(), jobs: args.jobs()?, keep_going: args.keep_going(), dry_run: args.dry_run(), registry, cli_features: args.cli_features()?, }, )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/read_manifest.rs000066400000000000000000000007261432416201200223000ustar00rootroot00000000000000use crate::command_prelude::*; pub fn cli() -> App { subcommand("read-manifest") .about( "\ Print a JSON representation of a Cargo.toml manifest. Deprecated, use `cargo metadata --no-deps` instead.\ ", ) .arg_quiet() .arg_manifest_path() } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; config.shell().print_json(&ws.current()?.serialized())?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/report.rs000066400000000000000000000034231432416201200210070ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::core::compiler::future_incompat::{OnDiskReports, REPORT_PREAMBLE}; use cargo::drop_println; pub fn cli() -> App { subcommand("report") .about("Generate and display various kinds of reports") .after_help("Run `cargo help report` for more detailed information.\n") .subcommand_required(true) .arg_required_else_help(true) .subcommand( subcommand("future-incompatibilities") .alias("future-incompat") .about("Reports any crates which will eventually stop compiling") .arg( opt( "id", "identifier of the report generated by a Cargo command invocation", ) .value_name("id"), ) .arg_package("Package to display a report for"), ) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { match args.subcommand() { Some(("future-incompatibilities", args)) => report_future_incompatibilities(config, args), Some((cmd, _)) => { unreachable!("unexpected command {}", cmd) } None => { unreachable!("unexpected command") } } } fn report_future_incompatibilities(config: &Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let reports = OnDiskReports::load(&ws)?; let id = args .value_of_u32("id")? .unwrap_or_else(|| reports.last_id()); let krate = args.get_one::("package").map(String::as_str); let report = reports.get_report(id, config, krate)?; drop_println!(config, "{}", REPORT_PREAMBLE); drop(config.shell().print_ansi_stdout(report.as_bytes())); Ok(()) } cargo-0.66.0/src/bin/cargo/commands/run.rs000066400000000000000000000071501432416201200203010ustar00rootroot00000000000000use crate::command_prelude::*; use crate::util::restricted_names::is_glob_pattern; use cargo::core::Verbosity; use cargo::ops::{self, CompileFilter, Packages}; use cargo_util::ProcessError; pub fn cli() -> App { subcommand("run") // subcommand aliases are handled in aliased_command() // .alias("r") .trailing_var_arg(true) .about("Run a binary or example of the local package") .arg_quiet() .arg( Arg::new("args") .value_parser(value_parser!(std::ffi::OsString)) .multiple_values(true), ) .arg_targets_bin_example( "Name of the bin target to run", "Name of the example target to run", ) .arg_package("Package with the target to run") .arg_jobs() .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg_unit_graph() .arg_ignore_rust_version() .arg_timings() .after_help("Run `cargo help run` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mut compile_opts = args.compile_options( config, CompileMode::Build, Some(&ws), ProfileChecking::Custom, )?; // Disallow `spec` to be an glob pattern if let Packages::Packages(opt_in) = &compile_opts.spec { if let Some(pattern) = opt_in.iter().find(|s| is_glob_pattern(s)) { return Err(anyhow::anyhow!( "`cargo run` does not support glob pattern `{}` on package selection", pattern, ) .into()); } } if !args.contains_id("example") && !args.contains_id("bin") { let default_runs: Vec<_> = compile_opts .spec .get_packages(&ws)? .iter() .filter_map(|pkg| pkg.manifest().default_run()) .collect(); if let [bin] = &default_runs[..] { compile_opts.filter = CompileFilter::single_bin(bin.to_string()); } else { // ops::run will take care of errors if len pkgs != 1. compile_opts.filter = CompileFilter::Default { // Force this to false because the code in ops::run is not // able to pre-check features before compilation starts to // enforce that only 1 binary is built. required_features_filterable: false, }; } }; ops::run(&ws, &compile_opts, &values_os(args, "args")).map_err(|err| { let proc_err = match err.downcast_ref::() { Some(e) => e, None => return CliError::new(err, 101), }; // If we never actually spawned the process then that sounds pretty // bad and we always want to forward that up. let exit_code = match proc_err.code { Some(exit) => exit, None => return CliError::new(err, 101), }; // If `-q` was passed then we suppress extra error information about // a failed process, we assume the process itself printed out enough // information about why it failed so we don't do so as well let is_quiet = config.shell().verbosity() == Verbosity::Quiet; if is_quiet { CliError::code(exit_code) } else { CliError::new(err, exit_code) } }) } cargo-0.66.0/src/bin/cargo/commands/rustc.rs000066400000000000000000000064331432416201200206400ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; use cargo::util::interning::InternedString; const PRINT_ARG_NAME: &str = "print"; const CRATE_TYPE_ARG_NAME: &str = "crate-type"; pub fn cli() -> App { subcommand("rustc") .trailing_var_arg(true) .about("Compile a package, and pass extra options to the compiler") .arg_quiet() .arg(Arg::new("args").multiple_values(true).help("Rustc flags")) .arg_package("Package to build") .arg_jobs() .arg_targets_all( "Build only this package's library", "Build only the specified binary", "Build all binaries", "Build only the specified example", "Build all examples", "Build only the specified test target", "Build all tests", "Build only the specified bench target", "Build all benches", "Build all targets", ) .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Target triple which compiles will be for") .arg( opt( PRINT_ARG_NAME, "Output compiler information without compiling", ) .value_name("INFO"), ) .arg(multi_opt( CRATE_TYPE_ARG_NAME, "CRATE-TYPE", "Comma separated list of types of crates for the compiler to emit", )) .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg_unit_graph() .arg_ignore_rust_version() .arg_future_incompat_report() .arg_timings() .after_help("Run `cargo help rustc` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; // This is a legacy behavior that changes the behavior based on the profile. // If we want to support this more formally, I think adding a --mode flag // would be warranted. let mode = match args.get_one::("profile").map(String::as_str) { Some("test") => CompileMode::Test, Some("bench") => CompileMode::Bench, Some("check") => CompileMode::Check { test: false }, _ => CompileMode::Build, }; let mut compile_opts = args.compile_options_for_single_package( config, mode, Some(&ws), ProfileChecking::LegacyRustc, )?; if compile_opts.build_config.requested_profile == "check" { compile_opts.build_config.requested_profile = InternedString::new("dev"); } let target_args = values(args, "args"); compile_opts.target_rustc_args = if target_args.is_empty() { None } else { Some(target_args) }; if let Some(opt_value) = args.get_one::(PRINT_ARG_NAME) { config .cli_unstable() .fail_if_stable_opt(PRINT_ARG_NAME, 9357)?; ops::print(&ws, &compile_opts, opt_value)?; return Ok(()); } let crate_types = values(args, CRATE_TYPE_ARG_NAME); compile_opts.target_rustc_crate_types = if crate_types.is_empty() { None } else { Some(crate_types) }; ops::compile(&ws, &compile_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/rustdoc.rs000066400000000000000000000037371432416201200211670ustar00rootroot00000000000000use cargo::ops::{self, DocOptions}; use crate::command_prelude::*; pub fn cli() -> App { subcommand("rustdoc") .trailing_var_arg(true) .about("Build a package's documentation, using specified custom flags.") .arg_quiet() .arg(Arg::new("args").multiple_values(true)) .arg(flag( "open", "Opens the docs in a browser after the operation", )) .arg_package("Package to document") .arg_jobs() .arg_targets_all( "Build only this package's library", "Build only the specified binary", "Build all binaries", "Build only the specified example", "Build all examples", "Build only the specified test target", "Build all tests", "Build only the specified bench target", "Build all benches", "Build all targets", ) .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg_unit_graph() .arg_ignore_rust_version() .arg_timings() .after_help("Run `cargo help rustdoc` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mut compile_opts = args.compile_options_for_single_package( config, CompileMode::Doc { deps: false }, Some(&ws), ProfileChecking::Custom, )?; let target_args = values(args, "args"); compile_opts.target_rustdoc_args = if target_args.is_empty() { None } else { Some(target_args) }; let doc_opts = DocOptions { open_result: args.flag("open"), compile_opts, }; ops::doc(&ws, &doc_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/search.rs000066400000000000000000000021031432416201200207330ustar00rootroot00000000000000use crate::command_prelude::*; use std::cmp::min; use cargo::ops; pub fn cli() -> App { subcommand("search") .about("Search packages in crates.io") .arg_quiet() .arg(Arg::new("query").multiple_values(true)) .arg_index() .arg( opt( "limit", "Limit the number of results (default: 10, max: 100)", ) .value_name("LIMIT"), ) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help search` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let registry = args.registry(config)?; let index = args.index()?; let limit = args.value_of_u32("limit")?; let limit = min(100, limit.unwrap_or(10)); let query: Vec<&str> = args .get_many::("query") .unwrap_or_default() .map(String::as_str) .collect(); let query: String = query.join("+"); ops::search(&query, config, index, limit, registry)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/test.rs000066400000000000000000000101471432416201200204540ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("test") // Subcommand aliases are handled in `aliased_command()`. // .alias("t") .trailing_var_arg(true) .about("Execute all unit and integration tests and build examples of a local package") .arg( Arg::new("TESTNAME") .help("If specified, only run tests containing this string in their names"), ) .arg( Arg::new("args") .help("Arguments for the test binary") .multiple_values(true) .last(true), ) .arg( flag( "quiet", "Display one character per test instead of one line", ) .short('q'), ) .arg_targets_all( "Test only this package's library unit tests", "Test only the specified binary", "Test all binaries", "Test only the specified example", "Test all examples", "Test only the specified test target", "Test all tests", "Test only the specified bench target", "Test all benches", "Test all targets", ) .arg(flag("doc", "Test only this library's documentation")) .arg(flag("no-run", "Compile, but don't run tests")) .arg(flag("no-fail-fast", "Run all tests regardless of failure")) .arg_package_spec( "Package to run tests for", "Test all packages in the workspace", "Exclude packages from the test", ) .arg_jobs() .arg_release("Build artifacts in release mode, with optimizations") .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() .arg_ignore_rust_version() .arg_message_format() .arg_unit_graph() .arg_future_incompat_report() .arg_timings() .after_help( "Run `cargo help test` for more detailed information.\n\ Run `cargo test -- --help` for test binary options.\n", ) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; let mut compile_opts = args.compile_options( config, CompileMode::Test, Some(&ws), ProfileChecking::Custom, )?; compile_opts.build_config.requested_profile = args.get_profile_name(config, "test", ProfileChecking::Custom)?; // `TESTNAME` is actually an argument of the test binary, but it's // important, so we explicitly mention it and reconfigure. let test_name = args.get_one::("TESTNAME"); let test_args = args.get_one::("TESTNAME").into_iter(); let test_args = test_args.chain(args.get_many::("args").unwrap_or_default()); let test_args = test_args.map(String::as_str).collect::>(); let no_run = args.flag("no-run"); let doc = args.flag("doc"); if doc { if compile_opts.filter.is_specific() { return Err( anyhow::format_err!("Can't mix --doc with other target selecting options").into(), ); } if no_run { return Err(anyhow::format_err!("Can't skip running doc tests with --no-run").into()); } compile_opts.build_config.mode = CompileMode::Doctest; compile_opts.filter = ops::CompileFilter::lib_only(); } else if test_name.is_some() && !compile_opts.filter.is_specific() { // If arg `TESTNAME` is provided, assumed that the user knows what // exactly they wants to test, so we use `all_test_targets` to // avoid compiling unnecessary targets such as examples, which are // included by the logic of default target filter. compile_opts.filter = ops::CompileFilter::all_test_targets(); } let ops = ops::TestOptions { no_run, no_fail_fast: args.flag("no-fail-fast"), compile_opts, }; ops::run_tests(&ws, &ops, &test_args) } cargo-0.66.0/src/bin/cargo/commands/tree.rs000066400000000000000000000245711432416201200204420ustar00rootroot00000000000000use crate::cli; use crate::command_prelude::*; use anyhow::{bail, format_err}; use cargo::core::dependency::DepKind; use cargo::ops::tree::{self, EdgeKind}; use cargo::ops::Packages; use cargo::util::print_available_packages; use cargo::util::CargoResult; use std::collections::HashSet; use std::str::FromStr; pub fn cli() -> App { subcommand("tree") .about("Display a tree visualization of a dependency graph") .arg_quiet() .arg_manifest_path() .arg_package_spec_no_all( "Package to be used as the root of the tree", "Display the tree for all packages in the workspace", "Exclude specific workspace members", ) .arg( flag("all", "Deprecated, use --no-dedupe instead") .short('a') .hide(true), ) .arg(flag("all-targets", "Deprecated, use --target=all instead").hide(true)) .arg_features() .arg_target_triple( "Filter dependencies matching the given target-triple (default host platform). \ Pass `all` to include all targets.", ) .arg(flag("no-dev-dependencies", "Deprecated, use -e=no-dev instead").hide(true)) .arg( multi_opt( "edges", "KINDS", "The kinds of dependencies to display \ (features, normal, build, dev, all, \ no-normal, no-build, no-dev, no-proc-macro)", ) .short('e'), ) .arg( optional_multi_opt( "invert", "SPEC", "Invert the tree direction and focus on the given package", ) .short('i'), ) .arg(multi_opt( "prune", "SPEC", "Prune the given package from the display of the dependency tree", )) .arg(opt("depth", "Maximum display depth of the dependency tree").value_name("DEPTH")) .arg(flag("no-indent", "Deprecated, use --prefix=none instead").hide(true)) .arg(flag("prefix-depth", "Deprecated, use --prefix=depth instead").hide(true)) .arg( opt( "prefix", "Change the prefix (indentation) of how each entry is displayed", ) .value_name("PREFIX") .value_parser(["depth", "indent", "none"]) .default_value("indent"), ) .arg(flag( "no-dedupe", "Do not de-duplicate (repeats all shared dependencies)", )) .arg( flag( "duplicates", "Show only dependencies which come in multiple versions (implies -i)", ) .short('d') .alias("duplicate"), ) .arg( opt("charset", "Character set to use in output: utf8, ascii") .value_name("CHARSET") .value_parser(["utf8", "ascii"]) .default_value("utf8"), ) .arg( opt("format", "Format string used for printing dependencies") .value_name("FORMAT") .short('f') .default_value("{p}"), ) .arg( // Backwards compatibility with old cargo-tree. flag("version", "Print version info and exit") .short('V') .hide(true), ) .after_help("Run `cargo help tree` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { if args.flag("version") { let verbose = args.verbose() > 0; let version = cli::get_version_string(verbose); cargo::drop_print!(config, "{}", version); return Ok(()); } let prefix = if args.flag("no-indent") { config .shell() .warn("the --no-indent flag has been changed to --prefix=none")?; "none" } else if args.flag("prefix-depth") { config .shell() .warn("the --prefix-depth flag has been changed to --prefix=depth")?; "depth" } else { args.get_one::("prefix").unwrap().as_str() }; let prefix = tree::Prefix::from_str(prefix).map_err(|e| anyhow::anyhow!("{}", e))?; let no_dedupe = args.flag("no-dedupe") || args.flag("all"); if args.flag("all") { config.shell().warn( "The `cargo tree` --all flag has been changed to --no-dedupe, \ and may be removed in a future version.\n\ If you are looking to display all workspace members, use the --workspace flag.", )?; } let targets = if args.flag("all-targets") { config .shell() .warn("the --all-targets flag has been changed to --target=all")?; vec!["all".to_string()] } else { args._values_of("target") }; let target = tree::Target::from_cli(targets); let (edge_kinds, no_proc_macro) = parse_edge_kinds(config, args)?; let graph_features = edge_kinds.contains(&EdgeKind::Feature); let pkgs_to_prune = args._values_of("prune"); let packages = args.packages_from_flags()?; let mut invert = args .get_many::("invert") .map_or_else(|| Vec::new(), |is| is.map(|s| s.to_string()).collect()); if args.is_present_with_zero_values("invert") { match &packages { Packages::Packages(ps) => { // Backwards compatibility with old syntax of `cargo tree -i -p foo`. invert.extend(ps.clone()); } _ => { return Err(format_err!( "The `-i` flag requires a package name.\n\ \n\ The `-i` flag is used to inspect the reverse dependencies of a specific\n\ package. It will invert the tree and display the packages that depend on the\n\ given package.\n\ \n\ Note that in a workspace, by default it will only display the package's\n\ reverse dependencies inside the tree of the workspace member in the current\n\ directory. The --workspace flag can be used to extend it so that it will show\n\ the package's reverse dependencies across the entire workspace. The -p flag\n\ can be used to display the package's reverse dependencies only with the\n\ subtree of the package given to -p.\n\ " ) .into()); } } } let ws = args.workspace(config)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; } let charset = tree::Charset::from_str(args.get_one::("charset").unwrap()) .map_err(|e| anyhow::anyhow!("{}", e))?; let opts = tree::TreeOptions { cli_features: args.cli_features()?, packages, target, edge_kinds, invert, pkgs_to_prune, prefix, no_dedupe, duplicates: args.flag("duplicates"), charset, format: args.get_one::("format").cloned().unwrap(), graph_features, max_display_depth: args.value_of_u32("depth")?.unwrap_or(u32::MAX), no_proc_macro, }; if opts.graph_features && opts.duplicates { return Err(format_err!("the `-e features` flag does not support `--duplicates`").into()); } tree::build_and_print(&ws, &opts)?; Ok(()) } /// Parses `--edges` option. /// /// Returns a tuple of `EdgeKind` map and `no_proc_marco` flag. fn parse_edge_kinds(config: &Config, args: &ArgMatches) -> CargoResult<(HashSet, bool)> { let (kinds, no_proc_macro) = { let mut no_proc_macro = false; let mut kinds = args.get_many::("edges").map_or_else( || Vec::new(), |es| { es.flat_map(|e| e.split(',')) .filter(|e| { no_proc_macro = *e == "no-proc-macro"; !no_proc_macro }) .collect() }, ); if args.flag("no-dev-dependencies") { config .shell() .warn("the --no-dev-dependencies flag has changed to -e=no-dev")?; kinds.push("no-dev"); } if kinds.is_empty() { kinds.extend(&["normal", "build", "dev"]); } (kinds, no_proc_macro) }; let mut result = HashSet::new(); let insert_defaults = |result: &mut HashSet| { result.insert(EdgeKind::Dep(DepKind::Normal)); result.insert(EdgeKind::Dep(DepKind::Build)); result.insert(EdgeKind::Dep(DepKind::Development)); }; let unknown = |k| { bail!( "unknown edge kind `{}`, valid values are \ \"normal\", \"build\", \"dev\", \ \"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \ \"features\", or \"all\"", k ) }; if kinds.iter().any(|k| k.starts_with("no-")) { insert_defaults(&mut result); for kind in &kinds { match *kind { "no-normal" => result.remove(&EdgeKind::Dep(DepKind::Normal)), "no-build" => result.remove(&EdgeKind::Dep(DepKind::Build)), "no-dev" => result.remove(&EdgeKind::Dep(DepKind::Development)), "features" => result.insert(EdgeKind::Feature), "normal" | "build" | "dev" | "all" => { bail!( "`{}` dependency kind cannot be mixed with \ \"no-normal\", \"no-build\", or \"no-dev\" \ dependency kinds", kind ) } k => return unknown(k), }; } return Ok((result, no_proc_macro)); } for kind in &kinds { match *kind { "all" => { insert_defaults(&mut result); result.insert(EdgeKind::Feature); } "features" => { result.insert(EdgeKind::Feature); } "normal" => { result.insert(EdgeKind::Dep(DepKind::Normal)); } "build" => { result.insert(EdgeKind::Dep(DepKind::Build)); } "dev" => { result.insert(EdgeKind::Dep(DepKind::Development)); } k => return unknown(k), } } if kinds.len() == 1 && kinds[0] == "features" { insert_defaults(&mut result); } Ok((result, no_proc_macro)) } cargo-0.66.0/src/bin/cargo/commands/uninstall.rs000066400000000000000000000022341432416201200215040ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("uninstall") .about("Remove a Rust binary") .arg_quiet() .arg(Arg::new("spec").multiple_values(true)) .arg_package_spec_simple("Package to uninstall") .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME")) .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) .after_help("Run `cargo help uninstall` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let root = args.get_one::("root").map(String::as_str); if args.is_present_with_zero_values("package") { return Err(anyhow::anyhow!( "\"--package \" requires a SPEC format value.\n\ Run `cargo help pkgid` for more information about SPEC format." ) .into()); } let specs = args .get_many::("spec") .unwrap_or_else(|| args.get_many::("package").unwrap_or_default()) .map(String::as_str) .collect(); ops::uninstall(root, specs, &values(args, "bin"), config)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/update.rs000066400000000000000000000026661432416201200207660ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops::{self, UpdateOptions}; use cargo::util::print_available_packages; pub fn cli() -> App { subcommand("update") .about("Update dependencies as recorded in the local lock file") .arg_quiet() .arg(flag("workspace", "Only update the workspace packages").short('w')) .arg_package_spec_simple("Package to update") .arg(flag( "aggressive", "Force updating all dependencies of SPEC as well when used with -p", )) .arg_dry_run("Don't actually write the lockfile") .arg( opt( "precise", "Update a single dependency to exactly PRECISE when used with -p", ) .value_name("PRECISE"), ) .arg_manifest_path() .after_help("Run `cargo help update` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let ws = args.workspace(config)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; } let update_opts = UpdateOptions { aggressive: args.flag("aggressive"), precise: args.get_one::("precise").map(String::as_str), to_update: values(args, "package"), dry_run: args.dry_run(), workspace: args.flag("workspace"), config, }; ops::update_lockfile(&ws, &update_opts)?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/vendor.rs000066400000000000000000000067451432416201200210030ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; use std::path::PathBuf; pub fn cli() -> App { subcommand("vendor") .about("Vendor all dependencies for a project locally") .arg_quiet() .arg_manifest_path() .arg( Arg::new("path") .value_parser(clap::value_parser!(PathBuf)) .help("Where to vendor crates (`vendor` by default)"), ) .arg(flag( "no-delete", "Don't delete older crates in the vendor directory", )) .arg( Arg::new("tomls") .short('s') .long("sync") .help("Additional `Cargo.toml` to sync and vendor") .value_name("TOML") .value_parser(clap::value_parser!(PathBuf)) .action(clap::ArgAction::Append), ) .arg(flag( "respect-source-config", "Respect `[source]` config in `.cargo/config`", )) .arg(flag( "versioned-dirs", "Always include version in subdir name", )) .arg(flag("no-merge-sources", "Not supported").hide(true)) .arg(flag("relative-path", "Not supported").hide(true)) .arg(flag("only-git-deps", "Not supported").hide(true)) .arg(flag("disallow-duplicates", "Not supported").hide(true)) .after_help("Run `cargo help vendor` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { // We're doing the vendoring operation ourselves, so we don't actually want // to respect any of the `source` configuration in Cargo itself. That's // intended for other consumers of Cargo, but we want to go straight to the // source, e.g. crates.io, to fetch crates. if !args.flag("respect-source-config") { config.values_mut()?.remove("source"); } // When we moved `cargo vendor` into Cargo itself we didn't stabilize a few // flags, so try to provide a helpful error message in that case to ensure // that users currently using the flag aren't tripped up. let crates_io_cargo_vendor_flag = if args.flag("no-merge-sources") { Some("--no-merge-sources") } else if args.flag("relative-path") { Some("--relative-path") } else if args.flag("only-git-deps") { Some("--only-git-deps") } else if args.flag("disallow-duplicates") { Some("--disallow-duplicates") } else { None }; if let Some(flag) = crates_io_cargo_vendor_flag { return Err(anyhow::format_err!( "\ the crates.io `cargo vendor` command has now been merged into Cargo itself and does not support the flag `{}` currently; to continue using the flag you can execute `cargo-vendor vendor ...`, and if you would like to see this flag supported in Cargo itself please feel free to file an issue at https://github.com/rust-lang/cargo/issues/new ", flag ) .into()); } let ws = args.workspace(config)?; let path = args .get_one::("path") .cloned() .unwrap_or_else(|| PathBuf::from("vendor")); ops::vendor( &ws, &ops::VendorOptions { no_delete: args.flag("no-delete"), destination: &path, versioned_dirs: args.flag("versioned-dirs"), extra: args .get_many::("tomls") .unwrap_or_default() .cloned() .collect(), }, )?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/verify_project.rs000066400000000000000000000012561432416201200225300ustar00rootroot00000000000000use crate::command_prelude::*; use std::collections::HashMap; use std::process; pub fn cli() -> App { subcommand("verify-project") .about("Check correctness of crate manifest") .arg_quiet() .arg_manifest_path() .after_help("Run `cargo help verify-project` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { if let Err(e) = args.workspace(config) { config .shell() .print_json(&HashMap::from([("invalid", e.to_string())]))?; process::exit(1) } config .shell() .print_json(&HashMap::from([("success", "true")]))?; Ok(()) } cargo-0.66.0/src/bin/cargo/commands/version.rs000066400000000000000000000007151432416201200211620ustar00rootroot00000000000000use crate::cli; use crate::command_prelude::*; pub fn cli() -> App { subcommand("version") .about("Show version information") .arg_quiet() .after_help("Run `cargo help version` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let verbose = args.verbose() > 0; let version = cli::get_version_string(verbose); cargo::drop_print!(config, "{}", version); Ok(()) } cargo-0.66.0/src/bin/cargo/commands/yank.rs000066400000000000000000000040431432416201200204350ustar00rootroot00000000000000use crate::command_prelude::*; use cargo::ops; pub fn cli() -> App { subcommand("yank") .about("Remove a pushed crate from the index") .arg_quiet() .arg(Arg::new("crate")) .arg( opt("version", "The version to yank or un-yank") .alias("vers") .value_name("VERSION"), ) .arg(flag( "undo", "Undo a yank, putting a version back into the index", )) .arg(opt("index", "Registry index to yank from").value_name("INDEX")) .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) .arg(opt("registry", "Registry to use").value_name("REGISTRY")) .after_help("Run `cargo help yank` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { config.load_credentials()?; let registry = args.registry(config)?; let (krate, version) = resolve_crate( args.get_one::("crate").map(String::as_str), args.get_one::("version").map(String::as_str), )?; if version.is_none() { return Err(anyhow::format_err!("`--version` is required").into()); } ops::yank( config, krate.map(|s| s.to_string()), version.map(|s| s.to_string()), args.get_one::("token").cloned(), args.get_one::("index").cloned(), args.flag("undo"), registry, )?; Ok(()) } fn resolve_crate<'k>( mut krate: Option<&'k str>, mut version: Option<&'k str>, ) -> crate::CargoResult<(Option<&'k str>, Option<&'k str>)> { if let Some((k, v)) = krate.and_then(|k| k.split_once('@')) { if version.is_some() { anyhow::bail!("cannot specify both `@{v}` and `--version`"); } if k.is_empty() { // by convention, arguments starting with `@` are response files anyhow::bail!("missing crate name for `@{v}`"); } krate = Some(k); version = Some(v); } Ok((krate, version)) } cargo-0.66.0/src/bin/cargo/main.rs000066400000000000000000000230621432416201200166200ustar00rootroot00000000000000#![warn(rust_2018_idioms)] // while we're getting used to 2018 #![allow(clippy::all)] use cargo::util::toml::StringOrVec; use cargo::util::CliError; use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; use cargo_util::{ProcessBuilder, ProcessError}; use std::collections::BTreeMap; use std::env; use std::fs; use std::path::{Path, PathBuf}; mod cli; mod commands; use crate::command_prelude::*; fn main() { #[cfg(feature = "pretty-env-logger")] pretty_env_logger::init_custom_env("CARGO_LOG"); #[cfg(not(feature = "pretty-env-logger"))] env_logger::init_from_env("CARGO_LOG"); let mut config = cli::LazyConfig::new(); let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() { cargo::ops::fix_exec_rustc(config.get(), &lock_addr).map_err(|e| CliError::from(e)) } else { let _token = cargo::util::job::setup(); cli::main(&mut config) }; match result { Err(e) => cargo::exit_with_error(e, &mut config.get_mut().shell()), Ok(()) => {} } } /// Table for defining the aliases which come builtin in `Cargo`. /// The contents are structured as: `(alias, aliased_command, description)`. const BUILTIN_ALIASES: [(&str, &str, &str); 5] = [ ("b", "build", "alias: build"), ("c", "check", "alias: check"), ("d", "doc", "alias: doc"), ("r", "run", "alias: run"), ("t", "test", "alias: test"), ]; /// Function which contains the list of all of the builtin aliases and it's /// corresponding execs represented as &str. fn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> { BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd) } fn aliased_command(config: &Config, command: &str) -> CargoResult>> { let alias_name = format!("alias.{}", command); let user_alias = match config.get_string(&alias_name) { Ok(Some(record)) => Some( record .val .split_whitespace() .map(|s| s.to_string()) .collect(), ), Ok(None) => None, Err(_) => config.get::>>(&alias_name)?, }; let result = user_alias.or_else(|| { builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()]) }); Ok(result) } /// List all runnable commands fn list_commands(config: &Config) -> BTreeMap { let prefix = "cargo-"; let suffix = env::consts::EXE_SUFFIX; let mut commands = BTreeMap::new(); for dir in search_directories(config) { let entries = match fs::read_dir(dir) { Ok(entries) => entries, _ => continue, }; for entry in entries.filter_map(|e| e.ok()) { let path = entry.path(); let filename = match path.file_name().and_then(|s| s.to_str()) { Some(filename) => filename, _ => continue, }; if !filename.starts_with(prefix) || !filename.ends_with(suffix) { continue; } if is_executable(entry.path()) { let end = filename.len() - suffix.len(); commands.insert( filename[prefix.len()..end].to_string(), CommandInfo::External { path: path.clone() }, ); } } } for cmd in commands::builtin() { commands.insert( cmd.get_name().to_string(), CommandInfo::BuiltIn { about: cmd.get_about().map(|s| s.to_string()), }, ); } // Add the builtin_aliases and them descriptions to the // `commands` `BTreeMap`. for command in &BUILTIN_ALIASES { commands.insert( command.0.to_string(), CommandInfo::BuiltIn { about: Some(command.2.to_string()), }, ); } // Add the user-defined aliases if let Ok(aliases) = config.get::>("alias") { for (name, target) in aliases.iter() { commands.insert( name.to_string(), CommandInfo::Alias { target: target.clone(), }, ); } } // `help` is special, so it needs to be inserted separately. commands.insert( "help".to_string(), CommandInfo::BuiltIn { about: Some("Displays help for a cargo subcommand".to_string()), }, ); commands } fn find_external_subcommand(config: &Config, cmd: &str) -> Option { let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); search_directories(config) .iter() .map(|dir| dir.join(&command_exe)) .find(|file| is_executable(file)) } fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult { let path = find_external_subcommand(config, cmd); let command = match path { Some(command) => command, None => { let err = if cmd.starts_with('+') { anyhow::format_err!( "no such subcommand: `{}`\n\n\t\ Cargo does not handle `+toolchain` directives.\n\t\ Did you mean to invoke `cargo` through `rustup` instead?", cmd ) } else { let suggestions = list_commands(config); let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c); anyhow::format_err!( "no such subcommand: `{}`{}\n\n\t\ View all installed commands with `cargo --list`", cmd, did_you_mean ) }; return Err(CliError::new(err, 101)); } }; let cargo_exe = config.cargo_exe()?; let mut cmd = ProcessBuilder::new(&command); cmd.env(cargo::CARGO_ENV, cargo_exe).args(args); if let Some(client) = config.jobserver_from_env() { cmd.inherit_jobserver(client); } let err = match cmd.exec_replace() { Ok(()) => return Ok(()), Err(e) => e, }; if let Some(perr) = err.downcast_ref::() { if let Some(code) = perr.code { return Err(CliError::code(code)); } } Err(CliError::new(err, 101)) } #[cfg(unix)] fn is_executable>(path: P) -> bool { use std::os::unix::prelude::*; fs::metadata(path) .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0) .unwrap_or(false) } #[cfg(windows)] fn is_executable>(path: P) -> bool { path.as_ref().is_file() } fn search_directories(config: &Config) -> Vec { let mut path_dirs = if let Some(val) = env::var_os("PATH") { env::split_paths(&val).collect() } else { vec![] }; let home_bin = config.home().clone().into_path_unlocked().join("bin"); // If any of that PATH elements contains `home_bin`, do not // add it again. This is so that the users can control priority // of it using PATH, while preserving the historical // behavior of preferring it over system global directories even // when not in PATH at all. // See https://github.com/rust-lang/cargo/issues/11020 for details. // // Note: `p == home_bin` will ignore trailing slash, but we don't // `canonicalize` the paths. if !path_dirs.iter().any(|p| p == &home_bin) { path_dirs.insert(0, home_bin); }; path_dirs } fn init_git_transports(config: &Config) { // Only use a custom transport if any HTTP options are specified, // such as proxies or custom certificate authorities. The custom // transport, however, is not as well battle-tested. match cargo::ops::needs_custom_http_transport(config) { Ok(true) => {} _ => return, } let handle = match cargo::ops::http_handle(config) { Ok(handle) => handle, Err(..) => return, }; // The unsafety of the registration function derives from two aspects: // // 1. This call must be synchronized with all other registration calls as // well as construction of new transports. // 2. The argument is leaked. // // We're clear on point (1) because this is only called at the start of this // binary (we know what the state of the world looks like) and we're mostly // clear on point (2) because we'd only free it after everything is done // anyway unsafe { git2_curl::register(handle); } // Disabling the owner validation in git can, in theory, lead to code execution // vulnerabilities. However, libgit2 does not launch executables, which is the foundation of // the original security issue. Meanwhile, issues with refusing to load git repos in // `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the // validation. // // For further discussion of Cargo's current interactions with git, see // // https://github.com/rust-lang/rfcs/pull/3279 // // and in particular the subsection on "Git support". // // Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library, // this code won't be invoked. Instead, developers will need to explicitly disable the // validation in their code. This is inconvenient, but won't accidentally open consuming // applications up to security issues if they use git2 to open repositories elsewhere in their // code. unsafe { if git2::opts::set_verify_owner_validation(false).is_err() { return; } } } cargo-0.66.0/src/cargo/000077500000000000000000000000001432416201200145535ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/000077500000000000000000000000001432416201200155035ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/compiler/000077500000000000000000000000001432416201200173155ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/compiler/artifact.rs000066400000000000000000000044021432416201200214600ustar00rootroot00000000000000/// Generate artifact information from unit dependencies for configuring the compiler environment. use crate::core::compiler::unit_graph::UnitDep; use crate::core::compiler::{Context, CrateType, FileFlavor, Unit}; use crate::core::TargetKind; use crate::CargoResult; use std::collections::HashMap; use std::ffi::OsString; /// Return all environment variables for the given unit-dependencies /// if artifacts are present. pub fn get_env( cx: &Context<'_, '_>, dependencies: &[UnitDep], ) -> CargoResult> { let mut env = HashMap::new(); for unit_dep in dependencies.iter().filter(|d| d.unit.artifact.is_true()) { for artifact_path in cx .outputs(&unit_dep.unit)? .iter() .filter_map(|f| (f.flavor == FileFlavor::Normal).then(|| &f.path)) { let artifact_type_upper = unit_artifact_type_name_upper(&unit_dep.unit); let dep_name = unit_dep.dep_name.unwrap_or(unit_dep.unit.pkg.name()); let dep_name_upper = dep_name.to_uppercase().replace("-", "_"); let var = format!("CARGO_{}_DIR_{}", artifact_type_upper, dep_name_upper); let path = artifact_path.parent().expect("parent dir for artifacts"); env.insert(var, path.to_owned().into()); let var = format!( "CARGO_{}_FILE_{}_{}", artifact_type_upper, dep_name_upper, unit_dep.unit.target.name() ); env.insert(var, artifact_path.to_owned().into()); if unit_dep.unit.target.name() == dep_name.as_str() { let var = format!("CARGO_{}_FILE_{}", artifact_type_upper, dep_name_upper,); env.insert(var, artifact_path.to_owned().into()); } } } Ok(env) } fn unit_artifact_type_name_upper(unit: &Unit) -> &'static str { match unit.target.kind() { TargetKind::Lib(kinds) => match kinds.as_slice() { &[CrateType::Cdylib] => "CDYLIB", &[CrateType::Staticlib] => "STATICLIB", invalid => unreachable!("BUG: artifacts cannot be of type {:?}", invalid), }, TargetKind::Bin => "BIN", invalid => unreachable!("BUG: artifacts cannot be of type {:?}", invalid), } } cargo-0.66.0/src/cargo/core/compiler/build_config.rs000066400000000000000000000222641432416201200223150ustar00rootroot00000000000000use crate::core::compiler::CompileKind; use crate::util::interning::InternedString; use crate::util::{CargoResult, Config, RustfixDiagnosticServer}; use anyhow::{bail, Context as _}; use cargo_util::ProcessBuilder; use serde::ser; use std::cell::RefCell; use std::path::PathBuf; use std::thread::available_parallelism; /// Configuration information for a rustc build. #[derive(Debug)] pub struct BuildConfig { /// The requested kind of compilation for this session pub requested_kinds: Vec, /// Number of rustc jobs to run in parallel. pub jobs: u32, /// Do not abort the build as soon as there is an error. pub keep_going: bool, /// Build profile pub requested_profile: InternedString, /// The mode we are compiling in. pub mode: CompileMode, /// `true` to print stdout in JSON format (for machine reading). pub message_format: MessageFormat, /// Force Cargo to do a full rebuild and treat each target as changed. pub force_rebuild: bool, /// Output a build plan to stdout instead of actually compiling. pub build_plan: bool, /// Output the unit graph to stdout instead of actually compiling. pub unit_graph: bool, /// An optional override of the rustc process for primary units pub primary_unit_rustc: Option, /// A thread used by `cargo fix` to receive messages on a socket regarding /// the success/failure of applying fixes. pub rustfix_diagnostic_server: RefCell>, /// The directory to copy final artifacts to. Note that even if `out_dir` is /// set, a copy of artifacts still could be found a `target/(debug\release)` /// as usual. // Note that, although the cmd-line flag name is `out-dir`, in code we use // `export_dir`, to avoid confusion with out dir at `target/debug/deps`. pub export_dir: Option, /// `true` to output a future incompatibility report at the end of the build pub future_incompat_report: bool, /// Which kinds of build timings to output (empty if none). pub timing_outputs: Vec, } fn default_parallelism() -> CargoResult { Ok(available_parallelism() .context("failed to determine the amount of parallelism available")? .get() as u32) } impl BuildConfig { /// Parses all config files to learn about build configuration. Currently /// configured options are: /// /// * `build.jobs` /// * `build.target` /// * `target.$target.ar` /// * `target.$target.linker` /// * `target.$target.libfoo.metadata` pub fn new( config: &Config, jobs: Option, keep_going: bool, requested_targets: &[String], mode: CompileMode, ) -> CargoResult { let cfg = config.build_config()?; let requested_kinds = CompileKind::from_requested_targets(config, requested_targets)?; if jobs.is_some() && config.jobserver_from_env().is_some() { config.shell().warn( "a `-j` argument was passed to Cargo but Cargo is \ also configured with an external jobserver in \ its environment, ignoring the `-j` parameter", )?; } let jobs = match jobs.or(cfg.jobs) { None => default_parallelism()?, Some(0) => anyhow::bail!("jobs may not be 0"), Some(j) if j < 0 => (default_parallelism()? as i32 + j).max(1) as u32, Some(j) => j as u32, }; if config.cli_unstable().build_std.is_some() && requested_kinds[0].is_host() { // TODO: This should eventually be fixed. anyhow::bail!("-Zbuild-std requires --target"); } Ok(BuildConfig { requested_kinds, jobs, keep_going, requested_profile: InternedString::new("dev"), mode, message_format: MessageFormat::Human, force_rebuild: false, build_plan: false, unit_graph: false, primary_unit_rustc: None, rustfix_diagnostic_server: RefCell::new(None), export_dir: None, future_incompat_report: false, timing_outputs: Vec::new(), }) } /// Whether or not the *user* wants JSON output. Whether or not rustc /// actually uses JSON is decided in `add_error_format`. pub fn emit_json(&self) -> bool { matches!(self.message_format, MessageFormat::Json { .. }) } pub fn test(&self) -> bool { self.mode == CompileMode::Test || self.mode == CompileMode::Bench } pub fn single_requested_kind(&self) -> CargoResult { match self.requested_kinds.len() { 1 => Ok(self.requested_kinds[0]), _ => bail!("only one `--target` argument is supported"), } } } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum MessageFormat { Human, Json { /// Whether rustc diagnostics are rendered by cargo or included into the /// output stream. render_diagnostics: bool, /// Whether the `rendered` field of rustc diagnostics are using the /// "short" rendering. short: bool, /// Whether the `rendered` field of rustc diagnostics embed ansi color /// codes. ansi: bool, }, Short, } /// The general "mode" for what to do. /// This is used for two purposes. The commands themselves pass this in to /// `compile_ws` to tell it the general execution strategy. This influences /// the default targets selected. The other use is in the `Unit` struct /// to indicate what is being done with a specific target. #[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)] pub enum CompileMode { /// A target being built for a test. Test, /// Building a target with `rustc` (lib or bin). Build, /// Building a target with `rustc` to emit `rmeta` metadata only. If /// `test` is true, then it is also compiled with `--test` to check it like /// a test. Check { test: bool }, /// Used to indicate benchmarks should be built. This is not used in /// `Unit`, because it is essentially the same as `Test` (indicating /// `--test` should be passed to rustc) and by using `Test` instead it /// allows some de-duping of Units to occur. Bench, /// A target that will be documented with `rustdoc`. /// If `deps` is true, then it will also document all dependencies. Doc { deps: bool }, /// A target that will be tested with `rustdoc`. Doctest, /// An example or library that will be scraped for function calls by `rustdoc`. Docscrape, /// A marker for Units that represent the execution of a `build.rs` script. RunCustomBuild, } impl ser::Serialize for CompileMode { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { use self::CompileMode::*; match *self { Test => "test".serialize(s), Build => "build".serialize(s), Check { .. } => "check".serialize(s), Bench => "bench".serialize(s), Doc { .. } => "doc".serialize(s), Doctest => "doctest".serialize(s), Docscrape => "docscrape".serialize(s), RunCustomBuild => "run-custom-build".serialize(s), } } } impl CompileMode { /// Returns `true` if the unit is being checked. pub fn is_check(self) -> bool { matches!(self, CompileMode::Check { .. }) } /// Returns `true` if this is generating documentation. pub fn is_doc(self) -> bool { matches!(self, CompileMode::Doc { .. }) } /// Returns `true` if this a doc test. pub fn is_doc_test(self) -> bool { self == CompileMode::Doctest } /// Returns `true` if this is scraping examples for documentation. pub fn is_doc_scrape(self) -> bool { self == CompileMode::Docscrape } /// Returns `true` if this is any type of test (test, benchmark, doc test, or /// check test). pub fn is_any_test(self) -> bool { matches!( self, CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true } | CompileMode::Doctest ) } /// Returns `true` if this is something that passes `--test` to rustc. pub fn is_rustc_test(self) -> bool { matches!( self, CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true } ) } /// Returns `true` if this is the *execution* of a `build.rs` script. pub fn is_run_custom_build(self) -> bool { self == CompileMode::RunCustomBuild } /// Returns `true` if this mode may generate an executable. /// /// Note that this also returns `true` for building libraries, so you also /// have to check the target. pub fn generates_executable(self) -> bool { matches!( self, CompileMode::Test | CompileMode::Bench | CompileMode::Build ) } } /// Kinds of build timings we can output. #[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)] pub enum TimingOutput { /// Human-readable HTML report Html, /// Machine-readable JSON (unstable) Json, } cargo-0.66.0/src/cargo/core/compiler/build_context/000077500000000000000000000000001432416201200221605ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/compiler/build_context/mod.rs000066400000000000000000000076751432416201200233240ustar00rootroot00000000000000use crate::core::compiler::unit_graph::UnitGraph; use crate::core::compiler::{BuildConfig, CompileKind, Unit}; use crate::core::profiles::Profiles; use crate::core::PackageSet; use crate::core::Workspace; use crate::util::config::Config; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Rustc; use std::collections::{HashMap, HashSet}; use std::path::PathBuf; mod target_info; pub use self::target_info::{ FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, }; /// The build context, containing all information about a build task. /// /// It is intended that this is mostly static information. Stuff that mutates /// during the build can be found in the parent `Context`. (I say mostly, /// because this has internal caching, but nothing that should be observable /// or require &mut.) pub struct BuildContext<'a, 'cfg> { /// The workspace the build is for. pub ws: &'a Workspace<'cfg>, /// The cargo configuration. pub config: &'cfg Config, pub profiles: Profiles, pub build_config: &'a BuildConfig, /// Extra compiler args for either `rustc` or `rustdoc`. pub extra_compiler_args: HashMap>, /// Package downloader. /// /// This holds ownership of the `Package` objects. pub packages: PackageSet<'cfg>, /// Information about rustc and the target platform. pub target_data: RustcTargetData<'cfg>, /// The root units of `unit_graph` (units requested on the command-line). pub roots: Vec, /// The dependency graph of units to compile. pub unit_graph: UnitGraph, /// Reverse-dependencies of documented units, used by the rustdoc --scrape-examples flag. pub scrape_units: Vec, /// The list of all kinds that are involved in this build pub all_kinds: HashSet, } impl<'a, 'cfg> BuildContext<'a, 'cfg> { pub fn new( ws: &'a Workspace<'cfg>, packages: PackageSet<'cfg>, build_config: &'a BuildConfig, profiles: Profiles, extra_compiler_args: HashMap>, target_data: RustcTargetData<'cfg>, roots: Vec, unit_graph: UnitGraph, scrape_units: Vec, ) -> CargoResult> { let all_kinds = unit_graph .keys() .map(|u| u.kind) .chain(build_config.requested_kinds.iter().copied()) .chain(std::iter::once(CompileKind::Host)) .collect(); Ok(BuildContext { ws, config: ws.config(), packages, build_config, profiles, extra_compiler_args, target_data, roots, unit_graph, scrape_units, all_kinds, }) } pub fn rustc(&self) -> &Rustc { &self.target_data.rustc } /// Gets the user-specified linker for a particular host or target. pub fn linker(&self, kind: CompileKind) -> Option { self.target_data .target_config(kind) .linker .as_ref() .map(|l| l.val.clone().resolve_program(self.config)) } /// Gets the host architecture triple. /// /// For example, x86_64-unknown-linux-gnu, would be /// - machine: x86_64, /// - hardware-platform: unknown, /// - operating system: linux-gnu. pub fn host_triple(&self) -> InternedString { self.target_data.rustc.host } /// Gets the number of jobs specified for this build. pub fn jobs(&self) -> u32 { self.build_config.jobs } pub fn rustflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustflags } pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustdocflags } pub fn extra_args_for(&self, unit: &Unit) -> Option<&Vec> { self.extra_compiler_args.get(unit) } } cargo-0.66.0/src/cargo/core/compiler/build_context/target_info.rs000066400000000000000000001103701432416201200250310ustar00rootroot00000000000000use crate::core::compiler::{ BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, }; use crate::core::{Dependency, Package, Target, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; use crate::util::{CargoResult, Rustc}; use anyhow::Context as _; use cargo_platform::{Cfg, CfgExpr}; use cargo_util::{paths, ProcessBuilder}; use serde::{Deserialize, Serialize}; use std::cell::RefCell; use std::collections::hash_map::{Entry, HashMap}; use std::env; use std::path::{Path, PathBuf}; use std::str::{self, FromStr}; /// Information about the platform target gleaned from querying rustc. /// /// `RustcTargetData` keeps two of these, one for the host and one for the /// target. If no target is specified, it uses a clone from the host. #[derive(Clone)] pub struct TargetInfo { /// A base process builder for discovering crate type information. In /// particular, this is used to determine the output filename prefix and /// suffix for a crate type. crate_type_process: ProcessBuilder, /// Cache of output filename prefixes and suffixes. /// /// The key is the crate type name (like `cdylib`) and the value is /// `Some((prefix, suffix))`, for example `libcargo.so` would be /// `Some(("lib", ".so")). The value is `None` if the crate type is not /// supported. crate_types: RefCell>>, /// `cfg` information extracted from `rustc --print=cfg`. cfg: Vec, /// Path to the sysroot. pub sysroot: PathBuf, /// Path to the "lib" or "bin" directory that rustc uses for its dynamic /// libraries. pub sysroot_host_libdir: PathBuf, /// Path to the "lib" directory in the sysroot which rustc uses for linking /// target libraries. pub sysroot_target_libdir: PathBuf, /// Extra flags to pass to `rustc`, see `env_args`. pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, /// Whether or not rustc supports the `-Csplit-debuginfo` flag. pub supports_split_debuginfo: bool, } /// Kind of each file generated by a Unit, part of `FileType`. #[derive(Clone, PartialEq, Eq, Debug)] pub enum FileFlavor { /// Not a special file type. Normal, /// Like `Normal`, but not directly executable. /// For example, a `.wasm` file paired with the "normal" `.js` file. Auxiliary, /// Something you can link against (e.g., a library). Linkable, /// An `.rmeta` Rust metadata file. Rmeta, /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file). DebugInfo, } /// Type of each file generated by a Unit. #[derive(Debug)] pub struct FileType { /// The kind of file. pub flavor: FileFlavor, /// The crate-type that generates this file. /// /// `None` for things that aren't associated with a specific crate type, /// for example `rmeta` files. pub crate_type: Option, /// The suffix for the file (for example, `.rlib`). /// This is an empty string for executables on Unix-like platforms. suffix: String, /// The prefix for the file (for example, `lib`). /// This is an empty string for things like executables. prefix: String, /// Flag to convert hyphen to underscore when uplifting. should_replace_hyphens: bool, } impl FileType { /// The filename for this FileType crated by rustc. pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String { match metadata { Some(metadata) => format!( "{}{}-{}{}", self.prefix, target.crate_name(), metadata, self.suffix ), None => format!("{}{}{}", self.prefix, target.crate_name(), self.suffix), } } /// The filename for this FileType that Cargo should use when "uplifting" /// it to the destination directory. pub fn uplift_filename(&self, target: &Target) -> String { let name = match target.binary_filename() { Some(name) => name, None => { // For binary crate type, `should_replace_hyphens` will always be false. if self.should_replace_hyphens { target.crate_name() } else { target.name().to_string() } } }; format!("{}{}{}", self.prefix, name, self.suffix) } /// Creates a new instance representing a `.rmeta` file. pub fn new_rmeta() -> FileType { // Note that even binaries use the `lib` prefix. FileType { flavor: FileFlavor::Rmeta, crate_type: None, suffix: ".rmeta".to_string(), prefix: "lib".to_string(), should_replace_hyphens: true, } } } impl TargetInfo { pub fn new( config: &Config, requested_kinds: &[CompileKind], rustc: &Rustc, kind: CompileKind, ) -> CargoResult { let rustflags = env_args( config, requested_kinds, &rustc.host, None, kind, Flags::Rust, )?; let extra_fingerprint = kind.fingerprint_hash(); let mut process = rustc.workspace_process(); process .arg("-") .arg("--crate-name") .arg("___") .arg("--print=file-names") .args(&rustflags) .env_remove("RUSTC_LOG"); if let CompileKind::Target(target) = kind { process.arg("--target").arg(target.rustc_target()); } let crate_type_process = process.clone(); const KNOWN_CRATE_TYPES: &[CrateType] = &[ CrateType::Bin, CrateType::Rlib, CrateType::Dylib, CrateType::Cdylib, CrateType::Staticlib, CrateType::ProcMacro, ]; for crate_type in KNOWN_CRATE_TYPES.iter() { process.arg("--crate-type").arg(crate_type.as_str()); } let supports_split_debuginfo = rustc .cached_output( process.clone().arg("-Csplit-debuginfo=packed"), extra_fingerprint, ) .is_ok(); process.arg("--print=sysroot"); process.arg("--print=cfg"); let (output, error) = rustc .cached_output(&process, extra_fingerprint) .with_context(|| "failed to run `rustc` to learn about target-specific information")?; let mut lines = output.lines(); let mut map = HashMap::new(); for crate_type in KNOWN_CRATE_TYPES { let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; map.insert(crate_type.clone(), out); } let line = match lines.next() { Some(line) => line, None => anyhow::bail!( "output of --print=sysroot missing when learning about \ target-specific information from rustc\n{}", output_err_info(&process, &output, &error) ), }; let sysroot = PathBuf::from(line); let sysroot_host_libdir = if cfg!(windows) { sysroot.join("bin") } else { sysroot.join("lib") }; let mut sysroot_target_libdir = sysroot.clone(); sysroot_target_libdir.push("lib"); sysroot_target_libdir.push("rustlib"); sysroot_target_libdir.push(match &kind { CompileKind::Host => rustc.host.as_str(), CompileKind::Target(target) => target.short_name(), }); sysroot_target_libdir.push("lib"); let cfg = lines .map(|line| Ok(Cfg::from_str(line)?)) .filter(TargetInfo::not_user_specific_cfg) .collect::>>() .with_context(|| { format!( "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", output ) })?; Ok(TargetInfo { crate_type_process, crate_types: RefCell::new(map), sysroot, sysroot_host_libdir, sysroot_target_libdir, // recalculate `rustflags` from above now that we have `cfg` // information rustflags: env_args( config, requested_kinds, &rustc.host, Some(&cfg), kind, Flags::Rust, )?, rustdocflags: env_args( config, requested_kinds, &rustc.host, Some(&cfg), kind, Flags::Rustdoc, )?, cfg, supports_split_debuginfo, }) } fn not_user_specific_cfg(cfg: &CargoResult) -> bool { if let Ok(Cfg::Name(cfg_name)) = cfg { // This should also include "debug_assertions", but it causes // regressions. Maybe some day in the distant future it can be // added (and possibly change the warning to an error). if cfg_name == "proc_macro" { return false; } } true } /// All the target `cfg` settings. pub fn cfg(&self) -> &[Cfg] { &self.cfg } /// Returns the list of file types generated by the given crate type. /// /// Returns `None` if the target does not support the given crate type. fn file_types( &self, crate_type: &CrateType, flavor: FileFlavor, target_triple: &str, ) -> CargoResult>> { let crate_type = if *crate_type == CrateType::Lib { CrateType::Rlib } else { crate_type.clone() }; let mut crate_types = self.crate_types.borrow_mut(); let entry = crate_types.entry(crate_type.clone()); let crate_type_info = match entry { Entry::Occupied(o) => &*o.into_mut(), Entry::Vacant(v) => { let value = self.discover_crate_type(v.key())?; &*v.insert(value) } }; let (prefix, suffix) = match *crate_type_info { Some((ref prefix, ref suffix)) => (prefix, suffix), None => return Ok(None), }; let mut ret = vec![FileType { suffix: suffix.clone(), prefix: prefix.clone(), flavor, crate_type: Some(crate_type.clone()), should_replace_hyphens: crate_type != CrateType::Bin, }]; // Window shared library import/export files. if crate_type.is_dynamic() { // Note: Custom JSON specs can alter the suffix. For now, we'll // just ignore non-DLL suffixes. if target_triple.ends_with("-windows-msvc") && suffix == ".dll" { // See https://docs.microsoft.com/en-us/cpp/build/reference/working-with-import-libraries-and-export-files // for more information about DLL import/export files. ret.push(FileType { suffix: ".dll.lib".to_string(), prefix: prefix.clone(), flavor: FileFlavor::Auxiliary, crate_type: Some(crate_type.clone()), should_replace_hyphens: true, }); // NOTE: lld does not produce these ret.push(FileType { suffix: ".dll.exp".to_string(), prefix: prefix.clone(), flavor: FileFlavor::Auxiliary, crate_type: Some(crate_type.clone()), should_replace_hyphens: true, }); } else if target_triple.ends_with("windows-gnu") && suffix == ".dll" { // See https://cygwin.com/cygwin-ug-net/dll.html for more // information about GNU import libraries. // LD can link DLL directly, but LLD requires the import library. ret.push(FileType { suffix: ".dll.a".to_string(), prefix: "lib".to_string(), flavor: FileFlavor::Auxiliary, crate_type: Some(crate_type.clone()), should_replace_hyphens: true, }) } } if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" { // emscripten binaries generate a .js file, which loads a .wasm // file. ret.push(FileType { suffix: ".wasm".to_string(), prefix: prefix.clone(), flavor: FileFlavor::Auxiliary, crate_type: Some(crate_type.clone()), // Name `foo-bar` will generate a `foo_bar.js` and // `foo_bar.wasm`. Cargo will translate the underscore and // copy `foo_bar.js` to `foo-bar.js`. However, the wasm // filename is embedded in the .js file with an underscore, so // it should not contain hyphens. should_replace_hyphens: true, }); // And a map file for debugging. This is only emitted with debug=2 // (-g4 for emcc). ret.push(FileType { suffix: ".wasm.map".to_string(), prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, crate_type: Some(crate_type.clone()), should_replace_hyphens: true, }); } // Handle separate debug files. let is_apple = target_triple.contains("-apple-"); if matches!( crate_type, CrateType::Bin | CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro ) { if is_apple { let suffix = if crate_type == CrateType::Bin { ".dSYM".to_string() } else { ".dylib.dSYM".to_string() }; ret.push(FileType { suffix, prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, crate_type: Some(crate_type), // macOS tools like lldb use all sorts of magic to locate // dSYM files. See https://lldb.llvm.org/use/symbols.html // for some details. It seems like a `.dSYM` located next // to the executable with the same name is one method. The // dSYM should have the same hyphens as the executable for // the names to match. should_replace_hyphens: false, }) } else if target_triple.ends_with("-msvc") { ret.push(FileType { suffix: ".pdb".to_string(), prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, crate_type: Some(crate_type), // The absolute path to the pdb file is embedded in the // executable. If the exe/pdb pair is moved to another // machine, then debuggers will look in the same directory // of the exe with the original pdb filename. Since the // original name contains underscores, they need to be // preserved. should_replace_hyphens: true, }) } } Ok(Some(ret)) } fn discover_crate_type(&self, crate_type: &CrateType) -> CargoResult> { let mut process = self.crate_type_process.clone(); process.arg("--crate-type").arg(crate_type.as_str()); let output = process.exec_with_output().with_context(|| { format!( "failed to run `rustc` to learn about crate-type {} information", crate_type ) })?; let error = str::from_utf8(&output.stderr).unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); parse_crate_type(crate_type, &process, output, error, &mut output.lines()) } /// Returns all the file types generated by rustc for the given mode/target_kind. /// /// The first value is a Vec of file types generated, the second value is /// a list of CrateTypes that are not supported by the given target. pub fn rustc_outputs( &self, mode: CompileMode, target_kind: &TargetKind, target_triple: &str, ) -> CargoResult<(Vec, Vec)> { match mode { CompileMode::Build => self.calc_rustc_outputs(target_kind, target_triple), CompileMode::Test | CompileMode::Bench => { match self.file_types(&CrateType::Bin, FileFlavor::Normal, target_triple)? { Some(fts) => Ok((fts, Vec::new())), None => Ok((Vec::new(), vec![CrateType::Bin])), } } CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())), CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { panic!("asked for rustc output for non-rustc mode") } } } fn calc_rustc_outputs( &self, target_kind: &TargetKind, target_triple: &str, ) -> CargoResult<(Vec, Vec)> { let mut unsupported = Vec::new(); let mut result = Vec::new(); let crate_types = target_kind.rustc_crate_types(); for crate_type in &crate_types { let flavor = if crate_type.is_linkable() { FileFlavor::Linkable } else { FileFlavor::Normal }; let file_types = self.file_types(crate_type, flavor, target_triple)?; match file_types { Some(types) => { result.extend(types); } None => { unsupported.push(crate_type.clone()); } } } if !result.is_empty() && !crate_types.iter().any(|ct| ct.requires_upstream_objects()) { // Only add rmeta if pipelining. result.push(FileType::new_rmeta()); } Ok((result, unsupported)) } } /// Takes rustc output (using specialized command line args), and calculates the file prefix and /// suffix for the given crate type, or returns `None` if the type is not supported. (e.g., for a /// Rust library like `libcargo.rlib`, we have prefix "lib" and suffix "rlib"). /// /// The caller needs to ensure that the lines object is at the correct line for the given crate /// type: this is not checked. /// /// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there /// are two files for bin (`.wasm` and `.js`)). fn parse_crate_type( crate_type: &CrateType, cmd: &ProcessBuilder, output: &str, error: &str, lines: &mut str::Lines<'_>, ) -> CargoResult> { let not_supported = error.lines().any(|line| { (line.contains("unsupported crate type") || line.contains("unknown crate type")) && line.contains(&format!("crate type `{}`", crate_type)) }); if not_supported { return Ok(None); } let line = match lines.next() { Some(line) => line, None => anyhow::bail!( "malformed output when learning about crate-type {} information\n{}", crate_type, output_err_info(cmd, output, error) ), }; let mut parts = line.trim().split("___"); let prefix = parts.next().unwrap(); let suffix = match parts.next() { Some(part) => part, None => anyhow::bail!( "output of --print=file-names has changed in the compiler, cannot parse\n{}", output_err_info(cmd, output, error) ), }; Ok(Some((prefix.to_string(), suffix.to_string()))) } /// Helper for creating an error message when parsing rustc output fails. fn output_err_info(cmd: &ProcessBuilder, stdout: &str, stderr: &str) -> String { let mut result = format!("command was: {}\n", cmd); if !stdout.is_empty() { result.push_str("\n--- stdout\n"); result.push_str(stdout); } if !stderr.is_empty() { result.push_str("\n--- stderr\n"); result.push_str(stderr); } if stdout.is_empty() && stderr.is_empty() { result.push_str("(no output received)"); } result } #[derive(Debug, Copy, Clone)] enum Flags { Rust, Rustdoc, } impl Flags { fn as_key(self) -> &'static str { match self { Flags::Rust => "rustflags", Flags::Rustdoc => "rustdocflags", } } fn as_env(self) -> &'static str { match self { Flags::Rust => "RUSTFLAGS", Flags::Rustdoc => "RUSTDOCFLAGS", } } } /// Acquire extra flags to pass to the compiler from various locations. /// /// The locations are: /// /// - the `CARGO_ENCODED_RUSTFLAGS` environment variable /// - the `RUSTFLAGS` environment variable /// /// then if none of those were found /// /// - `target.*.rustflags` from the config (.cargo/config) /// - `target.cfg(..).rustflags` from the config /// - `host.*.rustflags` from the config if compiling a host artifact or without `--target` /// /// then if none of those were found /// /// - `build.rustflags` from the config /// /// The behavior differs slightly when cross-compiling (or, specifically, when `--target` is /// provided) for artifacts that are always built for the host (plugins, build scripts, ...). /// For those artifacts, _only_ `host.*.rustflags` is respected, and no other configuration /// sources, _regardless of the value of `target-applies-to-host`_. This is counterintuitive, but /// necessary to retain backwards compatibility with older versions of Cargo. fn env_args( config: &Config, requested_kinds: &[CompileKind], host_triple: &str, target_cfg: Option<&[Cfg]>, kind: CompileKind, flags: Flags, ) -> CargoResult> { let target_applies_to_host = config.target_applies_to_host()?; // Host artifacts should not generally pick up rustflags from anywhere except [host]. // // The one exception to this is if `target-applies-to-host = true`, which opts into a // particular (inconsistent) past Cargo behavior where host artifacts _do_ pick up rustflags // set elsewhere when `--target` isn't passed. if kind.is_host() { if target_applies_to_host && requested_kinds == [CompileKind::Host] { // This is the past Cargo behavior where we fall back to the same logic as for other // artifacts without --target. } else { // In all other cases, host artifacts just get flags from [host], regardless of // --target. Or, phrased differently, no `--target` behaves the same as `--target // `, and host artifacts are always "special" (they don't pick up `RUSTFLAGS` for // example). return Ok(rustflags_from_host(config, flags, host_triple)?.unwrap_or_else(Vec::new)); } } // All other artifacts pick up the RUSTFLAGS, [target.*], and [build], in that order. // NOTE: It is impossible to have a [host] section and reach this logic with kind.is_host(), // since [host] implies `target-applies-to-host = false`, which always early-returns above. if let Some(rustflags) = rustflags_from_env(flags) { Ok(rustflags) } else if let Some(rustflags) = rustflags_from_target(config, host_triple, target_cfg, kind, flags)? { Ok(rustflags) } else if let Some(rustflags) = rustflags_from_build(config, flags)? { Ok(rustflags) } else { Ok(Vec::new()) } } fn rustflags_from_env(flags: Flags) -> Option> { // First try CARGO_ENCODED_RUSTFLAGS from the environment. // Prefer this over RUSTFLAGS since it's less prone to encoding errors. if let Ok(a) = env::var(format!("CARGO_ENCODED_{}", flags.as_env())) { if a.is_empty() { return Some(Vec::new()); } return Some(a.split('\x1f').map(str::to_string).collect()); } // Then try RUSTFLAGS from the environment if let Ok(a) = env::var(flags.as_env()) { let args = a .split(' ') .map(str::trim) .filter(|s| !s.is_empty()) .map(str::to_string); return Some(args.collect()); } // No rustflags to be collected from the environment None } fn rustflags_from_target( config: &Config, host_triple: &str, target_cfg: Option<&[Cfg]>, kind: CompileKind, flag: Flags, ) -> CargoResult>> { let mut rustflags = Vec::new(); // Then the target.*.rustflags value... let target = match &kind { CompileKind::Host => host_triple, CompileKind::Target(target) => target.short_name(), }; let key = format!("target.{}.{}", target, flag.as_key()); if let Some(args) = config.get::>(&key)? { rustflags.extend(args.as_slice().iter().cloned()); } // ...including target.'cfg(...)'.rustflags if let Some(target_cfg) = target_cfg { config .target_cfgs()? .iter() .filter_map(|(key, cfg)| { cfg.rustflags .as_ref() .map(|rustflags| (key, &rustflags.val)) }) .filter(|(key, _rustflags)| CfgExpr::matches_key(key, target_cfg)) .for_each(|(_key, cfg_rustflags)| { rustflags.extend(cfg_rustflags.as_slice().iter().cloned()); }); } if rustflags.is_empty() { Ok(None) } else { Ok(Some(rustflags)) } } fn rustflags_from_host( config: &Config, flag: Flags, host_triple: &str, ) -> CargoResult>> { let target_cfg = config.host_cfg_triple(host_triple)?; let list = match flag { Flags::Rust => &target_cfg.rustflags, Flags::Rustdoc => { // host.rustdocflags is not a thing, since it does not make sense return Ok(None); } }; Ok(list.as_ref().map(|l| l.val.as_slice().to_vec())) } fn rustflags_from_build(config: &Config, flag: Flags) -> CargoResult>> { // Then the `build.rustflags` value. let build = config.build_config()?; let list = match flag { Flags::Rust => &build.rustflags, Flags::Rustdoc => &build.rustdocflags, }; Ok(list.as_ref().map(|l| l.as_slice().to_vec())) } /// Collection of information about `rustc` and the host and target. pub struct RustcTargetData<'cfg> { /// Information about `rustc` itself. pub rustc: Rustc, /// Config config: &'cfg Config, requested_kinds: Vec, /// Build information for the "host", which is information about when /// `rustc` is invoked without a `--target` flag. This is used for /// procedural macros, build scripts, etc. host_config: TargetConfig, host_info: TargetInfo, /// Build information for targets that we're building for. This will be /// empty if the `--target` flag is not passed. target_config: HashMap, target_info: HashMap, } impl<'cfg> RustcTargetData<'cfg> { pub fn new( ws: &Workspace<'cfg>, requested_kinds: &[CompileKind], ) -> CargoResult> { let config = ws.config(); let rustc = config.load_global_rustc(Some(ws))?; let mut target_config = HashMap::new(); let mut target_info = HashMap::new(); let target_applies_to_host = config.target_applies_to_host()?; let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?; let host_config = if target_applies_to_host { config.target_cfg_triple(&rustc.host)? } else { config.host_cfg_triple(&rustc.host)? }; // This is a hack. The unit_dependency graph builder "pretends" that // `CompileKind::Host` is `CompileKind::Target(host)` if the // `--target` flag is not specified. Since the unit_dependency code // needs access to the target config data, create a copy so that it // can be found. See `rebuild_unit_graph_shared` for why this is done. if requested_kinds.iter().any(CompileKind::is_host) { let ct = CompileTarget::new(&rustc.host)?; target_info.insert(ct, host_info.clone()); target_config.insert(ct, config.target_cfg_triple(&rustc.host)?); }; let mut res = RustcTargetData { rustc, config, requested_kinds: requested_kinds.into(), host_config, host_info, target_config, target_info, }; // Get all kinds we currently know about. // // For now, targets can only ever come from the root workspace // units and artifact dependencies, so this // correctly represents all the kinds that can happen. When we have // other ways for targets to appear at places that are not the root units, // we may have to revisit this. fn artifact_targets(package: &Package) -> impl Iterator + '_ { package .manifest() .dependencies() .iter() .filter_map(|d| d.artifact()?.target()?.to_compile_kind()) } let all_kinds = requested_kinds .iter() .copied() .chain(ws.members().flat_map(|p| { p.manifest() .default_kind() .into_iter() .chain(p.manifest().forced_kind()) .chain(artifact_targets(p)) })); for kind in all_kinds { res.merge_compile_kind(kind)?; } Ok(res) } /// Insert `kind` into our `target_info` and `target_config` members if it isn't present yet. fn merge_compile_kind(&mut self, kind: CompileKind) -> CargoResult<()> { if let CompileKind::Target(target) = kind { if !self.target_config.contains_key(&target) { self.target_config .insert(target, self.config.target_cfg_triple(target.short_name())?); } if !self.target_info.contains_key(&target) { self.target_info.insert( target, TargetInfo::new(self.config, &self.requested_kinds, &self.rustc, kind)?, ); } } Ok(()) } /// Returns a "short" name for the given kind, suitable for keying off /// configuration in Cargo or presenting to users. pub fn short_name<'a>(&'a self, kind: &'a CompileKind) -> &'a str { match kind { CompileKind::Host => &self.rustc.host, CompileKind::Target(target) => target.short_name(), } } /// Whether a dependency should be compiled for the host or target platform, /// specified by `CompileKind`. pub fn dep_platform_activated(&self, dep: &Dependency, kind: CompileKind) -> bool { // If this dependency is only available for certain platforms, // make sure we're only enabling it for that platform. let platform = match dep.platform() { Some(p) => p, None => return true, }; let name = self.short_name(&kind); platform.matches(name, self.cfg(kind)) } /// Gets the list of `cfg`s printed out from the compiler for the specified kind. pub fn cfg(&self, kind: CompileKind) -> &[Cfg] { self.info(kind).cfg() } /// Information about the given target platform, learned by querying rustc. pub fn info(&self, kind: CompileKind) -> &TargetInfo { match kind { CompileKind::Host => &self.host_info, CompileKind::Target(s) => &self.target_info[&s], } } /// Gets the target configuration for a particular host or target. pub fn target_config(&self, kind: CompileKind) -> &TargetConfig { match kind { CompileKind::Host => &self.host_config, CompileKind::Target(s) => &self.target_config[&s], } } /// If a build script is overridden, this returns the `BuildOutput` to use. /// /// `lib_name` is the `links` library name and `kind` is whether it is for /// Host or Target. pub fn script_override(&self, lib_name: &str, kind: CompileKind) -> Option<&BuildOutput> { self.target_config(kind).links_overrides.get(lib_name) } } /// Structure used to deal with Rustdoc fingerprinting #[derive(Debug, Serialize, Deserialize)] pub struct RustDocFingerprint { pub rustc_vv: String, } impl RustDocFingerprint { /// This function checks whether the latest version of `Rustc` used to compile this /// `Workspace`'s docs was the same as the one is currently being used in this `cargo doc` /// call. /// /// In case it's not, it takes care of removing the `doc/` folder as well as overwriting /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have /// any versioning. pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> { if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint { return Ok(()); } let actual_rustdoc_target_data = RustDocFingerprint { rustc_vv: cx.bcx.rustc().verbose_version.clone(), }; let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); let write_fingerprint = || -> CargoResult<()> { paths::write( &fingerprint_path, serde_json::to_string(&actual_rustdoc_target_data)?, ) }; let rustdoc_data = match paths::read(&fingerprint_path) { Ok(rustdoc_data) => rustdoc_data, // If the fingerprint does not exist, do not clear out the doc // directories. Otherwise this ran into problems where projects // like rustbuild were creating the doc directory before running // `cargo doc` in a way that deleting it would break it. Err(_) => return write_fingerprint(), }; match serde_json::from_str::(&rustdoc_data) { Ok(fingerprint) => { if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv { return Ok(()); } else { log::debug!( "doc fingerprint changed:\noriginal:\n{}\nnew:\n{}", fingerprint.rustc_vv, actual_rustdoc_target_data.rustc_vv ); } } Err(e) => { log::debug!("could not deserialize {:?}: {}", fingerprint_path, e); } }; // Fingerprint does not match, delete the doc directories and write a new fingerprint. log::debug!( "fingerprint {:?} mismatch, clearing doc directories", fingerprint_path ); cx.bcx .all_kinds .iter() .map(|kind| cx.files().layout(*kind).doc()) .filter(|path| path.exists()) .try_for_each(|path| clean_doc(path))?; write_fingerprint()?; return Ok(()); fn clean_doc(path: &Path) -> CargoResult<()> { let entries = path .read_dir() .with_context(|| format!("failed to read directory `{}`", path.display()))?; for entry in entries { let entry = entry?; // Don't remove hidden files. Rustdoc does not create them, // but the user might have. if entry .file_name() .to_str() .map_or(false, |name| name.starts_with('.')) { continue; } let path = entry.path(); if entry.file_type()?.is_dir() { paths::remove_dir_all(path)?; } else { paths::remove_file(path)?; } } Ok(()) } } } cargo-0.66.0/src/cargo/core/compiler/build_plan.rs000066400000000000000000000114621432416201200220000ustar00rootroot00000000000000//! A graph-like structure used to represent the rustc commands to build the package and the //! interdependencies between them. //! //! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be //! shared with an external build system. Each Invocation in the BuildPlan comprises a single //! subprocess and defines the build environment, the outputs produced by the subprocess, and the //! dependencies on other Invocations. use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use serde::Serialize; use super::context::OutputFile; use super::{CompileKind, CompileMode, Context, Unit}; use crate::core::TargetKind; use crate::util::{internal, CargoResult, Config}; use cargo_util::ProcessBuilder; #[derive(Debug, Serialize)] struct Invocation { package_name: String, package_version: semver::Version, target_kind: TargetKind, kind: CompileKind, compile_mode: CompileMode, deps: Vec, outputs: Vec, links: BTreeMap, program: String, args: Vec, env: BTreeMap, cwd: Option, } #[derive(Debug)] pub struct BuildPlan { invocation_map: BTreeMap, plan: SerializedBuildPlan, } #[derive(Debug, Serialize)] struct SerializedBuildPlan { invocations: Vec, inputs: Vec, } impl Invocation { pub fn new(unit: &Unit, deps: Vec) -> Invocation { let id = unit.pkg.package_id(); Invocation { package_name: id.name().to_string(), package_version: id.version().clone(), kind: unit.kind, target_kind: unit.target.kind().clone(), compile_mode: unit.mode, deps, outputs: Vec::new(), links: BTreeMap::new(), program: String::new(), args: Vec::new(), env: BTreeMap::new(), cwd: None, } } pub fn add_output(&mut self, path: &Path, link: &Option) { self.outputs.push(path.to_path_buf()); if let Some(ref link) = *link { self.links.insert(link.clone(), path.to_path_buf()); } } pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> { self.program = cmd .get_program() .to_str() .ok_or_else(|| anyhow::format_err!("unicode program string required"))? .to_string(); self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf()); for arg in cmd.get_args() { self.args.push( arg.to_str() .ok_or_else(|| anyhow::format_err!("unicode argument string required"))? .to_string(), ); } for (var, value) in cmd.get_envs() { let value = match value { Some(s) => s, None => continue, }; self.env.insert( var.clone(), value .to_str() .ok_or_else(|| anyhow::format_err!("unicode environment value required"))? .to_string(), ); } Ok(()) } } impl BuildPlan { pub fn new() -> BuildPlan { BuildPlan { invocation_map: BTreeMap::new(), plan: SerializedBuildPlan::new(), } } pub fn add(&mut self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { let id = self.plan.invocations.len(); self.invocation_map.insert(unit.buildkey(), id); let deps = cx .unit_deps(unit) .iter() .map(|dep| self.invocation_map[&dep.unit.buildkey()]) .collect(); let invocation = Invocation::new(unit, deps); self.plan.invocations.push(invocation); Ok(()) } pub fn update( &mut self, invocation_name: &str, cmd: &ProcessBuilder, outputs: &[OutputFile], ) -> CargoResult<()> { let id = self.invocation_map[invocation_name]; let invocation = self.plan.invocations.get_mut(id).ok_or_else(|| { internal(format!("couldn't find invocation for {}", invocation_name)) })?; invocation.update_cmd(cmd)?; for output in outputs.iter() { invocation.add_output(&output.path, &output.hardlink); } Ok(()) } pub fn set_inputs(&mut self, inputs: Vec) { self.plan.inputs = inputs; } pub fn output_plan(self, config: &Config) { let encoded = serde_json::to_string(&self.plan).unwrap(); crate::drop_println!(config, "{}", encoded); } } impl SerializedBuildPlan { pub fn new() -> SerializedBuildPlan { SerializedBuildPlan { invocations: Vec::new(), inputs: Vec::new(), } } } cargo-0.66.0/src/cargo/core/compiler/compilation.rs000066400000000000000000000362741432416201200222150ustar00rootroot00000000000000use std::collections::{BTreeSet, HashMap}; use std::env; use std::ffi::{OsStr, OsString}; use std::path::PathBuf; use cargo_platform::CfgExpr; use cargo_util::{paths, ProcessBuilder}; use super::BuildContext; use crate::core::compiler::{CompileKind, Metadata, Unit}; use crate::core::Package; use crate::util::{config, CargoResult, Config}; /// Structure with enough information to run `rustdoc --test`. pub struct Doctest { /// What's being doctested pub unit: Unit, /// Arguments needed to pass to rustdoc to run this test. pub args: Vec, /// Whether or not -Zunstable-options is needed. pub unstable_opts: bool, /// The -Clinker value to use. pub linker: Option, /// The script metadata, if this unit's package has a build script. /// /// This is used for indexing [`Compilation::extra_env`]. pub script_meta: Option, /// Environment variables to set in the rustdoc process. pub env: HashMap, } /// Information about the output of a unit. #[derive(Ord, PartialOrd, Eq, PartialEq)] pub struct UnitOutput { /// The unit that generated this output. pub unit: Unit, /// Path to the unit's primary output (an executable or cdylib). pub path: PathBuf, /// The script metadata, if this unit's package has a build script. /// /// This is used for indexing [`Compilation::extra_env`]. pub script_meta: Option, } /// A structure returning the result of a compilation. pub struct Compilation<'cfg> { /// An array of all tests created during this compilation. pub tests: Vec, /// An array of all binaries created. pub binaries: Vec, /// An array of all cdylibs created. pub cdylibs: Vec, /// The crate names of the root units specified on the command-line. pub root_crate_names: Vec, /// All directories for the output of native build commands. /// /// This is currently used to drive some entries which are added to the /// LD_LIBRARY_PATH as appropriate. /// /// The order should be deterministic. pub native_dirs: BTreeSet, /// Root output directory (for the local package's artifacts) pub root_output: HashMap, /// Output directory for rust dependencies. /// May be for the host or for a specific target. pub deps_output: HashMap, /// The path to the host libdir for the compiler used sysroot_host_libdir: PathBuf, /// The path to libstd for each target sysroot_target_libdir: HashMap, /// Extra environment variables that were passed to compilations and should /// be passed to future invocations of programs. /// /// The key is the build script metadata for uniquely identifying the /// `RunCustomBuild` unit that generated these env vars. pub extra_env: HashMap>, /// Libraries to test with rustdoc. pub to_doc_test: Vec, /// The target host triple. pub host: String, config: &'cfg Config, /// Rustc process to be used by default rustc_process: ProcessBuilder, /// Rustc process to be used for workspace crates instead of rustc_process rustc_workspace_wrapper_process: ProcessBuilder, /// Optional rustc process to be used for primary crates instead of either rustc_process or /// rustc_workspace_wrapper_process primary_rustc_process: Option, target_runners: HashMap)>>, } impl<'cfg> Compilation<'cfg> { pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult> { let mut rustc = bcx.rustc().process(); let mut primary_rustc_process = bcx.build_config.primary_unit_rustc.clone(); let mut rustc_workspace_wrapper_process = bcx.rustc().workspace_process(); if bcx.config.extra_verbose() { rustc.display_env_vars(); rustc_workspace_wrapper_process.display_env_vars(); if let Some(rustc) = primary_rustc_process.as_mut() { rustc.display_env_vars(); } } Ok(Compilation { // TODO: deprecated; remove. native_dirs: BTreeSet::new(), root_output: HashMap::new(), deps_output: HashMap::new(), sysroot_host_libdir: bcx .target_data .info(CompileKind::Host) .sysroot_host_libdir .clone(), sysroot_target_libdir: bcx .all_kinds .iter() .map(|&kind| { ( kind, bcx.target_data.info(kind).sysroot_target_libdir.clone(), ) }) .collect(), tests: Vec::new(), binaries: Vec::new(), cdylibs: Vec::new(), root_crate_names: Vec::new(), extra_env: HashMap::new(), to_doc_test: Vec::new(), config: bcx.config, host: bcx.host_triple().to_string(), rustc_process: rustc, rustc_workspace_wrapper_process, primary_rustc_process, target_runners: bcx .build_config .requested_kinds .iter() .chain(Some(&CompileKind::Host)) .map(|kind| Ok((*kind, target_runner(bcx, *kind)?))) .collect::>>()?, }) } /// Returns a [`ProcessBuilder`] for running `rustc`. /// /// `is_primary` is true if this is a "primary package", which means it /// was selected by the user on the command-line (such as with a `-p` /// flag), see [`crate::core::compiler::Context::primary_packages`]. /// /// `is_workspace` is true if this is a workspace member. pub fn rustc_process( &self, unit: &Unit, is_primary: bool, is_workspace: bool, ) -> CargoResult { let rustc = if is_primary && self.primary_rustc_process.is_some() { self.primary_rustc_process.clone().unwrap() } else if is_workspace { self.rustc_workspace_wrapper_process.clone() } else { self.rustc_process.clone() }; let cmd = fill_rustc_tool_env(rustc, unit); self.fill_env(cmd, &unit.pkg, None, unit.kind, true) } /// Returns a [`ProcessBuilder`] for running `rustdoc`. pub fn rustdoc_process( &self, unit: &Unit, script_meta: Option, ) -> CargoResult { let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?); let cmd = fill_rustc_tool_env(rustdoc, unit); let mut cmd = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?; cmd.retry_with_argfile(true); unit.target.edition().cmd_edition_arg(&mut cmd); for crate_type in unit.target.rustc_crate_types() { cmd.arg("--crate-type").arg(crate_type.as_str()); } Ok(cmd) } /// Returns a [`ProcessBuilder`] appropriate for running a process for the /// host platform. /// /// This is currently only used for running build scripts. If you use this /// for anything else, please be extra careful on how environment /// variables are set! pub fn host_process>( &self, cmd: T, pkg: &Package, ) -> CargoResult { self.fill_env( ProcessBuilder::new(cmd), pkg, None, CompileKind::Host, false, ) } pub fn target_runner(&self, kind: CompileKind) -> Option<&(PathBuf, Vec)> { self.target_runners.get(&kind).and_then(|x| x.as_ref()) } /// Returns a [`ProcessBuilder`] appropriate for running a process for the /// target platform. This is typically used for `cargo run` and `cargo /// test`. /// /// `script_meta` is the metadata for the `RunCustomBuild` unit that this /// unit used for its build script. Use `None` if the package did not have /// a build script. pub fn target_process>( &self, cmd: T, kind: CompileKind, pkg: &Package, script_meta: Option, ) -> CargoResult { let builder = if let Some((runner, args)) = self.target_runner(kind) { let mut builder = ProcessBuilder::new(runner); builder.args(args); builder.arg(cmd); builder } else { ProcessBuilder::new(cmd) }; self.fill_env(builder, pkg, script_meta, kind, false) } /// Prepares a new process with an appropriate environment to run against /// the artifacts produced by the build process. /// /// The package argument is also used to configure environment variables as /// well as the working directory of the child process. fn fill_env( &self, mut cmd: ProcessBuilder, pkg: &Package, script_meta: Option, kind: CompileKind, is_rustc_tool: bool, ) -> CargoResult { let mut search_path = Vec::new(); if is_rustc_tool { search_path.push(self.deps_output[&CompileKind::Host].clone()); search_path.push(self.sysroot_host_libdir.clone()); } else { search_path.extend(super::filter_dynamic_search_path( self.native_dirs.iter(), &self.root_output[&kind], )); search_path.push(self.deps_output[&kind].clone()); search_path.push(self.root_output[&kind].clone()); // For build-std, we don't want to accidentally pull in any shared // libs from the sysroot that ships with rustc. This may not be // required (at least I cannot craft a situation where it // matters), but is here to be safe. if self.config.cli_unstable().build_std.is_none() { search_path.push(self.sysroot_target_libdir[&kind].clone()); } } let dylib_path = paths::dylib_path(); let dylib_path_is_empty = dylib_path.is_empty(); search_path.extend(dylib_path.into_iter()); if cfg!(target_os = "macos") && dylib_path_is_empty { // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't // set or set to an empty string. Since Cargo is explicitly setting // the value, make sure the defaults still work. if let Some(home) = env::var_os("HOME") { search_path.push(PathBuf::from(home).join("lib")); } search_path.push(PathBuf::from("/usr/local/lib")); search_path.push(PathBuf::from("/usr/lib")); } let search_path = paths::join_paths(&search_path, paths::dylib_path_envvar())?; cmd.env(paths::dylib_path_envvar(), &search_path); if let Some(meta) = script_meta { if let Some(env) = self.extra_env.get(&meta) { for (k, v) in env { cmd.env(k, v); } } } let metadata = pkg.manifest().metadata(); let cargo_exe = self.config.cargo_exe()?; cmd.env(crate::CARGO_ENV, cargo_exe); // When adding new environment variables depending on // crate properties which might require rebuild upon change // consider adding the corresponding properties to the hash // in BuildContext::target_metadata() cmd.env("CARGO_MANIFEST_DIR", pkg.root()) .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) .env("CARGO_PKG_VERSION_PRE", pkg.version().pre.as_str()) .env("CARGO_PKG_VERSION", &pkg.version().to_string()) .env("CARGO_PKG_NAME", &*pkg.name()) .env( "CARGO_PKG_DESCRIPTION", metadata.description.as_ref().unwrap_or(&String::new()), ) .env( "CARGO_PKG_HOMEPAGE", metadata.homepage.as_ref().unwrap_or(&String::new()), ) .env( "CARGO_PKG_REPOSITORY", metadata.repository.as_ref().unwrap_or(&String::new()), ) .env( "CARGO_PKG_LICENSE", metadata.license.as_ref().unwrap_or(&String::new()), ) .env( "CARGO_PKG_LICENSE_FILE", metadata.license_file.as_ref().unwrap_or(&String::new()), ) .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) .env( "CARGO_PKG_RUST_VERSION", &pkg.rust_version().unwrap_or(&String::new()), ) .cwd(pkg.root()); // Apply any environment variables from the config for (key, value) in self.config.env_config()?.iter() { // never override a value that has already been set by cargo if cmd.get_envs().contains_key(key) { continue; } if value.is_force() || env::var_os(key).is_none() { cmd.env(key, value.resolve(self.config)); } } Ok(cmd) } } /// Prepares a rustc_tool process with additional environment variables /// that are only relevant in a context that has a unit fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder { if unit.target.is_bin() { let name = unit .target .binary_filename() .unwrap_or(unit.target.name().to_string()); cmd.env("CARGO_BIN_NAME", name); } cmd.env("CARGO_CRATE_NAME", unit.target.crate_name()); cmd } fn target_runner( bcx: &BuildContext<'_, '_>, kind: CompileKind, ) -> CargoResult)>> { let target = bcx.target_data.short_name(&kind); // try target.{}.runner let key = format!("target.{}.runner", target); if let Some(v) = bcx.config.get::>(&key)? { let path = v.path.resolve_program(bcx.config); return Ok(Some((path, v.args))); } // try target.'cfg(...)'.runner let target_cfg = bcx.target_data.info(kind).cfg(); let mut cfgs = bcx .config .target_cfgs()? .iter() .filter_map(|(key, cfg)| cfg.runner.as_ref().map(|runner| (key, runner))) .filter(|(key, _runner)| CfgExpr::matches_key(key, target_cfg)); let matching_runner = cfgs.next(); if let Some((key, runner)) = cfgs.next() { anyhow::bail!( "several matching instances of `target.'cfg(..)'.runner` in configurations\n\ first match `{}` located in {}\n\ second match `{}` located in {}", matching_runner.unwrap().0, matching_runner.unwrap().1.definition, key, runner.definition ); } Ok(matching_runner.map(|(_k, runner)| { ( runner.val.path.clone().resolve_program(bcx.config), runner.val.args.clone(), ) })) } cargo-0.66.0/src/cargo/core/compiler/compile_kind.rs000066400000000000000000000165001432416201200223220ustar00rootroot00000000000000use crate::core::Target; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{Config, StableHasher}; use anyhow::Context as _; use serde::Serialize; use std::collections::BTreeSet; use std::fs; use std::hash::{Hash, Hasher}; use std::path::Path; /// Indicator for how a unit is being compiled. /// /// This is used primarily for organizing cross compilations vs host /// compilations, where cross compilations happen at the request of `--target` /// and host compilations happen for things like build scripts and procedural /// macros. #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] pub enum CompileKind { /// Attached to a unit that is compiled for the "host" system or otherwise /// is compiled without a `--target` flag. This is used for procedural /// macros and build scripts, or if the `--target` flag isn't passed. Host, /// Attached to a unit to be compiled for a particular target. This is used /// for units when the `--target` flag is passed. Target(CompileTarget), } impl CompileKind { pub fn is_host(&self) -> bool { matches!(self, CompileKind::Host) } pub fn for_target(self, target: &Target) -> CompileKind { // Once we start compiling for the `Host` kind we continue doing so, but // if we are a `Target` kind and then we start compiling for a target // that needs to be on the host we lift ourselves up to `Host`. match self { CompileKind::Host => CompileKind::Host, CompileKind::Target(_) if target.for_host() => CompileKind::Host, CompileKind::Target(n) => CompileKind::Target(n), } } /// Creates a new list of `CompileKind` based on the requested list of /// targets. /// /// If no targets are given then this returns a single-element vector with /// `CompileKind::Host`. pub fn from_requested_targets( config: &Config, targets: &[String], ) -> CargoResult> { let dedup = |targets: &[String]| { Ok(targets .iter() .map(|value| Ok(CompileKind::Target(CompileTarget::new(value)?))) // First collect into a set to deduplicate any `--target` passed // more than once... .collect::>>()? // ... then generate a flat list for everything else to use. .into_iter() .collect()) }; if !targets.is_empty() { return dedup(targets); } let kinds = match &config.build_config()?.target { None => Ok(vec![CompileKind::Host]), Some(build_target_config) => dedup(&build_target_config.values(config)?), }; kinds } /// Hash used for fingerprinting. /// /// Metadata hashing uses the normal Hash trait, which does not /// differentiate on `.json` file contents. The fingerprint hash does /// check the contents. pub fn fingerprint_hash(&self) -> u64 { match self { CompileKind::Host => 0, CompileKind::Target(target) => target.fingerprint_hash(), } } } impl serde::ser::Serialize for CompileKind { fn serialize(&self, s: S) -> Result where S: serde::ser::Serializer, { match self { CompileKind::Host => None::<&str>.serialize(s), CompileKind::Target(t) => Some(t.name).serialize(s), } } } /// Abstraction for the representation of a compilation target that Cargo has. /// /// Compilation targets are one of two things right now: /// /// 1. A raw target string, like `x86_64-unknown-linux-gnu`. /// 2. The path to a JSON file, such as `/path/to/my-target.json`. /// /// Raw target strings are typically dictated by `rustc` itself and represent /// built-in targets. Custom JSON files are somewhat unstable, but supported /// here in Cargo. Note that for JSON target files this `CompileTarget` stores a /// full canonicalized path to the target. /// /// The main reason for this existence is to handle JSON target files where when /// we call rustc we pass full paths but when we use it for Cargo's purposes /// like naming directories or looking up configuration keys we only check the /// file stem of JSON target files. For built-in rustc targets this is just an /// uninterpreted string basically. #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)] pub struct CompileTarget { name: InternedString, } impl CompileTarget { pub fn new(name: &str) -> CargoResult { let name = name.trim(); if name.is_empty() { anyhow::bail!("target was empty"); } if !name.ends_with(".json") { return Ok(CompileTarget { name: name.into() }); } // If `name` ends in `.json` then it's likely a custom target // specification. Canonicalize the path to ensure that different builds // with different paths always produce the same result. let path = Path::new(name) .canonicalize() .with_context(|| format!("target path {:?} is not a valid file", name))?; let name = path .into_os_string() .into_string() .map_err(|_| anyhow::format_err!("target path is not valid unicode"))?; Ok(CompileTarget { name: name.into() }) } /// Returns the full unqualified name of this target, suitable for passing /// to `rustc` directly. /// /// Typically this is pretty much the same as `short_name`, but for the case /// of JSON target files this will be a full canonicalized path name for the /// current filesystem. pub fn rustc_target(&self) -> InternedString { self.name } /// Returns a "short" version of the target name suitable for usage within /// Cargo for configuration and such. /// /// This is typically the same as `rustc_target`, or the full name, but for /// JSON target files this returns just the file stem (e.g. `foo` out of /// `foo.json`) instead of the full path. pub fn short_name(&self) -> &str { // Flexible target specifications often point at json files, so if it // looks like we've got one of those just use the file stem (the file // name without ".json") as a short name for this target. Note that the // `unwrap()` here should never trigger since we have a nonempty name // and it starts as utf-8 so it's always utf-8 if self.name.ends_with(".json") { Path::new(&self.name).file_stem().unwrap().to_str().unwrap() } else { &self.name } } /// See [`CompileKind::fingerprint_hash`]. pub fn fingerprint_hash(&self) -> u64 { let mut hasher = StableHasher::new(); match self .name .ends_with(".json") .then(|| fs::read_to_string(self.name)) { Some(Ok(contents)) => { // This may have some performance concerns, since it is called // fairly often. If that ever seems worth fixing, consider // embedding this in `CompileTarget`. contents.hash(&mut hasher); } _ => { self.name.hash(&mut hasher); } } hasher.finish() } } cargo-0.66.0/src/cargo/core/compiler/context/000077500000000000000000000000001432416201200210015ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/compiler/context/compilation_files.rs000066400000000000000000000666551432416201200250710ustar00rootroot00000000000000use std::collections::HashMap; use std::env; use std::fmt; use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::sync::Arc; use lazycell::LazyCell; use log::debug; use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; use crate::util::{self, CargoResult, StableHasher}; /// This is a generic version number that can be changed to make /// backwards-incompatible changes to any file structures in the output /// directory. For example, the fingerprint files or the build-script /// output files. Normally cargo updates ship with rustc updates which will /// cause a new hash due to the rustc version changing, but this allows /// cargo to be extra careful to deal with different versions of cargo that /// use the same rustc version. const METADATA_VERSION: u8 = 2; /// The `Metadata` is a hash used to make unique file names for each unit in a /// build. It is also use for symbol mangling. /// /// For example: /// - A project may depend on crate `A` and crate `B`, so the package name must be in the file name. /// - Similarly a project may depend on two versions of `A`, so the version must be in the file name. /// /// In general this must include all things that need to be distinguished in different parts of /// the same build. This is absolutely required or we override things before /// we get chance to use them. /// /// It is also used for symbol mangling, because if you have two versions of /// the same crate linked together, their symbols need to be differentiated. /// /// We use a hash because it is an easy way to guarantee /// that all the inputs can be converted to a valid path. /// /// This also acts as the main layer of caching provided by Cargo. /// For example, we want to cache `cargo build` and `cargo doc` separately, so that running one /// does not invalidate the artifacts for the other. We do this by including `CompileMode` in the /// hash, thus the artifacts go in different folders and do not override each other. /// If we don't add something that we should have, for this reason, we get the /// correct output but rebuild more than is needed. /// /// Some things that need to be tracked to ensure the correct output should definitely *not* /// go in the `Metadata`. For example, the modification time of a file, should be tracked to make a /// rebuild when the file changes. However, it would be wasteful to include in the `Metadata`. The /// old artifacts are never going to be needed again. We can save space by just overwriting them. /// If we add something that we should not have, for this reason, we get the correct output but take /// more space than needed. This makes not including something in `Metadata` /// a form of cache invalidation. /// /// You should also avoid anything that would interfere with reproducible /// builds. For example, *any* absolute path should be avoided. This is one /// reason that `RUSTFLAGS` is not in `Metadata`, because it often has /// absolute paths (like `--remap-path-prefix` which is fundamentally used for /// reproducible builds and has absolute paths in it). Also, in some cases the /// mangled symbols need to be stable between different builds with different /// settings. For example, profile-guided optimizations need to swap /// `RUSTFLAGS` between runs, but needs to keep the same symbol names. /// /// Note that the `Fingerprint` is in charge of tracking everything needed to determine if a /// rebuild is needed. #[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] pub struct Metadata(u64); impl fmt::Display for Metadata { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:016x}", self.0) } } impl fmt::Debug for Metadata { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Metadata({:016x})", self.0) } } /// Information about the metadata hashes used for a `Unit`. struct MetaInfo { /// The symbol hash to use. meta_hash: Metadata, /// Whether or not the `-C extra-filename` flag is used to generate unique /// output filenames for this `Unit`. /// /// If this is `true`, the `meta_hash` is used for the filename. use_extra_filename: bool, } /// Collection of information about the files emitted by the compiler, and the /// output directory structure. pub struct CompilationFiles<'a, 'cfg> { /// The target directory layout for the host (and target if it is the same as host). pub(super) host: Layout, /// The target directory layout for the target (if different from then host). pub(super) target: HashMap, /// Additional directory to include a copy of the outputs. export_dir: Option, /// The root targets requested by the user on the command line (does not /// include dependencies). roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. metas: HashMap, /// For each Unit, a list all files produced. outputs: HashMap>>>, } /// Info about a single file emitted by the compiler. #[derive(Debug)] pub struct OutputFile { /// Absolute path to the file that will be produced by the build process. pub path: PathBuf, /// If it should be linked into `target`, and what it should be called /// (e.g., without metadata). pub hardlink: Option, /// If `--out-dir` is specified, the absolute path to the exported file. pub export_path: Option, /// Type of the file (library / debug symbol / else). pub flavor: FileFlavor, } impl OutputFile { /// Gets the hard link if present; otherwise, returns the path. pub fn bin_dst(&self) -> &PathBuf { match self.hardlink { Some(ref link_dst) => link_dst, None => &self.path, } } } impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { pub(super) fn new( cx: &Context<'a, 'cfg>, host: Layout, target: HashMap, ) -> CompilationFiles<'a, 'cfg> { let mut metas = HashMap::new(); for unit in &cx.bcx.roots { metadata_of(unit, cx, &mut metas); } let outputs = metas .keys() .cloned() .map(|unit| (unit, LazyCell::new())) .collect(); CompilationFiles { ws: cx.bcx.ws, host, target, export_dir: cx.bcx.build_config.export_dir.clone(), roots: cx.bcx.roots.clone(), metas, outputs, } } /// Returns the appropriate directory layout for either a plugin or not. pub fn layout(&self, kind: CompileKind) -> &Layout { match kind { CompileKind::Host => &self.host, CompileKind::Target(target) => &self.target[&target], } } /// Gets the metadata for the given unit. /// /// See module docs for more details. pub fn metadata(&self, unit: &Unit) -> Metadata { self.metas[unit].meta_hash } /// Returns whether or not `-C extra-filename` is used to extend the /// output filenames to make them unique. pub fn use_extra_filename(&self, unit: &Unit) -> bool { self.metas[unit].use_extra_filename } /// Gets the short hash based only on the `PackageId`. /// Used for the metadata when `metadata` returns `None`. pub fn target_short_hash(&self, unit: &Unit) -> String { let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); util::short_hash(&(METADATA_VERSION, hashable)) } /// Returns the directory where the artifacts for the given unit are /// initially created. pub fn out_dir(&self, unit: &Unit) -> PathBuf { // Docscrape units need to have doc/ set as the out_dir so sources for reverse-dependencies // will be put into doc/ and not into deps/ where the *.examples files are stored. if unit.mode.is_doc() || unit.mode.is_doc_scrape() { self.layout(unit.kind).doc().to_path_buf() } else if unit.mode.is_doc_test() { panic!("doc tests do not have an out dir"); } else if unit.target.is_custom_build() { self.build_script_dir(unit) } else if unit.target.is_example() { self.layout(unit.kind).examples().to_path_buf() } else if unit.artifact.is_true() { self.artifact_dir(unit) } else { self.deps_dir(unit).to_path_buf() } } /// Additional export directory from `--out-dir`. pub fn export_dir(&self) -> Option { self.export_dir.clone() } /// Directory name to use for a package in the form `NAME-HASH`. /// /// Note that some units may share the same directory, so care should be /// taken in those cases! fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); let meta = &self.metas[unit]; if meta.use_extra_filename { format!("{}-{}", name, meta.meta_hash) } else { format!("{}-{}", name, self.target_short_hash(unit)) } } /// Returns the final artifact path for the host (`/…/target/debug`) pub fn host_dest(&self) -> &Path { self.host.dest() } /// Returns the root of the build output tree for the host (`/…/target`) pub fn host_root(&self) -> &Path { self.host.root() } /// Returns the host `deps` directory path. pub fn host_deps(&self) -> &Path { self.host.deps() } /// Returns the directories where Rust crate dependencies are found for the /// specified unit. pub fn deps_dir(&self, unit: &Unit) -> &Path { self.layout(unit.kind).deps() } /// Directory where the fingerprint for the given unit should go. pub fn fingerprint_dir(&self, unit: &Unit) -> PathBuf { let dir = self.pkg_dir(unit); self.layout(unit.kind).fingerprint().join(dir) } /// Returns the path for a file in the fingerprint directory. /// /// The "prefix" should be something to distinguish the file from other /// files in the fingerprint directory. pub fn fingerprint_file_path(&self, unit: &Unit, prefix: &str) -> PathBuf { // Different targets need to be distinguished in the let kind = unit.target.kind().description(); let flavor = if unit.mode.is_any_test() { "test-" } else if unit.mode.is_doc() { "doc-" } else if unit.mode.is_run_custom_build() { "run-" } else { "" }; let name = format!("{}{}{}-{}", prefix, flavor, kind, unit.target.name()); self.fingerprint_dir(unit).join(name) } /// Path where compiler output is cached. pub fn message_cache_path(&self, unit: &Unit) -> PathBuf { self.fingerprint_file_path(unit, "output-") } /// Returns the directory where a compiled build script is stored. /// `/path/to/target/{debug,release}/build/PKG-HASH` pub fn build_script_dir(&self, unit: &Unit) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(!unit.mode.is_run_custom_build()); assert!(self.metas.contains_key(unit)); let dir = self.pkg_dir(unit); self.layout(CompileKind::Host).build().join(dir) } /// Returns the directory for compiled artifacts files. /// `/path/to/target/{debug,release}/deps/artifact/KIND/PKG-HASH` fn artifact_dir(&self, unit: &Unit) -> PathBuf { assert!(self.metas.contains_key(unit)); assert!(unit.artifact.is_true()); let dir = self.pkg_dir(unit); let kind = match unit.target.kind() { TargetKind::Bin => "bin", TargetKind::Lib(lib_kinds) => match lib_kinds.as_slice() { &[CrateType::Cdylib] => "cdylib", &[CrateType::Staticlib] => "staticlib", invalid => unreachable!( "BUG: unexpected artifact library type(s): {:?} - these should have been split", invalid ), }, invalid => unreachable!( "BUG: {:?} are not supposed to be used as artifacts", invalid ), }; self.layout(unit.kind).artifact().join(dir).join(kind) } /// Returns the directory where information about running a build script /// is stored. /// `/path/to/target/{debug,release}/build/PKG-HASH` pub fn build_script_run_dir(&self, unit: &Unit) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(unit.mode.is_run_custom_build()); let dir = self.pkg_dir(unit); self.layout(unit.kind).build().join(dir) } /// Returns the "OUT_DIR" directory for running a build script. /// `/path/to/target/{debug,release}/build/PKG-HASH/out` pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf { self.build_script_run_dir(unit).join("out") } /// Returns the path to the executable binary for the given bin target. /// /// This should only to be used when a `Unit` is not available. pub fn bin_link_for_target( &self, target: &Target, kind: CompileKind, bcx: &BuildContext<'_, '_>, ) -> CargoResult { assert!(target.is_bin()); let dest = self.layout(kind).dest(); let info = bcx.target_data.info(kind); let (file_types, _) = info .rustc_outputs( CompileMode::Build, &TargetKind::Bin, bcx.target_data.short_name(&kind), ) .expect("target must support `bin`"); let file_type = file_types .iter() .find(|file_type| file_type.flavor == FileFlavor::Normal) .expect("target must support `bin`"); Ok(dest.join(file_type.uplift_filename(target))) } /// Returns the filenames that the given unit will generate. /// /// Note: It is not guaranteed that all of the files will be generated. pub(super) fn outputs( &self, unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { self.outputs[unit] .try_borrow_with(|| self.calc_outputs(unit, bcx)) .map(Arc::clone) } /// Returns the path where the output for the given unit and FileType /// should be uplifted to. /// /// Returns `None` if the unit shouldn't be uplifted (for example, a /// dependent rlib). fn uplift_to(&self, unit: &Unit, file_type: &FileType, from_path: &Path) -> Option { // Tests, check, doc, etc. should not be uplifted. if unit.mode != CompileMode::Build || file_type.flavor == FileFlavor::Rmeta { return None; } // Artifact dependencies are never uplifted. if unit.artifact.is_true() { return None; } // - Binaries: The user always wants to see these, even if they are // implicitly built (for example for integration tests). // - dylibs: This ensures that the dynamic linker pulls in all the // latest copies (even if the dylib was built from a previous cargo // build). There are complex reasons for this, see #8139, #6167, #6162. // - Things directly requested from the command-line (the "roots"). // This one is a little questionable for rlibs (see #6131), but is // historically how Cargo has operated. This is primarily useful to // give the user access to staticlibs and cdylibs. if !unit.target.is_bin() && !unit.target.is_custom_build() && file_type.crate_type != Some(CrateType::Dylib) && !self.roots.contains(unit) { return None; } let filename = file_type.uplift_filename(&unit.target); let uplift_path = if unit.target.is_example() { // Examples live in their own little world. self.layout(unit.kind).examples().join(filename) } else if unit.target.is_custom_build() { self.build_script_dir(unit).join(filename) } else { self.layout(unit.kind).dest().join(filename) }; if from_path == uplift_path { // This can happen with things like examples that reside in the // same directory, do not have a metadata hash (like on Windows), // and do not have hyphens. return None; } Some(uplift_path) } fn calc_outputs( &self, unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { let ret = match unit.mode { CompileMode::Doc { .. } => { let path = self .out_dir(unit) .join(unit.target.crate_name()) .join("index.html"); vec![OutputFile { path, hardlink: None, export_path: None, flavor: FileFlavor::Normal, }] } CompileMode::RunCustomBuild => { // At this time, this code path does not handle build script // outputs. vec![] } CompileMode::Doctest => { // Doctests are built in a temporary directory and then // deleted. There is the `--persist-doctests` unstable flag, // but Cargo does not know about that. vec![] } CompileMode::Docscrape => { let path = self .deps_dir(unit) .join(format!("{}.examples", unit.buildkey())); vec![OutputFile { path, hardlink: None, export_path: None, flavor: FileFlavor::Normal, }] } CompileMode::Test | CompileMode::Build | CompileMode::Bench | CompileMode::Check { .. } => self.calc_outputs_rustc(unit, bcx)?, }; debug!("Target filenames: {:?}", ret); Ok(Arc::new(ret)) } /// Computes the actual, full pathnames for all the files generated by rustc. /// /// The `OutputFile` also contains the paths where those files should be /// "uplifted" to. fn calc_outputs_rustc( &self, unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult> { let out_dir = self.out_dir(unit); let info = bcx.target_data.info(unit.kind); let triple = bcx.target_data.short_name(&unit.kind); let (file_types, unsupported) = info.rustc_outputs(unit.mode, unit.target.kind(), triple)?; if file_types.is_empty() { if !unsupported.is_empty() { let unsupported_strs: Vec<_> = unsupported.iter().map(|ct| ct.as_str()).collect(); anyhow::bail!( "cannot produce {} for `{}` as the target `{}` \ does not support these crate types", unsupported_strs.join(", "), unit.pkg, triple, ) } anyhow::bail!( "cannot compile `{}` as the target `{}` does not \ support any of the output crate types", unit.pkg, triple, ); } // Convert FileType to OutputFile. let mut outputs = Vec::new(); for file_type in file_types { let meta = &self.metas[unit]; let meta_opt = meta.use_extra_filename.then(|| meta.meta_hash.to_string()); let path = out_dir.join(file_type.output_filename(&unit.target, meta_opt.as_deref())); // If, the `different_binary_name` feature is enabled, the name of the hardlink will // be the name of the binary provided by the user in `Cargo.toml`. let hardlink = self.uplift_to(unit, &file_type, &path); let export_path = if unit.target.is_custom_build() { None } else { self.export_dir.as_ref().and_then(|export_dir| { hardlink .as_ref() .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) }) }; outputs.push(OutputFile { path, hardlink, export_path, flavor: file_type.flavor, }); } Ok(outputs) } } fn metadata_of<'a>( unit: &Unit, cx: &Context<'_, '_>, metas: &'a mut HashMap, ) -> &'a MetaInfo { if !metas.contains_key(unit) { let meta = compute_metadata(unit, cx, metas); metas.insert(unit.clone(), meta); for dep in cx.unit_deps(unit) { metadata_of(&dep.unit, cx, metas); } } &metas[unit] } fn compute_metadata( unit: &Unit, cx: &Context<'_, '_>, metas: &mut HashMap, ) -> MetaInfo { let bcx = &cx.bcx; let mut hasher = StableHasher::new(); METADATA_VERSION.hash(&mut hasher); // Unique metadata per (name, source, version) triple. This'll allow us // to pull crates from anywhere without worrying about conflicts. unit.pkg .package_id() .stable_hash(bcx.ws.root()) .hash(&mut hasher); // Also mix in enabled features to our metadata. This'll ensure that // when changing feature sets each lib is separately cached. unit.features.hash(&mut hasher); // Mix in the target-metadata of all the dependencies of this target. let mut deps_metadata = cx .unit_deps(unit) .iter() .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash) .collect::>(); deps_metadata.sort(); deps_metadata.hash(&mut hasher); // Throw in the profile we're compiling with. This helps caching // `panic=abort` and `panic=unwind` artifacts, additionally with various // settings like debuginfo and whatnot. unit.profile.hash(&mut hasher); unit.mode.hash(&mut hasher); cx.lto[unit].hash(&mut hasher); // Artifacts compiled for the host should have a different // metadata piece than those compiled for the target, so make sure // we throw in the unit's `kind` as well. Use `fingerprint_hash` // so that the StableHash doesn't change based on the pathnames // of the custom target JSON spec files. unit.kind.fingerprint_hash().hash(&mut hasher); // Finally throw in the target name/kind. This ensures that concurrent // compiles of targets in the same crate don't collide. unit.target.name().hash(&mut hasher); unit.target.kind().hash(&mut hasher); hash_rustc_version(bcx, &mut hasher); if cx.bcx.ws.is_member(&unit.pkg) { // This is primarily here for clippy. This ensures that the clippy // artifacts are separate from the `check` ones. if let Some(path) = &cx.bcx.rustc().workspace_wrapper { path.hash(&mut hasher); } } // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. // This should be the release channel, to get a different hash for each channel. if let Ok(ref channel) = env::var("__CARGO_DEFAULT_LIB_METADATA") { channel.hash(&mut hasher); } // std units need to be kept separate from user dependencies. std crates // are differentiated in the Unit with `is_std` (for things like // `-Zforce-unstable-if-unmarked`), so they are always built separately. // This isn't strictly necessary for build dependencies which probably // don't need unstable support. A future experiment might be to set // `is_std` to false for build dependencies so that they can be shared // with user dependencies. unit.is_std.hash(&mut hasher); MetaInfo { meta_hash: Metadata(hasher.finish()), use_extra_filename: should_use_metadata(bcx, unit), } } fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { let vers = &bcx.rustc().version; if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { // For stable, keep the artifacts separate. This helps if someone is // testing multiple versions, to avoid recompiles. bcx.rustc().verbose_version.hash(hasher); return; } // On "nightly"/"beta"/"dev"/etc, keep each "channel" separate. Don't hash // the date/git information, so that whenever someone updates "nightly", // they won't have a bunch of stale artifacts in the target directory. // // This assumes that the first segment is the important bit ("nightly", // "beta", "dev", etc.). Skip other parts like the `.3` in `-beta.3`. vers.pre.split('.').next().hash(hasher); // Keep "host" since some people switch hosts to implicitly change // targets, (like gnu vs musl or gnu vs msvc). In the future, we may want // to consider hashing `unit.kind.short_name()` instead. bcx.rustc().host.hash(hasher); // None of the other lines are important. Currently they are: // binary: rustc <-- or "rustdoc" // commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a // commit-date: 2020-03-21 // host: x86_64-apple-darwin // release: 1.44.0-nightly // LLVM version: 9.0 // // The backend version ("LLVM version") might become more relevant in // the future when cranelift sees more use, and people want to switch // between different backends without recompiling. } /// Returns whether or not this unit should use a metadata hash. fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { if unit.mode.is_doc_test() || unit.mode.is_doc() { // Doc tests do not have metadata. return false; } if unit.mode.is_any_test() || unit.mode.is_check() { // These always use metadata. return true; } // No metadata in these cases: // // - dylibs: // - if any dylib names are encoded in executables, so they can't be renamed. // - TODO: Maybe use `-install-name` on macOS or `-soname` on other UNIX systems // to specify the dylib name to be used by the linker instead of the filename. // - Windows MSVC executables: The path to the PDB is embedded in the // executable, and we don't want the PDB path to include the hash in it. // - wasm32-unknown-emscripten executables: When using emscripten, the path to the // .wasm file is embedded in the .js file, so we don't want the hash in there. // // This is only done for local packages, as we don't expect to export // dependencies. // // The __CARGO_DEFAULT_LIB_METADATA env var is used to override this to // force metadata in the hash. This is only used for building libstd. For // example, if libstd is placed in a common location, we don't want a file // named /usr/lib/libstd.so which could conflict with other rustc // installs. In addition it prevents accidentally loading a libstd of a // different compiler at runtime. // See https://github.com/rust-lang/cargo/issues/3005 let short_name = bcx.target_data.short_name(&unit.kind); if (unit.target.is_dylib() || unit.target.is_cdylib() || (unit.target.is_executable() && short_name == "wasm32-unknown-emscripten") || (unit.target.is_executable() && short_name.contains("msvc"))) && unit.pkg.package_id().source_id().is_path() && env::var("__CARGO_DEFAULT_LIB_METADATA").is_err() { return false; } true } cargo-0.66.0/src/cargo/core/compiler/context/mod.rs000066400000000000000000000667271432416201200221500ustar00rootroot00000000000000use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use crate::core::compiler::compilation::{self, UnitOutput}; use crate::core::compiler::{self, artifact, Unit}; use crate::core::PackageId; use crate::util::errors::CargoResult; use crate::util::profile; use anyhow::{bail, Context as _}; use filetime::FileTime; use jobserver::Client; use super::build_plan::BuildPlan; use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; use super::lto::Lto; use super::unit_graph::UnitDep; use super::{ BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint, }; mod compilation_files; use self::compilation_files::CompilationFiles; pub use self::compilation_files::{Metadata, OutputFile}; /// Collection of all the stuff that is needed to perform a build. pub struct Context<'a, 'cfg> { /// Mostly static information about the build task. pub bcx: &'a BuildContext<'a, 'cfg>, /// A large collection of information about the result of the entire compilation. pub compilation: Compilation<'cfg>, /// Output from build scripts, updated after each build script runs. pub build_script_outputs: Arc>, /// Dependencies (like rerun-if-changed) declared by a build script. /// This is *only* populated from the output from previous runs. /// If the build script hasn't ever been run, then it must be run. pub build_explicit_deps: HashMap, /// Fingerprints used to detect if a unit is out-of-date. pub fingerprints: HashMap>, /// Cache of file mtimes to reduce filesystem hits. pub mtime_cache: HashMap, /// A set used to track which units have been compiled. /// A unit may appear in the job graph multiple times as a dependency of /// multiple packages, but it only needs to run once. pub compiled: HashSet, /// Linking information for each `Unit`. /// See `build_map` for details. pub build_scripts: HashMap>, /// Job server client to manage concurrency with other processes. pub jobserver: Client, /// "Primary" packages are the ones the user selected on the command-line /// with `-p` flags. If no flags are specified, then it is the defaults /// based on the current directory and the default workspace members. primary_packages: HashSet, /// An abstraction of the files and directories that will be generated by /// the compilation. This is `None` until after `unit_dependencies` has /// been computed. files: Option>, /// A set of units which are compiling rlibs and are expected to produce /// metadata files in addition to the rlib itself. rmeta_required: HashSet, /// When we're in jobserver-per-rustc process mode, this keeps those /// jobserver clients for each Unit (which eventually becomes a rustc /// process). pub rustc_clients: HashMap, /// Map of the LTO-status of each unit. This indicates what sort of /// compilation is happening (only object, only bitcode, both, etc), and is /// precalculated early on. pub lto: HashMap, /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag. /// See Context::find_metadata_units for more details. pub metadata_for_doc_units: HashMap, } impl<'a, 'cfg> Context<'a, 'cfg> { pub fn new(bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This // is the same as the GNU make implementation of a jobserver, and // intentionally so! It's hoped that we can interact with GNU make and // all share the same jobserver. // // Note that if we don't have a jobserver in our environment then we // create our own, and we create it with `n` tokens, but immediately // acquire one, because one token is ourself, a running process. let jobserver = match bcx.config.jobserver_from_env() { Some(c) => c.clone(), None => { let client = Client::new(bcx.jobs() as usize) .with_context(|| "failed to create jobserver")?; client.acquire_raw()?; client } }; Ok(Self { bcx, compilation: Compilation::new(bcx)?, build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())), fingerprints: HashMap::new(), mtime_cache: HashMap::new(), compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), jobserver, primary_packages: HashSet::new(), files: None, rmeta_required: HashSet::new(), rustc_clients: HashMap::new(), lto: HashMap::new(), metadata_for_doc_units: HashMap::new(), }) } /// Starts compilation, waits for it to finish, and returns information /// about the result of compilation. pub fn compile(mut self, exec: &Arc) -> CargoResult> { let mut queue = JobQueue::new(self.bcx); let mut plan = BuildPlan::new(); let build_plan = self.bcx.build_config.build_plan; self.lto = super::lto::generate(self.bcx)?; self.prepare_units()?; self.prepare()?; custom_build::build_map(&mut self)?; self.check_collisions()?; self.compute_metadata_for_doc_units(); // We need to make sure that if there were any previous docs // already compiled, they were compiled with the same Rustc version that we're currently // using. Otherwise we must remove the `doc/` folder and compile again forcing a rebuild. // // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have // any versioning (See https://github.com/rust-lang/cargo/issues/8461). // Therefore, we can end up with weird bugs and behaviours if we mix different // versions of these files. if self.bcx.build_config.mode.is_doc() { RustDocFingerprint::check_rustdoc_fingerprint(&self)? } for unit in &self.bcx.roots { // Build up a list of pending jobs, each of which represent // compiling a particular package. No actual work is executed as // part of this, that's all done next as part of the `execute` // function which will run everything in order with proper // parallelism. let force_rebuild = self.bcx.build_config.force_rebuild; super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?; } // Now that we've got the full job queue and we've done all our // fingerprint analysis to determine what to run, bust all the memoized // fingerprint hashes to ensure that during the build they all get the // most up-to-date values. In theory we only need to bust hashes that // transitively depend on a dirty build script, but it shouldn't matter // that much for performance anyway. for fingerprint in self.fingerprints.values() { fingerprint.clear_memoized(); } // Now that we've figured out everything that we're going to do, do it! queue.execute(&mut self, &mut plan)?; if build_plan { plan.set_inputs(self.build_plan_inputs()?); plan.output_plan(self.bcx.config); } // Collect the result of the build into `self.compilation`. for unit in &self.bcx.roots { // Collect tests and executables. for output in self.outputs(unit)?.iter() { if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary { continue; } let bindst = output.bin_dst(); if unit.mode == CompileMode::Test { self.compilation .tests .push(self.unit_output(unit, &output.path)); } else if unit.target.is_executable() { self.compilation .binaries .push(self.unit_output(unit, bindst)); } else if unit.target.is_cdylib() && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit) { self.compilation .cdylibs .push(self.unit_output(unit, bindst)); } } // If the unit has a build script, add `OUT_DIR` to the // environment variables. if unit.target.is_lib() { for dep in &self.bcx.unit_graph[unit] { if dep.unit.mode.is_run_custom_build() { let out_dir = self .files() .build_script_out_dir(&dep.unit) .display() .to_string(); let script_meta = self.get_run_build_script_metadata(&dep.unit); self.compilation .extra_env .entry(script_meta) .or_insert_with(Vec::new) .push(("OUT_DIR".to_string(), out_dir)); } } } // Collect information for `rustdoc --test`. if unit.mode.is_doc_test() { let mut unstable_opts = false; let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?; args.extend(compiler::lto_args(&self, unit)); args.extend(compiler::features_args(unit)); args.extend(compiler::check_cfg_args(&self, unit)); let script_meta = self.find_build_script_metadata(unit); if let Some(meta) = script_meta { if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) { for cfg in &output.cfgs { args.push("--cfg".into()); args.push(cfg.into()); } if !output.check_cfgs.is_empty() { args.push("-Zunstable-options".into()); for check_cfg in &output.check_cfgs { args.push("--check-cfg".into()); args.push(check_cfg.into()); } } for (lt, arg) in &output.linker_args { if lt.applies_to(&unit.target) { args.push("-C".into()); args.push(format!("link-arg={}", arg).into()); } } } } args.extend(self.bcx.rustdocflags_args(unit).iter().map(Into::into)); use super::MessageFormat; let format = match self.bcx.build_config.message_format { MessageFormat::Short => "short", MessageFormat::Human => "human", MessageFormat::Json { .. } => "json", }; args.push("--error-format".into()); args.push(format.into()); self.compilation.to_doc_test.push(compilation::Doctest { unit: unit.clone(), args, unstable_opts, linker: self.bcx.linker(unit.kind), script_meta, env: artifact::get_env(&self, self.unit_deps(unit))?, }); } super::output_depinfo(&mut self, unit)?; } for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() { self.compilation .extra_env .entry(*script_meta) .or_insert_with(Vec::new) .extend(output.env.iter().cloned()); for dir in output.library_paths.iter() { self.compilation.native_dirs.insert(dir.clone()); } } Ok(self.compilation) } /// Returns the executable for the specified unit (if any). pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { let is_binary = unit.target.is_executable(); let is_test = unit.mode.is_any_test(); if !unit.mode.generates_executable() || !(is_binary || is_test) { return Ok(None); } Ok(self .outputs(unit)? .iter() .find(|o| o.flavor == FileFlavor::Normal) .map(|output| output.bin_dst().clone())) } pub fn prepare_units(&mut self) -> CargoResult<()> { let dest = self.bcx.profiles.get_dir_name(); let host_layout = Layout::new(self.bcx.ws, None, &dest)?; let mut targets = HashMap::new(); for kind in self.bcx.all_kinds.iter() { if let CompileKind::Target(target) = *kind { let layout = Layout::new(self.bcx.ws, Some(target), &dest)?; targets.insert(target, layout); } } self.primary_packages .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id())); self.compilation .root_crate_names .extend(self.bcx.roots.iter().map(|u| u.target.crate_name())); self.record_units_requiring_metadata(); let files = CompilationFiles::new(self, host_layout, targets); self.files = Some(files); Ok(()) } /// Prepare this context, ensuring that all filesystem directories are in /// place. pub fn prepare(&mut self) -> CargoResult<()> { let _p = profile::start("preparing layout"); self.files_mut() .host .prepare() .with_context(|| "couldn't prepare build directories")?; for target in self.files.as_mut().unwrap().target.values_mut() { target .prepare() .with_context(|| "couldn't prepare build directories")?; } let files = self.files.as_ref().unwrap(); for &kind in self.bcx.all_kinds.iter() { let layout = files.layout(kind); self.compilation .root_output .insert(kind, layout.dest().to_path_buf()); self.compilation .deps_output .insert(kind, layout.deps().to_path_buf()); } Ok(()) } pub fn files(&self) -> &CompilationFiles<'a, 'cfg> { self.files.as_ref().unwrap() } fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> { self.files.as_mut().unwrap() } /// Returns the filenames that the given unit will generate. pub fn outputs(&self, unit: &Unit) -> CargoResult>> { self.files.as_ref().unwrap().outputs(unit, self.bcx) } /// Direct dependencies for the given unit. pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] { &self.bcx.unit_graph[unit] } /// Returns the RunCustomBuild Unit associated with the given Unit. /// /// If the package does not have a build script, this returns None. pub fn find_build_script_unit(&self, unit: &Unit) -> Option { if unit.mode.is_run_custom_build() { return Some(unit.clone()); } self.bcx.unit_graph[unit] .iter() .find(|unit_dep| { unit_dep.unit.mode.is_run_custom_build() && unit_dep.unit.pkg.package_id() == unit.pkg.package_id() }) .map(|unit_dep| unit_dep.unit.clone()) } /// Returns the metadata hash for the RunCustomBuild Unit associated with /// the given unit. /// /// If the package does not have a build script, this returns None. pub fn find_build_script_metadata(&self, unit: &Unit) -> Option { let script_unit = self.find_build_script_unit(unit)?; Some(self.get_run_build_script_metadata(&script_unit)) } /// Returns the metadata hash for a RunCustomBuild unit. pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); self.files().metadata(unit) } pub fn is_primary_package(&self, unit: &Unit) -> bool { self.primary_packages.contains(&unit.pkg.package_id()) } /// Returns the list of filenames read by cargo to generate the `BuildContext` /// (all `Cargo.toml`, etc.). pub fn build_plan_inputs(&self) -> CargoResult> { // Keep sorted for consistency. let mut inputs = BTreeSet::new(); // Note: dev-deps are skipped if they are not present in the unit graph. for unit in self.bcx.unit_graph.keys() { inputs.insert(unit.pkg.manifest_path().to_path_buf()); } Ok(inputs.into_iter().collect()) } /// Returns a [`UnitOutput`] which represents some information about the /// output of a unit. pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput { let script_meta = self.find_build_script_metadata(unit); UnitOutput { unit: unit.clone(), path: path.to_path_buf(), script_meta, } } fn check_collisions(&self) -> CargoResult<()> { let mut output_collisions = HashMap::new(); let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String { format!( "The {} target `{}` in package `{}` has the same output \ filename as the {} target `{}` in package `{}`.\n\ Colliding filename is: {}\n", unit.target.kind().description(), unit.target.name(), unit.pkg.package_id(), other_unit.target.kind().description(), other_unit.target.name(), other_unit.pkg.package_id(), path.display() ) }; let suggestion = "Consider changing their names to be unique or compiling them separately.\n\ This may become a hard error in the future; see \ ."; let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\ the same path; see ."; let report_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf, suggestion: &str| -> CargoResult<()> { if unit.target.name() == other_unit.target.name() { self.bcx.config.shell().warn(format!( "output filename collision.\n\ {}\ The targets should have unique names.\n\ {}", describe_collision(unit, other_unit, path), suggestion )) } else { self.bcx.config.shell().warn(format!( "output filename collision.\n\ {}\ The output filenames should be unique.\n\ {}\n\ If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\ https://github.com/rust-lang/cargo/issues/ with as much information as you\n\ can provide.\n\ cargo {} running on `{}` target `{}`\n\ First unit: {:?}\n\ Second unit: {:?}", describe_collision(unit, other_unit, path), suggestion, crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind), unit, other_unit)) } }; fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> { bail!( "document output filename collision\n\ The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\ Only one may be documented at once since they output to the same path.\n\ Consider documenting only one, renaming one, \ or marking one with `doc = false` in Cargo.toml.", unit.target.kind().description(), unit.target.name(), unit.pkg, other_unit.target.kind().description(), other_unit.target.name(), other_unit.pkg, ); } let mut keys = self .bcx .unit_graph .keys() .filter(|unit| !unit.mode.is_run_custom_build()) .collect::>(); // Sort for consistent error messages. keys.sort_unstable(); // These are kept separate to retain compatibility with older // versions, which generated an error when there was a duplicate lib // or bin (but the old code did not check bin<->lib collisions). To // retain backwards compatibility, this only generates an error for // duplicate libs or duplicate bins (but not both). Ideally this // shouldn't be here, but since there isn't a complete workaround, // yet, this retains the old behavior. let mut doc_libs = HashMap::new(); let mut doc_bins = HashMap::new(); for unit in keys { if unit.mode.is_doc() && self.is_primary_package(unit) { // These situations have been an error since before 1.0, so it // is not a warning like the other situations. if unit.target.is_lib() { if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit) { doc_collision_error(unit, prev)?; } } else if let Some(prev) = doc_bins.insert((unit.target.crate_name(), unit.kind), unit) { doc_collision_error(unit, prev)?; } } for output in self.outputs(unit)?.iter() { if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) { if unit.mode.is_doc() { // See https://github.com/rust-lang/rust/issues/56169 // and https://github.com/rust-lang/rust/issues/61378 report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?; } else { report_collision(unit, other_unit, &output.path, suggestion)?; } } if let Some(hardlink) = output.hardlink.as_ref() { if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) { report_collision(unit, other_unit, hardlink, suggestion)?; } } if let Some(ref export_path) = output.export_path { if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { self.bcx.config.shell().warn(format!( "`--out-dir` filename collision.\n\ {}\ The exported filenames should be unique.\n\ {}", describe_collision(unit, other_unit, export_path), suggestion ))?; } } } } Ok(()) } /// Records the list of units which are required to emit metadata. /// /// Units which depend only on the metadata of others requires the others to /// actually produce metadata, so we'll record that here. fn record_units_requiring_metadata(&mut self) { for (key, deps) in self.bcx.unit_graph.iter() { for dep in deps { if self.only_requires_rmeta(key, &dep.unit) { self.rmeta_required.insert(dep.unit.clone()); } } } } /// Returns whether when `parent` depends on `dep` if it only requires the /// metadata file from `dep`. pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool { // We're only a candidate for requiring an `rmeta` file if we // ourselves are building an rlib, !parent.requires_upstream_objects() && parent.mode == CompileMode::Build // Our dependency must also be built as an rlib, otherwise the // object code must be useful in some fashion && !dep.requires_upstream_objects() && dep.mode == CompileMode::Build } /// Returns whether when `unit` is built whether it should emit metadata as /// well because some compilations rely on that. pub fn rmeta_required(&self, unit: &Unit) -> bool { self.rmeta_required.contains(unit) } pub fn new_jobserver(&mut self) -> CargoResult { let tokens = self.bcx.jobs() as usize; let client = Client::new(tokens).with_context(|| "failed to create jobserver")?; // Drain the client fully for i in 0..tokens { client.acquire_raw().with_context(|| { format!( "failed to fully drain {}/{} token from jobserver at startup", i, tokens, ) })?; } Ok(client) } /// Finds metadata for Doc/Docscrape units. /// /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies /// will be the metadata of the Cargo unit that generated the current library's rmeta file, /// which should be a Check unit. /// /// If the current crate has reverse-dependencies, such a Check unit should exist, and so /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples /// scraped from the current crate can be used when documenting the current crate. pub fn compute_metadata_for_doc_units(&mut self) { for unit in self.bcx.unit_graph.keys() { if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() { continue; } let matching_units = self .bcx .unit_graph .keys() .filter(|other| { unit.pkg == other.pkg && unit.target == other.target && !other.mode.is_doc_scrape() }) .collect::>(); let metadata_unit = matching_units .iter() .find(|other| other.mode.is_check()) .or_else(|| matching_units.iter().find(|other| other.mode.is_doc())) .unwrap_or(&unit); self.metadata_for_doc_units .insert(unit.clone(), self.files().metadata(metadata_unit)); } } } cargo-0.66.0/src/cargo/core/compiler/crate_type.rs000066400000000000000000000056321432416201200220300ustar00rootroot00000000000000use std::fmt; #[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum CrateType { Bin, Lib, Rlib, Dylib, Cdylib, Staticlib, ProcMacro, Other(String), } impl CrateType { pub fn as_str(&self) -> &str { match self { CrateType::Bin => "bin", CrateType::Lib => "lib", CrateType::Rlib => "rlib", CrateType::Dylib => "dylib", CrateType::Cdylib => "cdylib", CrateType::Staticlib => "staticlib", CrateType::ProcMacro => "proc-macro", CrateType::Other(s) => s, } } pub fn can_lto(&self) -> bool { match self { CrateType::Bin | CrateType::Staticlib | CrateType::Cdylib => true, CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro | CrateType::Other(..) => false, } } pub fn is_linkable(&self) -> bool { match self { CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true, CrateType::Bin | CrateType::Cdylib | CrateType::Staticlib | CrateType::Other(..) => { false } } } pub fn is_dynamic(&self) -> bool { match self { CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => true, CrateType::Lib | CrateType::Rlib | CrateType::Bin | CrateType::Staticlib | CrateType::Other(..) => false, } } pub fn requires_upstream_objects(&self) -> bool { // "lib" == "rlib" and is a compilation that doesn't actually // require upstream object files to exist, only upstream metadata // files. As a result, it doesn't require upstream artifacts !matches!(self, CrateType::Lib | CrateType::Rlib) // Everything else, however, is some form of "linkable output" or // something that requires upstream object files. } } impl fmt::Display for CrateType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.as_str().fmt(f) } } impl<'a> From<&'a String> for CrateType { fn from(s: &'a String) -> Self { match s.as_str() { "bin" => CrateType::Bin, "lib" => CrateType::Lib, "rlib" => CrateType::Rlib, "dylib" => CrateType::Dylib, "cdylib" => CrateType::Cdylib, "staticlib" => CrateType::Staticlib, "procmacro" => CrateType::ProcMacro, _ => CrateType::Other(s.clone()), } } } impl fmt::Debug for CrateType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.to_string().fmt(f) } } impl serde::Serialize for CrateType { fn serialize(&self, s: S) -> Result where S: serde::ser::Serializer, { self.to_string().serialize(s) } } cargo-0.66.0/src/cargo/core/compiler/custom_build.rs000066400000000000000000001206301432416201200223560ustar00rootroot00000000000000use super::job::{Freshness, Job, Work}; use super::{fingerprint, Context, LinkType, Unit}; use crate::core::compiler::artifact; use crate::core::compiler::context::Metadata; use crate::core::compiler::job_queue::JobState; use crate::core::{profiles::ProfileRoot, PackageId, Target}; use crate::util::errors::CargoResult; use crate::util::machine_message::{self, Message}; use crate::util::{internal, profile}; use anyhow::{bail, Context as _}; use cargo_platform::Cfg; use cargo_util::paths; use std::collections::hash_map::{Entry, HashMap}; use std::collections::{BTreeSet, HashSet}; use std::path::{Path, PathBuf}; use std::str; use std::sync::{Arc, Mutex}; const CARGO_WARNING: &str = "cargo:warning="; /// Contains the parsed output of a custom build script. #[derive(Clone, Debug, Hash, Default)] pub struct BuildOutput { /// Paths to pass to rustc with the `-L` flag. pub library_paths: Vec, /// Names and link kinds of libraries, suitable for the `-l` flag. pub library_links: Vec, /// Linker arguments suitable to be passed to `-C link-arg=` pub linker_args: Vec<(LinkType, String)>, /// Various `--cfg` flags to pass to the compiler. pub cfgs: Vec, /// Various `--check-cfg` flags to pass to the compiler. pub check_cfgs: Vec, /// Additional environment variables to run the compiler with. pub env: Vec<(String, String)>, /// Metadata to pass to the immediate dependencies. pub metadata: Vec<(String, String)>, /// Paths to trigger a rerun of this build script. /// May be absolute or relative paths (relative to package root). pub rerun_if_changed: Vec, /// Environment variables which, when changed, will cause a rebuild. pub rerun_if_env_changed: Vec, /// Warnings generated by this build. /// /// These are only displayed if this is a "local" package, `-vv` is used, /// or there is a build error for any target in this package. pub warnings: Vec, } /// Map of packages to build script output. /// /// This initially starts out as empty. Overridden build scripts get /// inserted during `build_map`. The rest of the entries are added /// immediately after each build script runs. /// /// The `Metadata` is the unique metadata hash for the RunCustomBuild Unit of /// the package. It needs a unique key, since the build script can be run /// multiple times with different profiles or features. We can't embed a /// `Unit` because this structure needs to be shareable between threads. #[derive(Default)] pub struct BuildScriptOutputs { outputs: HashMap, } /// Linking information for a `Unit`. /// /// See `build_map` for more details. #[derive(Default)] pub struct BuildScripts { /// List of build script outputs this Unit needs to include for linking. Each /// element is an index into `BuildScriptOutputs`. /// /// Cargo will use this `to_link` vector to add `-L` flags to compiles as we /// propagate them upwards towards the final build. Note, however, that we /// need to preserve the ordering of `to_link` to be topologically sorted. /// This will ensure that build scripts which print their paths properly will /// correctly pick up the files they generated (if there are duplicates /// elsewhere). /// /// To preserve this ordering, the (id, metadata) is stored in two places, once /// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain /// this as we're building interactively below to ensure that the memory /// usage here doesn't blow up too much. /// /// For more information, see #2354. pub to_link: Vec<(PackageId, Metadata)>, /// This is only used while constructing `to_link` to avoid duplicates. seen_to_link: HashSet<(PackageId, Metadata)>, /// Host-only dependencies that have build scripts. Each element is an /// index into `BuildScriptOutputs`. /// /// This is the set of transitive dependencies that are host-only /// (proc-macro, plugin, build-dependency) that contain a build script. /// Any `BuildOutput::library_paths` path relative to `target` will be /// added to LD_LIBRARY_PATH so that the compiler can find any dynamic /// libraries a build script may have generated. pub plugins: BTreeSet<(PackageId, Metadata)>, } /// Dependency information as declared by a build script. #[derive(Debug)] pub struct BuildDeps { /// Absolute path to the file in the target directory that stores the /// output of the build script. pub build_script_output: PathBuf, /// Files that trigger a rebuild if they change. pub rerun_if_changed: Vec, /// Environment variables that trigger a rebuild if they change. pub rerun_if_env_changed: Vec, } /// Prepares a `Work` that executes the target as a custom build script. pub fn prepare(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { let _p = profile::start(format!( "build script prepare: {}/{}", unit.pkg, unit.target.name() )); let metadata = cx.get_run_build_script_metadata(unit); if cx .build_script_outputs .lock() .unwrap() .contains_key(metadata) { // The output is already set, thus the build script is overridden. fingerprint::prepare_target(cx, unit, false) } else { build_work(cx, unit) } } fn emit_build_output( state: &JobState<'_, '_>, output: &BuildOutput, out_dir: &Path, package_id: PackageId, ) -> CargoResult<()> { let library_paths = output .library_paths .iter() .map(|l| l.display().to_string()) .collect::>(); let msg = machine_message::BuildScript { package_id, linked_libs: &output.library_links, linked_paths: &library_paths, cfgs: &output.cfgs, env: &output.env, out_dir, } .to_json_string(); state.stdout(msg)?; Ok(()) } fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_run_custom_build()); let bcx = &cx.bcx; let dependencies = cx.unit_deps(unit); let build_script_unit = dependencies .iter() .find(|d| !d.unit.mode.is_run_custom_build() && d.unit.target.is_custom_build()) .map(|d| &d.unit) .expect("running a script not depending on an actual script"); let script_dir = cx.files().build_script_dir(build_script_unit); let script_out_dir = cx.files().build_script_out_dir(unit); let script_run_dir = cx.files().build_script_run_dir(unit); let build_plan = bcx.build_config.build_plan; let invocation_name = unit.buildkey(); if let Some(deps) = unit.pkg.manifest().metabuild() { prepare_metabuild(cx, build_script_unit, deps)?; } // Building the command to execute let to_exec = script_dir.join(unit.target.name()); // Start preparing the process to execute, starting out with some // environment variables. Note that the profile-related environment // variables are not set with this the build script's profile but rather the // package's library profile. // NOTE: if you add any profile flags, be sure to update // `Profiles::get_profile_run_custom_build` so that those flags get // carried over. let to_exec = to_exec.into_os_string(); let mut cmd = cx.compilation.host_process(to_exec, &unit.pkg)?; let debug = unit.profile.debuginfo.unwrap_or(0) != 0; cmd.env("OUT_DIR", &script_out_dir) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) .env("NUM_JOBS", &bcx.jobs().to_string()) .env("TARGET", bcx.target_data.short_name(&unit.kind)) .env("DEBUG", debug.to_string()) .env("OPT_LEVEL", &unit.profile.opt_level.to_string()) .env( "PROFILE", match unit.profile.root { ProfileRoot::Release => "release", ProfileRoot::Debug => "debug", }, ) .env("HOST", &bcx.host_triple()) .env("RUSTC", &bcx.rustc().path) .env("RUSTDOC", &*bcx.config.rustdoc()?) .inherit_jobserver(&cx.jobserver); // Find all artifact dependencies and make their file and containing directory discoverable using environment variables. for (var, value) in artifact::get_env(cx, dependencies)? { cmd.env(&var, value); } if let Some(linker) = &bcx.target_data.target_config(unit.kind).linker { cmd.env( "RUSTC_LINKER", linker.val.clone().resolve_program(bcx.config), ); } if let Some(links) = unit.pkg.manifest().links() { cmd.env("CARGO_MANIFEST_LINKS", links); } // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. for feat in &unit.features { cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); } let mut cfg_map = HashMap::new(); for cfg in bcx.target_data.cfg(unit.kind) { match *cfg { Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); } Cfg::KeyPair(ref k, ref v) => { if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new())) { values.push(v.clone()) } } } } for (k, v) in cfg_map { if k == "debug_assertions" { // This cfg is always true and misleading, so avoid setting it. // That is because Cargo queries rustc without any profile settings. continue; } let k = format!("CARGO_CFG_{}", super::envify(&k)); match v { Some(list) => { cmd.env(&k, list.join(",")); } None => { cmd.env(&k, ""); } } } // Also inform the build script of the rustc compiler context. if let Some(wrapper) = bcx.rustc().wrapper.as_ref() { cmd.env("RUSTC_WRAPPER", wrapper); } else { cmd.env_remove("RUSTC_WRAPPER"); } cmd.env_remove("RUSTC_WORKSPACE_WRAPPER"); if cx.bcx.ws.is_member(&unit.pkg) { if let Some(wrapper) = bcx.rustc().workspace_wrapper.as_ref() { cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper); } } cmd.env( "CARGO_ENCODED_RUSTFLAGS", bcx.rustflags_args(unit).join("\x1f"), ); cmd.env_remove("RUSTFLAGS"); // Gather the set of native dependencies that this package has along with // some other variables to close over. // // This information will be used at build-time later on to figure out which // sorts of variables need to be discovered at that time. let lib_deps = dependencies .iter() .filter_map(|dep| { if dep.unit.mode.is_run_custom_build() { let dep_metadata = cx.get_run_build_script_metadata(&dep.unit); Some(( dep.unit.pkg.manifest().links().unwrap().to_string(), dep.unit.pkg.package_id(), dep_metadata, )) } else { None } }) .collect::>(); let library_name = unit.pkg.library().map(|t| t.crate_name()); let pkg_descr = unit.pkg.to_string(); let build_script_outputs = Arc::clone(&cx.build_script_outputs); let id = unit.pkg.package_id(); let output_file = script_run_dir.join("output"); let err_file = script_run_dir.join("stderr"); let root_output_file = script_run_dir.join("root-output"); let host_target_root = cx.files().host_dest().to_path_buf(); let all = ( id, library_name.clone(), pkg_descr.clone(), Arc::clone(&build_script_outputs), output_file.clone(), script_out_dir.clone(), ); let build_scripts = cx.build_scripts.get(unit).cloned(); let json_messages = bcx.build_config.emit_json(); let extra_verbose = bcx.config.extra_verbose(); let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit); let metadata_hash = cx.get_run_build_script_metadata(unit); paths::create_dir_all(&script_dir)?; paths::create_dir_all(&script_out_dir)?; let nightly_features_allowed = cx.bcx.config.nightly_features_allowed; let extra_check_cfg = match cx.bcx.config.cli_unstable().check_cfg { Some((_, _, _, output)) => output, None => false, }; let targets: Vec = unit.pkg.targets().to_vec(); // Need a separate copy for the fresh closure. let targets_fresh = targets.clone(); // Prepare the unit of "dirty work" which will actually run the custom build // command. // // Note that this has to do some extra work just before running the command // to determine extra environment variables and such. let dirty = Work::new(move |state| { // Make sure that OUT_DIR exists. // // If we have an old build directory, then just move it into place, // otherwise create it! paths::create_dir_all(&script_out_dir) .with_context(|| "failed to create script output directory for build command")?; // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our // dynamic library search path in case the build script depended on any // native dynamic libraries. if !build_plan { let build_script_outputs = build_script_outputs.lock().unwrap(); for (name, dep_id, dep_metadata) in lib_deps { let script_output = build_script_outputs.get(dep_metadata).ok_or_else(|| { internal(format!( "failed to locate build state for env vars: {}/{}", dep_id, dep_metadata )) })?; let data = &script_output.metadata; for &(ref key, ref value) in data.iter() { cmd.env( &format!("DEP_{}_{}", super::envify(&name), super::envify(key)), value, ); } } if let Some(build_scripts) = build_scripts { super::add_plugin_deps( &mut cmd, &build_script_outputs, &build_scripts, &host_target_root, )?; } } if build_plan { state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new())); return Ok(()); } // And now finally, run the build command itself! state.running(&cmd); let timestamp = paths::set_invocation_time(&script_run_dir)?; let prefix = format!("[{} {}] ", id.name(), id.version()); let mut warnings_in_case_of_panic = Vec::new(); let output = cmd .exec_with_streaming( &mut |stdout| { if let Some(warning) = stdout.strip_prefix(CARGO_WARNING) { warnings_in_case_of_panic.push(warning.to_owned()); } if extra_verbose { state.stdout(format!("{}{}", prefix, stdout))?; } Ok(()) }, &mut |stderr| { if extra_verbose { state.stderr(format!("{}{}", prefix, stderr))?; } Ok(()) }, true, ) .with_context(|| format!("failed to run custom build command for `{}`", pkg_descr)); if let Err(error) = output { insert_warnings_in_build_outputs( build_script_outputs, id, metadata_hash, warnings_in_case_of_panic, ); return Err(error); } let output = output.unwrap(); // After the build command has finished running, we need to be sure to // remember all of its output so we can later discover precisely what it // was, even if we don't run the build command again (due to freshness). // // This is also the location where we provide feedback into the build // state informing what variables were discovered via our script as // well. paths::write(&output_file, &output.stdout)?; // This mtime shift allows Cargo to detect if a source file was // modified in the middle of the build. paths::set_file_time_no_err(output_file, timestamp); paths::write(&err_file, &output.stderr)?; paths::write(&root_output_file, paths::path2bytes(&script_out_dir)?)?; let parsed_output = BuildOutput::parse( &output.stdout, library_name, &pkg_descr, &script_out_dir, &script_out_dir, extra_check_cfg, nightly_features_allowed, &targets, )?; if json_messages { emit_build_output(state, &parsed_output, script_out_dir.as_path(), id)?; } build_script_outputs .lock() .unwrap() .insert(id, metadata_hash, parsed_output); Ok(()) }); // Now that we've prepared our work-to-do, we need to prepare the fresh work // itself to run when we actually end up just discarding what we calculated // above. let fresh = Work::new(move |state| { let (id, library_name, pkg_descr, build_script_outputs, output_file, script_out_dir) = all; let output = match prev_output { Some(output) => output, None => BuildOutput::parse_file( &output_file, library_name, &pkg_descr, &prev_script_out_dir, &script_out_dir, extra_check_cfg, nightly_features_allowed, &targets_fresh, )?, }; if json_messages { emit_build_output(state, &output, script_out_dir.as_path(), id)?; } build_script_outputs .lock() .unwrap() .insert(id, metadata_hash, output); Ok(()) }); let mut job = if cx.bcx.build_config.build_plan { Job::new_dirty(Work::noop()) } else { fingerprint::prepare_target(cx, unit, false)? }; if job.freshness() == Freshness::Dirty { job.before(dirty); } else { job.before(fresh); } Ok(job) } fn insert_warnings_in_build_outputs( build_script_outputs: Arc>, id: PackageId, metadata_hash: Metadata, warnings: Vec, ) { let build_output_with_only_warnings = BuildOutput { warnings, ..BuildOutput::default() }; build_script_outputs .lock() .unwrap() .insert(id, metadata_hash, build_output_with_only_warnings); } impl BuildOutput { pub fn parse_file( path: &Path, library_name: Option, pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, extra_check_cfg: bool, nightly_features_allowed: bool, targets: &[Target], ) -> CargoResult { let contents = paths::read_bytes(path)?; BuildOutput::parse( &contents, library_name, pkg_descr, script_out_dir_when_generated, script_out_dir, extra_check_cfg, nightly_features_allowed, targets, ) } // Parses the output of a script. // The `pkg_descr` is used for error messages. // The `library_name` is used for determining if RUSTC_BOOTSTRAP should be allowed. pub fn parse( input: &[u8], // Takes String instead of InternedString so passing `unit.pkg.name()` will give a compile error. library_name: Option, pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, extra_check_cfg: bool, nightly_features_allowed: bool, targets: &[Target], ) -> CargoResult { let mut library_paths = Vec::new(); let mut library_links = Vec::new(); let mut linker_args = Vec::new(); let mut cfgs = Vec::new(); let mut check_cfgs = Vec::new(); let mut env = Vec::new(); let mut metadata = Vec::new(); let mut rerun_if_changed = Vec::new(); let mut rerun_if_env_changed = Vec::new(); let mut warnings = Vec::new(); let whence = format!("build script of `{}`", pkg_descr); for line in input.split(|b| *b == b'\n') { let line = match str::from_utf8(line) { Ok(line) => line.trim(), Err(..) => continue, }; let mut iter = line.splitn(2, ':'); if iter.next() != Some("cargo") { // skip this line since it doesn't start with "cargo:" continue; } let data = match iter.next() { Some(val) => val, None => continue, }; // getting the `key=value` part of the line let mut iter = data.splitn(2, '='); let key = iter.next(); let value = iter.next(); let (key, value) = match (key, value) { (Some(a), Some(b)) => (a, b.trim_end()), // Line started with `cargo:` but didn't match `key=value`. _ => bail!("invalid output in {}: `{}`\n\ Expected a line with `cargo:key=value` with an `=` character, \ but none was found.\n\ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \ for more information about build script outputs.", whence, line), }; // This will rewrite paths if the target directory has been moved. let value = value.replace( script_out_dir_when_generated.to_str().unwrap(), script_out_dir.to_str().unwrap(), ); macro_rules! check_and_add_target { ($target_kind: expr, $is_target_kind: expr, $link_type: expr) => { if !targets.iter().any(|target| $is_target_kind(target)) { bail!( "invalid instruction `cargo:{}` from {}\n\ The package {} does not have a {} target.", key, whence, pkg_descr, $target_kind ); } linker_args.push(($link_type, value)); }; } // Keep in sync with TargetConfig::parse_links_overrides. match key { "rustc-flags" => { let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?; library_links.extend(links.into_iter()); library_paths.extend(paths.into_iter()); } "rustc-link-lib" => library_links.push(value.to_string()), "rustc-link-search" => library_paths.push(PathBuf::from(value)), "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => { if !targets.iter().any(|target| target.is_cdylib()) { warnings.push(format!( "cargo:{} was specified in the build script of {}, \ but that package does not contain a cdylib target\n\ \n\ Allowing this was an unintended change in the 1.50 \ release, and may become an error in the future. \ For more information, see \ .", key, pkg_descr )); } linker_args.push((LinkType::Cdylib, value)) } "rustc-link-arg-bins" => { check_and_add_target!("bin", Target::is_bin, LinkType::Bin); } "rustc-link-arg-bin" => { let mut parts = value.splitn(2, '='); let bin_name = parts.next().unwrap().to_string(); let arg = parts.next().ok_or_else(|| { anyhow::format_err!( "invalid instruction `cargo:{}={}` from {}\n\ The instruction should have the form cargo:{}=BIN=ARG", key, value, whence, key ) })?; if !targets .iter() .any(|target| target.is_bin() && target.name() == bin_name) { bail!( "invalid instruction `cargo:{}` from {}\n\ The package {} does not have a bin target with the name `{}`.", key, whence, pkg_descr, bin_name ); } linker_args.push((LinkType::SingleBin(bin_name), arg.to_string())); } "rustc-link-arg-tests" => { check_and_add_target!("test", Target::is_test, LinkType::Test); } "rustc-link-arg-benches" => { check_and_add_target!("benchmark", Target::is_bench, LinkType::Bench); } "rustc-link-arg-examples" => { check_and_add_target!("example", Target::is_example, LinkType::Example); } "rustc-link-arg" => { linker_args.push((LinkType::All, value)); } "rustc-cfg" => cfgs.push(value.to_string()), "rustc-check-cfg" => { if extra_check_cfg { check_cfgs.push(value.to_string()); } else { warnings.push(format!("cargo:{} requires -Zcheck-cfg=output flag", key)); } } "rustc-env" => { let (key, val) = BuildOutput::parse_rustc_env(&value, &whence)?; // Build scripts aren't allowed to set RUSTC_BOOTSTRAP. // See https://github.com/rust-lang/cargo/issues/7088. if key == "RUSTC_BOOTSTRAP" { // If RUSTC_BOOTSTRAP is already set, the user of Cargo knows about // bootstrap and still wants to override the channel. Give them a way to do // so, but still emit a warning that the current crate shouldn't be trying // to set RUSTC_BOOTSTRAP. // If this is a nightly build, setting RUSTC_BOOTSTRAP wouldn't affect the // behavior, so still only give a warning. // NOTE: cargo only allows nightly features on RUSTC_BOOTSTRAP=1, but we // want setting any value of RUSTC_BOOTSTRAP to downgrade this to a warning // (so that `RUSTC_BOOTSTRAP=library_name` will work) let rustc_bootstrap_allows = |name: Option<&str>| { let name = match name { // as of 2021, no binaries on crates.io use RUSTC_BOOTSTRAP, so // fine-grained opt-outs aren't needed. end-users can always use // RUSTC_BOOTSTRAP=1 from the top-level if it's really a problem. None => return false, Some(n) => n, }; std::env::var("RUSTC_BOOTSTRAP") .map_or(false, |var| var.split(',').any(|s| s == name)) }; if nightly_features_allowed || rustc_bootstrap_allows(library_name.as_deref()) { warnings.push(format!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.", val, whence )); } else { // Setting RUSTC_BOOTSTRAP would change the behavior of the crate. // Abort with an error. bail!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.\n\ help: If you're sure you want to do this in your project, set the environment variable `RUSTC_BOOTSTRAP={}` before running cargo instead.", val, whence, library_name.as_deref().unwrap_or("1"), ); } } else { env.push((key, val)); } } "warning" => warnings.push(value.to_string()), "rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)), "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), _ => metadata.push((key.to_string(), value.to_string())), } } Ok(BuildOutput { library_paths, library_links, linker_args, cfgs, check_cfgs, env, metadata, rerun_if_changed, rerun_if_env_changed, warnings, }) } pub fn parse_rustc_flags( value: &str, whence: &str, ) -> CargoResult<(Vec, Vec)> { let value = value.trim(); let mut flags_iter = value .split(|c: char| c.is_whitespace()) .filter(|w| w.chars().any(|c| !c.is_whitespace())); let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); while let Some(flag) = flags_iter.next() { if flag.starts_with("-l") || flag.starts_with("-L") { // Check if this flag has no space before the value as is // common with tools like pkg-config // e.g. -L/some/dir/local/lib or -licui18n let (flag, mut value) = flag.split_at(2); if value.is_empty() { value = match flags_iter.next() { Some(v) => v, None => bail! { "Flag in rustc-flags has no value in {}: {}", whence, value }, } } match flag { "-l" => library_links.push(value.to_string()), "-L" => library_paths.push(PathBuf::from(value)), // This was already checked above _ => unreachable!(), }; } else { bail!( "Only `-l` and `-L` flags are allowed in {}: `{}`", whence, value ) } } Ok((library_paths, library_links)) } pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> { let mut iter = value.splitn(2, '='); let name = iter.next(); let val = iter.next(); match (name, val) { (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())), _ => bail!("Variable rustc-env has no value in {}: {}", whence, value), } } } fn prepare_metabuild(cx: &Context<'_, '_>, unit: &Unit, deps: &[String]) -> CargoResult<()> { let mut output = Vec::new(); let available_deps = cx.unit_deps(unit); // Filter out optional dependencies, and look up the actual lib name. let meta_deps: Vec<_> = deps .iter() .filter_map(|name| { available_deps .iter() .find(|d| d.unit.pkg.name().as_str() == name.as_str()) .map(|d| d.unit.target.crate_name()) }) .collect(); for dep in &meta_deps { output.push(format!("use {};\n", dep)); } output.push("fn main() {\n".to_string()); for dep in &meta_deps { output.push(format!(" {}::metabuild();\n", dep)); } output.push("}\n".to_string()); let output = output.join(""); let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); paths::create_dir_all(path.parent().unwrap())?; paths::write_if_changed(path, &output)?; Ok(()) } impl BuildDeps { pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { BuildDeps { build_script_output: output_file.to_path_buf(), rerun_if_changed: output .map(|p| &p.rerun_if_changed) .cloned() .unwrap_or_default(), rerun_if_env_changed: output .map(|p| &p.rerun_if_env_changed) .cloned() .unwrap_or_default(), } } } /// Computes several maps in `Context`: /// - `build_scripts`: A map that tracks which build scripts each package /// depends on. /// - `build_explicit_deps`: Dependency statements emitted by build scripts /// from a previous run. /// - `build_script_outputs`: Pre-populates this with any overridden build /// scripts. /// /// The important one here is `build_scripts`, which for each `(package, /// metadata)` stores a `BuildScripts` object which contains a list of /// dependencies with build scripts that the unit should consider when /// linking. For example this lists all dependencies' `-L` flags which need to /// be propagated transitively. /// /// The given set of units to this function is the initial set of /// targets/profiles which are being built. pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { let mut ret = HashMap::new(); for unit in &cx.bcx.roots { build(&mut ret, cx, unit)?; } cx.build_scripts .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); return Ok(()); // Recursive function to build up the map we're constructing. This function // memoizes all of its return values as it goes along. fn build<'a>( out: &'a mut HashMap, cx: &mut Context<'_, '_>, unit: &Unit, ) -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the // set of dependencies. if out.contains_key(unit) { return Ok(&out[unit]); } // If there is a build script override, pre-fill the build output. if unit.mode.is_run_custom_build() { if let Some(links) = unit.pkg.manifest().links() { if let Some(output) = cx.bcx.target_data.script_override(links, unit.kind) { let metadata = cx.get_run_build_script_metadata(unit); cx.build_script_outputs.lock().unwrap().insert( unit.pkg.package_id(), metadata, output.clone(), ); } } } let mut ret = BuildScripts::default(); // If a package has a build script, add itself as something to inspect for linking. if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { let script_meta = cx .find_build_script_metadata(unit) .expect("has_custom_build should have RunCustomBuild"); add_to_link(&mut ret, unit.pkg.package_id(), script_meta); } // Load any dependency declarations from a previous run. if unit.mode.is_run_custom_build() { parse_previous_explicit_deps(cx, unit); } // We want to invoke the compiler deterministically to be cache-friendly // to rustc invocation caching schemes, so be sure to generate the same // set of build script dependency orderings via sorting the targets that // come out of the `Context`. let mut dependencies: Vec = cx.unit_deps(unit).iter().map(|d| d.unit.clone()).collect(); dependencies.sort_by_key(|u| u.pkg.package_id()); for dep_unit in dependencies.iter() { let dep_scripts = build(out, cx, dep_unit)?; if dep_unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter().cloned()); } else if dep_unit.target.is_linkable() { for &(pkg, metadata) in dep_scripts.to_link.iter() { add_to_link(&mut ret, pkg, metadata); } } } match out.entry(unit.clone()) { Entry::Vacant(entry) => Ok(entry.insert(ret)), Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"), } } // When adding an entry to 'to_link' we only actually push it on if the // script hasn't seen it yet (e.g., we don't push on duplicates). fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, metadata: Metadata) { if scripts.seen_to_link.insert((pkg, metadata)) { scripts.to_link.push((pkg, metadata)); } } fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) { let script_run_dir = cx.files().build_script_run_dir(unit); let output_file = script_run_dir.join("output"); let (prev_output, _) = prev_build_output(cx, unit); let deps = BuildDeps::new(&output_file, prev_output.as_ref()); cx.build_explicit_deps.insert(unit.clone(), deps); } } /// Returns the previous parsed `BuildOutput`, if any, from a previous /// execution. /// /// Also returns the directory containing the output, typically used later in /// processing. fn prev_build_output(cx: &mut Context<'_, '_>, unit: &Unit) -> (Option, PathBuf) { let script_out_dir = cx.files().build_script_out_dir(unit); let script_run_dir = cx.files().build_script_run_dir(unit); let root_output_file = script_run_dir.join("root-output"); let output_file = script_run_dir.join("output"); let prev_script_out_dir = paths::read_bytes(&root_output_file) .and_then(|bytes| paths::bytes2path(&bytes)) .unwrap_or_else(|_| script_out_dir.clone()); ( BuildOutput::parse_file( &output_file, unit.pkg.library().map(|t| t.crate_name()), &unit.pkg.to_string(), &prev_script_out_dir, &script_out_dir, match cx.bcx.config.cli_unstable().check_cfg { Some((_, _, _, output)) => output, None => false, }, cx.bcx.config.nightly_features_allowed, unit.pkg.targets(), ) .ok(), prev_script_out_dir, ) } impl BuildScriptOutputs { /// Inserts a new entry into the map. fn insert(&mut self, pkg_id: PackageId, metadata: Metadata, parsed_output: BuildOutput) { match self.outputs.entry(metadata) { Entry::Vacant(entry) => { entry.insert(parsed_output); } Entry::Occupied(entry) => panic!( "build script output collision for {}/{}\n\ old={:?}\nnew={:?}", pkg_id, metadata, entry.get(), parsed_output ), } } /// Returns `true` if the given key already exists. fn contains_key(&self, metadata: Metadata) -> bool { self.outputs.contains_key(&metadata) } /// Gets the build output for the given key. pub fn get(&self, meta: Metadata) -> Option<&BuildOutput> { self.outputs.get(&meta) } /// Returns an iterator over all entries. pub fn iter(&self) -> impl Iterator { self.outputs.iter() } } cargo-0.66.0/src/cargo/core/compiler/fingerprint.rs000066400000000000000000002515121432416201200222200ustar00rootroot00000000000000//! # Fingerprints //! //! This module implements change-tracking so that Cargo can know whether or //! not something needs to be recompiled. A Cargo `Unit` can be either "dirty" //! (needs to be recompiled) or "fresh" (it does not need to be recompiled). //! There are several mechanisms that influence a Unit's freshness: //! //! - The `Fingerprint` is a hash, saved to the filesystem in the //! `.fingerprint` directory, that tracks information about the Unit. If the //! fingerprint is missing (such as the first time the unit is being //! compiled), then the unit is dirty. If any of the fingerprint fields //! change (like the name of the source file), then the Unit is considered //! dirty. //! //! The `Fingerprint` also tracks the fingerprints of all its dependencies, //! so a change in a dependency will propagate the "dirty" status up. //! //! - Filesystem mtime tracking is also used to check if a unit is dirty. //! See the section below on "Mtime comparison" for more details. There //! are essentially two parts to mtime tracking: //! //! 1. The mtime of a Unit's output files is compared to the mtime of all //! its dependencies' output file mtimes (see `check_filesystem`). If any //! output is missing, or is older than a dependency's output, then the //! unit is dirty. //! 2. The mtime of a Unit's source files is compared to the mtime of its //! dep-info file in the fingerprint directory (see `find_stale_file`). //! The dep-info file is used as an anchor to know when the last build of //! the unit was done. See the "dep-info files" section below for more //! details. If any input files are missing, or are newer than the //! dep-info, then the unit is dirty. //! //! Note: Fingerprinting is not a perfect solution. Filesystem mtime tracking //! is notoriously imprecise and problematic. Only a small part of the //! environment is captured. This is a balance of performance, simplicity, and //! completeness. Sandboxing, hashing file contents, tracking every file //! access, environment variable, and network operation would ensure more //! reliable and reproducible builds at the cost of being complex, slow, and //! platform-dependent. //! //! ## Fingerprints and Metadata //! //! The `Metadata` hash is a hash added to the output filenames to isolate //! each unit. See the documentation in the `compilation_files` module for //! more details. NOTE: Not all output files are isolated via filename hashes //! (like dylibs). The fingerprint directory uses a hash, but sometimes units //! share the same fingerprint directory (when they don't have Metadata) so //! care should be taken to handle this! //! //! Fingerprints and Metadata are similar, and track some of the same things. //! The Metadata contains information that is required to keep Units separate. //! The Fingerprint includes additional information that should cause a //! recompile, but it is desired to reuse the same filenames. A comparison //! of what is tracked: //! //! Value | Fingerprint | Metadata //! -------------------------------------------|-------------|---------- //! rustc | βœ“ | βœ“ //! Profile | βœ“ | βœ“ //! `cargo rustc` extra args | βœ“ | βœ“ //! CompileMode | βœ“ | βœ“ //! Target Name | βœ“ | βœ“ //! TargetKind (bin/lib/etc.) | βœ“ | βœ“ //! Enabled Features | βœ“ | βœ“ //! Immediate dependency’s hashes | βœ“[^1] | βœ“ //! CompileKind (host/target) | βœ“ | βœ“ //! __CARGO_DEFAULT_LIB_METADATA[^4] | | βœ“ //! package_id | | βœ“ //! authors, description, homepage, repo | βœ“ | //! Target src path relative to ws | βœ“ | //! Target flags (test/bench/for_host/edition) | βœ“ | //! -C incremental=… flag | βœ“ | //! mtime of sources | βœ“[^3] | //! RUSTFLAGS/RUSTDOCFLAGS | βœ“ | //! LTO flags | βœ“ | βœ“ //! config settings[^5] | βœ“ | //! is_std | | βœ“ //! //! [^1]: Build script and bin dependencies are not included. //! //! [^3]: See below for details on mtime tracking. //! //! [^4]: `__CARGO_DEFAULT_LIB_METADATA` is set by rustbuild to embed the //! release channel (bootstrap/stable/beta/nightly) in libstd. //! //! [^5]: Config settings that are not otherwise captured anywhere else. //! Currently, this is only `doc.extern-map`. //! //! When deciding what should go in the Metadata vs the Fingerprint, consider //! that some files (like dylibs) do not have a hash in their filename. Thus, //! if a value changes, only the fingerprint will detect the change (consider, //! for example, swapping between different features). Fields that are only in //! Metadata generally aren't relevant to the fingerprint because they //! fundamentally change the output (like target vs host changes the directory //! where it is emitted). //! //! ## Fingerprint files //! //! Fingerprint information is stored in the //! `target/{debug,release}/.fingerprint/` directory. Each Unit is stored in a //! separate directory. Each Unit directory contains: //! //! - A file with a 16 hex-digit hash. This is the Fingerprint hash, used for //! quick loading and comparison. //! - A `.json` file that contains details about the Fingerprint. This is only //! used to log details about *why* a fingerprint is considered dirty. //! `CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build` can be //! used to display this log information. //! - A "dep-info" file which is a translation of rustc's `*.d` dep-info files //! to a Cargo-specific format that tweaks file names and is optimized for //! reading quickly. //! - An `invoked.timestamp` file whose filesystem mtime is updated every time //! the Unit is built. This is used for capturing the time when the build //! starts, to detect if files are changed in the middle of the build. See //! below for more details. //! //! Note that some units are a little different. A Unit for *running* a build //! script or for `rustdoc` does not have a dep-info file (it's not //! applicable). Build script `invoked.timestamp` files are in the build //! output directory. //! //! ## Fingerprint calculation //! //! After the list of Units has been calculated, the Units are added to the //! `JobQueue`. As each one is added, the fingerprint is calculated, and the //! dirty/fresh status is recorded. A closure is used to update the fingerprint //! on-disk when the Unit successfully finishes. The closure will recompute the //! Fingerprint based on the updated information. If the Unit fails to compile, //! the fingerprint is not updated. //! //! Fingerprints are cached in the `Context`. This makes computing //! Fingerprints faster, but also is necessary for properly updating //! dependency information. Since a Fingerprint includes the Fingerprints of //! all dependencies, when it is updated, by using `Arc` clones, it //! automatically picks up the updates to its dependencies. //! //! ### dep-info files //! //! Cargo passes the `--emit=dep-info` flag to `rustc` so that `rustc` will //! generate a "dep info" file (with the `.d` extension). This is a //! Makefile-like syntax that includes all of the source files used to build //! the crate. This file is used by Cargo to know which files to check to see //! if the crate will need to be rebuilt. //! //! After `rustc` exits successfully, Cargo will read the dep info file and //! translate it into a binary format that is stored in the fingerprint //! directory (`translate_dep_info`). The mtime of the fingerprint dep-info //! file itself is used as the reference for comparing the source files to //! determine if any of the source files have been modified (see below for //! more detail). Note that Cargo parses the special `# env-var:...` comments in //! dep-info files to learn about environment variables that the rustc compile //! depends on. Cargo then later uses this to trigger a recompile if a //! referenced env var changes (even if the source didn't change). //! //! There is also a third dep-info file. Cargo will extend the file created by //! rustc with some additional information and saves this into the output //! directory. This is intended for build system integration. See the //! `output_depinfo` module for more detail. //! //! #### -Zbinary-dep-depinfo //! //! `rustc` has an experimental flag `-Zbinary-dep-depinfo`. This causes //! `rustc` to include binary files (like rlibs) in the dep-info file. This is //! primarily to support rustc development, so that Cargo can check the //! implicit dependency to the standard library (which lives in the sysroot). //! We want Cargo to recompile whenever the standard library rlib/dylibs //! change, and this is a generic mechanism to make that work. //! //! ### Mtime comparison //! //! The use of modification timestamps is the most common way a unit will be //! determined to be dirty or fresh between builds. There are many subtle //! issues and edge cases with mtime comparisons. This gives a high-level //! overview, but you'll need to read the code for the gritty details. Mtime //! handling is different for different unit kinds. The different styles are //! driven by the `Fingerprint.local` field, which is set based on the unit //! kind. //! //! The status of whether or not the mtime is "stale" or "up-to-date" is //! stored in `Fingerprint.fs_status`. //! //! All units will compare the mtime of its newest output file with the mtimes //! of the outputs of all its dependencies. If any output file is missing, //! then the unit is stale. If any dependency is newer, the unit is stale. //! //! #### Normal package mtime handling //! //! `LocalFingerprint::CheckDepinfo` is used for checking the mtime of //! packages. It compares the mtime of the input files (the source files) to //! the mtime of the dep-info file (which is written last after a build is //! finished). If the dep-info is missing, the unit is stale (it has never //! been built). The list of input files comes from the dep-info file. See the //! section above for details on dep-info files. //! //! Also note that although registry and git packages use `CheckDepInfo`, none //! of their source files are included in the dep-info (see //! `translate_dep_info`), so for those kinds no mtime checking is done //! (unless `-Zbinary-dep-depinfo` is used). Repository and git packages are //! static, so there is no need to check anything. //! //! When a build is complete, the mtime of the dep-info file in the //! fingerprint directory is modified to rewind it to the time when the build //! started. This is done by creating an `invoked.timestamp` file when the //! build starts to capture the start time. The mtime is rewound to the start //! to handle the case where the user modifies a source file while a build is //! running. Cargo can't know whether or not the file was included in the //! build, so it takes a conservative approach of assuming the file was *not* //! included, and it should be rebuilt during the next build. //! //! #### Rustdoc mtime handling //! //! Rustdoc does not emit a dep-info file, so Cargo currently has a relatively //! simple system for detecting rebuilds. `LocalFingerprint::Precalculated` is //! used for rustdoc units. For registry packages, this is the package //! version. For git packages, it is the git hash. For path packages, it is //! the a string of the mtime of the newest file in the package. //! //! There are some known bugs with how this works, so it should be improved at //! some point. //! //! #### Build script mtime handling //! //! Build script mtime handling runs in different modes. There is the "old //! style" where the build script does not emit any `rerun-if` directives. In //! this mode, Cargo will use `LocalFingerprint::Precalculated`. See the //! "rustdoc" section above how it works. //! //! In the new-style, each `rerun-if` directive is translated to the //! corresponding `LocalFingerprint` variant. The `RerunIfChanged` variant //! compares the mtime of the given filenames against the mtime of the //! "output" file. //! //! Similar to normal units, the build script "output" file mtime is rewound //! to the time just before the build script is executed to handle mid-build //! modifications. //! //! ## Considerations for inclusion in a fingerprint //! //! Over time we've realized a few items which historically were included in //! fingerprint hashings should not actually be included. Examples are: //! //! * Modification time values. We strive to never include a modification time //! inside a `Fingerprint` to get hashed into an actual value. While //! theoretically fine to do, in practice this causes issues with common //! applications like Docker. Docker, after a layer is built, will zero out //! the nanosecond part of all filesystem modification times. This means that //! the actual modification time is different for all build artifacts, which //! if we tracked the actual values of modification times would cause //! unnecessary recompiles. To fix this we instead only track paths which are //! relevant. These paths are checked dynamically to see if they're up to //! date, and the modification time doesn't make its way into the fingerprint //! hash. //! //! * Absolute path names. We strive to maintain a property where if you rename //! a project directory Cargo will continue to preserve all build artifacts //! and reuse the cache. This means that we can't ever hash an absolute path //! name. Instead we always hash relative path names and the "root" is passed //! in at runtime dynamically. Some of this is best effort, but the general //! idea is that we assume all accesses within a crate stay within that //! crate. //! //! These are pretty tricky to test for unfortunately, but we should have a good //! test suite nowadays and lord knows Cargo gets enough testing in the wild! //! //! ## Build scripts //! //! The *running* of a build script (`CompileMode::RunCustomBuild`) is treated //! significantly different than all other Unit kinds. It has its own function //! for calculating the Fingerprint (`calculate_run_custom_build`) and has some //! unique considerations. It does not track the same information as a normal //! Unit. The information tracked depends on the `rerun-if-changed` and //! `rerun-if-env-changed` statements produced by the build script. If the //! script does not emit either of these statements, the Fingerprint runs in //! "old style" mode where an mtime change of *any* file in the package will //! cause the build script to be re-run. Otherwise, the fingerprint *only* //! tracks the individual "rerun-if" items listed by the build script. //! //! The "rerun-if" statements from a *previous* build are stored in the build //! output directory in a file called `output`. Cargo parses this file when //! the Unit for that build script is prepared for the `JobQueue`. The //! Fingerprint code can then use that information to compute the Fingerprint //! and compare against the old fingerprint hash. //! //! Care must be taken with build script Fingerprints because the //! `Fingerprint::local` value may be changed after the build script runs //! (such as if the build script adds or removes "rerun-if" items). //! //! Another complication is if a build script is overridden. In that case, the //! fingerprint is the hash of the output of the override. //! //! ## Special considerations //! //! Registry dependencies do not track the mtime of files. This is because //! registry dependencies are not expected to change (if a new version is //! used, the Package ID will change, causing a rebuild). Cargo currently //! partially works with Docker caching. When a Docker image is built, it has //! normal mtime information. However, when a step is cached, the nanosecond //! portions of all files is zeroed out. Currently this works, but care must //! be taken for situations like these. //! //! HFS on macOS only supports 1 second timestamps. This causes a significant //! number of problems, particularly with Cargo's testsuite which does rapid //! builds in succession. Other filesystems have various degrees of //! resolution. //! //! Various weird filesystems (such as network filesystems) also can cause //! complications. Network filesystems may track the time on the server //! (except when the time is set manually such as with //! `filetime::set_file_times`). Not all filesystems support modifying the //! mtime. //! //! See the `A-rebuild-detection` flag on the issue tracker for more: //! use std::collections::hash_map::{Entry, HashMap}; use std::env; use std::hash::{self, Hash, Hasher}; use std::io; use std::path::{Path, PathBuf}; use std::str; use std::sync::{Arc, Mutex}; use std::time::SystemTime; use anyhow::{bail, format_err, Context as _}; use cargo_util::{paths, ProcessBuilder}; use filetime::FileTime; use log::{debug, info}; use serde::de; use serde::ser; use serde::{Deserialize, Serialize}; use crate::core::compiler::unit_graph::UnitDep; use crate::core::Package; use crate::util; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{internal, path_args, profile, StableHasher}; use crate::CARGO_ENV; use super::custom_build::BuildDeps; use super::job::{Job, Work}; use super::{BuildContext, Context, FileFlavor, Unit}; /// Determines if a `unit` is up-to-date, and if not prepares necessary work to /// update the persisted fingerprint. /// /// This function will inspect `unit`, calculate a fingerprint for it, and then /// return an appropriate `Job` to run. The returned `Job` will be a noop if /// `unit` is considered "fresh", or if it was previously built and cached. /// Otherwise the `Job` returned will write out the true fingerprint to the /// filesystem, to be executed after the unit's work has completed. /// /// The `force` flag is a way to force the `Job` to be "dirty", or always /// update the fingerprint. **Beware using this flag** because it does not /// transitively propagate throughout the dependency graph, it only forces this /// one unit which is very unlikely to be what you want unless you're /// exclusively talking about top-level units. pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> CargoResult { let _p = profile::start(format!( "fingerprint: {} / {}", unit.pkg.package_id(), unit.target.name() )); let bcx = cx.bcx; let loc = cx.files().fingerprint_file_path(unit, ""); debug!("fingerprint at: {}", loc.display()); // Figure out if this unit is up to date. After calculating the fingerprint // compare it to an old version, if any, and attempt to print diagnostic // information about failed comparisons to aid in debugging. let fingerprint = calculate(cx, unit)?; let mtime_on_use = cx.bcx.config.cli_unstable().mtime_on_use; let compare = compare_old_fingerprint(&loc, &*fingerprint, mtime_on_use); log_compare(unit, &compare); // If our comparison failed (e.g., we're going to trigger a rebuild of this // crate), then we also ensure the source of the crate passes all // verification checks before we build it. // // The `Source::verify` method is intended to allow sources to execute // pre-build checks to ensure that the relevant source code is all // up-to-date and as expected. This is currently used primarily for // directory sources which will use this hook to perform an integrity check // on all files in the source to ensure they haven't changed. If they have // changed then an error is issued. if compare.is_err() { let source_id = unit.pkg.package_id().source_id(); let sources = bcx.packages.sources(); let source = sources .get(source_id) .ok_or_else(|| internal("missing package source"))?; source.verify(unit.pkg.package_id())?; } if compare.is_ok() && !force { return Ok(Job::new_fresh()); } // Clear out the old fingerprint file if it exists. This protects when // compilation is interrupted leaving a corrupt file. For example, a // project with a lib.rs and integration test (two units): // // 1. Build the library and integration test. // 2. Make a change to lib.rs (NOT the integration test). // 3. Build the integration test, hit Ctrl-C while linking. With gcc, this // will leave behind an incomplete executable (zero size, or partially // written). NOTE: The library builds successfully, it is the linking // of the integration test that we are interrupting. // 4. Build the integration test again. // // Without the following line, then step 3 will leave a valid fingerprint // on the disk. Then step 4 will think the integration test is "fresh" // because: // // - There is a valid fingerprint hash on disk (written in step 1). // - The mtime of the output file (the corrupt integration executable // written in step 3) is newer than all of its dependencies. // - The mtime of the integration test fingerprint dep-info file (written // in step 1) is newer than the integration test's source files, because // we haven't modified any of its source files. // // But the executable is corrupt and needs to be rebuilt. Clearing the // fingerprint at step 3 ensures that Cargo never mistakes a partially // written output as up-to-date. if loc.exists() { // Truncate instead of delete so that compare_old_fingerprint will // still log the reason for the fingerprint failure instead of just // reporting "failed to read fingerprint" during the next build if // this build fails. paths::write(&loc, b"")?; } let write_fingerprint = if unit.mode.is_run_custom_build() { // For build scripts the `local` field of the fingerprint may change // while we're executing it. For example it could be in the legacy // "consider everything a dependency mode" and then we switch to "deps // are explicitly specified" mode. // // To handle this movement we need to regenerate the `local` field of a // build script's fingerprint after it's executed. We do this by // using the `build_script_local_fingerprints` function which returns a // thunk we can invoke on a foreign thread to calculate this. let build_script_outputs = Arc::clone(&cx.build_script_outputs); let metadata = cx.get_run_build_script_metadata(unit); let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); Work::new(move |_| { let outputs = build_script_outputs.lock().unwrap(); let output = outputs .get(metadata) .expect("output must exist after running"); let deps = BuildDeps::new(&output_path, Some(output)); // FIXME: it's basically buggy that we pass `None` to `call_box` // here. See documentation on `build_script_local_fingerprints` // below for more information. Despite this just try to proceed and // hobble along if it happens to return `Some`. if let Some(new_local) = (gen_local)(&deps, None)? { *fingerprint.local.lock().unwrap() = new_local; } write_fingerprint(&loc, &fingerprint) }) } else { Work::new(move |_| write_fingerprint(&loc, &fingerprint)) }; Ok(Job::new_dirty(write_fingerprint)) } /// Dependency edge information for fingerprints. This is generated for each /// dependency and is stored in a `Fingerprint` below. #[derive(Clone)] struct DepFingerprint { /// The hash of the package id that this dependency points to pkg_id: u64, /// The crate name we're using for this dependency, which if we change we'll /// need to recompile! name: InternedString, /// Whether or not this dependency is flagged as a public dependency or not. public: bool, /// Whether or not this dependency is an rmeta dependency or a "full" /// dependency. In the case of an rmeta dependency our dependency edge only /// actually requires the rmeta from what we depend on, so when checking /// mtime information all files other than the rmeta can be ignored. only_requires_rmeta: bool, /// The dependency's fingerprint we recursively point to, containing all the /// other hash information we'd otherwise need. fingerprint: Arc, } /// A fingerprint can be considered to be a "short string" representing the /// state of a world for a package. /// /// If a fingerprint ever changes, then the package itself needs to be /// recompiled. Inputs to the fingerprint include source code modifications, /// compiler flags, compiler version, etc. This structure is not simply a /// `String` due to the fact that some fingerprints cannot be calculated lazily. /// /// Path sources, for example, use the mtime of the corresponding dep-info file /// as a fingerprint (all source files must be modified *before* this mtime). /// This dep-info file is not generated, however, until after the crate is /// compiled. As a result, this structure can be thought of as a fingerprint /// to-be. The actual value can be calculated via `hash_u64()`, but the operation /// may fail as some files may not have been generated. /// /// Note that dependencies are taken into account for fingerprints because rustc /// requires that whenever an upstream crate is recompiled that all downstream /// dependents are also recompiled. This is typically tracked through /// `DependencyQueue`, but it also needs to be retained here because Cargo can /// be interrupted while executing, losing the state of the `DependencyQueue` /// graph. #[derive(Serialize, Deserialize)] pub struct Fingerprint { /// Hash of the version of `rustc` used. rustc: u64, /// Sorted list of cfg features enabled. features: String, /// Hash of the `Target` struct, including the target name, /// package-relative source path, edition, etc. target: u64, /// Hash of the `Profile`, `CompileMode`, and any extra flags passed via /// `cargo rustc` or `cargo rustdoc`. profile: u64, /// Hash of the path to the base source file. This is relative to the /// workspace root for path members, or absolute for other sources. path: u64, /// Fingerprints of dependencies. deps: Vec, /// Information about the inputs that affect this Unit (such as source /// file mtimes or build script environment variables). local: Mutex>, /// Cached hash of the `Fingerprint` struct. Used to improve performance /// for hashing. #[serde(skip)] memoized_hash: Mutex>, /// RUSTFLAGS/RUSTDOCFLAGS environment variable value (or config value). rustflags: Vec, /// Hash of some metadata from the manifest, such as "authors", or /// "description", which are exposed as environment variables during /// compilation. metadata: u64, /// Hash of various config settings that change how things are compiled. config: u64, /// The rustc target. This is only relevant for `.json` files, otherwise /// the metadata hash segregates the units. compile_kind: u64, /// Description of whether the filesystem status for this unit is up to date /// or should be considered stale. #[serde(skip)] fs_status: FsStatus, /// Files, relative to `target_root`, that are produced by the step that /// this `Fingerprint` represents. This is used to detect when the whole /// fingerprint is out of date if this is missing, or if previous /// fingerprints output files are regenerated and look newer than this one. #[serde(skip)] outputs: Vec, } /// Indication of the status on the filesystem for a particular unit. enum FsStatus { /// This unit is to be considered stale, even if hash information all /// matches. The filesystem inputs have changed (or are missing) and the /// unit needs to subsequently be recompiled. Stale, /// This unit is up-to-date. All outputs and their corresponding mtime are /// listed in the payload here for other dependencies to compare against. UpToDate { mtimes: HashMap }, } impl FsStatus { fn up_to_date(&self) -> bool { match self { FsStatus::UpToDate { .. } => true, FsStatus::Stale => false, } } } impl Default for FsStatus { fn default() -> FsStatus { FsStatus::Stale } } impl Serialize for DepFingerprint { fn serialize(&self, ser: S) -> Result where S: ser::Serializer, { ( &self.pkg_id, &self.name, &self.public, &self.fingerprint.hash_u64(), ) .serialize(ser) } } impl<'de> Deserialize<'de> for DepFingerprint { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?; Ok(DepFingerprint { pkg_id, name: InternedString::new(&name), public, fingerprint: Arc::new(Fingerprint { memoized_hash: Mutex::new(Some(hash)), ..Fingerprint::new() }), // This field is never read since it's only used in // `check_filesystem` which isn't used by fingerprints loaded from // disk. only_requires_rmeta: false, }) } } /// A `LocalFingerprint` represents something that we use to detect direct /// changes to a `Fingerprint`. /// /// This is where we track file information, env vars, etc. This /// `LocalFingerprint` struct is hashed and if the hash changes will force a /// recompile of any fingerprint it's included into. Note that the "local" /// terminology comes from the fact that it only has to do with one crate, and /// `Fingerprint` tracks the transitive propagation of fingerprint changes. /// /// Note that because this is hashed its contents are carefully managed. Like /// mentioned in the above module docs, we don't want to hash absolute paths or /// mtime information. /// /// Also note that a `LocalFingerprint` is used in `check_filesystem` to detect /// when the filesystem contains stale information (based on mtime currently). /// The paths here don't change much between compilations but they're used as /// inputs when we probe the filesystem looking at information. #[derive(Debug, Serialize, Deserialize, Hash)] enum LocalFingerprint { /// This is a precalculated fingerprint which has an opaque string we just /// hash as usual. This variant is primarily used for rustdoc where we /// don't have a dep-info file to compare against. /// /// This is also used for build scripts with no `rerun-if-*` statements, but /// that's overall a mistake and causes bugs in Cargo. We shouldn't use this /// for build scripts. Precalculated(String), /// This is used for crate compilations. The `dep_info` file is a relative /// path anchored at `target_root(...)` to the dep-info file that Cargo /// generates (which is a custom serialization after parsing rustc's own /// `dep-info` output). /// /// The `dep_info` file, when present, also lists a number of other files /// for us to look at. If any of those files are newer than this file then /// we need to recompile. CheckDepInfo { dep_info: PathBuf }, /// This represents a nonempty set of `rerun-if-changed` annotations printed /// out by a build script. The `output` file is a relative file anchored at /// `target_root(...)` which is the actual output of the build script. That /// output has already been parsed and the paths printed out via /// `rerun-if-changed` are listed in `paths`. The `paths` field is relative /// to `pkg.root()` /// /// This is considered up-to-date if all of the `paths` are older than /// `output`, otherwise we need to recompile. RerunIfChanged { output: PathBuf, paths: Vec, }, /// This represents a single `rerun-if-env-changed` annotation printed by a /// build script. The exact env var and value are hashed here. There's no /// filesystem dependence here, and if the values are changed the hash will /// change forcing a recompile. RerunIfEnvChanged { var: String, val: Option }, } enum StaleItem { MissingFile(PathBuf), ChangedFile { reference: PathBuf, reference_mtime: FileTime, stale: PathBuf, stale_mtime: FileTime, }, ChangedEnv { var: String, previous: Option, current: Option, }, } impl LocalFingerprint { /// Checks dynamically at runtime if this `LocalFingerprint` has a stale /// item inside of it. /// /// The main purpose of this function is to handle two different ways /// fingerprints can be invalidated: /// /// * One is a dependency listed in rustc's dep-info files is invalid. Note /// that these could either be env vars or files. We check both here. /// /// * Another is the `rerun-if-changed` directive from build scripts. This /// is where we'll find whether files have actually changed fn find_stale_item( &self, mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, cargo_exe: &Path, ) -> CargoResult> { match self { // We need to parse `dep_info`, learn about the crate's dependencies. // // For each env var we see if our current process's env var still // matches, and for each file we see if any of them are newer than // the `dep_info` file itself whose mtime represents the start of // rustc. LocalFingerprint::CheckDepInfo { dep_info } => { let dep_info = target_root.join(dep_info); let info = match parse_dep_info(pkg_root, target_root, &dep_info)? { Some(info) => info, None => return Ok(Some(StaleItem::MissingFile(dep_info))), }; for (key, previous) in info.env.iter() { let current = if key == CARGO_ENV { Some( cargo_exe .to_str() .ok_or_else(|| { format_err!( "cargo exe path {} must be valid UTF-8", cargo_exe.display() ) })? .to_string(), ) } else { env::var(key).ok() }; if current == *previous { continue; } return Ok(Some(StaleItem::ChangedEnv { var: key.clone(), previous: previous.clone(), current, })); } Ok(find_stale_file(mtime_cache, &dep_info, info.files.iter())) } // We need to verify that no paths listed in `paths` are newer than // the `output` path itself, or the last time the build script ran. LocalFingerprint::RerunIfChanged { output, paths } => Ok(find_stale_file( mtime_cache, &target_root.join(output), paths.iter().map(|p| pkg_root.join(p)), )), // These have no dependencies on the filesystem, and their values // are included natively in the `Fingerprint` hash so nothing // tocheck for here. LocalFingerprint::RerunIfEnvChanged { .. } => Ok(None), LocalFingerprint::Precalculated(..) => Ok(None), } } fn kind(&self) -> &'static str { match self { LocalFingerprint::Precalculated(..) => "precalculated", LocalFingerprint::CheckDepInfo { .. } => "dep-info", LocalFingerprint::RerunIfChanged { .. } => "rerun-if-changed", LocalFingerprint::RerunIfEnvChanged { .. } => "rerun-if-env-changed", } } } impl Fingerprint { fn new() -> Fingerprint { Fingerprint { rustc: 0, target: 0, profile: 0, path: 0, features: String::new(), deps: Vec::new(), local: Mutex::new(Vec::new()), memoized_hash: Mutex::new(None), rustflags: Vec::new(), metadata: 0, config: 0, compile_kind: 0, fs_status: FsStatus::Stale, outputs: Vec::new(), } } /// For performance reasons fingerprints will memoize their own hash, but /// there's also internal mutability with its `local` field which can /// change, for example with build scripts, during a build. /// /// This method can be used to bust all memoized hashes just before a build /// to ensure that after a build completes everything is up-to-date. pub fn clear_memoized(&self) { *self.memoized_hash.lock().unwrap() = None; } fn hash_u64(&self) -> u64 { if let Some(s) = *self.memoized_hash.lock().unwrap() { return s; } let ret = util::hash_u64(self); *self.memoized_hash.lock().unwrap() = Some(ret); ret } /// Compares this fingerprint with an old version which was previously /// serialized to filesystem. /// /// The purpose of this is exclusively to produce a diagnostic message /// indicating why we're recompiling something. This function always returns /// an error, it will never return success. fn compare(&self, old: &Fingerprint) -> CargoResult<()> { if self.rustc != old.rustc { bail!("rust compiler has changed") } if self.features != old.features { bail!( "features have changed: previously {}, now {}", old.features, self.features ) } if self.target != old.target { bail!("target configuration has changed") } if self.path != old.path { bail!("path to the source has changed") } if self.profile != old.profile { bail!("profile configuration has changed") } if self.rustflags != old.rustflags { bail!( "RUSTFLAGS has changed: previously {:?}, now {:?}", old.rustflags, self.rustflags ) } if self.metadata != old.metadata { bail!("metadata changed") } if self.config != old.config { bail!("configuration settings have changed") } if self.compile_kind != old.compile_kind { bail!("compile kind (rustc target) changed") } let my_local = self.local.lock().unwrap(); let old_local = old.local.lock().unwrap(); if my_local.len() != old_local.len() { bail!("local lens changed"); } for (new, old) in my_local.iter().zip(old_local.iter()) { match (new, old) { (LocalFingerprint::Precalculated(a), LocalFingerprint::Precalculated(b)) => { if a != b { bail!( "precalculated components have changed: previously {}, now {}", b, a ) } } ( LocalFingerprint::CheckDepInfo { dep_info: adep }, LocalFingerprint::CheckDepInfo { dep_info: bdep }, ) => { if adep != bdep { bail!( "dep info output changed: previously {:?}, now {:?}", bdep, adep ) } } ( LocalFingerprint::RerunIfChanged { output: aout, paths: apaths, }, LocalFingerprint::RerunIfChanged { output: bout, paths: bpaths, }, ) => { if aout != bout { bail!( "rerun-if-changed output changed: previously {:?}, now {:?}", bout, aout ) } if apaths != bpaths { bail!( "rerun-if-changed output changed: previously {:?}, now {:?}", bpaths, apaths, ) } } ( LocalFingerprint::RerunIfEnvChanged { var: akey, val: avalue, }, LocalFingerprint::RerunIfEnvChanged { var: bkey, val: bvalue, }, ) => { if *akey != *bkey { bail!("env vars changed: previously {}, now {}", bkey, akey); } if *avalue != *bvalue { bail!( "env var `{}` changed: previously {:?}, now {:?}", akey, bvalue, avalue ) } } (a, b) => bail!( "local fingerprint type has changed ({} => {})", b.kind(), a.kind() ), } } if self.deps.len() != old.deps.len() { bail!("number of dependencies has changed") } for (a, b) in self.deps.iter().zip(old.deps.iter()) { if a.name != b.name { let e = format_err!("`{}` != `{}`", a.name, b.name) .context("unit dependency name changed"); return Err(e); } if a.fingerprint.hash_u64() != b.fingerprint.hash_u64() { let e = format_err!( "new ({}/{:x}) != old ({}/{:x})", a.name, a.fingerprint.hash_u64(), b.name, b.fingerprint.hash_u64() ) .context("unit dependency information changed"); return Err(e); } } if !self.fs_status.up_to_date() { bail!("current filesystem status shows we're outdated"); } // This typically means some filesystem modifications happened or // something transitive was odd. In general we should strive to provide // a better error message than this, so if you see this message a lot it // likely means this method needs to be updated! bail!("two fingerprint comparison turned up nothing obvious"); } /// Dynamically inspect the local filesystem to update the `fs_status` field /// of this `Fingerprint`. /// /// This function is used just after a `Fingerprint` is constructed to check /// the local state of the filesystem and propagate any dirtiness from /// dependencies up to this unit as well. This function assumes that the /// unit starts out as `FsStatus::Stale` and then it will optionally switch /// it to `UpToDate` if it can. fn check_filesystem( &mut self, mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, cargo_exe: &Path, ) -> CargoResult<()> { assert!(!self.fs_status.up_to_date()); let mut mtimes = HashMap::new(); // Get the `mtime` of all outputs. Optionally update their mtime // afterwards based on the `mtime_on_use` flag. Afterwards we want the // minimum mtime as it's the one we'll be comparing to inputs and // dependencies. for output in self.outputs.iter() { let mtime = match paths::mtime(output) { Ok(mtime) => mtime, // This path failed to report its `mtime`. It probably doesn't // exists, so leave ourselves as stale and bail out. Err(e) => { debug!("failed to get mtime of {:?}: {}", output, e); return Ok(()); } }; assert!(mtimes.insert(output.clone(), mtime).is_none()); } let opt_max = mtimes.iter().max_by_key(|kv| kv.1); let (max_path, max_mtime) = match opt_max { Some(mtime) => mtime, // We had no output files. This means we're an overridden build // script and we're just always up to date because we aren't // watching the filesystem. None => { self.fs_status = FsStatus::UpToDate { mtimes }; return Ok(()); } }; debug!( "max output mtime for {:?} is {:?} {}", pkg_root, max_path, max_mtime ); for dep in self.deps.iter() { let dep_mtimes = match &dep.fingerprint.fs_status { FsStatus::UpToDate { mtimes } => mtimes, // If our dependency is stale, so are we, so bail out. FsStatus::Stale => return Ok(()), }; // If our dependency edge only requires the rmeta file to be present // then we only need to look at that one output file, otherwise we // need to consider all output files to see if we're out of date. let (dep_path, dep_mtime) = if dep.only_requires_rmeta { dep_mtimes .iter() .find(|(path, _mtime)| { path.extension().and_then(|s| s.to_str()) == Some("rmeta") }) .expect("failed to find rmeta") } else { match dep_mtimes.iter().max_by_key(|kv| kv.1) { Some(dep_mtime) => dep_mtime, // If our dependencies is up to date and has no filesystem // interactions, then we can move on to the next dependency. None => continue, } }; debug!( "max dep mtime for {:?} is {:?} {}", pkg_root, dep_path, dep_mtime ); // If the dependency is newer than our own output then it was // recompiled previously. We transitively become stale ourselves in // that case, so bail out. // // Note that this comparison should probably be `>=`, not `>`, but // for a discussion of why it's `>` see the discussion about #5918 // below in `find_stale`. if dep_mtime > max_mtime { info!( "dependency on `{}` is newer than we are {} > {} {:?}", dep.name, dep_mtime, max_mtime, pkg_root ); return Ok(()); } } // If we reached this far then all dependencies are up to date. Check // all our `LocalFingerprint` information to see if we have any stale // files for this package itself. If we do find something log a helpful // message and bail out so we stay stale. for local in self.local.get_mut().unwrap().iter() { if let Some(item) = local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe)? { item.log(); return Ok(()); } } // Everything was up to date! Record such. self.fs_status = FsStatus::UpToDate { mtimes }; debug!("filesystem up-to-date {:?}", pkg_root); Ok(()) } } impl hash::Hash for Fingerprint { fn hash(&self, h: &mut H) { let Fingerprint { rustc, ref features, target, path, profile, ref deps, ref local, metadata, config, compile_kind, ref rustflags, .. } = *self; let local = local.lock().unwrap(); ( rustc, features, target, path, profile, &*local, metadata, config, compile_kind, rustflags, ) .hash(h); h.write_usize(deps.len()); for DepFingerprint { pkg_id, name, public, fingerprint, only_requires_rmeta: _, // static property, no need to hash } in deps { pkg_id.hash(h); name.hash(h); public.hash(h); // use memoized dep hashes to avoid exponential blowup h.write_u64(fingerprint.hash_u64()); } } } impl DepFingerprint { fn new(cx: &mut Context<'_, '_>, parent: &Unit, dep: &UnitDep) -> CargoResult { let fingerprint = calculate(cx, &dep.unit)?; // We need to be careful about what we hash here. We have a goal of // supporting renaming a project directory and not rebuilding // everything. To do that, however, we need to make sure that the cwd // doesn't make its way into any hashes, and one source of that is the // `SourceId` for `path` packages. // // We already have a requirement that `path` packages all have unique // names (sort of for this same reason), so if the package source is a // `path` then we just hash the name, but otherwise we hash the full // id as it won't change when the directory is renamed. let pkg_id = if dep.unit.pkg.package_id().source_id().is_path() { util::hash_u64(dep.unit.pkg.package_id().name()) } else { util::hash_u64(dep.unit.pkg.package_id()) }; Ok(DepFingerprint { pkg_id, name: dep.extern_crate_name, public: dep.public, fingerprint, only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit), }) } } impl StaleItem { /// Use the `log` crate to log a hopefully helpful message in diagnosing /// what file is considered stale and why. This is intended to be used in /// conjunction with `CARGO_LOG` to determine why Cargo is recompiling /// something. Currently there's no user-facing usage of this other than /// that. fn log(&self) { match self { StaleItem::MissingFile(path) => { info!("stale: missing {:?}", path); } StaleItem::ChangedFile { reference, reference_mtime, stale, stale_mtime, } => { info!("stale: changed {:?}", stale); info!(" (vs) {:?}", reference); info!(" {:?} != {:?}", reference_mtime, stale_mtime); } StaleItem::ChangedEnv { var, previous, current, } => { info!("stale: changed env {:?}", var); info!(" {:?} != {:?}", previous, current); } } } } /// Calculates the fingerprint for a `unit`. /// /// This fingerprint is used by Cargo to learn about when information such as: /// /// * A non-path package changes (changes version, changes revision, etc). /// * Any dependency changes /// * The compiler changes /// * The set of features a package is built with changes /// * The profile a target is compiled with changes (e.g., opt-level changes) /// * Any other compiler flags change that will affect the result /// /// Information like file modification time is only calculated for path /// dependencies. fn calculate(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult> { // This function is slammed quite a lot, so the result is memoized. if let Some(s) = cx.fingerprints.get(unit) { return Ok(Arc::clone(s)); } let mut fingerprint = if unit.mode.is_run_custom_build() { calculate_run_custom_build(cx, unit)? } else if unit.mode.is_doc_test() { panic!("doc tests do not fingerprint"); } else { calculate_normal(cx, unit)? }; // After we built the initial `Fingerprint` be sure to update the // `fs_status` field of it. let target_root = target_root(cx); let cargo_exe = cx.bcx.config.cargo_exe()?; fingerprint.check_filesystem( &mut cx.mtime_cache, unit.pkg.root(), &target_root, cargo_exe, )?; let fingerprint = Arc::new(fingerprint); cx.fingerprints .insert(unit.clone(), Arc::clone(&fingerprint)); Ok(fingerprint) } /// Calculate a fingerprint for a "normal" unit, or anything that's not a build /// script. This is an internal helper of `calculate`, don't call directly. fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { // Recursively calculate the fingerprint for all of our dependencies. // // Skip fingerprints of binaries because they don't actually induce a // recompile, they're just dependencies in the sense that they need to be // built. // // Create Vec since mutable cx is needed in closure. let deps = Vec::from(cx.unit_deps(unit)); let mut deps = deps .into_iter() .filter(|dep| !dep.unit.target.is_bin()) .map(|dep| DepFingerprint::new(cx, unit, &dep)) .collect::>>()?; deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); // Afterwards calculate our own fingerprint information. let target_root = target_root(cx); let local = if unit.mode.is_doc() { // rustdoc does not have dep-info files. let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for documenting {}", unit.pkg ) })?; vec![LocalFingerprint::Precalculated(fingerprint)] } else { let dep_info = dep_info_loc(cx, unit); let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); vec![LocalFingerprint::CheckDepInfo { dep_info }] }; // Figure out what the outputs of our unit is, and we'll be storing them // into the fingerprint as well. let outputs = cx .outputs(unit)? .iter() .filter(|output| !matches!(output.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) .map(|output| output.path.clone()) .collect(); // Fill out a bunch more information that we'll be tracking typically // hashed to take up less space on disk as we just need to know when things // change. let extra_flags = if unit.mode.is_doc() { cx.bcx.rustdocflags_args(unit) } else { cx.bcx.rustflags_args(unit) } .to_vec(); let profile_hash = util::hash_u64(( &unit.profile, unit.mode, cx.bcx.extra_args_for(unit), cx.lto[unit], )); // Include metadata since it is exposed as environment variables. let m = unit.pkg.manifest().metadata(); let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); let mut config = StableHasher::new(); if let Some(linker) = cx.bcx.linker(unit.kind) { linker.hash(&mut config); } if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { if let Ok(map) = cx.bcx.config.doc_extern_map() { map.hash(&mut config); } } if let Some(allow_features) = &cx.bcx.config.cli_unstable().allow_features { allow_features.hash(&mut config); } let compile_kind = unit.kind.fingerprint_hash(); Ok(Fingerprint { rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), target: util::hash_u64(&unit.target), profile: profile_hash, // Note that .0 is hashed here, not .1 which is the cwd. That doesn't // actually affect the output artifact so there's no need to hash it. path: util::hash_u64(path_args(cx.bcx.ws, unit).0), features: format!("{:?}", unit.features), deps, local: Mutex::new(local), memoized_hash: Mutex::new(None), metadata, config: config.finish(), compile_kind, rustflags: extra_flags, fs_status: FsStatus::Stale, outputs, }) } /// Calculate a fingerprint for an "execute a build script" unit. This is an /// internal helper of `calculate`, don't call directly. fn calculate_run_custom_build(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_run_custom_build()); // Using the `BuildDeps` information we'll have previously parsed and // inserted into `build_explicit_deps` built an initial snapshot of the // `LocalFingerprint` list for this build script. If we previously executed // the build script this means we'll be watching files and env vars. // Otherwise if we haven't previously executed it we'll just start watching // the whole crate. let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); let deps = &cx.build_explicit_deps[unit]; let local = (gen_local)( deps, Some(&|| { const IO_ERR_MESSAGE: &str = "\ An I/O error happened. Please make sure you can access the file. By default, if your project contains a build script, cargo scans all files in it to determine whether a rebuild is needed. If you don't expect to access the file, specify `rerun-if-changed` in your build script. See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information."; pkg_fingerprint(cx.bcx, &unit.pkg).map_err(|err| { let mut message = format!("failed to determine package fingerprint for build script for {}", unit.pkg); if err.root_cause().is::() { message = format!("{}\n{}", message, IO_ERR_MESSAGE) } err.context(message) }) }), )? .unwrap(); let output = deps.build_script_output.clone(); // Include any dependencies of our execution, which is typically just the // compilation of the build script itself. (if the build script changes we // should be rerun!). Note though that if we're an overridden build script // we have no dependencies so no need to recurse in that case. let deps = if overridden { // Overridden build scripts don't need to track deps. vec![] } else { // Create Vec since mutable cx is needed in closure. let deps = Vec::from(cx.unit_deps(unit)); deps.into_iter() .map(|dep| DepFingerprint::new(cx, unit, &dep)) .collect::>>()? }; Ok(Fingerprint { local: Mutex::new(local), rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), deps, outputs: if overridden { Vec::new() } else { vec![output] }, // Most of the other info is blank here as we don't really include it // in the execution of the build script, but... this may be a latent // bug in Cargo. ..Fingerprint::new() }) } /// Get ready to compute the `LocalFingerprint` values for a `RunCustomBuild` /// unit. /// /// This function has, what's on the surface, a seriously wonky interface. /// You'll call this function and it'll return a closure and a boolean. The /// boolean is pretty simple in that it indicates whether the `unit` has been /// overridden via `.cargo/config`. The closure is much more complicated. /// /// This closure is intended to capture any local state necessary to compute /// the `LocalFingerprint` values for this unit. It is `Send` and `'static` to /// be sent to other threads as well (such as when we're executing build /// scripts). That deduplication is the rationale for the closure at least. /// /// The arguments to the closure are a bit weirder, though, and I'll apologize /// in advance for the weirdness too. The first argument to the closure is a /// `&BuildDeps`. This is the parsed version of a build script, and when Cargo /// starts up this is cached from previous runs of a build script. After a /// build script executes the output file is reparsed and passed in here. /// /// The second argument is the weirdest, it's *optionally* a closure to /// call `pkg_fingerprint` below. The `pkg_fingerprint` below requires access /// to "source map" located in `Context`. That's very non-`'static` and /// non-`Send`, so it can't be used on other threads, such as when we invoke /// this after a build script has finished. The `Option` allows us to for sure /// calculate it on the main thread at the beginning, and then swallow the bug /// for now where a worker thread after a build script has finished doesn't /// have access. Ideally there would be no second argument or it would be more /// "first class" and not an `Option` but something that can be sent between /// threads. In any case, it's a bug for now. /// /// This isn't the greatest of interfaces, and if there's suggestions to /// improve please do so! /// /// FIXME(#6779) - see all the words above fn build_script_local_fingerprints( cx: &mut Context<'_, '_>, unit: &Unit, ) -> ( Box< dyn FnOnce( &BuildDeps, Option<&dyn Fn() -> CargoResult>, ) -> CargoResult>> + Send, >, bool, ) { assert!(unit.mode.is_run_custom_build()); // First up, if this build script is entirely overridden, then we just // return the hash of what we overrode it with. This is the easy case! if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { debug!("override local fingerprints deps {}", unit.pkg); return ( Box::new( move |_: &BuildDeps, _: Option<&dyn Fn() -> CargoResult>| { Ok(Some(vec![fingerprint])) }, ), true, // this is an overridden build script ); } // ... Otherwise this is a "real" build script and we need to return a real // closure. Our returned closure classifies the build script based on // whether it prints `rerun-if-*`. If it *doesn't* print this it's where the // magical second argument comes into play, which fingerprints a whole // package. Remember that the fact that this is an `Option` is a bug, but a // longstanding bug, in Cargo. Recent refactorings just made it painfully // obvious. let pkg_root = unit.pkg.root().to_path_buf(); let target_dir = target_root(cx); let calculate = move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult>| { if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { match pkg_fingerprint { // FIXME: this is somewhat buggy with respect to docker and // weird filesystems. The `Precalculated` variant // constructed below will, for `path` dependencies, contain // a stringified version of the mtime for the local crate. // This violates one of the things we describe in this // module's doc comment, never hashing mtimes. We should // figure out a better scheme where a package fingerprint // may be a string (like for a registry) or a list of files // (like for a path dependency). Those list of files would // be stored here rather than the the mtime of them. Some(f) => { let s = f()?; debug!( "old local fingerprints deps {:?} precalculated={:?}", pkg_root, s ); return Ok(Some(vec![LocalFingerprint::Precalculated(s)])); } None => return Ok(None), } } // Ok so now we're in "new mode" where we can have files listed as // dependencies as well as env vars listed as dependencies. Process // them all here. Ok(Some(local_fingerprints_deps(deps, &target_dir, &pkg_root))) }; // Note that `false` == "not overridden" (Box::new(calculate), false) } /// Create a `LocalFingerprint` for an overridden build script. /// Returns None if it is not overridden. fn build_script_override_fingerprint( cx: &mut Context<'_, '_>, unit: &Unit, ) -> Option { // Build script output is only populated at this stage when it is // overridden. let build_script_outputs = cx.build_script_outputs.lock().unwrap(); let metadata = cx.get_run_build_script_metadata(unit); // Returns None if it is not overridden. let output = build_script_outputs.get(metadata)?; let s = format!( "overridden build state with hash: {}", util::hash_u64(output) ); Some(LocalFingerprint::Precalculated(s)) } /// Compute the `LocalFingerprint` values for a `RunCustomBuild` unit for /// non-overridden new-style build scripts only. This is only used when `deps` /// is already known to have a nonempty `rerun-if-*` somewhere. fn local_fingerprints_deps( deps: &BuildDeps, target_root: &Path, pkg_root: &Path, ) -> Vec { debug!("new local fingerprints deps {:?}", pkg_root); let mut local = Vec::new(); if !deps.rerun_if_changed.is_empty() { // Note that like the module comment above says we are careful to never // store an absolute path in `LocalFingerprint`, so ensure that we strip // absolute prefixes from them. let output = deps .build_script_output .strip_prefix(target_root) .unwrap() .to_path_buf(); let paths = deps .rerun_if_changed .iter() .map(|p| p.strip_prefix(pkg_root).unwrap_or(p).to_path_buf()) .collect(); local.push(LocalFingerprint::RerunIfChanged { output, paths }); } local.extend( deps.rerun_if_env_changed .iter() .map(|var| LocalFingerprint::RerunIfEnvChanged { var: var.clone(), val: env::var(var).ok(), }), ); local } fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { debug_assert_ne!(fingerprint.rustc, 0); // fingerprint::new().rustc == 0, make sure it doesn't make it to the file system. // This is mostly so outside tools can reliably find out what rust version this file is for, // as we can use the full hash. let hash = fingerprint.hash_u64(); debug!("write fingerprint ({:x}) : {}", hash, loc.display()); paths::write(loc, util::to_hex(hash).as_bytes())?; let json = serde_json::to_string(fingerprint).unwrap(); if cfg!(debug_assertions) { let f: Fingerprint = serde_json::from_str(&json).unwrap(); assert_eq!(f.hash_u64(), hash); } paths::write(&loc.with_extension("json"), json.as_bytes())?; Ok(()) } /// Prepare for work when a package starts to build pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { let new1 = cx.files().fingerprint_dir(unit); // Doc tests have no output, thus no fingerprint. if !new1.exists() && !unit.mode.is_doc_test() { paths::create_dir_all(&new1)?; } Ok(()) } /// Returns the location that the dep-info file will show up at for the `unit` /// specified. pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { cx.files().fingerprint_file_path(unit, "dep-") } /// Returns an absolute path that target directory. /// All paths are rewritten to be relative to this. fn target_root(cx: &Context<'_, '_>) -> PathBuf { cx.bcx.ws.target_dir().into_path_unlocked() } fn compare_old_fingerprint( loc: &Path, new_fingerprint: &Fingerprint, mtime_on_use: bool, ) -> CargoResult<()> { let old_fingerprint_short = paths::read(loc)?; if mtime_on_use { // update the mtime so other cleaners know we used it let t = FileTime::from_system_time(SystemTime::now()); debug!("mtime-on-use forcing {:?} to {}", loc, t); paths::set_file_time_no_err(loc, t); } let new_hash = new_fingerprint.hash_u64(); if util::to_hex(new_hash) == old_fingerprint_short && new_fingerprint.fs_status.up_to_date() { return Ok(()); } let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json) .with_context(|| internal("failed to deserialize json"))?; // Fingerprint can be empty after a failed rebuild (see comment in prepare_target). if !old_fingerprint_short.is_empty() { debug_assert_eq!( util::to_hex(old_fingerprint.hash_u64()), old_fingerprint_short ); } let result = new_fingerprint.compare(&old_fingerprint); assert!(result.is_err()); result } fn log_compare(unit: &Unit, compare: &CargoResult<()>) { let ce = match compare { Ok(..) => return, Err(e) => e, }; info!( "fingerprint error for {}/{:?}/{:?}", unit.pkg, unit.mode, unit.target, ); info!(" err: {:?}", ce); } /// Parses Cargo's internal `EncodedDepInfo` structure that was previously /// serialized to disk. /// /// Note that this is not rustc's `*.d` files. /// /// Also note that rustc's `*.d` files are translated to Cargo-specific /// `EncodedDepInfo` files after compilations have finished in /// `translate_dep_info`. /// /// Returns `None` if the file is corrupt or couldn't be read from disk. This /// indicates that the crate should likely be rebuilt. pub fn parse_dep_info( pkg_root: &Path, target_root: &Path, dep_info: &Path, ) -> CargoResult> { let data = match paths::read_bytes(dep_info) { Ok(data) => data, Err(_) => return Ok(None), }; let info = match EncodedDepInfo::parse(&data) { Some(info) => info, None => { log::warn!("failed to parse cargo's dep-info at {:?}", dep_info); return Ok(None); } }; let mut ret = RustcDepInfo::default(); ret.env = info.env; ret.files.extend(info.files.into_iter().map(|(ty, path)| { match ty { DepInfoPathType::PackageRootRelative => pkg_root.join(path), // N.B. path might be absolute here in which case the join will have no effect DepInfoPathType::TargetRootRelative => target_root.join(path), } })); Ok(Some(ret)) } fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult { let source_id = pkg.package_id().source_id(); let sources = bcx.packages.sources(); let source = sources .get(source_id) .ok_or_else(|| internal("missing package source"))?; source.fingerprint(pkg) } fn find_stale_file( mtime_cache: &mut HashMap, reference: &Path, paths: I, ) -> Option where I: IntoIterator, I::Item: AsRef, { let reference_mtime = match paths::mtime(reference) { Ok(mtime) => mtime, Err(..) => return Some(StaleItem::MissingFile(reference.to_path_buf())), }; for path in paths { let path = path.as_ref(); let path_mtime = match mtime_cache.entry(path.to_path_buf()) { Entry::Occupied(o) => *o.get(), Entry::Vacant(v) => { let mtime = match paths::mtime_recursive(path) { Ok(mtime) => mtime, Err(..) => return Some(StaleItem::MissingFile(path.to_path_buf())), }; *v.insert(mtime) } }; // TODO: fix #5918. // Note that equal mtimes should be considered "stale". For filesystems with // not much timestamp precision like 1s this is would be a conservative approximation // to handle the case where a file is modified within the same second after // a build starts. We want to make sure that incremental rebuilds pick that up! // // For filesystems with nanosecond precision it's been seen in the wild that // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that // kernels may cache the current time so files created at different times actually // list the same nanosecond precision. Some digging on #5919 picked up that the // kernel caches the current time between timer ticks, which could mean that if // a file is updated at most 10ms after a build starts then Cargo may not // pick up the build changes. // // All in all, an equality check here would be a conservative assumption that, // if equal, files were changed just after a previous build finished. // Unfortunately this became problematic when (in #6484) cargo switch to more accurately // measuring the start time of builds. if path_mtime <= reference_mtime { continue; } return Some(StaleItem::ChangedFile { reference: reference.to_path_buf(), reference_mtime, stale: path.to_path_buf(), stale_mtime: path_mtime, }); } debug!( "all paths up-to-date relative to {:?} mtime={}", reference, reference_mtime ); None } enum DepInfoPathType { // src/, e.g. src/lib.rs PackageRootRelative, // target/debug/deps/lib... // or an absolute path /.../sysroot/... TargetRootRelative, } /// Parses the dep-info file coming out of rustc into a Cargo-specific format. /// /// This function will parse `rustc_dep_info` as a makefile-style dep info to /// learn about the all files which a crate depends on. This is then /// re-serialized into the `cargo_dep_info` path in a Cargo-specific format. /// /// The `pkg_root` argument here is the absolute path to the directory /// containing `Cargo.toml` for this crate that was compiled. The paths listed /// in the rustc dep-info file may or may not be absolute but we'll want to /// consider all of them relative to the `root` specified. /// /// The `rustc_cwd` argument is the absolute path to the cwd of the compiler /// when it was invoked. /// /// If the `allow_package` argument is true, then package-relative paths are /// included. If it is false, then package-relative paths are skipped and /// ignored (typically used for registry or git dependencies where we assume /// the source never changes, and we don't want the cost of running `stat` on /// all those files). See the module-level docs for the note about /// `-Zbinary-dep-depinfo` for more details on why this is done. /// /// The serialized Cargo format will contain a list of files, all of which are /// relative if they're under `root`. or absolute if they're elsewhere. pub fn translate_dep_info( rustc_dep_info: &Path, cargo_dep_info: &Path, rustc_cwd: &Path, pkg_root: &Path, target_root: &Path, rustc_cmd: &ProcessBuilder, allow_package: bool, ) -> CargoResult<()> { let depinfo = parse_rustc_dep_info(rustc_dep_info)?; let target_root = target_root.canonicalize()?; let pkg_root = pkg_root.canonicalize()?; let mut on_disk_info = EncodedDepInfo::default(); on_disk_info.env = depinfo.env; // This is a bit of a tricky statement, but here we're *removing* the // dependency on environment variables that were defined specifically for // the command itself. Environment variables returned by `get_envs` includes // environment variables like: // // * `OUT_DIR` if applicable // * env vars added by a build script, if any // // The general idea here is that the dep info file tells us what, when // changed, should cause us to rebuild the crate. These environment // variables are synthesized by Cargo and/or the build script, and the // intention is that their values are tracked elsewhere for whether the // crate needs to be rebuilt. // // For example a build script says when it needs to be rerun and otherwise // it's assumed to produce the same output, so we're guaranteed that env // vars defined by the build script will always be the same unless the build // script itself reruns, in which case the crate will rerun anyway. // // For things like `OUT_DIR` it's a bit sketchy for now. Most of the time // that's used for code generation but this is technically buggy where if // you write a binary that does `println!("{}", env!("OUT_DIR"))` we won't // recompile that if you move the target directory. Hopefully that's not too // bad of an issue for now... // // This also includes `CARGO` since if the code is explicitly wanting to // know that path, it should be rebuilt if it changes. The CARGO path is // not tracked elsewhere in the fingerprint. on_disk_info .env .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV); for file in depinfo.files { // The path may be absolute or relative, canonical or not. Make sure // it is canonicalized so we are comparing the same kinds of paths. let abs_file = rustc_cwd.join(file); // If canonicalization fails, just use the abs path. There is currently // a bug where --remap-path-prefix is affecting .d files, causing them // to point to non-existent paths. let canon_file = abs_file.canonicalize().unwrap_or_else(|_| abs_file.clone()); let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) { (DepInfoPathType::TargetRootRelative, stripped) } else if let Ok(stripped) = canon_file.strip_prefix(&pkg_root) { if !allow_package { continue; } (DepInfoPathType::PackageRootRelative, stripped) } else { // It's definitely not target root relative, but this is an absolute path (since it was // joined to rustc_cwd) and as such re-joining it later to the target root will have no // effect. (DepInfoPathType::TargetRootRelative, &*abs_file) }; on_disk_info.files.push((ty, path.to_owned())); } paths::write(cargo_dep_info, on_disk_info.serialize()?)?; Ok(()) } #[derive(Default)] pub struct RustcDepInfo { /// The list of files that the main target in the dep-info file depends on. pub files: Vec, /// The list of environment variables we found that the rustc compilation /// depends on. /// /// The first element of the pair is the name of the env var and the second /// item is the value. `Some` means that the env var was set, and `None` /// means that the env var wasn't actually set and the compilation depends /// on it not being set. pub env: Vec<(String, Option)>, } // Same as `RustcDepInfo` except avoids absolute paths as much as possible to // allow moving around the target directory. // // This is also stored in an optimized format to make parsing it fast because // Cargo will read it for crates on all future compilations. #[derive(Default)] struct EncodedDepInfo { files: Vec<(DepInfoPathType, PathBuf)>, env: Vec<(String, Option)>, } impl EncodedDepInfo { fn parse(mut bytes: &[u8]) -> Option { let bytes = &mut bytes; let nfiles = read_usize(bytes)?; let mut files = Vec::with_capacity(nfiles as usize); for _ in 0..nfiles { let ty = match read_u8(bytes)? { 0 => DepInfoPathType::PackageRootRelative, 1 => DepInfoPathType::TargetRootRelative, _ => return None, }; let bytes = read_bytes(bytes)?; files.push((ty, paths::bytes2path(bytes).ok()?)); } let nenv = read_usize(bytes)?; let mut env = Vec::with_capacity(nenv as usize); for _ in 0..nenv { let key = str::from_utf8(read_bytes(bytes)?).ok()?.to_string(); let val = match read_u8(bytes)? { 0 => None, 1 => Some(str::from_utf8(read_bytes(bytes)?).ok()?.to_string()), _ => return None, }; env.push((key, val)); } return Some(EncodedDepInfo { files, env }); fn read_usize(bytes: &mut &[u8]) -> Option { let ret = bytes.get(..4)?; *bytes = &bytes[4..]; Some(u32::from_le_bytes(ret.try_into().unwrap()) as usize) } fn read_u8(bytes: &mut &[u8]) -> Option { let ret = *bytes.get(0)?; *bytes = &bytes[1..]; Some(ret) } fn read_bytes<'a>(bytes: &mut &'a [u8]) -> Option<&'a [u8]> { let n = read_usize(bytes)? as usize; let ret = bytes.get(..n)?; *bytes = &bytes[n..]; Some(ret) } } fn serialize(&self) -> CargoResult> { let mut ret = Vec::new(); let dst = &mut ret; write_usize(dst, self.files.len()); for (ty, file) in self.files.iter() { match ty { DepInfoPathType::PackageRootRelative => dst.push(0), DepInfoPathType::TargetRootRelative => dst.push(1), } write_bytes(dst, paths::path2bytes(file)?); } write_usize(dst, self.env.len()); for (key, val) in self.env.iter() { write_bytes(dst, key); match val { None => dst.push(0), Some(val) => { dst.push(1); write_bytes(dst, val); } } } return Ok(ret); fn write_bytes(dst: &mut Vec, val: impl AsRef<[u8]>) { let val = val.as_ref(); write_usize(dst, val.len()); dst.extend_from_slice(val); } fn write_usize(dst: &mut Vec, val: usize) { dst.extend(&u32::to_le_bytes(val as u32)); } } } /// Parse the `.d` dep-info file generated by rustc. pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult { let contents = paths::read(rustc_dep_info)?; let mut ret = RustcDepInfo::default(); let mut found_deps = false; for line in contents.lines() { if let Some(rest) = line.strip_prefix("# env-dep:") { let mut parts = rest.splitn(2, '='); let env_var = match parts.next() { Some(s) => s, None => continue, }; let env_val = match parts.next() { Some(s) => Some(unescape_env(s)?), None => None, }; ret.env.push((unescape_env(env_var)?, env_val)); } else if let Some(pos) = line.find(": ") { if found_deps { continue; } found_deps = true; let mut deps = line[pos + 2..].split_whitespace(); while let Some(s) = deps.next() { let mut file = s.to_string(); while file.ends_with('\\') { file.pop(); file.push(' '); file.push_str(deps.next().ok_or_else(|| { internal("malformed dep-info format, trailing \\".to_string()) })?); } ret.files.push(file.into()); } } } return Ok(ret); // rustc tries to fit env var names and values all on a single line, which // means it needs to escape `\r` and `\n`. The escape syntax used is "\n" // which means that `\` also needs to be escaped. fn unescape_env(s: &str) -> CargoResult { let mut ret = String::with_capacity(s.len()); let mut chars = s.chars(); while let Some(c) = chars.next() { if c != '\\' { ret.push(c); continue; } match chars.next() { Some('\\') => ret.push('\\'), Some('n') => ret.push('\n'), Some('r') => ret.push('\r'), Some(c) => bail!("unknown escape character `{}`", c), None => bail!("unterminated escape character"), } } Ok(ret) } } cargo-0.66.0/src/cargo/core/compiler/future_incompat.rs000066400000000000000000000372641432416201200231030ustar00rootroot00000000000000//! Support for future-incompatible warning reporting. use crate::core::compiler::BuildContext; use crate::core::{Dependency, PackageId, QueryKind, Workspace}; use crate::sources::SourceConfigMap; use crate::util::{iter_join, CargoResult, Config}; use anyhow::{bail, format_err, Context}; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fmt::Write as _; use std::io::{Read, Write}; use std::task::Poll; pub const REPORT_PREAMBLE: &str = "\ The following warnings were discovered during the build. These warnings are an indication that the packages contain code that will become an error in a future release of Rust. These warnings typically cover changes to close soundness problems, unintended or undocumented behavior, or critical problems that cannot be fixed in a backwards-compatible fashion, and are not expected to be in wide use. Each warning should contain a link for more information on what the warning means and how to resolve it. "; /// Current version of the on-disk format. const ON_DISK_VERSION: u32 = 0; /// The future incompatibility report, emitted by the compiler as a JSON message. #[derive(serde::Deserialize)] pub struct FutureIncompatReport { pub future_incompat_report: Vec, } /// Structure used for collecting reports in-memory. pub struct FutureIncompatReportPackage { pub package_id: PackageId, pub items: Vec, } /// A single future-incompatible warning emitted by rustc. #[derive(Serialize, Deserialize)] pub struct FutureBreakageItem { /// The date at which this lint will become an error. /// Currently unused pub future_breakage_date: Option, /// The original diagnostic emitted by the compiler pub diagnostic: Diagnostic, } /// A diagnostic emitted by the compiler as a JSON message. /// We only care about the 'rendered' field #[derive(Serialize, Deserialize)] pub struct Diagnostic { pub rendered: String, pub level: String, } /// The filename in the top-level `target` directory where we store /// the report const FUTURE_INCOMPAT_FILE: &str = ".future-incompat-report.json"; /// Max number of reports to save on disk. const MAX_REPORTS: usize = 5; /// The structure saved to disk containing the reports. #[derive(Serialize, Deserialize)] pub struct OnDiskReports { /// A schema version number, to handle older cargo's from trying to read /// something that they don't understand. version: u32, /// The report ID to use for the next report to save. next_id: u32, /// Available reports. reports: Vec, } /// A single report for a given compilation session. #[derive(Serialize, Deserialize)] struct OnDiskReport { /// Unique reference to the report for the `--id` CLI flag. id: u32, /// A message describing suggestions for fixing the /// reported issues suggestion_message: String, /// Report, suitable for printing to the console. /// Maps package names to the corresponding report /// We use a `BTreeMap` so that the iteration order /// is stable across multiple runs of `cargo` per_package: BTreeMap, } impl Default for OnDiskReports { fn default() -> OnDiskReports { OnDiskReports { version: ON_DISK_VERSION, next_id: 1, reports: Vec::new(), } } } impl OnDiskReports { /// Saves a new report. pub fn save_report( mut self, ws: &Workspace<'_>, suggestion_message: String, per_package_reports: &[FutureIncompatReportPackage], ) { let report = OnDiskReport { id: self.next_id, suggestion_message, per_package: render_report(per_package_reports), }; self.next_id += 1; self.reports.push(report); if self.reports.len() > MAX_REPORTS { self.reports.remove(0); } let on_disk = serde_json::to_vec(&self).unwrap(); if let Err(e) = ws .target_dir() .open_rw( FUTURE_INCOMPAT_FILE, ws.config(), "Future incompatibility report", ) .and_then(|file| { let mut file = file.file(); file.set_len(0)?; file.write_all(&on_disk)?; Ok(()) }) { crate::display_warning_with_error( "failed to write on-disk future incompatible report", &e, &mut ws.config().shell(), ); } } /// Loads the on-disk reports. pub fn load(ws: &Workspace<'_>) -> CargoResult { let report_file = match ws.target_dir().open_ro( FUTURE_INCOMPAT_FILE, ws.config(), "Future incompatible report", ) { Ok(r) => r, Err(e) => { if let Some(io_err) = e.downcast_ref::() { if io_err.kind() == std::io::ErrorKind::NotFound { bail!("no reports are currently available"); } } return Err(e); } }; let mut file_contents = String::new(); report_file .file() .read_to_string(&mut file_contents) .with_context(|| "failed to read report")?; let on_disk_reports: OnDiskReports = serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; if on_disk_reports.version != ON_DISK_VERSION { bail!("unable to read reports; reports were saved from a future version of Cargo"); } Ok(on_disk_reports) } /// Returns the most recent report ID. pub fn last_id(&self) -> u32 { self.reports.last().map(|r| r.id).unwrap() } pub fn get_report( &self, id: u32, config: &Config, package: Option<&str>, ) -> CargoResult { let report = self.reports.iter().find(|r| r.id == id).ok_or_else(|| { let available = iter_join(self.reports.iter().map(|r| r.id.to_string()), ", "); format_err!( "could not find report with ID {}\n\ Available IDs are: {}", id, available ) })?; let mut to_display = report.suggestion_message.clone(); to_display += "\n"; let package_report = if let Some(package) = package { report .per_package .get(package) .ok_or_else(|| { format_err!( "could not find package with ID `{}`\n Available packages are: {}\n Omit the `--package` flag to display a report for all packages", package, iter_join(report.per_package.keys(), ", ") ) })? .to_string() } else { report .per_package .values() .cloned() .collect::>() .join("\n") }; to_display += &package_report; let shell = config.shell(); let to_display = if shell.err_supports_color() && shell.out_supports_color() { to_display } else { strip_ansi_escapes::strip(&to_display) .map(|v| String::from_utf8(v).expect("utf8")) .expect("strip should never fail") }; Ok(to_display) } } fn render_report(per_package_reports: &[FutureIncompatReportPackage]) -> BTreeMap { let mut report: BTreeMap = BTreeMap::new(); for per_package in per_package_reports { let package_spec = format!( "{}@{}", per_package.package_id.name(), per_package.package_id.version() ); let rendered = report.entry(package_spec).or_default(); rendered.push_str(&format!( "The package `{}` currently triggers the following future incompatibility lints:\n", per_package.package_id )); for item in &per_package.items { rendered.extend( item.diagnostic .rendered .lines() .map(|l| format!("> {}\n", l)), ); } } report } /// Returns a user-readable message explaining which of /// the packages in `package_ids` have updates available. /// This is best-effort - if an error occurs, `None` will be returned. fn get_updates(ws: &Workspace<'_>, package_ids: &BTreeSet) -> Option { // This in general ignores all errors since this is opportunistic. let _lock = ws.config().acquire_package_cache_lock().ok()?; // Create a set of updated registry sources. let map = SourceConfigMap::new(ws.config()).ok()?; let mut package_ids: BTreeSet<_> = package_ids .iter() .filter(|pkg_id| pkg_id.source_id().is_registry()) .collect(); let source_ids: HashSet<_> = package_ids .iter() .map(|pkg_id| pkg_id.source_id()) .collect(); let mut sources: HashMap<_, _> = source_ids .into_iter() .filter_map(|sid| { let source = map.load(sid, &HashSet::new()).ok()?; Some((sid, source)) }) .collect(); // Query the sources for new versions, mapping `package_ids` into `summaries`. let mut summaries = Vec::new(); while !package_ids.is_empty() { package_ids.retain(|&pkg_id| { let source = match sources.get_mut(&pkg_id.source_id()) { Some(s) => s, None => return false, }; let dep = match Dependency::parse(pkg_id.name(), None, pkg_id.source_id()) { Ok(dep) => dep, Err(_) => return false, }; match source.query_vec(&dep, QueryKind::Exact) { Poll::Ready(Ok(sum)) => { summaries.push((pkg_id, sum)); false } Poll::Ready(Err(_)) => false, Poll::Pending => true, } }); for (_, source) in sources.iter_mut() { source.block_until_ready().ok()?; } } let mut updates = String::new(); for (pkg_id, summaries) in summaries { let mut updated_versions: Vec<_> = summaries .iter() .map(|summary| summary.version()) .filter(|version| *version > pkg_id.version()) .collect(); updated_versions.sort(); let updated_versions = iter_join( updated_versions .into_iter() .map(|version| version.to_string()), ", ", ); if !updated_versions.is_empty() { writeln!( updates, "{} has the following newer versions available: {}", pkg_id, updated_versions ) .unwrap(); } } Some(updates) } /// Writes a future-incompat report to disk, using the per-package /// reports gathered during the build. If requested by the user, /// a message is also displayed in the build output. pub fn save_and_display_report( bcx: &BuildContext<'_, '_>, per_package_future_incompat_reports: &[FutureIncompatReportPackage], ) { let should_display_message = match bcx.config.future_incompat_config() { Ok(config) => config.should_display_message(), Err(e) => { crate::display_warning_with_error( "failed to read future-incompat config from disk", &e, &mut bcx.config.shell(), ); true } }; if per_package_future_incompat_reports.is_empty() { // Explicitly passing a command-line flag overrides // `should_display_message` from the config file if bcx.build_config.future_incompat_report { drop( bcx.config .shell() .note("0 dependencies had future-incompatible warnings"), ); } return; } let current_reports = match OnDiskReports::load(bcx.ws) { Ok(r) => r, Err(e) => { log::debug!( "saving future-incompatible reports failed to load current reports: {:?}", e ); OnDiskReports::default() } }; let report_id = current_reports.next_id; // Get a list of unique and sorted package name/versions. let package_ids: BTreeSet<_> = per_package_future_incompat_reports .iter() .map(|r| r.package_id) .collect(); let package_vers: Vec<_> = package_ids.iter().map(|pid| pid.to_string()).collect(); if should_display_message || bcx.build_config.future_incompat_report { drop(bcx.config.shell().warn(&format!( "the following packages contain code that will be rejected by a future \ version of Rust: {}", package_vers.join(", ") ))); } let updated_versions = get_updates(bcx.ws, &package_ids).unwrap_or(String::new()); let update_message = if !updated_versions.is_empty() { format!( " - Some affected dependencies have newer versions available. You may want to consider updating them to a newer version to see if the issue has been fixed. {updated_versions}\n", updated_versions = updated_versions ) } else { String::new() }; let upstream_info = package_ids .iter() .map(|package_id| { let manifest = bcx.packages.get_one(*package_id).unwrap().manifest(); format!( " - {package_spec} - Repository: {url} - Detailed warning command: `cargo report future-incompatibilities --id {id} --package {package_spec}`", package_spec = format!("{}@{}", package_id.name(), package_id.version()), url = manifest .metadata() .repository .as_deref() .unwrap_or(""), id = report_id, ) }) .collect::>() .join("\n"); let suggestion_message = format!( " To solve this problem, you can try the following approaches: {update_message} - If the issue is not solved by updating the dependencies, a fix has to be implemented by those dependencies. You can help with that by notifying the maintainers of this problem (e.g. by creating a bug report) or by proposing a fix to the maintainers (e.g. by creating a pull request): {upstream_info} - If waiting for an upstream fix is not an option, you can use the `[patch]` section in `Cargo.toml` to use your own version of the dependency. For more information, see: https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html#the-patch-section ", upstream_info = upstream_info, update_message = update_message, ); current_reports.save_report( bcx.ws, suggestion_message.clone(), per_package_future_incompat_reports, ); if bcx.build_config.future_incompat_report { drop(bcx.config.shell().note(&suggestion_message)); drop(bcx.config.shell().note(&format!( "this report can be shown with `cargo report \ future-incompatibilities --id {}`", report_id ))); } else if should_display_message { drop(bcx.config.shell().note(&format!( "to see what the problems were, use the option \ `--future-incompat-report`, or run `cargo report \ future-incompatibilities --id {}`", report_id ))); } } cargo-0.66.0/src/cargo/core/compiler/job.rs000066400000000000000000000043371432416201200204440ustar00rootroot00000000000000use std::fmt; use std::mem; use super::job_queue::JobState; use crate::util::CargoResult; pub struct Job { work: Work, fresh: Freshness, } /// Each proc should send its description before starting. /// It should send either once or close immediately. pub struct Work { inner: Box) -> CargoResult<()> + Send>, } impl Work { pub fn new(f: F) -> Work where F: FnOnce(&JobState<'_, '_>) -> CargoResult<()> + Send + 'static, { Work { inner: Box::new(f) } } pub fn noop() -> Work { Work::new(|_| Ok(())) } pub fn call(self, tx: &JobState<'_, '_>) -> CargoResult<()> { (self.inner)(tx) } pub fn then(self, next: Work) -> Work { Work::new(move |state| { self.call(state)?; next.call(state) }) } } impl Job { /// Creates a new job that does nothing. pub fn new_fresh() -> Job { Job { work: Work::noop(), fresh: Freshness::Fresh, } } /// Creates a new job representing a unit of work. pub fn new_dirty(work: Work) -> Job { Job { work, fresh: Freshness::Dirty, } } /// Consumes this job by running it, returning the result of the /// computation. pub fn run(self, state: &JobState<'_, '_>) -> CargoResult<()> { self.work.call(state) } /// Returns whether this job was fresh/dirty, where "fresh" means we're /// likely to perform just some small bookkeeping where "dirty" means we'll /// probably do something slow like invoke rustc. pub fn freshness(&self) -> Freshness { self.fresh } pub fn before(&mut self, next: Work) { let prev = mem::replace(&mut self.work, Work::noop()); self.work = next.then(prev); } } impl fmt::Debug for Job { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Job {{ ... }}") } } /// Indication of the freshness of a package. /// /// A fresh package does not necessarily need to be rebuilt (unless a dependency /// was also rebuilt), and a dirty package must always be rebuilt. #[derive(PartialEq, Eq, Debug, Clone, Copy)] pub enum Freshness { Fresh, Dirty, } cargo-0.66.0/src/cargo/core/compiler/job_queue.rs000066400000000000000000001453641432416201200216560ustar00rootroot00000000000000//! This module implements the job queue which determines the ordering in which //! rustc is spawned off. It also manages the allocation of jobserver tokens to //! rustc beyond the implicit token each rustc owns (i.e., the ones used for //! parallel LLVM work and parallel rustc threads). //! //! Cargo and rustc have a somewhat non-trivial jobserver relationship with each //! other, which is due to scaling issues with sharing a single jobserver //! amongst what is potentially hundreds of threads of work on many-cored //! systems on (at least) linux, and likely other platforms as well. //! //! The details of this algorithm are (also) written out in //! src/librustc_jobserver/lib.rs. What follows is a description focusing on the //! Cargo side of things. //! //! Cargo wants to complete the build as quickly as possible, fully saturating //! all cores (as constrained by the -j=N) parameter. Cargo also must not spawn //! more than N threads of work: the total amount of tokens we have floating //! around must always be limited to N. //! //! It is not really possible to optimally choose which crate should build first //! or last; nor is it possible to decide whether to give an additional token to //! rustc first or rather spawn a new crate of work. For now, the algorithm we //! implement prioritizes spawning as many crates (i.e., rustc processes) as //! possible, and then filling each rustc with tokens on demand. //! //! The primary loop is in `drain_the_queue` below. //! //! We integrate with the jobserver, originating from GNU make, to make sure //! that build scripts which use make to build C code can cooperate with us on //! the number of used tokens and avoid overfilling the system we're on. //! //! The jobserver is unfortunately a very simple protocol, so we enhance it a //! little when we know that there is a rustc on the other end. Via the stderr //! pipe we have to rustc, we get messages such as "NeedsToken" and //! "ReleaseToken" from rustc. //! //! "NeedsToken" indicates that a rustc is interested in acquiring a token, but //! never that it would be impossible to make progress without one (i.e., it //! would be incorrect for rustc to not terminate due to an unfulfilled //! NeedsToken request); we do not usually fulfill all NeedsToken requests for a //! given rustc. //! //! "ReleaseToken" indicates that a rustc is done with one of its tokens and is //! ready for us to re-acquire ownership -- we will either release that token //! back into the general pool or reuse it ourselves. Note that rustc will //! inform us that it is releasing a token even if it itself is also requesting //! tokens; is is up to us whether to return the token to that same rustc. //! //! The current scheduling algorithm is relatively primitive and could likely be //! improved. use std::cell::{Cell, RefCell}; use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt::Write as _; use std::io; use std::marker; use std::sync::Arc; use std::thread::{self, Scope}; use std::time::Duration; use anyhow::{format_err, Context as _}; use cargo_util::ProcessBuilder; use jobserver::{Acquired, Client, HelperThread}; use log::{debug, trace}; use semver::Version; use super::context::OutputFile; use super::job::{ Freshness::{self, Dirty, Fresh}, Job, }; use super::timings::Timings; use super::{BuildContext, BuildPlan, CompileMode, Context, Unit}; use crate::core::compiler::future_incompat::{ self, FutureBreakageItem, FutureIncompatReportPackage, }; use crate::core::resolver::ResolveBehavior; use crate::core::{PackageId, Shell, TargetKind}; use crate::util::diagnostic_server::{self, DiagnosticPrinter}; use crate::util::errors::AlreadyPrintedError; use crate::util::machine_message::{self, Message as _}; use crate::util::CargoResult; use crate::util::{self, internal, profile}; use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue}; /// This structure is backed by the `DependencyQueue` type and manages the /// queueing of compilation steps for each package. Packages enqueue units of /// work and then later on the entire graph is converted to DrainState and /// executed. pub struct JobQueue<'cfg> { queue: DependencyQueue, counts: HashMap, timings: Timings<'cfg>, } /// This structure is backed by the `DependencyQueue` type and manages the /// actual compilation step of each package. Packages enqueue units of work and /// then later on the entire graph is processed and compiled. /// /// It is created from JobQueue when we have fully assembled the crate graph /// (i.e., all package dependencies are known). /// /// # Message queue /// /// Each thread running a process uses the message queue to send messages back /// to the main thread. The main thread coordinates everything, and handles /// printing output. /// /// It is important to be careful which messages use `push` vs `push_bounded`. /// `push` is for priority messages (like tokens, or "finished") where the /// sender shouldn't block. We want to handle those so real work can proceed /// ASAP. /// /// `push_bounded` is only for messages being printed to stdout/stderr. Being /// bounded prevents a flood of messages causing a large amount of memory /// being used. /// /// `push` also avoids blocking which helps avoid deadlocks. For example, when /// the diagnostic server thread is dropped, it waits for the thread to exit. /// But if the thread is blocked on a full queue, and there is a critical /// error, the drop will deadlock. This should be fixed at some point in the /// future. The jobserver thread has a similar problem, though it will time /// out after 1 second. struct DrainState<'cfg> { // This is the length of the DependencyQueue when starting out total_units: usize, queue: DependencyQueue, messages: Arc>, /// Diagnostic deduplication support. diag_dedupe: DiagDedupe<'cfg>, /// Count of warnings, used to print a summary after the job succeeds. /// /// First value is the total number of warnings, and the second value is /// the number that were suppressed because they were duplicates of a /// previous warning. warning_count: HashMap, active: HashMap, compiled: HashSet, documented: HashSet, counts: HashMap, progress: Progress<'cfg>, next_id: u32, timings: Timings<'cfg>, /// Tokens that are currently owned by this Cargo, and may be "associated" /// with a rustc process. They may also be unused, though if so will be /// dropped on the next loop iteration. /// /// Note that the length of this may be zero, but we will still spawn work, /// as we share the implicit token given to this Cargo process with a /// single rustc process. tokens: Vec, /// rustc per-thread tokens, when in jobserver-per-rustc mode. rustc_tokens: HashMap>, /// This represents the list of rustc jobs (processes) and associated /// clients that are interested in receiving a token. to_send_clients: BTreeMap>, /// The list of jobs that we have not yet started executing, but have /// retrieved from the `queue`. We eagerly pull jobs off the main queue to /// allow us to request jobserver tokens pretty early. pending_queue: Vec<(Unit, Job, usize)>, print: DiagnosticPrinter<'cfg>, /// How many jobs we've finished finished: usize, per_package_future_incompat_reports: Vec, } pub struct ErrorsDuringDrain { pub count: usize, } struct ErrorToHandle { error: anyhow::Error, /// This field is true for "interesting" errors and false for "mundane" /// errors. If false, we print the above error only if it's the first one /// encountered so far while draining the job queue. /// /// At most places that an error is propagated, we set this to false to /// avoid scenarios where Cargo might end up spewing tons of redundant error /// messages. For example if an i/o stream got closed somewhere, we don't /// care about individually reporting every thread that it broke; just the /// first is enough. /// /// The exception where print_always is true is that we do report every /// instance of a rustc invocation that failed with diagnostics. This /// corresponds to errors from Message::Finish. print_always: bool, } impl From for ErrorToHandle where anyhow::Error: From, { fn from(error: E) -> Self { ErrorToHandle { error: anyhow::Error::from(error), print_always: false, } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct JobId(pub u32); impl std::fmt::Display for JobId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } /// A `JobState` is constructed by `JobQueue::run` and passed to `Job::run`. It includes everything /// necessary to communicate between the main thread and the execution of the job. /// /// The job may execute on either a dedicated thread or the main thread. If the job executes on the /// main thread, the `output` field must be set to prevent a deadlock. pub struct JobState<'a, 'cfg> { /// Channel back to the main thread to coordinate messages and such. /// /// When the `output` field is `Some`, care must be taken to avoid calling `push_bounded` on /// the message queue to prevent a deadlock. messages: Arc>, /// Normally output is sent to the job queue with backpressure. When the job is fresh /// however we need to immediately display the output to prevent a deadlock as the /// output messages are processed on the same thread as they are sent from. `output` /// defines where to output in this case. /// /// Currently the `Shell` inside `Config` is wrapped in a `RefCell` and thus can't be passed /// between threads. This means that it isn't possible for multiple output messages to be /// interleaved. In the future, it may be wrapped in a `Mutex` instead. In this case /// interleaving is still prevented as the lock would be held for the whole printing of an /// output message. output: Option<&'a DiagDedupe<'cfg>>, /// The job id that this state is associated with, used when sending /// messages back to the main thread. id: JobId, /// Whether or not we're expected to have a call to `rmeta_produced`. Once /// that method is called this is dynamically set to `false` to prevent /// sending a double message later on. rmeta_required: Cell, // Historical versions of Cargo made use of the `'a` argument here, so to // leave the door open to future refactorings keep it here. _marker: marker::PhantomData<&'a ()>, } /// Handler for deduplicating diagnostics. struct DiagDedupe<'cfg> { seen: RefCell>, config: &'cfg Config, } impl<'cfg> DiagDedupe<'cfg> { fn new(config: &'cfg Config) -> Self { DiagDedupe { seen: RefCell::new(HashSet::new()), config, } } /// Emits a diagnostic message. /// /// Returns `true` if the message was emitted, or `false` if it was /// suppressed for being a duplicate. fn emit_diag(&self, diag: &str) -> CargoResult { let h = util::hash_u64(diag); if !self.seen.borrow_mut().insert(h) { return Ok(false); } let mut shell = self.config.shell(); shell.print_ansi_stderr(diag.as_bytes())?; shell.err().write_all(b"\n")?; Ok(true) } } /// Possible artifacts that can be produced by compilations, used as edge values /// in the dependency graph. /// /// As edge values we can have multiple kinds of edges depending on one node, /// for example some units may only depend on the metadata for an rlib while /// others depend on the full rlib. This `Artifact` enum is used to distinguish /// this case and track the progress of compilations as they proceed. #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] enum Artifact { /// A generic placeholder for "depends on everything run by a step" and /// means that we can't start the next compilation until the previous has /// finished entirely. All, /// A node indicating that we only depend on the metadata of a compilation, /// but the compilation is typically also producing an rlib. We can start /// our step, however, before the full rlib is available. Metadata, } enum Message { Run(JobId, String), BuildPlanMsg(String, ProcessBuilder, Arc>), Stdout(String), Stderr(String), Diagnostic { id: JobId, level: String, diag: String, }, WarningCount { id: JobId, emitted: bool, }, FixDiagnostic(diagnostic_server::Message), Token(io::Result), Finish(JobId, Artifact, CargoResult<()>), FutureIncompatReport(JobId, Vec), // This client should get release_raw called on it with one of our tokens NeedsToken(JobId), // A token previously passed to a NeedsToken client is being released. ReleaseToken(JobId), } impl<'a, 'cfg> JobState<'a, 'cfg> { pub fn running(&self, cmd: &ProcessBuilder) { self.messages.push(Message::Run(self.id, cmd.to_string())); } pub fn build_plan( &self, module_name: String, cmd: ProcessBuilder, filenames: Arc>, ) { self.messages .push(Message::BuildPlanMsg(module_name, cmd, filenames)); } pub fn stdout(&self, stdout: String) -> CargoResult<()> { if let Some(dedupe) = self.output { writeln!(dedupe.config.shell().out(), "{}", stdout)?; } else { self.messages.push_bounded(Message::Stdout(stdout)); } Ok(()) } pub fn stderr(&self, stderr: String) -> CargoResult<()> { if let Some(dedupe) = self.output { let mut shell = dedupe.config.shell(); shell.print_ansi_stderr(stderr.as_bytes())?; shell.err().write_all(b"\n")?; } else { self.messages.push_bounded(Message::Stderr(stderr)); } Ok(()) } pub fn emit_diag(&self, level: String, diag: String) -> CargoResult<()> { if let Some(dedupe) = self.output { let emitted = dedupe.emit_diag(&diag)?; if level == "warning" { self.messages.push(Message::WarningCount { id: self.id, emitted, }); } } else { self.messages.push_bounded(Message::Diagnostic { id: self.id, level, diag, }); } Ok(()) } /// A method used to signal to the coordinator thread that the rmeta file /// for an rlib has been produced. This is only called for some rmeta /// builds when required, and can be called at any time before a job ends. /// This should only be called once because a metadata file can only be /// produced once! pub fn rmeta_produced(&self) { self.rmeta_required.set(false); self.messages .push(Message::Finish(self.id, Artifact::Metadata, Ok(()))); } pub fn future_incompat_report(&self, report: Vec) { self.messages .push(Message::FutureIncompatReport(self.id, report)); } /// The rustc underlying this Job is about to acquire a jobserver token (i.e., block) /// on the passed client. /// /// This should arrange for the associated client to eventually get a token via /// `client.release_raw()`. pub fn will_acquire(&self) { self.messages.push(Message::NeedsToken(self.id)); } /// The rustc underlying this Job is informing us that it is done with a jobserver token. /// /// Note that it does *not* write that token back anywhere. pub fn release_token(&self) { self.messages.push(Message::ReleaseToken(self.id)); } } impl<'cfg> JobQueue<'cfg> { pub fn new(bcx: &BuildContext<'_, 'cfg>) -> JobQueue<'cfg> { JobQueue { queue: DependencyQueue::new(), counts: HashMap::new(), timings: Timings::new(bcx, &bcx.roots), } } pub fn enqueue(&mut self, cx: &Context<'_, 'cfg>, unit: &Unit, job: Job) -> CargoResult<()> { let dependencies = cx.unit_deps(unit); let mut queue_deps = dependencies .iter() .filter(|dep| { // Binaries aren't actually needed to *compile* tests, just to run // them, so we don't include this dependency edge in the job graph. (!dep.unit.target.is_test() && !dep.unit.target.is_bin()) || dep.unit.artifact.is_true() }) .map(|dep| { // Handle the case here where our `unit -> dep` dependency may // only require the metadata, not the full compilation to // finish. Use the tables in `cx` to figure out what kind // of artifact is associated with this dependency. let artifact = if cx.only_requires_rmeta(unit, &dep.unit) { Artifact::Metadata } else { Artifact::All }; (dep.unit.clone(), artifact) }) .collect::>(); // This is somewhat tricky, but we may need to synthesize some // dependencies for this target if it requires full upstream // compilations to have completed. Because of pipelining, some // dependency edges may be `Metadata` due to the above clause (as // opposed to everything being `All`). For example consider: // // a (binary) // β”” b (lib) // β”” c (lib) // // Here the dependency edge from B to C will be `Metadata`, and the // dependency edge from A to B will be `All`. For A to be compiled, // however, it currently actually needs the full rlib of C. This means // that we need to synthesize a dependency edge for the dependency graph // from A to C. That's done here. // // This will walk all dependencies of the current target, and if any of // *their* dependencies are `Metadata` then we depend on the `All` of // the target as well. This should ensure that edges changed to // `Metadata` propagate upwards `All` dependencies to anything that // transitively contains the `Metadata` edge. if unit.requires_upstream_objects() { for dep in dependencies { depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit.clone()); } fn depend_on_deps_of_deps( cx: &Context<'_, '_>, deps: &mut HashMap, unit: Unit, ) { for dep in cx.unit_deps(&unit) { if deps.insert(dep.unit.clone(), Artifact::All).is_none() { depend_on_deps_of_deps(cx, deps, dep.unit.clone()); } } } } // For now we use a fixed placeholder value for the cost of each unit, but // in the future this could be used to allow users to provide hints about // relative expected costs of units, or this could be automatically set in // a smarter way using timing data from a previous compilation. self.queue.queue(unit.clone(), job, queue_deps, 100); *self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1; Ok(()) } /// Executes all jobs necessary to build the dependency graph. /// /// This function will spawn off `config.jobs()` workers to build all of the /// necessary dependencies, in order. Freshness is propagated as far as /// possible along each dependency chain. pub fn execute(mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan) -> CargoResult<()> { let _p = profile::start("executing the job graph"); self.queue.queue_finished(); let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config); let state = DrainState { total_units: self.queue.len(), queue: self.queue, // 100 here is somewhat arbitrary. It is a few screenfulls of // output, and hopefully at most a few megabytes of memory for // typical messages. If you change this, please update the test // caching_large_output, too. messages: Arc::new(Queue::new(100)), diag_dedupe: DiagDedupe::new(cx.bcx.config), warning_count: HashMap::new(), active: HashMap::new(), compiled: HashSet::new(), documented: HashSet::new(), counts: self.counts, progress, next_id: 0, timings: self.timings, tokens: Vec::new(), rustc_tokens: HashMap::new(), to_send_clients: BTreeMap::new(), pending_queue: Vec::new(), print: DiagnosticPrinter::new(cx.bcx.config), finished: 0, per_package_future_incompat_reports: Vec::new(), }; // Create a helper thread for acquiring jobserver tokens let messages = state.messages.clone(); let helper = cx .jobserver .clone() .into_helper_thread(move |token| { messages.push(Message::Token(token)); }) .with_context(|| "failed to create helper thread for jobserver management")?; // Create a helper thread to manage the diagnostics for rustfix if // necessary. let messages = state.messages.clone(); // It is important that this uses `push` instead of `push_bounded` for // now. If someone wants to fix this to be bounded, the `drop` // implementation needs to be changed to avoid possible deadlocks. let _diagnostic_server = cx .bcx .build_config .rustfix_diagnostic_server .borrow_mut() .take() .map(move |srv| srv.start(move |msg| messages.push(Message::FixDiagnostic(msg)))); thread::scope( move |scope| match state.drain_the_queue(cx, plan, scope, &helper) { Some(err) => Err(err), None => Ok(()), }, ) } } impl<'cfg> DrainState<'cfg> { fn spawn_work_if_possible<'s>( &mut self, cx: &mut Context<'_, '_>, jobserver_helper: &HelperThread, scope: &'s Scope<'s, '_>, ) -> CargoResult<()> { // Dequeue as much work as we can, learning about everything // possible that can run. Note that this is also the point where we // start requesting job tokens. Each job after the first needs to // request a token. while let Some((unit, job, priority)) = self.queue.dequeue() { // We want to keep the pieces of work in the `pending_queue` sorted // by their priorities, and insert the current job at its correctly // sorted position: following the lower priority jobs, and the ones // with the same priority (since they were dequeued before the // current one, we also keep that relation). let idx = self .pending_queue .partition_point(|&(_, _, p)| p <= priority); self.pending_queue.insert(idx, (unit, job, priority)); if self.active.len() + self.pending_queue.len() > 1 { jobserver_helper.request_token(); } } // Now that we've learned of all possible work that we can execute // try to spawn it so long as we've got a jobserver token which says // we're able to perform some parallel work. // The `pending_queue` is sorted in ascending priority order, and we // remove items from its end to schedule the highest priority items // sooner. while self.has_extra_tokens() && !self.pending_queue.is_empty() { let (unit, job, _) = self.pending_queue.pop().unwrap(); *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; if !cx.bcx.build_config.build_plan { // Print out some nice progress information. // NOTE: An error here will drop the job without starting it. // That should be OK, since we want to exit as soon as // possible during an error. self.note_working_on(cx.bcx.config, &unit, job.freshness())?; } self.run(&unit, job, cx, scope); } Ok(()) } fn has_extra_tokens(&self) -> bool { self.active.len() < self.tokens.len() + 1 } // The oldest job (i.e., least job ID) is the one we grant tokens to first. fn pop_waiting_client(&mut self) -> (JobId, Client) { // FIXME: replace this with BTreeMap::first_entry when that stabilizes. let key = *self .to_send_clients .keys() .next() .expect("at least one waiter"); let clients = self.to_send_clients.get_mut(&key).unwrap(); let client = clients.pop().unwrap(); if clients.is_empty() { self.to_send_clients.remove(&key); } (key, client) } // If we managed to acquire some extra tokens, send them off to a waiting rustc. fn grant_rustc_token_requests(&mut self) -> CargoResult<()> { while !self.to_send_clients.is_empty() && self.has_extra_tokens() { let (id, client) = self.pop_waiting_client(); // This unwrap is guaranteed to succeed. `active` must be at least // length 1, as otherwise there can't be a client waiting to be sent // on, so tokens.len() must also be at least one. let token = self.tokens.pop().unwrap(); self.rustc_tokens .entry(id) .or_insert_with(Vec::new) .push(token); client .release_raw() .with_context(|| "failed to release jobserver token")?; } Ok(()) } fn handle_event( &mut self, cx: &mut Context<'_, '_>, jobserver_helper: &HelperThread, plan: &mut BuildPlan, event: Message, ) -> Result<(), ErrorToHandle> { match event { Message::Run(id, cmd) => { cx.bcx .config .shell() .verbose(|c| c.status("Running", &cmd))?; self.timings.unit_start(id, self.active[&id].clone()); } Message::BuildPlanMsg(module_name, cmd, filenames) => { plan.update(&module_name, &cmd, &filenames)?; } Message::Stdout(out) => { writeln!(cx.bcx.config.shell().out(), "{}", out)?; } Message::Stderr(err) => { let mut shell = cx.bcx.config.shell(); shell.print_ansi_stderr(err.as_bytes())?; shell.err().write_all(b"\n")?; } Message::Diagnostic { id, level, diag } => { let emitted = self.diag_dedupe.emit_diag(&diag)?; if level == "warning" { self.bump_warning_count(id, emitted); } } Message::WarningCount { id, emitted } => { self.bump_warning_count(id, emitted); } Message::FixDiagnostic(msg) => { self.print.print(&msg)?; } Message::Finish(id, artifact, result) => { let unit = match artifact { // If `id` has completely finished we remove it // from the `active` map ... Artifact::All => { trace!("end: {:?}", id); self.finished += 1; if let Some(rustc_tokens) = self.rustc_tokens.remove(&id) { // This puts back the tokens that this rustc // acquired into our primary token list. // // This represents a rustc bug: it did not // release all of its thread tokens but finished // completely. But we want to make Cargo resilient // to such rustc bugs, as they're generally not // fatal in nature (i.e., Cargo can make progress // still, and the build might not even fail). self.tokens.extend(rustc_tokens); } self.to_send_clients.remove(&id); self.report_warning_count(cx.bcx.config, id); self.active.remove(&id).unwrap() } // ... otherwise if it hasn't finished we leave it // in there as we'll get another `Finish` later on. Artifact::Metadata => { trace!("end (meta): {:?}", id); self.active[&id].clone() } }; debug!("end ({:?}): {:?}", unit, result); match result { Ok(()) => self.finish(id, &unit, artifact, cx)?, Err(error) => { let msg = "The following warnings were emitted during compilation:"; self.emit_warnings(Some(msg), &unit, cx)?; self.back_compat_notice(cx, &unit)?; return Err(ErrorToHandle { error, print_always: true, }); } } } Message::FutureIncompatReport(id, items) => { let package_id = self.active[&id].pkg.package_id(); self.per_package_future_incompat_reports .push(FutureIncompatReportPackage { package_id, items }); } Message::Token(acquired_token) => { let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; self.tokens.push(token); } Message::NeedsToken(id) => { trace!("queue token request"); jobserver_helper.request_token(); let client = cx.rustc_clients[&self.active[&id]].clone(); self.to_send_clients .entry(id) .or_insert_with(Vec::new) .push(client); } Message::ReleaseToken(id) => { // Note that this pops off potentially a completely // different token, but all tokens of the same job are // conceptually the same so that's fine. // // self.tokens is a "pool" -- the order doesn't matter -- and // this transfers ownership of the token into that pool. If we // end up using it on the next go around, then this token will // be truncated, same as tokens obtained through Message::Token. let rustc_tokens = self .rustc_tokens .get_mut(&id) .expect("no tokens associated"); self.tokens .push(rustc_tokens.pop().expect("rustc releases token it has")); } } Ok(()) } // This will also tick the progress bar as appropriate fn wait_for_events(&mut self) -> Vec { // Drain all events at once to avoid displaying the progress bar // unnecessarily. If there's no events we actually block waiting for // an event, but we keep a "heartbeat" going to allow `record_cpu` // to run above to calculate CPU usage over time. To do this we // listen for a message with a timeout, and on timeout we run the // previous parts of the loop again. let mut events = self.messages.try_pop_all(); trace!( "tokens in use: {}, rustc_tokens: {:?}, waiting_rustcs: {:?} (events this tick: {})", self.tokens.len(), self.rustc_tokens .iter() .map(|(k, j)| (k, j.len())) .collect::>(), self.to_send_clients .iter() .map(|(k, j)| (k, j.len())) .collect::>(), events.len(), ); if events.is_empty() { loop { self.tick_progress(); self.tokens.truncate(self.active.len() - 1); match self.messages.pop(Duration::from_millis(500)) { Some(message) => { events.push(message); break; } None => continue, } } } events } /// This is the "main" loop, where Cargo does all work to run the /// compiler. /// /// This returns an Option to prevent the use of `?` on `Result` types /// because it is important for the loop to carefully handle errors. fn drain_the_queue<'s>( mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan, scope: &'s Scope<'s, '_>, jobserver_helper: &HelperThread, ) -> Option { trace!("queue: {:#?}", self.queue); // Iteratively execute the entire dependency graph. Each turn of the // loop starts out by scheduling as much work as possible (up to the // maximum number of parallel jobs we have tokens for). A local queue // is maintained separately from the main dependency queue as one // dequeue may actually dequeue quite a bit of work (e.g., 10 binaries // in one package). // // After a job has finished we update our internal state if it was // successful and otherwise wait for pending work to finish if it failed // and then immediately return (or keep going, if requested by the build // config). let mut errors = ErrorsDuringDrain { count: 0 }; // CAUTION! Do not use `?` or break out of the loop early. Every error // must be handled in such a way that the loop is still allowed to // drain event messages. loop { if errors.count == 0 || cx.bcx.build_config.keep_going { if let Err(e) = self.spawn_work_if_possible(cx, jobserver_helper, scope) { self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e); } } // If after all that we're not actually running anything then we're // done! if self.active.is_empty() { break; } if let Err(e) = self.grant_rustc_token_requests() { self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e); } // And finally, before we block waiting for the next event, drop any // excess tokens we may have accidentally acquired. Due to how our // jobserver interface is architected we may acquire a token that we // don't actually use, and if this happens just relinquish it back // to the jobserver itself. for event in self.wait_for_events() { if let Err(event_err) = self.handle_event(cx, jobserver_helper, plan, event) { self.handle_error(&mut cx.bcx.config.shell(), &mut errors, event_err); } } } self.progress.clear(); let profile_name = cx.bcx.build_config.requested_profile; // NOTE: this may be a bit inaccurate, since this may not display the // profile for what was actually built. Profile overrides can change // these settings, and in some cases different targets are built with // different profiles. To be accurate, it would need to collect a // list of Units built, and maybe display a list of the different // profiles used. However, to keep it simple and compatible with old // behavior, we just display what the base profile is. let profile = cx.bcx.profiles.base_profile(); let mut opt_type = String::from(if profile.opt_level.as_str() == "0" { "unoptimized" } else { "optimized" }); if profile.debuginfo.unwrap_or(0) != 0 { opt_type += " + debuginfo"; } let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed()); if let Err(e) = self.timings.finished(cx, &errors.to_error()) { self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e); } if cx.bcx.build_config.emit_json() { let mut shell = cx.bcx.config.shell(); let msg = machine_message::BuildFinished { success: errors.count == 0, } .to_json_string(); if let Err(e) = writeln!(shell.out(), "{}", msg) { self.handle_error(&mut shell, &mut errors, e); } } if let Some(error) = errors.to_error() { // Any errors up to this point have already been printed via the // `display_error` inside `handle_error`. Some(anyhow::Error::new(AlreadyPrintedError::new(error))) } else if self.queue.is_empty() && self.pending_queue.is_empty() { let message = format!( "{} [{}] target(s) in {}", profile_name, opt_type, time_elapsed ); if !cx.bcx.build_config.build_plan { // It doesn't really matter if this fails. drop(cx.bcx.config.shell().status("Finished", message)); future_incompat::save_and_display_report( cx.bcx, &self.per_package_future_incompat_reports, ); } None } else { debug!("queue: {:#?}", self.queue); Some(internal("finished with jobs still left in the queue")) } } fn handle_error( &self, shell: &mut Shell, err_state: &mut ErrorsDuringDrain, new_err: impl Into, ) { let new_err = new_err.into(); if new_err.print_always || err_state.count == 0 { crate::display_error(&new_err.error, shell); if err_state.count == 0 && !self.active.is_empty() { drop(shell.warn("build failed, waiting for other jobs to finish...")); } err_state.count += 1; } else { log::warn!("{:?}", new_err.error); } } // This also records CPU usage and marks concurrency; we roughly want to do // this as often as we spin on the events receiver (at least every 500ms or // so). fn tick_progress(&mut self) { // Record some timing information if `--timings` is enabled, and // this'll end up being a noop if we're not recording this // information. self.timings.mark_concurrency( self.active.len(), self.pending_queue.len(), self.queue.len(), self.rustc_tokens.len(), ); self.timings.record_cpu(); let active_names = self .active .values() .map(|u| self.name_for_progress(u)) .collect::>(); drop(self.progress.tick_now( self.finished, self.total_units, &format!(": {}", active_names.join(", ")), )); } fn name_for_progress(&self, unit: &Unit) -> String { let pkg_name = unit.pkg.name(); let target_name = unit.target.name(); match unit.mode { CompileMode::Doc { .. } => format!("{}(doc)", pkg_name), CompileMode::RunCustomBuild => format!("{}(build)", pkg_name), CompileMode::Test | CompileMode::Check { test: true } => match unit.target.kind() { TargetKind::Lib(_) => format!("{}(test)", target_name), TargetKind::CustomBuild => panic!("cannot test build script"), TargetKind::Bin => format!("{}(bin test)", target_name), TargetKind::Test => format!("{}(test)", target_name), TargetKind::Bench => format!("{}(bench)", target_name), TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { format!("{}(example test)", target_name) } }, _ => match unit.target.kind() { TargetKind::Lib(_) => pkg_name.to_string(), TargetKind::CustomBuild => format!("{}(build.rs)", pkg_name), TargetKind::Bin => format!("{}(bin)", target_name), TargetKind::Test => format!("{}(test)", target_name), TargetKind::Bench => format!("{}(bench)", target_name), TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { format!("{}(example)", target_name) } }, } } /// Executes a job. /// /// Fresh jobs block until finished (which should be very fast!), Dirty /// jobs will spawn a thread in the background and return immediately. fn run<'s>(&mut self, unit: &Unit, job: Job, cx: &Context<'_, '_>, scope: &'s Scope<'s, '_>) { let id = JobId(self.next_id); self.next_id = self.next_id.checked_add(1).unwrap(); debug!("start {}: {:?}", id, unit); assert!(self.active.insert(id, unit.clone()).is_none()); let messages = self.messages.clone(); let fresh = job.freshness(); let rmeta_required = cx.rmeta_required(unit); let doit = move |state: JobState<'_, '_>| { let mut sender = FinishOnDrop { messages: &state.messages, id, result: None, }; sender.result = Some(job.run(&state)); // If the `rmeta_required` wasn't consumed but it was set // previously, then we either have: // // 1. The `job` didn't do anything because it was "fresh". // 2. The `job` returned an error and didn't reach the point where // it called `rmeta_produced`. // 3. We forgot to call `rmeta_produced` and there's a bug in Cargo. // // Ruling out the third, the other two are pretty common for 2 // we'll just naturally abort the compilation operation but for 1 // we need to make sure that the metadata is flagged as produced so // send a synthetic message here. if state.rmeta_required.get() && sender.result.as_ref().unwrap().is_ok() { state .messages .push(Message::Finish(state.id, Artifact::Metadata, Ok(()))); } // Use a helper struct with a `Drop` implementation to guarantee // that a `Finish` message is sent even if our job panics. We // shouldn't panic unless there's a bug in Cargo, so we just need // to make sure nothing hangs by accident. struct FinishOnDrop<'a> { messages: &'a Queue, id: JobId, result: Option>, } impl Drop for FinishOnDrop<'_> { fn drop(&mut self) { let result = self .result .take() .unwrap_or_else(|| Err(format_err!("worker panicked"))); self.messages .push(Message::Finish(self.id, Artifact::All, result)); } } }; match fresh { Freshness::Fresh => { self.timings.add_fresh(); // Running a fresh job on the same thread is often much faster than spawning a new // thread to run the job. doit(JobState { id, messages, output: Some(&self.diag_dedupe), rmeta_required: Cell::new(rmeta_required), _marker: marker::PhantomData, }); } Freshness::Dirty => { self.timings.add_dirty(); scope.spawn(move || { doit(JobState { id, messages: messages.clone(), output: None, rmeta_required: Cell::new(rmeta_required), _marker: marker::PhantomData, }) }); } } } fn emit_warnings( &mut self, msg: Option<&str>, unit: &Unit, cx: &mut Context<'_, '_>, ) -> CargoResult<()> { let outputs = cx.build_script_outputs.lock().unwrap(); let metadata = match cx.find_build_script_metadata(unit) { Some(metadata) => metadata, None => return Ok(()), }; let bcx = &mut cx.bcx; if let Some(output) = outputs.get(metadata) { if !output.warnings.is_empty() { if let Some(msg) = msg { writeln!(bcx.config.shell().err(), "{}\n", msg)?; } for warning in output.warnings.iter() { bcx.config.shell().warn(warning)?; } if msg.is_some() { // Output an empty line. writeln!(bcx.config.shell().err())?; } } } Ok(()) } fn bump_warning_count(&mut self, id: JobId, emitted: bool) { let cnts = self.warning_count.entry(id).or_default(); cnts.0 += 1; if !emitted { cnts.1 += 1; } } /// Displays a final report of the warnings emitted by a particular job. fn report_warning_count(&mut self, config: &Config, id: JobId) { let count = match self.warning_count.remove(&id) { Some(count) => count, None => return, }; let unit = &self.active[&id]; let mut message = format!("`{}` ({}", unit.pkg.name(), unit.target.description_named()); if unit.mode.is_rustc_test() && !(unit.target.is_test() || unit.target.is_bench()) { message.push_str(" test"); } else if unit.mode.is_doc_test() { message.push_str(" doctest"); } else if unit.mode.is_doc() { message.push_str(" doc"); } message.push_str(") generated "); match count.0 { 1 => message.push_str("1 warning"), n => drop(write!(message, "{} warnings", n)), }; match count.1 { 0 => {} 1 => message.push_str(" (1 duplicate)"), n => drop(write!(message, " ({} duplicates)", n)), } // Errors are ignored here because it is tricky to handle them // correctly, and they aren't important. drop(config.shell().warn(message)); } fn finish( &mut self, id: JobId, unit: &Unit, artifact: Artifact, cx: &mut Context<'_, '_>, ) -> CargoResult<()> { if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.config) { self.emit_warnings(None, unit, cx)?; } let unlocked = self.queue.finish(unit, &artifact); match artifact { Artifact::All => self.timings.unit_finished(id, unlocked), Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked), } Ok(()) } // This isn't super trivial because we don't want to print loads and // loads of information to the console, but we also want to produce a // faithful representation of what's happening. This is somewhat nuanced // as a package can start compiling *very* early on because of custom // build commands and such. // // In general, we try to print "Compiling" for the first nontrivial task // run for a package, regardless of when that is. We then don't print // out any more information for a package after we've printed it once. fn note_working_on( &mut self, config: &Config, unit: &Unit, fresh: Freshness, ) -> CargoResult<()> { if (self.compiled.contains(&unit.pkg.package_id()) && !unit.mode.is_doc()) || (self.documented.contains(&unit.pkg.package_id()) && unit.mode.is_doc()) { return Ok(()); } match fresh { // Any dirty stage which runs at least one command gets printed as // being a compiled package. Dirty => { if unit.mode.is_doc() { self.documented.insert(unit.pkg.package_id()); config.shell().status("Documenting", &unit.pkg)?; } else if unit.mode.is_doc_test() { // Skip doc test. } else { self.compiled.insert(unit.pkg.package_id()); if unit.mode.is_check() { config.shell().status("Checking", &unit.pkg)?; } else { config.shell().status("Compiling", &unit.pkg)?; } } } Fresh => { // If doc test are last, only print "Fresh" if nothing has been printed. if self.counts[&unit.pkg.package_id()] == 0 && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id())) { self.compiled.insert(unit.pkg.package_id()); config.shell().verbose(|c| c.status("Fresh", &unit.pkg))?; } } } Ok(()) } fn back_compat_notice(&self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { if unit.pkg.name() != "diesel" || unit.pkg.version() >= &Version::new(1, 4, 8) || cx.bcx.ws.resolve_behavior() == ResolveBehavior::V1 || !unit.pkg.package_id().source_id().is_registry() || !unit.features.is_empty() { return Ok(()); } if !cx .bcx .unit_graph .keys() .any(|unit| unit.pkg.name() == "diesel" && !unit.features.is_empty()) { return Ok(()); } cx.bcx.config.shell().note( "\ This error may be due to an interaction between diesel and Cargo's new feature resolver. Try updating to diesel 1.4.8 to fix this error. ", )?; Ok(()) } } impl ErrorsDuringDrain { fn to_error(&self) -> Option { match self.count { 0 => None, 1 => Some(format_err!("1 job failed")), n => Some(format_err!("{} jobs failed", n)), } } } cargo-0.66.0/src/cargo/core/compiler/layout.rs000066400000000000000000000217471432416201200212130ustar00rootroot00000000000000//! Management of the directory layout of a build //! //! The directory layout is a little tricky at times, hence a separate file to //! house this logic. The current layout looks like this: //! //! ```text //! # This is the root directory for all output, the top-level package //! # places all of its output here. //! target/ //! //! # Cache of `rustc -Vv` output for performance. //! .rustc-info.json //! //! # All final artifacts are linked into this directory from `deps`. //! # Note that named profiles will soon be included as separate directories //! # here. They have a restricted format, similar to Rust identifiers, so //! # Cargo-specific directories added in the future should use some prefix //! # like `.` to avoid name collisions. //! debug/ # or release/ //! //! # File used to lock the directory to prevent multiple cargo processes //! # from using it at the same time. //! .cargo-lock //! //! # Hidden directory that holds all of the fingerprint files for all //! # packages //! .fingerprint/ //! # Each package is in a separate directory. //! # Note that different target kinds have different filename prefixes. //! $pkgname-$META/ //! # Set of source filenames for this package. //! dep-lib-$targetname //! # Timestamp when this package was last built. //! invoked.timestamp //! # The fingerprint hash. //! lib-$targetname //! # Detailed information used for logging the reason why //! # something is being recompiled. //! lib-$targetname.json //! # The console output from the compiler. This is cached //! # so that warnings can be redisplayed for "fresh" units. //! output-lib-$targetname //! //! # This is the root directory for all rustc artifacts except build //! # scripts, examples, and test and bench executables. Almost every //! # artifact should have a metadata hash added to its filename to //! # prevent collisions. One notable exception is dynamic libraries. //! deps/ //! //! # Each artifact dependency gets in its own directory. //! /artifact/$pkgname-$META/$kind //! //! # Root directory for all compiled examples. //! examples/ //! //! # Directory used to store incremental data for the compiler (when //! # incremental is enabled. //! incremental/ //! //! # This is the location at which the output of all custom build //! # commands are rooted. //! build/ //! //! # Each package gets its own directory where its build script and //! # script output are placed //! $pkgname-$META/ # For the build script itself. //! # The build script executable (name may be changed by user). //! build-script-build-$META //! # Hard link to build-script-build-$META. //! build-script-build //! # Dependency information generated by rustc. //! build-script-build-$META.d //! # Debug information, depending on platform and profile //! # settings. //! //! //! # The package shows up twice with two different metadata hashes. //! $pkgname-$META/ # For the output of the build script. //! # Timestamp when the build script was last executed. //! invoked.timestamp //! # Directory where script can output files ($OUT_DIR). //! out/ //! # Output from the build script. //! output //! # Path to `out`, used to help when the target directory is //! # moved. //! root-output //! # Stderr output from the build script. //! stderr //! //! # Output from rustdoc //! doc/ //! //! # Used by `cargo package` and `cargo publish` to build a `.crate` file. //! package/ //! //! # Experimental feature for generated build scripts. //! .metabuild/ //! ``` //! //! When cross-compiling, the layout is the same, except it appears in //! `target/$TRIPLE`. use crate::core::compiler::CompileTarget; use crate::core::Workspace; use crate::util::{CargoResult, FileLock}; use cargo_util::paths; use std::path::{Path, PathBuf}; /// Contains the paths of all target output locations. /// /// See module docs for more information. pub struct Layout { /// The root directory: `/path/to/target`. /// If cross compiling: `/path/to/target/$TRIPLE`. root: PathBuf, /// The final artifact destination: `$root/debug` (or `release`). dest: PathBuf, /// The directory with rustc artifacts: `$dest/deps` deps: PathBuf, /// The directory for build scripts: `$dest/build` build: PathBuf, /// The directory for artifacts, i.e. binaries, cdylibs, staticlibs: `$dest/deps/artifact` artifact: PathBuf, /// The directory for incremental files: `$dest/incremental` incremental: PathBuf, /// The directory for fingerprints: `$dest/.fingerprint` fingerprint: PathBuf, /// The directory for examples: `$dest/examples` examples: PathBuf, /// The directory for rustdoc output: `$root/doc` doc: PathBuf, /// The directory for temporary data of integration tests and benches: `$dest/tmp` tmp: PathBuf, /// The lockfile for a build (`.cargo-lock`). Will be unlocked when this /// struct is `drop`ped. _lock: FileLock, } impl Layout { /// Calculate the paths for build output, lock the build directory, and return as a Layout. /// /// This function will block if the directory is already locked. /// /// `dest` should be the final artifact directory name. Currently either /// "debug" or "release". pub fn new( ws: &Workspace<'_>, target: Option, dest: &str, ) -> CargoResult { let mut root = ws.target_dir(); if let Some(target) = target { root.push(target.short_name()); } let dest = root.join(dest); // If the root directory doesn't already exist go ahead and create it // here. Use this opportunity to exclude it from backups as well if the // system supports it since this is a freshly created folder. // paths::create_dir_all_excluded_from_backups_atomic(root.as_path_unlocked())?; // Now that the excluded from backups target root is created we can create the // actual destination (sub)subdirectory. paths::create_dir_all(dest.as_path_unlocked())?; // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. let lock = dest.open_rw(".cargo-lock", ws.config(), "build directory")?; let root = root.into_path_unlocked(); let dest = dest.into_path_unlocked(); let deps = dest.join("deps"); let artifact = deps.join("artifact"); Ok(Layout { deps, build: dest.join("build"), artifact, incremental: dest.join("incremental"), fingerprint: dest.join(".fingerprint"), examples: dest.join("examples"), doc: root.join("doc"), tmp: root.join("tmp"), root, dest, _lock: lock, }) } /// Makes sure all directories stored in the Layout exist on the filesystem. pub fn prepare(&mut self) -> CargoResult<()> { paths::create_dir_all(&self.deps)?; paths::create_dir_all(&self.incremental)?; paths::create_dir_all(&self.fingerprint)?; paths::create_dir_all(&self.examples)?; paths::create_dir_all(&self.build)?; Ok(()) } /// Fetch the destination path for final artifacts (`/…/target/debug`). pub fn dest(&self) -> &Path { &self.dest } /// Fetch the deps path. pub fn deps(&self) -> &Path { &self.deps } /// Fetch the examples path. pub fn examples(&self) -> &Path { &self.examples } /// Fetch the doc path. pub fn doc(&self) -> &Path { &self.doc } /// Fetch the root path (`/…/target`). pub fn root(&self) -> &Path { &self.root } /// Fetch the incremental path. pub fn incremental(&self) -> &Path { &self.incremental } /// Fetch the fingerprint path. pub fn fingerprint(&self) -> &Path { &self.fingerprint } /// Fetch the build script path. pub fn build(&self) -> &Path { &self.build } /// Fetch the artifact path. pub fn artifact(&self) -> &Path { &self.artifact } /// Create and return the tmp path. pub fn prepare_tmp(&self) -> CargoResult<&Path> { paths::create_dir_all(&self.tmp)?; Ok(&self.tmp) } } cargo-0.66.0/src/cargo/core/compiler/links.rs000066400000000000000000000045151432416201200210100ustar00rootroot00000000000000use super::unit_graph::UnitGraph; use crate::core::resolver::errors::describe_path; use crate::core::{PackageId, Resolve}; use crate::util::errors::CargoResult; use std::collections::{HashMap, HashSet}; /// Validate `links` field does not conflict between packages. pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph) -> CargoResult<()> { // NOTE: This is the *old* links validator. Links are usually validated in // the resolver. However, the `links` field was added to the index in // early 2018 (see https://github.com/rust-lang/cargo/pull/4978). However, // `links` has been around since 2014, so there are still many crates in // the index that don't have `links` properly set in the index (over 600 // at the time of this writing in 2019). This can probably be removed at // some point in the future, though it might be worth considering fixing // the index. let mut validated: HashSet = HashSet::new(); let mut links: HashMap = HashMap::new(); let mut units: Vec<_> = unit_graph.keys().collect(); // Sort primarily to make testing easier. units.sort_unstable(); for unit in units { if !validated.insert(unit.pkg.package_id()) { continue; } let lib = match unit.pkg.manifest().links() { Some(lib) => lib, None => continue, }; if let Some(&prev) = links.get(lib) { let prev_path = resolve .path_to_top(&prev) .into_iter() .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); let pkg = unit.pkg.package_id(); let path = resolve .path_to_top(&pkg) .into_iter() .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); anyhow::bail!( "multiple packages link to native library `{}`, \ but a native library can be linked only once\n\ \n\ {}\nlinks to native library `{}`\n\ \n\ {}\nalso links to native library `{}`", lib, describe_path(prev_path), lib, describe_path(path), lib ) } links.insert(lib.to_string(), unit.pkg.package_id()); } Ok(()) } cargo-0.66.0/src/cargo/core/compiler/lto.rs000066400000000000000000000205461432416201200204700ustar00rootroot00000000000000use crate::core::compiler::{BuildContext, CompileMode, CrateType, Unit}; use crate::core::profiles; use crate::util::interning::InternedString; use crate::util::errors::CargoResult; use std::collections::hash_map::{Entry, HashMap}; /// Possible ways to run rustc and request various parts of LTO. /// /// Variant | Flag | Object Code | Bitcode /// -------------------|------------------------|-------------|-------- /// `Run` | `-C lto=foo` | n/a | n/a /// `Off` | `-C lto=off` | n/a | n/a /// `OnlyBitcode` | `-C linker-plugin-lto` | | βœ“ /// `ObjectAndBitcode` | | βœ“ | βœ“ /// `OnlyObject` | `-C embed-bitcode=no` | βœ“ | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Lto { /// LTO is run for this rustc, and it's `-Clto=foo`. If the given value is /// None, that corresponds to `-Clto` with no argument, which means do /// "fat" LTO. Run(Option), /// LTO has been explicitly listed as "off". This means no thin-local-LTO, /// no LTO anywhere, I really mean it! Off, /// This rustc invocation only needs to produce bitcode (it is *only* used /// for LTO), there's no need to produce object files, so we can pass /// `-Clinker-plugin-lto` OnlyBitcode, /// This rustc invocation needs to embed bitcode in object files. This means /// that object files may be used for a normal link, and the crate may be /// loaded for LTO later, so both are required. ObjectAndBitcode, /// This should not include bitcode. This is primarily to reduce disk /// space usage. OnlyObject, } pub fn generate(bcx: &BuildContext<'_, '_>) -> CargoResult> { let mut map = HashMap::new(); for unit in bcx.roots.iter() { let root_lto = match unit.profile.lto { // LTO not requested, no need for bitcode. profiles::Lto::Bool(false) => Lto::OnlyObject, profiles::Lto::Off => Lto::Off, _ => { let crate_types = unit.target.rustc_crate_types(); if unit.target.for_host() { Lto::OnlyObject } else if needs_object(&crate_types) { lto_when_needs_object(&crate_types) } else { // This may or may not participate in LTO, let's start // with the minimum requirements. This may be expanded in // `calculate` below if necessary. Lto::OnlyBitcode } } }; calculate(bcx, &mut map, unit, root_lto)?; } Ok(map) } /// Whether or not any of these crate types need object code. fn needs_object(crate_types: &[CrateType]) -> bool { crate_types.iter().any(|k| k.can_lto() || k.is_dynamic()) } /// Lto setting to use when this unit needs object code. fn lto_when_needs_object(crate_types: &[CrateType]) -> Lto { if crate_types.iter().all(|ct| *ct == CrateType::Dylib) { // A dylib whose parent is running LTO. rustc currently // doesn't support LTO with dylibs, so bitcode is not // needed. Lto::OnlyObject } else { // Mixed rlib with a dylib or cdylib whose parent is running LTO. This // needs both: bitcode for the rlib (for LTO) and object code for the // dylib. Lto::ObjectAndBitcode } } fn calculate( bcx: &BuildContext<'_, '_>, map: &mut HashMap, unit: &Unit, parent_lto: Lto, ) -> CargoResult<()> { let crate_types = match unit.mode { // Note: Doctest ignores LTO, but for now we'll compute it as-if it is // a Bin, in case it is ever supported in the future. CompileMode::Test | CompileMode::Bench | CompileMode::Doctest => vec![CrateType::Bin], // Notes on other modes: // - Check: Treat as the underlying type, it doesn't really matter. // - Doc: LTO is N/A for the Doc unit itself since rustdoc does not // support codegen flags. We still compute the dependencies, which // are mostly `Check`. // - RunCustomBuild is ignored because it is always "for_host". _ => unit.target.rustc_crate_types(), }; // LTO can only be performed if *all* of the crate types support it. // For example, a cdylib/rlib combination won't allow LTO. let all_lto_types = crate_types.iter().all(CrateType::can_lto); // Compute the LTO based on the profile, and what our parent requires. let lto = if unit.target.for_host() { // Disable LTO for host builds since we only really want to perform LTO // for the final binary, and LTO on plugins/build scripts/proc macros is // largely not desired. Lto::OnlyObject } else if all_lto_types { // Note that this ignores the `parent_lto` because this isn't a // linkable crate type; this unit is not being embedded in the parent. match unit.profile.lto { profiles::Lto::Named(s) => Lto::Run(Some(s)), profiles::Lto::Off => Lto::Off, profiles::Lto::Bool(true) => Lto::Run(None), profiles::Lto::Bool(false) => Lto::OnlyObject, } } else { match (parent_lto, needs_object(&crate_types)) { // An rlib whose parent is running LTO, we only need bitcode. (Lto::Run(_), false) => Lto::OnlyBitcode, // LTO when something needs object code. (Lto::Run(_), true) | (Lto::OnlyBitcode, true) => lto_when_needs_object(&crate_types), // LTO is disabled, continue to disable it. (Lto::Off, _) => Lto::Off, // If this doesn't have any requirements, or the requirements are // already satisfied, then stay with our parent. (_, false) | (Lto::OnlyObject, true) | (Lto::ObjectAndBitcode, true) => parent_lto, } }; // Merge the computed LTO. If this unit appears multiple times in the // graph, the merge may expand the requirements. let merged_lto = match map.entry(unit.clone()) { // If we haven't seen this unit before then insert our value and keep // going. Entry::Vacant(v) => *v.insert(lto), Entry::Occupied(mut v) => { let result = match (lto, v.get()) { // No change in requirements. (Lto::OnlyBitcode, Lto::OnlyBitcode) => Lto::OnlyBitcode, (Lto::OnlyObject, Lto::OnlyObject) => Lto::OnlyObject, // Once we're running LTO we keep running LTO. We should always // calculate the same thing here each iteration because if we // see this twice then it means, for example, two unit tests // depend on a binary, which is normal. (Lto::Run(s), _) | (_, &Lto::Run(s)) => Lto::Run(s), // Off means off! This has the same reasoning as `Lto::Run`. (Lto::Off, _) | (_, Lto::Off) => Lto::Off, // Once a target has requested both, that's the maximal amount // of work that can be done, so we just keep doing that work. (Lto::ObjectAndBitcode, _) | (_, Lto::ObjectAndBitcode) => Lto::ObjectAndBitcode, // Upgrade so that both requirements can be met. // // This is where the trickiness happens. This unit needs // bitcode and the previously calculated value for this unit // says it didn't need bitcode (or vice versa). This means that // we're a shared dependency between some targets which require // LTO and some which don't. This means that instead of being // either only-objects or only-bitcode we have to embed both in // rlibs (used for different compilations), so we switch to // including both. (Lto::OnlyObject, Lto::OnlyBitcode) | (Lto::OnlyBitcode, Lto::OnlyObject) => { Lto::ObjectAndBitcode } }; // No need to recurse if we calculated the same value as before. if result == *v.get() { return Ok(()); } v.insert(result); result } }; for dep in &bcx.unit_graph[unit] { calculate(bcx, map, &dep.unit, merged_lto)?; } Ok(()) } cargo-0.66.0/src/cargo/core/compiler/mod.rs000066400000000000000000001536151432416201200204550ustar00rootroot00000000000000pub mod artifact; mod build_config; mod build_context; mod build_plan; mod compilation; mod compile_kind; mod context; mod crate_type; mod custom_build; mod fingerprint; pub mod future_incompat; mod job; mod job_queue; mod layout; mod links; mod lto; mod output_depinfo; pub mod rustdoc; pub mod standard_lib; mod timings; mod unit; pub mod unit_dependencies; pub mod unit_graph; use std::collections::HashSet; use std::env; use std::ffi::{OsStr, OsString}; use std::fs::{self, File}; use std::io::{BufRead, Write}; use std::path::{Path, PathBuf}; use std::sync::Arc; use anyhow::{Context as _, Error}; use lazycell::LazyCell; use log::{debug, trace}; pub use self::build_config::{BuildConfig, CompileMode, MessageFormat, TimingOutput}; pub use self::build_context::{ BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, }; use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest, UnitOutput}; pub use self::compile_kind::{CompileKind, CompileTarget}; pub use self::context::{Context, Metadata}; pub use self::crate_type::CrateType; pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; pub use self::job::Freshness; use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; pub(crate) use self::layout::Layout; pub use self::lto::Lto; use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; use crate::core::compiler::future_incompat::FutureIncompatReport; pub use crate::core::compiler::unit::{Unit, UnitInterner}; use crate::core::manifest::TargetSourcePath; use crate::core::profiles::{PanicStrategy, Profile, Strip}; use crate::core::{Feature, PackageId, Target}; use crate::util::errors::{CargoResult, VerboseError}; use crate::util::interning::InternedString; use crate::util::machine_message::{self, Message}; use crate::util::{add_path_args, internal, iter_join_onto, profile}; use cargo_util::{paths, ProcessBuilder, ProcessError}; const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; #[derive(Clone, Hash, Debug, PartialEq, Eq)] pub enum LinkType { All, Cdylib, Bin, SingleBin(String), Test, Bench, Example, } impl LinkType { pub fn applies_to(&self, target: &Target) -> bool { match self { LinkType::All => true, LinkType::Cdylib => target.is_cdylib(), LinkType::Bin => target.is_bin(), LinkType::SingleBin(name) => target.is_bin() && target.name() == name, LinkType::Test => target.is_test(), LinkType::Bench => target.is_bench(), LinkType::Example => target.is_exe_example(), } } } /// A glorified callback for executing calls to rustc. Rather than calling rustc /// directly, we'll use an `Executor`, giving clients an opportunity to intercept /// the build calls. pub trait Executor: Send + Sync + 'static { /// Called after a rustc process invocation is prepared up-front for a given /// unit of work (may still be modified for runtime-known dependencies, when /// the work is actually executed). fn init(&self, _cx: &Context<'_, '_>, _unit: &Unit) {} /// In case of an `Err`, Cargo will not continue with the build process for /// this package. fn exec( &self, cmd: &ProcessBuilder, id: PackageId, target: &Target, mode: CompileMode, on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, ) -> CargoResult<()>; /// Queried when queuing each unit of work. If it returns true, then the /// unit will always be rebuilt, independent of whether it needs to be. fn force_rebuild(&self, _unit: &Unit) -> bool { false } } /// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's /// default behaviour. #[derive(Copy, Clone)] pub struct DefaultExecutor; impl Executor for DefaultExecutor { fn exec( &self, cmd: &ProcessBuilder, _id: PackageId, _target: &Target, _mode: CompileMode, on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, ) -> CargoResult<()> { cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false) .map(drop) } } fn compile<'cfg>( cx: &mut Context<'_, 'cfg>, jobs: &mut JobQueue<'cfg>, plan: &mut BuildPlan, unit: &Unit, exec: &Arc, force_rebuild: bool, ) -> CargoResult<()> { let bcx = cx.bcx; let build_plan = bcx.build_config.build_plan; if !cx.compiled.insert(unit.clone()) { return Ok(()); } // Build up the work to be done to compile this unit, enqueuing it once // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); fingerprint::prepare_init(cx, unit)?; let job = if unit.mode.is_run_custom_build() { custom_build::prepare(cx, unit)? } else if unit.mode.is_doc_test() { // We run these targets later, so this is just a no-op for now. Job::new_fresh() } else if build_plan { Job::new_dirty(rustc(cx, unit, &exec.clone())?) } else { let force = exec.force_rebuild(unit) || force_rebuild; let mut job = fingerprint::prepare_target(cx, unit, force)?; job.before(if job.freshness() == Freshness::Dirty { let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { rustdoc(cx, unit)? } else { rustc(cx, unit, exec)? }; work.then(link_targets(cx, unit, false)?) } else { // We always replay the output cache, // since it might contain future-incompat-report messages let work = replay_output_cache( unit.pkg.package_id(), PathBuf::from(unit.pkg.manifest_path()), &unit.target, cx.files().message_cache_path(unit), cx.bcx.build_config.message_format, cx.bcx.config.shell().err_supports_color(), unit.show_warnings(bcx.config), ); // Need to link targets on both the dirty and fresh. work.then(link_targets(cx, unit, true)?) }); job }; jobs.enqueue(cx, unit, job)?; drop(p); // Be sure to compile all dependencies of this target as well. let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. for dep in deps { compile(cx, jobs, plan, &dep.unit, exec, false)?; } if build_plan { plan.add(cx, unit)?; } Ok(()) } fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> CargoResult { let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; let build_plan = cx.bcx.build_config.build_plan; let name = unit.pkg.name().to_string(); let buildkey = unit.buildkey(); add_cap_lints(cx.bcx, unit, &mut rustc); let outputs = cx.outputs(unit)?; let root = cx.files().out_dir(unit); // Prepare the native lib state (extra `-L` and `-l` flags). let build_script_outputs = Arc::clone(&cx.build_script_outputs); let current_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let build_scripts = cx.build_scripts.get(unit).cloned(); // If we are a binary and the package also contains a library, then we // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let dep_info_name = if cx.files().use_extra_filename(unit) { format!( "{}-{}.d", unit.target.crate_name(), cx.files().metadata(unit) ) } else { format!("{}.d", unit.target.crate_name()) }; let rustc_dep_info_loc = root.join(dep_info_name); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); rustc.args(cx.bcx.rustflags_args(unit)); if cx.bcx.config.cli_unstable().binary_dep_depinfo { rustc.arg("-Z").arg("binary-dep-depinfo"); } let mut output_options = OutputOptions::new(cx, unit); let package_id = unit.pkg.package_id(); let target = Target::clone(&unit.target); let mode = unit.mode; exec.init(cx, unit); let exec = exec.clone(); let root_output = cx.files().host_dest().to_path_buf(); let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); let pkg_root = unit.pkg.root().to_path_buf(); let cwd = rustc .get_cwd() .unwrap_or_else(|| cx.bcx.config.cwd()) .to_path_buf(); let fingerprint_dir = cx.files().fingerprint_dir(unit); let script_metadata = cx.find_build_script_metadata(unit); let is_local = unit.is_local(); let artifact = unit.artifact; return Ok(Work::new(move |state| { // Artifacts are in a different location than typical units, // hence we must assure the crate- and target-dependent // directory is present. if artifact.is_true() { paths::create_dir_all(&root)?; } // Only at runtime have we discovered what the extra -L and -l // arguments are for native libraries, so we process those here. We // also need to be sure to add any -L paths for our plugins to the // dynamic library load path as a plugin's dynamic library may be // located somewhere in there. // Finally, if custom environment variables have been produced by // previous build scripts, we include them in the rustc invocation. if let Some(build_scripts) = build_scripts { let script_outputs = build_script_outputs.lock().unwrap(); if !build_plan { add_native_deps( &mut rustc, &script_outputs, &build_scripts, pass_l_flag, &target, current_id, )?; add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?; } add_custom_flags(&mut rustc, &script_outputs, script_metadata)?; } for output in outputs.iter() { // If there is both an rmeta and rlib, rustc will prefer to use the // rlib, even if it is older. Therefore, we must delete the rlib to // force using the new rmeta. if output.path.extension() == Some(OsStr::new("rmeta")) { let dst = root.join(&output.path).with_extension("rlib"); if dst.exists() { paths::remove_file(&dst)?; } } } fn verbose_if_simple_exit_code(err: Error) -> Error { // If a signal on unix (`code == None`) or an abnormal termination // on Windows (codes like `0xC0000409`), don't hide the error details. match err .downcast_ref::() .as_ref() .and_then(|perr| perr.code) { Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(), _ => err, } } state.running(&rustc); let timestamp = paths::set_invocation_time(&fingerprint_dir)?; if build_plan { state.build_plan(buildkey, rustc.clone(), outputs.clone()); } else { exec.exec( &rustc, package_id, &target, mode, &mut |line| on_stdout_line(state, line, package_id, &target), &mut |line| { on_stderr_line( state, line, package_id, &manifest_path, &target, &mut output_options, ) }, ) .map_err(verbose_if_simple_exit_code) .with_context(|| { // adapted from rustc_errors/src/lib.rs let warnings = match output_options.warnings_seen { 0 => String::new(), 1 => "; 1 warning emitted".to_string(), count => format!("; {} warnings emitted", count), }; let errors = match output_options.errors_seen { 0 => String::new(), 1 => " due to previous error".to_string(), count => format!(" due to {} previous errors", count), }; format!("could not compile `{}`{}{}", name, errors, warnings) })?; // Exec should never return with success *and* generate an error. debug_assert_eq!(output_options.errors_seen, 0); } if rustc_dep_info_loc.exists() { fingerprint::translate_dep_info( &rustc_dep_info_loc, &dep_info_loc, &cwd, &pkg_root, &target_dir, &rustc, // Do not track source files in the fingerprint for registry dependencies. is_local, ) .with_context(|| { internal(format!( "could not parse/generate dep info at: {}", rustc_dep_info_loc.display() )) })?; // This mtime shift allows Cargo to detect if a source file was // modified in the middle of the build. paths::set_file_time_no_err(dep_info_loc, timestamp); } Ok(()) })); // Add all relevant `-L` and `-l` flags from dependencies (now calculated and // present in `state`) to the command provided. fn add_native_deps( rustc: &mut ProcessBuilder, build_script_outputs: &BuildScriptOutputs, build_scripts: &BuildScripts, pass_l_flag: bool, target: &Target, current_id: PackageId, ) -> CargoResult<()> { for key in build_scripts.to_link.iter() { let output = build_script_outputs.get(key.1).ok_or_else(|| { internal(format!( "couldn't find build script output for {}/{}", key.0, key.1 )) })?; for path in output.library_paths.iter() { rustc.arg("-L").arg(path); } if key.0 == current_id { if pass_l_flag { for name in output.library_links.iter() { rustc.arg("-l").arg(name); } } } for (lt, arg) in &output.linker_args { // There was an unintentional change where cdylibs were // allowed to be passed via transitive dependencies. This // clause should have been kept in the `if` block above. For // now, continue allowing it for cdylib only. // See https://github.com/rust-lang/cargo/issues/9562 if lt.applies_to(target) && (key.0 == current_id || *lt == LinkType::Cdylib) { rustc.arg("-C").arg(format!("link-arg={}", arg)); } } } Ok(()) } } /// Link the compiled target (often of form `foo-{metadata_hash}`) to the /// final target. This must happen during both "Fresh" and "Compile". fn link_targets(cx: &mut Context<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult { let bcx = cx.bcx; let outputs = cx.outputs(unit)?; let export_dir = cx.files().export_dir(); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let profile = unit.profile.clone(); let unit_mode = unit.mode; let features = unit.features.iter().map(|s| s.to_string()).collect(); let json_messages = bcx.build_config.emit_json(); let executable = cx.get_executable(unit)?; let mut target = Target::clone(&unit.target); if let TargetSourcePath::Metabuild = target.src_path() { // Give it something to serialize. let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); target.set_src_path(TargetSourcePath::Path(path)); } Ok(Work::new(move |state| { // If we're a "root crate", e.g., the target of this compilation, then we // hard link our outputs out of the `deps` directory into the directory // above. This means that `cargo build` will produce binaries in // `target/debug` which one probably expects. let mut destinations = vec![]; for output in outputs.iter() { let src = &output.path; // This may have been a `cargo rustc` command which changes the // output, so the source may not actually exist. if !src.exists() { continue; } let dst = match output.hardlink.as_ref() { Some(dst) => dst, None => { destinations.push(src.clone()); continue; } }; destinations.push(dst.clone()); paths::link_or_copy(src, dst)?; if let Some(ref path) = output.export_path { let export_dir = export_dir.as_ref().unwrap(); paths::create_dir_all(export_dir)?; paths::link_or_copy(src, path)?; } } if json_messages { let art_profile = machine_message::ArtifactProfile { opt_level: profile.opt_level.as_str(), debuginfo: profile.debuginfo, debug_assertions: profile.debug_assertions, overflow_checks: profile.overflow_checks, test: unit_mode.is_any_test(), }; let msg = machine_message::Artifact { package_id, manifest_path, target: &target, profile: art_profile, features, filenames: destinations, executable, fresh, } .to_json_string(); state.stdout(msg)?; } Ok(()) })) } // For all plugin dependencies, add their -L paths (now calculated and present // in `build_script_outputs`) to the dynamic library load path for the command // to execute. fn add_plugin_deps( rustc: &mut ProcessBuilder, build_script_outputs: &BuildScriptOutputs, build_scripts: &BuildScripts, root_output: &Path, ) -> CargoResult<()> { let var = paths::dylib_path_envvar(); let search_path = rustc.get_env(var).unwrap_or_default(); let mut search_path = env::split_paths(&search_path).collect::>(); for (pkg_id, metadata) in &build_scripts.plugins { let output = build_script_outputs .get(*metadata) .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", pkg_id)))?; search_path.append(&mut filter_dynamic_search_path( output.library_paths.iter(), root_output, )); } let search_path = paths::join_paths(&search_path, var)?; rustc.env(var, &search_path); Ok(()) } // Determine paths to add to the dynamic search path from -L entries // // Strip off prefixes like "native=" or "framework=" and filter out directories // **not** inside our output directory since they are likely spurious and can cause // clashes with system shared libraries (issue #3366). fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec where I: Iterator, { let mut search_path = vec![]; for dir in paths { let dir = match dir.to_str() { Some(s) => { let mut parts = s.splitn(2, '='); match (parts.next(), parts.next()) { (Some("native"), Some(path)) | (Some("crate"), Some(path)) | (Some("dependency"), Some(path)) | (Some("framework"), Some(path)) | (Some("all"), Some(path)) => path.into(), _ => dir.clone(), } } None => dir.clone(), }; if dir.starts_with(&root_output) { search_path.push(dir); } else { debug!( "Not including path {} in runtime library search path because it is \ outside target root {}", dir.display(), root_output.display() ); } } search_path } fn prepare_rustc( cx: &mut Context<'_, '_>, crate_types: &[CrateType], unit: &Unit, ) -> CargoResult { let is_primary = cx.is_primary_package(unit); let is_workspace = cx.bcx.ws.is_member(&unit.pkg); let mut base = cx .compilation .rustc_process(unit, is_primary, is_workspace)?; if is_primary { base.env("CARGO_PRIMARY_PACKAGE", "1"); } if unit.target.is_test() || unit.target.is_bench() { let tmp = cx.files().layout(unit.kind).prepare_tmp()?; base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string()); } if cx.bcx.config.cli_unstable().jobserver_per_rustc { let client = cx.new_jobserver()?; base.inherit_jobserver(&client); base.arg("-Z").arg("jobserver-token-requests"); assert!(cx.rustc_clients.insert(unit.clone(), client).is_none()); } else { base.inherit_jobserver(&cx.jobserver); } build_base_args(cx, &mut base, unit, crate_types)?; build_deps_args(&mut base, cx, unit)?; Ok(base) } fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { let bcx = cx.bcx; // script_metadata is not needed here, it is only for tests. let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; rustdoc.inherit_jobserver(&cx.jobserver); let crate_name = unit.target.crate_name(); rustdoc.arg("--crate-name").arg(&crate_name); add_path_args(bcx.ws, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); if let CompileKind::Target(target) = unit.kind { rustdoc.arg("--target").arg(target.rustc_target()); } let doc_dir = cx.files().out_dir(unit); // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if // it doesn't already exist. paths::create_dir_all(&doc_dir)?; rustdoc.arg("-o").arg(&doc_dir); rustdoc.args(&features_args(unit)); rustdoc.args(&check_cfg_args(cx, unit)); add_error_format_and_color(cx, &mut rustdoc); add_allow_features(cx, &mut rustdoc); if let Some(args) = cx.bcx.extra_args_for(unit) { rustdoc.args(args); } let metadata = cx.metadata_for_doc_units[unit]; rustdoc.arg("-C").arg(format!("metadata={}", metadata)); let scrape_output_path = |unit: &Unit| -> CargoResult { let output_dir = cx.files().deps_dir(unit); Ok(output_dir.join(format!("{}.examples", unit.buildkey()))) }; if unit.mode.is_doc_scrape() { debug_assert!(cx.bcx.scrape_units.contains(unit)); rustdoc.arg("-Zunstable-options"); rustdoc .arg("--scrape-examples-output-path") .arg(scrape_output_path(unit)?); // Only scrape example for items from crates in the workspace, to reduce generated file size for pkg in cx.bcx.ws.members() { let names = pkg .targets() .iter() .map(|target| target.crate_name()) .collect::>(); for name in names { rustdoc.arg("--scrape-examples-target-crate").arg(name); } } } else if cx.bcx.scrape_units.len() > 0 && cx.bcx.ws.unit_needs_doc_scrape(unit) { // We only pass scraped examples to packages in the workspace // since examples are only coming from reverse-dependencies of workspace packages rustdoc.arg("-Zunstable-options"); for scrape_unit in &cx.bcx.scrape_units { rustdoc .arg("--with-examples") .arg(scrape_output_path(scrape_unit)?); } } build_deps_args(&mut rustdoc, cx, unit)?; rustdoc::add_root_urls(cx, unit, &mut rustdoc)?; rustdoc.args(bcx.rustdocflags_args(unit)); if !crate_version_flag_already_present(&rustdoc) { append_crate_version_flag(unit, &mut rustdoc); } let name = unit.pkg.name().to_string(); let build_script_outputs = Arc::clone(&cx.build_script_outputs); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let target = Target::clone(&unit.target); let mut output_options = OutputOptions::new(cx, unit); let script_metadata = cx.find_build_script_metadata(unit); Ok(Work::new(move |state| { add_custom_flags( &mut rustdoc, &build_script_outputs.lock().unwrap(), script_metadata, )?; let crate_dir = doc_dir.join(&crate_name); if crate_dir.exists() { // Remove output from a previous build. This ensures that stale // files for removed items are removed. debug!("removing pre-existing doc directory {:?}", crate_dir); paths::remove_dir_all(crate_dir)?; } state.running(&rustdoc); rustdoc .exec_with_streaming( &mut |line| on_stdout_line(state, line, package_id, &target), &mut |line| { on_stderr_line( state, line, package_id, &manifest_path, &target, &mut output_options, ) }, false, ) .with_context(|| format!("could not document `{}`", name))?; Ok(()) })) } // The --crate-version flag could have already been passed in RUSTDOCFLAGS // or as an extra compiler argument for rustdoc fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool { rustdoc.get_args().any(|flag| { flag.to_str() .map_or(false, |flag| flag.starts_with(RUSTDOC_CRATE_VERSION_FLAG)) }) } fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) { rustdoc .arg(RUSTDOC_CRATE_VERSION_FLAG) .arg(unit.pkg.version().to_string()); } fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { // If this is an upstream dep we don't want warnings from, turn off all // lints. if !unit.show_warnings(bcx.config) { cmd.arg("--cap-lints").arg("allow"); // If this is an upstream dep but we *do* want warnings, make sure that they // don't fail compilation. } else if !unit.is_local() { cmd.arg("--cap-lints").arg("warn"); } } /// Forward -Zallow-features if it is set for cargo. fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { let mut arg = String::from("-Zallow-features="); let _ = iter_join_onto(&mut arg, allow, ","); cmd.arg(&arg); } } /// Add error-format flags to the command. /// /// Cargo always uses JSON output. This has several benefits, such as being /// easier to parse, handles changing formats (for replaying cached messages), /// ensures atomic output (so messages aren't interleaved), allows for /// intercepting messages like rmeta artifacts, etc. rustc includes a /// "rendered" field in the JSON message with the message properly formatted, /// which Cargo will extract and display to the user. fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { cmd.arg("--error-format=json"); let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat"); match cx.bcx.build_config.message_format { MessageFormat::Short | MessageFormat::Json { short: true, .. } => { json.push_str(",diagnostic-short"); } _ => {} } cmd.arg(json); let config = cx.bcx.config; if config.nightly_features_allowed { match ( config.cli_unstable().terminal_width, config.shell().err_width().diagnostic_terminal_width(), ) { // Terminal width explicitly provided - only useful for testing. (Some(Some(width)), _) => { cmd.arg(format!("--diagnostic-width={}", width)); } // Terminal width was not explicitly provided but flag was provided - common case. (Some(None), Some(width)) => { cmd.arg(format!("--diagnostic-width={}", width)); } // User didn't opt-in. _ => (), } } } fn build_base_args( cx: &mut Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit, crate_types: &[CrateType], ) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); let bcx = cx.bcx; let Profile { ref opt_level, codegen_backend, codegen_units, debuginfo, debug_assertions, split_debuginfo, overflow_checks, rpath, ref panic, incremental, strip, rustflags, .. } = unit.profile.clone(); let test = unit.mode.is_any_test(); cmd.arg("--crate-name").arg(&unit.target.crate_name()); let edition = unit.target.edition(); edition.cmd_edition_arg(cmd); add_path_args(bcx.ws, unit, cmd); add_error_format_and_color(cx, cmd); add_allow_features(cx, cmd); let mut contains_dy_lib = false; if !test { for crate_type in crate_types { cmd.arg("--crate-type").arg(crate_type.as_str()); contains_dy_lib |= crate_type == &CrateType::Dylib; } } if unit.mode.is_check() { cmd.arg("--emit=dep-info,metadata"); } else if !unit.requires_upstream_objects() { // Always produce metadata files for rlib outputs. Metadata may be used // in this session for a pipelined compilation, or it may be used in a // future Cargo session as part of a pipelined compile. cmd.arg("--emit=dep-info,metadata,link"); } else { cmd.arg("--emit=dep-info,link"); } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) || (contains_dy_lib && !cx.is_primary_package(unit)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } if opt_level.as_str() != "0" { cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); } if !rustflags.is_empty() { cmd.args(&rustflags); } if *panic != PanicStrategy::Unwind { cmd.arg("-C").arg(format!("panic={}", panic)); } cmd.args(<o_args(cx, unit)); // This is generally just an optimization on build time so if we don't pass // it then it's ok. As of the time of this writing it's a very new flag, so // we need to dynamically check if it's available. if cx.bcx.target_data.info(unit.kind).supports_split_debuginfo { if let Some(split) = split_debuginfo { cmd.arg("-C").arg(format!("split-debuginfo={}", split)); } } if let Some(backend) = codegen_backend { cmd.arg("-Z").arg(&format!("codegen-backend={}", backend)); } if let Some(n) = codegen_units { cmd.arg("-C").arg(&format!("codegen-units={}", n)); } if let Some(debuginfo) = debuginfo { cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); } if let Some(args) = cx.bcx.extra_args_for(unit) { cmd.args(args); } // `-C overflow-checks` is implied by the setting of `-C debug-assertions`, // so we only need to provide `-C overflow-checks` if it differs from // the value of `-C debug-assertions` we would provide. if opt_level.as_str() != "0" { if debug_assertions { cmd.args(&["-C", "debug-assertions=on"]); if !overflow_checks { cmd.args(&["-C", "overflow-checks=off"]); } } else if overflow_checks { cmd.args(&["-C", "overflow-checks=on"]); } } else if !debug_assertions { cmd.args(&["-C", "debug-assertions=off"]); if overflow_checks { cmd.args(&["-C", "overflow-checks=on"]); } } else if !overflow_checks { cmd.args(&["-C", "overflow-checks=off"]); } if test && unit.target.harness() { cmd.arg("--test"); // Cargo has historically never compiled `--test` binaries with // `panic=abort` because the `test` crate itself didn't support it. // Support is now upstream, however, but requires an unstable flag to be // passed when compiling the test. We require, in Cargo, an unstable // flag to pass to rustc, so register that here. Eventually this flag // will simply not be needed when the behavior is stabilized in the Rust // compiler itself. if *panic == PanicStrategy::Abort { cmd.arg("-Z").arg("panic-abort-tests"); } } else if test { cmd.arg("--cfg").arg("test"); } cmd.args(&features_args(unit)); cmd.args(&check_cfg_args(cx, unit)); let meta = cx.files().metadata(unit); cmd.arg("-C").arg(&format!("metadata={}", meta)); if cx.files().use_extra_filename(unit) { cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); } if rpath { cmd.arg("-C").arg("rpath"); } cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { if let Some(val) = val { let mut joined = OsString::from(prefix); joined.push(val); cmd.arg(key).arg(joined); } } if let CompileKind::Target(n) = unit.kind { cmd.arg("--target").arg(n.rustc_target()); } opt( cmd, "-C", "linker=", bcx.linker(unit.kind).as_ref().map(|s| s.as_ref()), ); if incremental { let dir = cx.files().layout(unit.kind).incremental().as_os_str(); opt(cmd, "-C", "incremental=", Some(dir)); } if strip != Strip::None { cmd.arg("-C").arg(format!("strip={}", strip)); } if unit.is_std { // -Zforce-unstable-if-unmarked prevents the accidental use of // unstable crates within the sysroot (such as "extern crate libc" or // any non-public crate in the sysroot). // // RUSTC_BOOTSTRAP allows unstable features on stable. cmd.arg("-Z") .arg("force-unstable-if-unmarked") .env("RUSTC_BOOTSTRAP", "1"); } // Add `CARGO_BIN_` environment variables for building tests. if unit.target.is_test() || unit.target.is_bench() { for bin_target in unit .pkg .manifest() .targets() .iter() .filter(|target| target.is_bin()) { let exe_path = cx .files() .bin_link_for_target(bin_target, unit.kind, cx.bcx)?; let name = bin_target .binary_filename() .unwrap_or(bin_target.name().to_string()); let key = format!("CARGO_BIN_EXE_{}", name); cmd.env(&key, exe_path); } } Ok(()) } /// All active features for the unit passed as --cfg fn features_args(unit: &Unit) -> Vec { let mut args = Vec::with_capacity(unit.features.len() * 2); for feat in &unit.features { args.push(OsString::from("--cfg")); args.push(OsString::from(format!("feature=\"{}\"", feat))); } args } /// Generate the --check-cfg arguments for the unit fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { if let Some((features, well_known_names, well_known_values, _output)) = cx.bcx.config.cli_unstable().check_cfg { let mut args = Vec::with_capacity(unit.pkg.summary().features().len() * 2 + 4); args.push(OsString::from("-Zunstable-options")); if features { // This generate something like this: // - values(feature) // - values(feature, "foo", "bar") let mut arg = OsString::from("values(feature"); for (&feat, _) in unit.pkg.summary().features() { arg.push(", \""); arg.push(&feat); arg.push("\""); } arg.push(")"); args.push(OsString::from("--check-cfg")); args.push(arg); } if well_known_names { args.push(OsString::from("--check-cfg")); args.push(OsString::from("names()")); } if well_known_values { args.push(OsString::from("--check-cfg")); args.push(OsString::from("values()")); } args } else { Vec::new() } } fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { let mut result = Vec::new(); let mut push = |arg: &str| { result.push(OsString::from("-C")); result.push(OsString::from(arg)); }; match cx.lto[unit] { lto::Lto::Run(None) => push("lto"), lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), lto::Lto::Off => { push("lto=off"); push("embed-bitcode=no"); } lto::Lto::ObjectAndBitcode => {} // this is rustc's default lto::Lto::OnlyBitcode => push("linker-plugin-lto"), lto::Lto::OnlyObject => push("embed-bitcode=no"), } result } fn build_deps_args( cmd: &mut ProcessBuilder, cx: &mut Context<'_, '_>, unit: &Unit, ) -> CargoResult<()> { let bcx = cx.bcx; cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); deps.push(cx.files().deps_dir(unit)); deps }); // Be sure that the host path is also listed. This'll ensure that proc macro // dependencies are correctly found (for reexported macros). if !unit.kind.is_host() { cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); deps.push(cx.files().host_deps()); deps }); } let deps = cx.unit_deps(unit); // If there is not one linkable target but should, rustc fails later // on if there is an `extern crate` for it. This may turn into a hard // error in the future (see PR #4797). if !deps .iter() .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable()) { if let Some(dep) = deps.iter().find(|dep| { !dep.unit.mode.is_doc() && dep.unit.target.is_lib() && !dep.unit.artifact.is_true() }) { bcx.config.shell().warn(format!( "The package `{}` \ provides no linkable target. The compiler might raise an error while compiling \ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ Cargo.toml. This warning might turn into a hard error in the future.", dep.unit.target.crate_name(), unit.target.crate_name(), dep.unit.target.crate_name() ))?; } } let mut unstable_opts = false; for dep in deps { if dep.unit.mode.is_run_custom_build() { cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); } } for arg in extern_args(cx, unit, &mut unstable_opts)? { cmd.arg(arg); } for (var, env) in artifact::get_env(cx, deps)? { cmd.env(&var, env); } // This will only be set if we're already using a feature // requiring nightly rust if unstable_opts { cmd.arg("-Z").arg("unstable-options"); } Ok(()) } /// Add custom flags from the output a of build-script to a `ProcessBuilder` fn add_custom_flags( cmd: &mut ProcessBuilder, build_script_outputs: &BuildScriptOutputs, metadata: Option, ) -> CargoResult<()> { if let Some(metadata) = metadata { if let Some(output) = build_script_outputs.get(metadata) { for cfg in output.cfgs.iter() { cmd.arg("--cfg").arg(cfg); } if !output.check_cfgs.is_empty() { cmd.arg("-Zunstable-options"); for check_cfg in &output.check_cfgs { cmd.arg("--check-cfg").arg(check_cfg); } } for &(ref name, ref value) in output.env.iter() { cmd.env(name, value); } } } Ok(()) } /// Generates a list of `--extern` arguments. pub fn extern_args( cx: &Context<'_, '_>, unit: &Unit, unstable_opts: &mut bool, ) -> CargoResult> { let mut result = Vec::new(); let deps = cx.unit_deps(unit); // Closure to add one dependency to `result`. let mut link_to = |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> { let mut value = OsString::new(); let mut opts = Vec::new(); if unit .pkg .manifest() .unstable_features() .require(Feature::public_dependency()) .is_ok() && !dep.public { opts.push("priv"); *unstable_opts = true; } if noprelude { opts.push("noprelude"); *unstable_opts = true; } if !opts.is_empty() { value.push(opts.join(",")); value.push(":"); } value.push(extern_crate_name.as_str()); value.push("="); let mut pass = |file| { let mut value = value.clone(); value.push(file); result.push(OsString::from("--extern")); result.push(value); }; let outputs = cx.outputs(&dep.unit)?; if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { // Example: rlib dependency for an rlib, rmeta is all that is required. let output = outputs .iter() .find(|output| output.flavor == FileFlavor::Rmeta) .expect("failed to find rmeta dep for pipelined dep"); pass(&output.path); } else { // Example: a bin needs `rlib` for dependencies, it cannot use rmeta. for output in outputs.iter() { if output.flavor == FileFlavor::Linkable { pass(&output.path); } } } Ok(()) }; for dep in deps { if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { link_to(dep, dep.extern_crate_name, dep.noprelude)?; } } if unit.target.proc_macro() { // Automatically import `proc_macro`. result.push(OsString::from("--extern")); result.push(OsString::from("proc_macro")); } Ok(result) } fn envify(s: &str) -> String { s.chars() .flat_map(|c| c.to_uppercase()) .map(|c| if c == '-' { '_' } else { c }) .collect() } struct OutputOptions { /// What format we're emitting from Cargo itself. format: MessageFormat, /// Whether or not to display messages in color. color: bool, /// Where to write the JSON messages to support playback later if the unit /// is fresh. The file is created lazily so that in the normal case, lots /// of empty files are not created. If this is None, the output will not /// be cached (such as when replaying cached messages). cache_cell: Option<(PathBuf, LazyCell)>, /// If `true`, display any diagnostics. /// Other types of JSON messages are processed regardless /// of the value of this flag. /// /// This is used primarily for cache replay. If you build with `-vv`, the /// cache will be filled with diagnostics from dependencies. When the /// cache is replayed without `-vv`, we don't want to show them. show_diagnostics: bool, warnings_seen: usize, errors_seen: usize, } impl OutputOptions { fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { let color = cx.bcx.config.shell().err_supports_color(); let path = cx.files().message_cache_path(unit); // Remove old cache, ignore ENOENT, which is the common case. drop(fs::remove_file(&path)); let cache_cell = Some((path, LazyCell::new())); OutputOptions { format: cx.bcx.build_config.message_format, color, cache_cell, show_diagnostics: true, warnings_seen: 0, errors_seen: 0, } } } fn on_stdout_line( state: &JobState<'_, '_>, line: &str, _package_id: PackageId, _target: &Target, ) -> CargoResult<()> { state.stdout(line.to_string())?; Ok(()) } fn on_stderr_line( state: &JobState<'_, '_>, line: &str, package_id: PackageId, manifest_path: &std::path::Path, target: &Target, options: &mut OutputOptions, ) -> CargoResult<()> { if on_stderr_line_inner(state, line, package_id, manifest_path, target, options)? { // Check if caching is enabled. if let Some((path, cell)) = &mut options.cache_cell { // Cache the output, which will be replayed later when Fresh. let f = cell.try_borrow_mut_with(|| paths::create(path))?; debug_assert!(!line.contains('\n')); f.write_all(line.as_bytes())?; f.write_all(&[b'\n'])?; } } Ok(()) } /// Returns true if the line should be cached. fn on_stderr_line_inner( state: &JobState<'_, '_>, line: &str, package_id: PackageId, manifest_path: &std::path::Path, target: &Target, options: &mut OutputOptions, ) -> CargoResult { // We primarily want to use this function to process JSON messages from // rustc. The compiler should always print one JSON message per line, and // otherwise it may have other output intermingled (think RUST_LOG or // something like that), so skip over everything that doesn't look like a // JSON message. if !line.starts_with('{') { state.stderr(line.to_string())?; return Ok(true); } let mut compiler_message: Box = match serde_json::from_str(line) { Ok(msg) => msg, // If the compiler produced a line that started with `{` but it wasn't // valid JSON, maybe it wasn't JSON in the first place! Forward it along // to stderr. Err(e) => { debug!("failed to parse json: {:?}", e); state.stderr(line.to_string())?; return Ok(true); } }; let count_diagnostic = |level, options: &mut OutputOptions| { if level == "warning" { options.warnings_seen += 1; } else if level == "error" { options.errors_seen += 1; } }; if let Ok(report) = serde_json::from_str::(compiler_message.get()) { for item in &report.future_incompat_report { count_diagnostic(&*item.diagnostic.level, options); } state.future_incompat_report(report.future_incompat_report); return Ok(true); } // Depending on what we're emitting from Cargo itself, we figure out what to // do with this JSON message. match options.format { // In the "human" output formats (human/short) or if diagnostic messages // from rustc aren't being included in the output of Cargo's JSON // messages then we extract the diagnostic (if present) here and handle // it ourselves. MessageFormat::Human | MessageFormat::Short | MessageFormat::Json { render_diagnostics: true, .. } => { #[derive(serde::Deserialize)] struct CompilerMessage { rendered: String, message: String, level: String, } if let Ok(mut msg) = serde_json::from_str::(compiler_message.get()) { if msg.message.starts_with("aborting due to") || msg.message.ends_with("warning emitted") || msg.message.ends_with("warnings emitted") { // Skip this line; we'll print our own summary at the end. return Ok(true); } // state.stderr will add a newline if msg.rendered.ends_with('\n') { msg.rendered.pop(); } let rendered = if options.color { msg.rendered } else { // Strip only fails if the the Writer fails, which is Cursor // on a Vec, which should never fail. strip_ansi_escapes::strip(&msg.rendered) .map(|v| String::from_utf8(v).expect("utf8")) .expect("strip should never fail") }; if options.show_diagnostics { count_diagnostic(&msg.level, options); state.emit_diag(msg.level, rendered)?; } return Ok(true); } } // Remove color information from the rendered string if color is not // enabled. Cargo always asks for ANSI colors from rustc. This allows // cached replay to enable/disable colors without re-invoking rustc. MessageFormat::Json { ansi: false, .. } => { #[derive(serde::Deserialize, serde::Serialize)] struct CompilerMessage { rendered: String, #[serde(flatten)] other: std::collections::BTreeMap, } if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { error.rendered = strip_ansi_escapes::strip(&error.rendered) .map(|v| String::from_utf8(v).expect("utf8")) .unwrap_or(error.rendered); let new_line = serde_json::to_string(&error)?; let new_msg: Box = serde_json::from_str(&new_line)?; compiler_message = new_msg; } } // If ansi colors are desired then we should be good to go! We can just // pass through this message as-is. MessageFormat::Json { ansi: true, .. } => {} } // We always tell rustc to emit messages about artifacts being produced. // These messages feed into pipelined compilation, as well as timing // information. // // Look for a matching directive and inform Cargo internally that a // metadata file has been produced. #[derive(serde::Deserialize)] struct ArtifactNotification { artifact: String, } if let Ok(artifact) = serde_json::from_str::(compiler_message.get()) { trace!("found directive from rustc: `{}`", artifact.artifact); if artifact.artifact.ends_with(".rmeta") { debug!("looks like metadata finished early!"); state.rmeta_produced(); } return Ok(false); } #[derive(serde::Deserialize)] struct JobserverNotification { jobserver_event: Event, } #[derive(Debug, serde::Deserialize)] enum Event { WillAcquire, Release, } if let Ok(JobserverNotification { jobserver_event }) = serde_json::from_str::(compiler_message.get()) { trace!( "found jobserver directive from rustc: `{:?}`", jobserver_event ); match jobserver_event { Event::WillAcquire => state.will_acquire(), Event::Release => state.release_token(), } return Ok(false); } // And failing all that above we should have a legitimate JSON diagnostic // from the compiler, so wrap it in an external Cargo JSON message // indicating which package it came from and then emit it. if !options.show_diagnostics { return Ok(true); } #[derive(serde::Deserialize)] struct CompilerMessage { level: String, } if let Ok(message) = serde_json::from_str::(compiler_message.get()) { count_diagnostic(&message.level, options); } let msg = machine_message::FromCompiler { package_id, manifest_path, target, message: compiler_message, } .to_json_string(); // Switch json lines from rustc/rustdoc that appear on stderr to stdout // instead. We want the stdout of Cargo to always be machine parseable as // stderr has our colorized human-readable messages. state.stdout(msg)?; Ok(true) } fn replay_output_cache( package_id: PackageId, manifest_path: PathBuf, target: &Target, path: PathBuf, format: MessageFormat, color: bool, show_diagnostics: bool, ) -> Work { let target = target.clone(); let mut options = OutputOptions { format, color, cache_cell: None, show_diagnostics, warnings_seen: 0, errors_seen: 0, }; Work::new(move |state| { if !path.exists() { // No cached output, probably didn't emit anything. return Ok(()); } // We sometimes have gigabytes of output from the compiler, so avoid // loading it all into memory at once, as that can cause OOM where // otherwise there would be none. let file = paths::open(&path)?; let mut reader = std::io::BufReader::new(file); let mut line = String::new(); loop { let length = reader.read_line(&mut line)?; if length == 0 { break; } let trimmed = line.trim_end_matches(&['\n', '\r'][..]); on_stderr_line( state, trimmed, package_id, &manifest_path, &target, &mut options, )?; line.clear(); } Ok(()) }) } cargo-0.66.0/src/cargo/core/compiler/output_depinfo.rs000066400000000000000000000142601432416201200227320ustar00rootroot00000000000000//! Module for generating dep-info files. //! //! `rustc` generates a dep-info file with a `.d` extension at the same //! location of the output artifacts as a result of using `--emit=dep-info`. //! This dep-info file is a Makefile-like syntax that indicates the //! dependencies needed to build the artifact. Example: //! //! ```makefile //! /path/to/target/debug/deps/cargo-b6219d178925203d: src/bin/main.rs src/bin/cargo/cli.rs # … etc. //! ``` //! //! The fingerprint module has code to parse these files, and stores them as //! binary format in the fingerprint directory. These are used to quickly scan //! for any changed files. //! //! On top of all this, Cargo emits its own dep-info files in the output //! directory. This is done for every "uplifted" artifact. These are intended //! to be used with external build systems so that they can detect if Cargo //! needs to be re-executed. It includes all the entries from the `rustc` //! dep-info file, and extends it with any `rerun-if-changed` entries from //! build scripts. It also includes sources from any path dependencies. Registry //! dependencies are not included under the assumption that changes to them can //! be detected via changes to `Cargo.lock`. use cargo_util::paths::normalize_path; use std::collections::{BTreeSet, HashSet}; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; use super::{fingerprint, Context, FileFlavor, Unit}; use crate::util::{internal, CargoResult}; use cargo_util::paths; use log::debug; fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { let path = path.as_ref(); if let Some(basedir) = basedir { let norm_path = normalize_path(path); let norm_basedir = normalize_path(basedir.as_ref()); match norm_path.strip_prefix(norm_basedir) { Ok(relpath) => wrap_path(relpath), _ => wrap_path(path), } } else { wrap_path(path) } } fn wrap_path(path: &Path) -> CargoResult { path.to_str() .ok_or_else(|| internal(format!("path `{:?}` not utf-8", path))) .map(|f| f.replace(" ", "\\ ")) } fn add_deps_for_unit( deps: &mut BTreeSet, cx: &mut Context<'_, '_>, unit: &Unit, visited: &mut HashSet, ) -> CargoResult<()> { if !visited.insert(unit.clone()) { return Ok(()); } // units representing the execution of a build script don't actually // generate a dep info file, so we just keep on going below if !unit.mode.is_run_custom_build() { // Add dependencies from rustc dep-info output (stored in fingerprint directory) let dep_info_loc = fingerprint::dep_info_loc(cx, unit); if let Some(paths) = fingerprint::parse_dep_info(unit.pkg.root(), cx.files().host_root(), &dep_info_loc)? { for path in paths.files { deps.insert(path); } } else { debug!( "can't find dep_info for {:?} {}", unit.pkg.package_id(), unit.target ); return Err(internal("dep_info missing")); } } // Add rerun-if-changed dependencies if let Some(metadata) = cx.find_build_script_metadata(unit) { if let Some(output) = cx.build_script_outputs.lock().unwrap().get(metadata) { for path in &output.rerun_if_changed { // The paths we have saved from the unit are of arbitrary relativeness and may be // relative to the crate root of the dependency. let path = unit.pkg.root().join(path); deps.insert(path); } } } // Recursively traverse all transitive dependencies let unit_deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. for dep in unit_deps { if dep.unit.is_local() { add_deps_for_unit(deps, cx, &dep.unit, visited)?; } } Ok(()) } /// Save a `.d` dep-info file for the given unit. /// /// This only saves files for uplifted artifacts. pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { let bcx = cx.bcx; let mut deps = BTreeSet::new(); let mut visited = HashSet::new(); let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok(); let basedir_string; let basedir = match bcx.config.build_config()?.dep_info_basedir.clone() { Some(value) => { basedir_string = value .resolve_path(bcx.config) .as_os_str() .to_str() .ok_or_else(|| anyhow::format_err!("build.dep-info-basedir path not utf-8"))? .to_string(); Some(basedir_string.as_str()) } None => None, }; let deps = deps .iter() .map(|f| render_filename(f, basedir)) .collect::>>()?; for output in cx .outputs(unit)? .iter() .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) { if let Some(ref link_dst) = output.hardlink { let output_path = link_dst.with_extension("d"); if success { let target_fn = render_filename(link_dst, basedir)?; // If nothing changed don't recreate the file which could alter // its mtime if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { if previous.files.iter().eq(deps.iter().map(Path::new)) { continue; } } // Otherwise write it all out let mut outfile = BufWriter::new(paths::create(output_path)?); write!(outfile, "{}:", target_fn)?; for dep in &deps { write!(outfile, " {}", dep)?; } writeln!(outfile)?; // dep-info generation failed, so delete output file. This will // usually cause the build system to always rerun the build // rule, which is correct if inefficient. } else if output_path.exists() { paths::remove_file(output_path)?; } } } Ok(()) } cargo-0.66.0/src/cargo/core/compiler/rustdoc.rs000066400000000000000000000142121432416201200213460ustar00rootroot00000000000000//! Utilities for building with rustdoc. use crate::core::compiler::context::Context; use crate::core::compiler::unit::Unit; use crate::core::compiler::CompileKind; use crate::sources::CRATES_IO_REGISTRY; use crate::util::errors::{internal, CargoResult}; use cargo_util::ProcessBuilder; use std::collections::HashMap; use std::fmt; use std::hash; use url::Url; const DOCS_RS_URL: &'static str = "https://docs.rs/"; /// Mode used for `std`. #[derive(Debug, Hash)] pub enum RustdocExternMode { /// Use a local `file://` URL. Local, /// Use a remote URL to (default). Remote, /// An arbitrary URL. Url(String), } impl From for RustdocExternMode { fn from(s: String) -> RustdocExternMode { match s.as_ref() { "local" => RustdocExternMode::Local, "remote" => RustdocExternMode::Remote, _ => RustdocExternMode::Url(s), } } } impl fmt::Display for RustdocExternMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { RustdocExternMode::Local => "local".fmt(f), RustdocExternMode::Remote => "remote".fmt(f), RustdocExternMode::Url(s) => s.fmt(f), } } } impl<'de> serde::de::Deserialize<'de> for RustdocExternMode { fn deserialize(deserializer: D) -> Result where D: serde::de::Deserializer<'de>, { let s = String::deserialize(deserializer)?; Ok(s.into()) } } #[derive(serde::Deserialize, Debug)] #[serde(default)] pub struct RustdocExternMap { #[serde(deserialize_with = "default_crates_io_to_docs_rs")] pub(crate) registries: HashMap, std: Option, } impl Default for RustdocExternMap { fn default() -> Self { Self { registries: HashMap::from([(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into())]), std: None, } } } fn default_crates_io_to_docs_rs<'de, D: serde::Deserializer<'de>>( de: D, ) -> Result, D::Error> { use serde::Deserialize; let mut registries = HashMap::deserialize(de)?; if !registries.contains_key(CRATES_IO_REGISTRY) { registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into()); } Ok(registries) } impl hash::Hash for RustdocExternMap { fn hash(&self, into: &mut H) { self.std.hash(into); for (key, value) in &self.registries { key.hash(into); value.hash(into); } } } pub fn add_root_urls( cx: &Context<'_, '_>, unit: &Unit, rustdoc: &mut ProcessBuilder, ) -> CargoResult<()> { let config = cx.bcx.config; if !config.cli_unstable().rustdoc_map { log::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag"); return Ok(()); } let map = config.doc_extern_map()?; let mut unstable_opts = false; // Collect mapping of registry name -> index url. let name2url: HashMap<&String, Url> = map .registries .keys() .filter_map(|name| { if let Ok(index_url) = config.get_registry_index(name) { Some((name, index_url)) } else { log::warn!( "`doc.extern-map.{}` specifies a registry that is not defined", name ); None } }) .collect(); for dep in cx.unit_deps(unit) { if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { for (registry, location) in &map.registries { let sid = dep.unit.pkg.package_id().source_id(); let matches_registry = || -> bool { if !sid.is_registry() { return false; } if sid.is_default_registry() { return registry == CRATES_IO_REGISTRY; } if let Some(index_url) = name2url.get(registry) { return index_url == sid.url(); } false }; if matches_registry() { let mut url = location.clone(); if !url.contains("{pkg_name}") && !url.contains("{version}") { if !url.ends_with('/') { url.push('/'); } url.push_str("{pkg_name}/{version}/"); } let url = url .replace("{pkg_name}", &dep.unit.pkg.name()) .replace("{version}", &dep.unit.pkg.version().to_string()); rustdoc.arg("--extern-html-root-url"); rustdoc.arg(format!("{}={}", dep.unit.target.crate_name(), url)); unstable_opts = true; } } } } let std_url = match &map.std { None | Some(RustdocExternMode::Remote) => None, Some(RustdocExternMode::Local) => { let sysroot = &cx.bcx.target_data.info(CompileKind::Host).sysroot; let html_root = sysroot.join("share").join("doc").join("rust").join("html"); if html_root.exists() { let url = Url::from_file_path(&html_root).map_err(|()| { internal(format!( "`{}` failed to convert to URL", html_root.display() )) })?; Some(url.to_string()) } else { log::warn!( "`doc.extern-map.std` is \"local\", but local docs don't appear to exist at {}", html_root.display() ); None } } Some(RustdocExternMode::Url(s)) => Some(s.to_string()), }; if let Some(url) = std_url { for name in &["std", "core", "alloc", "proc_macro"] { rustdoc.arg("--extern-html-root-url"); rustdoc.arg(format!("{}={}", name, url)); unstable_opts = true; } } if unstable_opts { rustdoc.arg("-Zunstable-options"); } Ok(()) } cargo-0.66.0/src/cargo/core/compiler/standard_lib.rs000066400000000000000000000221461432416201200223160ustar00rootroot00000000000000//! Code for building the standard library. use crate::core::compiler::unit_dependencies::IsArtifact; use crate::core::compiler::UnitInterner; use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; use crate::core::resolver::HasDevUnits; use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; use crate::ops::{self, Packages}; use crate::util::errors::CargoResult; use crate::Config; use std::collections::{HashMap, HashSet}; use std::env; use std::path::PathBuf; use super::BuildConfig; /// Parse the `-Zbuild-std` flag. pub fn parse_unstable_flag(value: Option<&str>) -> Vec { // This is a temporary hack until there is a more principled way to // declare dependencies in Cargo.toml. let value = value.unwrap_or("std"); let mut crates: HashSet<&str> = value.split(',').collect(); if crates.contains("std") { crates.insert("core"); crates.insert("alloc"); crates.insert("proc_macro"); crates.insert("panic_unwind"); crates.insert("compiler_builtins"); } else if crates.contains("core") { crates.insert("compiler_builtins"); } crates.into_iter().map(|s| s.to_string()).collect() } pub(crate) fn std_crates(config: &Config, units: Option<&[Unit]>) -> Option> { let crates = config.cli_unstable().build_std.as_ref()?.clone(); // Only build libtest if it looks like it is needed. let mut crates = crates.clone(); // If we know what units we're building, we can filter for libtest depending on the jobs. if let Some(units) = units { if units .iter() .any(|unit| unit.mode.is_rustc_test() && unit.target.harness()) { // Only build libtest when libstd is built (libtest depends on libstd) if crates.iter().any(|c| c == "std") && !crates.iter().any(|c| c == "test") { crates.push("test".to_string()); } } } else { // We don't know what jobs are going to be run, so download libtest just in case. if !crates.iter().any(|c| c == "test") { crates.push("test".to_string()) } } Some(crates) } /// Resolve the standard library dependencies. pub fn resolve_std<'cfg>( ws: &Workspace<'cfg>, target_data: &RustcTargetData<'cfg>, build_config: &BuildConfig, crates: &[String], ) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> { if build_config.build_plan { ws.config() .shell() .warn("-Zbuild-std does not currently fully support --build-plan")?; } let src_path = detect_sysroot_src_path(target_data)?; let to_patch = [ "rustc-std-workspace-core", "rustc-std-workspace-alloc", "rustc-std-workspace-std", ]; let patches = to_patch .iter() .map(|&name| { let source_path = SourceId::for_path(&src_path.join("library").join(name))?; let dep = Dependency::parse(name, None, source_path)?; Ok(dep) }) .collect::>>()?; let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap(); let patch = HashMap::from([(crates_io_url, patches)]); let members = vec![ String::from("library/std"), String::from("library/core"), String::from("library/alloc"), String::from("library/test"), ]; let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new( &src_path, &Some(members), /*default_members*/ &None, /*exclude*/ &None, /*inheritable*/ &None, /*custom_metadata*/ &None, )); let virtual_manifest = crate::core::VirtualManifest::new( /*replace*/ Vec::new(), patch, ws_config, /*profiles*/ None, crate::core::Features::default(), None, ); let config = ws.config(); // This is a delicate hack. In order for features to resolve correctly, // the resolver needs to run a specific "current" member of the workspace. // Thus, in order to set the features for `std`, we need to set `libtest` // to be the "current" member. `libtest` is the root, and all other // standard library crates are dependencies from there. Since none of the // other crates need to alter their features, this should be fine, for // now. Perhaps in the future features will be decoupled from the resolver // and it will be easier to control feature selection. let current_manifest = src_path.join("library/test/Cargo.toml"); // TODO: Consider doing something to enforce --locked? Or to prevent the // lock file from being written, such as setting ephemeral. let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?; // Don't require optional dependencies in this workspace, aka std's own // `[dev-dependencies]`. No need for us to generate a `Resolve` which has // those included because we'll never use them anyway. std_ws.set_require_optional_deps(false); // `test` is not in the default set because it is optional, but it needs // to be part of the resolve in case we do need it. let mut spec_pkgs = Vec::from(crates); spec_pkgs.push("test".to_string()); let spec = Packages::Packages(spec_pkgs); let specs = spec.to_package_id_specs(&std_ws)?; let features = match &config.cli_unstable().build_std_features { Some(list) => list.clone(), None => vec![ "panic-unwind".to_string(), "backtrace".to_string(), "default".to_string(), ], }; let cli_features = CliFeatures::from_command_line( &features, /*all_features*/ false, /*uses_default_features*/ false, )?; let resolve = ops::resolve_ws_with_opts( &std_ws, target_data, &build_config.requested_kinds, &cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, )?; Ok(( resolve.pkg_set, resolve.targeted_resolve, resolve.resolved_features, )) } /// Generate a list of root `Unit`s for the standard library. /// /// The given slice of crate names is the root set. pub fn generate_std_roots( crates: &[String], std_resolve: &Resolve, std_features: &ResolvedFeatures, kinds: &[CompileKind], package_set: &PackageSet<'_>, interner: &UnitInterner, profiles: &Profiles, ) -> CargoResult>> { // Generate the root Units for the standard library. let std_ids = crates .iter() .map(|crate_name| std_resolve.query(crate_name)) .collect::>>()?; // Convert PackageId to Package. let std_pkgs = package_set.get_many(std_ids)?; // Generate a map of Units for each kind requested. let mut ret = HashMap::new(); for pkg in std_pkgs { let lib = pkg .targets() .iter() .find(|t| t.is_lib()) .expect("std has a lib"); // I don't think we need to bother with Check here, the difference // in time is minimal, and the difference in caching is // significant. let mode = CompileMode::Build; let features = std_features.activated_features( pkg.package_id(), FeaturesFor::NormalOrDevOrArtifactTarget(None), ); for kind in kinds { let list = ret.entry(*kind).or_insert_with(Vec::new); let unit_for = UnitFor::new_normal(*kind); let profile = profiles.get_profile( pkg.package_id(), /*is_member*/ false, /*is_local*/ false, unit_for, *kind, ); list.push(interner.intern( pkg, lib, profile, *kind, mode, features.clone(), /*is_std*/ true, /*dep_hash*/ 0, IsArtifact::No, )); } } Ok(ret) } fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { if let Some(s) = env::var_os("__CARGO_TESTS_ONLY_SRC_ROOT") { return Ok(s.into()); } // NOTE: This is temporary until we figure out how to acquire the source. let src_path = target_data .info(CompileKind::Host) .sysroot .join("lib") .join("rustlib") .join("src") .join("rust"); let lock = src_path.join("Cargo.lock"); if !lock.exists() { let msg = format!( "{:?} does not exist, unable to build with the standard \ library, try:\n rustup component add rust-src", lock ); match env::var("RUSTUP_TOOLCHAIN") { Ok(rustup_toolchain) => { anyhow::bail!("{} --toolchain {}", msg, rustup_toolchain); } Err(_) => { anyhow::bail!(msg); } } } Ok(src_path) } cargo-0.66.0/src/cargo/core/compiler/timings.js000066400000000000000000000332311432416201200213270ustar00rootroot00000000000000// Position of the vertical axis. const X_LINE = 50; // General-use margin size. const MARGIN = 5; // Position of the horizontal axis, relative to the bottom. const Y_LINE = 35; // Minimum distance between time tick labels. const MIN_TICK_DIST = 50; // Radius for rounded rectangle corners. const RADIUS = 3; // Height of unit boxes. const BOX_HEIGHT = 25; // Distance between Y tick marks on the unit graph. const Y_TICK_DIST = BOX_HEIGHT + 2; // Rects used for mouseover detection. // Objects of {x, y, x2, y2, i} where `i` is the index into UNIT_DATA. let HIT_BOXES = []; // Index into UNIT_DATA of the last unit hovered over by mouse. let LAST_HOVER = null; // Key is unit index, value is {x, y, width, rmeta_x} of the box. let UNIT_COORDS = {}; // Map of unit index to the index it was unlocked by. let REVERSE_UNIT_DEPS = {}; let REVERSE_UNIT_RMETA_DEPS = {}; for (let n=0; n unit.duration >= min_time); const graph_height = Y_TICK_DIST * units.length; const {ctx, graph_width, canvas_width, canvas_height, px_per_sec} = draw_graph_axes('pipeline-graph', graph_height); const container = document.getElementById('pipeline-container'); container.style.width = canvas_width; container.style.height = canvas_height; // Canvas for hover highlights. This is a separate layer to improve performance. const linectx = setup_canvas('pipeline-graph-lines', canvas_width, canvas_height); linectx.clearRect(0, 0, canvas_width, canvas_height); // Draw Y tick marks. for (let n=1; n 1) { ctx.beginPath(); ctx.fillStyle = cpuFillStyle; let bottomLeft = coord(CPU_USAGE[0][0], 0); ctx.moveTo(bottomLeft.x, bottomLeft.y); for (let i=0; i < CPU_USAGE.length; i++) { let [time, usage] = CPU_USAGE[i]; let {x, y} = coord(time, usage / 100.0 * max_v); ctx.lineTo(x, y); } let bottomRight = coord(CPU_USAGE[CPU_USAGE.length - 1][0], 0); ctx.lineTo(bottomRight.x, bottomRight.y); ctx.fill(); } function draw_line(style, key) { let first = CONCURRENCY_DATA[0]; let last = coord(first.t, key(first)); ctx.strokeStyle = style; ctx.beginPath(); ctx.moveTo(last.x, last.y); for (let i=1; i 100) { throw Error("tick loop too long"); } count += 1; if (max_value <= max_ticks * step) { break; } step += 10; } } const tick_dist = px_per_v * step; const num_ticks = Math.floor(max_value / step); return {step, tick_dist, num_ticks}; } function codegen_time(unit) { if (unit.rmeta_time == null) { return null; } let ctime = unit.duration - unit.rmeta_time; return [unit.rmeta_time, ctime]; } function roundedRect(ctx, x, y, width, height, r) { r = Math.min(r, width, height); ctx.beginPath(); ctx.moveTo(x+r, y); ctx.lineTo(x+width-r, y); ctx.arc(x+width-r, y+r, r, 3*Math.PI/2, 0); ctx.lineTo(x+width, y+height-r); ctx.arc(x+width-r, y+height-r, r, 0, Math.PI/2); ctx.lineTo(x+r, y+height); ctx.arc(x+r, y+height-r, r, Math.PI/2, Math.PI); ctx.lineTo(x, y-r); ctx.arc(x+r, y+r, r, Math.PI, 3*Math.PI/2); ctx.closePath(); } function pipeline_mouse_hit(event) { // This brute-force method can be optimized if needed. for (let box of HIT_BOXES) { if (event.offsetX >= box.x && event.offsetX <= box.x2 && event.offsetY >= box.y && event.offsetY <= box.y2) { return box; } } } function pipeline_mousemove(event) { // Highlight dependency lines on mouse hover. let box = pipeline_mouse_hit(event); if (box) { if (box.i != LAST_HOVER) { LAST_HOVER = box.i; let g = document.getElementById('pipeline-graph-lines'); let ctx = g.getContext('2d'); ctx.clearRect(0, 0, g.width, g.height); ctx.save(); ctx.translate(X_LINE, MARGIN); ctx.lineWidth = 2; draw_dep_lines(ctx, box.i, true); if (box.i in REVERSE_UNIT_DEPS) { const dep_unit = REVERSE_UNIT_DEPS[box.i]; if (dep_unit in UNIT_COORDS) { const {x, y, rmeta_x} = UNIT_COORDS[dep_unit]; draw_one_dep_line(ctx, x, y, box.i, true); } } if (box.i in REVERSE_UNIT_RMETA_DEPS) { const dep_unit = REVERSE_UNIT_RMETA_DEPS[box.i]; if (dep_unit in UNIT_COORDS) { const {x, y, rmeta_x} = UNIT_COORDS[dep_unit]; draw_one_dep_line(ctx, rmeta_x, y, box.i, true); } } ctx.restore(); } } } render_pipeline_graph(); render_timing_graph(); // Set up and handle controls. { const range = document.getElementById('min-unit-time'); const time_output = document.getElementById('min-unit-time-output'); time_output.innerHTML = `${range.value}s`; range.oninput = event => { time_output.innerHTML = `${range.value}s`; render_pipeline_graph(); }; const scale = document.getElementById('scale'); const scale_output = document.getElementById('scale-output'); scale_output.innerHTML = `${scale.value}`; scale.oninput = event => { scale_output.innerHTML = `${scale.value}`; render_pipeline_graph(); render_timing_graph(); }; } cargo-0.66.0/src/cargo/core/compiler/timings.rs000066400000000000000000000546261432416201200213520ustar00rootroot00000000000000//! Timing tracking. //! //! This module implements some simple tracking information for timing of how //! long it takes for different units to compile. use super::{CompileMode, Unit}; use crate::core::compiler::job_queue::JobId; use crate::core::compiler::{BuildContext, Context, TimingOutput}; use crate::core::PackageId; use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; use crate::util::{CargoResult, Config}; use anyhow::Context as _; use cargo_util::paths; use std::collections::HashMap; use std::io::{BufWriter, Write}; use std::thread::available_parallelism; use std::time::{Duration, Instant, SystemTime}; pub struct Timings<'cfg> { config: &'cfg Config, /// Whether or not timings should be captured. enabled: bool, /// If true, saves an HTML report to disk. report_html: bool, /// If true, emits JSON information with timing information. report_json: bool, /// When Cargo started. start: Instant, /// A rendered string of when compilation started. start_str: String, /// A summary of the root units. /// /// Tuples of `(package_description, target_descriptions)`. root_targets: Vec<(String, Vec)>, /// The build profile. profile: String, /// Total number of fresh units. total_fresh: u32, /// Total number of dirty units. total_dirty: u32, /// Time tracking for each individual unit. unit_times: Vec, /// Units that are in the process of being built. /// When they finished, they are moved to `unit_times`. active: HashMap, /// Concurrency-tracking information. This is periodically updated while /// compilation progresses. concurrency: Vec, /// Last recorded state of the system's CPUs and when it happened last_cpu_state: Option, last_cpu_recording: Instant, /// Recorded CPU states, stored as tuples. First element is when the /// recording was taken and second element is percentage usage of the /// system. cpu_usage: Vec<(f64, f64)>, } /// Tracking information for an individual unit. struct UnitTime { unit: Unit, /// A string describing the cargo target. target: String, /// The time when this unit started as an offset in seconds from `Timings::start`. start: f64, /// Total time to build this unit in seconds. duration: f64, /// The time when the `.rmeta` file was generated, an offset in seconds /// from `start`. rmeta_time: Option, /// Reverse deps that are freed to run after this unit finished. unlocked_units: Vec, /// Same as `unlocked_units`, but unlocked by rmeta. unlocked_rmeta_units: Vec, } /// Periodic concurrency tracking information. #[derive(serde::Serialize)] struct Concurrency { /// Time as an offset in seconds from `Timings::start`. t: f64, /// Number of units currently running. active: usize, /// Number of units that could run, but are waiting for a jobserver token. waiting: usize, /// Number of units that are not yet ready, because they are waiting for /// dependencies to finish. inactive: usize, /// Number of rustc "extra" threads -- i.e., how many tokens have been /// provided across all current rustc instances that are not the main thread /// tokens. rustc_parallelism: usize, } impl<'cfg> Timings<'cfg> { pub fn new(bcx: &BuildContext<'_, 'cfg>, root_units: &[Unit]) -> Timings<'cfg> { let has_report = |what| bcx.build_config.timing_outputs.contains(&what); let report_html = has_report(TimingOutput::Html); let report_json = has_report(TimingOutput::Json); let enabled = report_html | report_json; let mut root_map: HashMap> = HashMap::new(); for unit in root_units { let target_desc = unit.target.description_named(); root_map .entry(unit.pkg.package_id()) .or_default() .push(target_desc); } let root_targets = root_map .into_iter() .map(|(pkg_id, targets)| { let pkg_desc = format!("{} {}", pkg_id.name(), pkg_id.version()); (pkg_desc, targets) }) .collect(); let start_str = humantime::format_rfc3339_seconds(SystemTime::now()).to_string(); let profile = bcx.build_config.requested_profile.to_string(); let last_cpu_state = if enabled { match State::current() { Ok(state) => Some(state), Err(e) => { log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e); None } } } else { None }; Timings { config: bcx.config, enabled, report_html, report_json, start: bcx.config.creation_time(), start_str, root_targets, profile, total_fresh: 0, total_dirty: 0, unit_times: Vec::new(), active: HashMap::new(), concurrency: Vec::new(), last_cpu_state, last_cpu_recording: Instant::now(), cpu_usage: Vec::new(), } } /// Mark that a unit has started running. pub fn unit_start(&mut self, id: JobId, unit: Unit) { if !self.enabled { return; } let mut target = if unit.target.is_lib() && unit.mode == CompileMode::Build { // Special case for brevity, since most dependencies hit // this path. "".to_string() } else { format!(" {}", unit.target.description_named()) }; match unit.mode { CompileMode::Test => target.push_str(" (test)"), CompileMode::Build => {} CompileMode::Check { test: true } => target.push_str(" (check-test)"), CompileMode::Check { test: false } => target.push_str(" (check)"), CompileMode::Bench => target.push_str(" (bench)"), CompileMode::Doc { .. } => target.push_str(" (doc)"), CompileMode::Doctest => target.push_str(" (doc test)"), CompileMode::Docscrape => target.push_str(" (doc scrape)"), CompileMode::RunCustomBuild => target.push_str(" (run)"), } let unit_time = UnitTime { unit, target, start: self.start.elapsed().as_secs_f64(), duration: 0.0, rmeta_time: None, unlocked_units: Vec::new(), unlocked_rmeta_units: Vec::new(), }; assert!(self.active.insert(id, unit_time).is_none()); } /// Mark that the `.rmeta` file as generated. pub fn unit_rmeta_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) { if !self.enabled { return; } // `id` may not always be active. "fresh" units unconditionally // generate `Message::Finish`, but this active map only tracks dirty // units. let unit_time = match self.active.get_mut(&id) { Some(ut) => ut, None => return, }; let t = self.start.elapsed().as_secs_f64(); unit_time.rmeta_time = Some(t - unit_time.start); assert!(unit_time.unlocked_rmeta_units.is_empty()); unit_time .unlocked_rmeta_units .extend(unlocked.iter().cloned().cloned()); } /// Mark that a unit has finished running. pub fn unit_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) { if !self.enabled { return; } // See note above in `unit_rmeta_finished`, this may not always be active. let mut unit_time = match self.active.remove(&id) { Some(ut) => ut, None => return, }; let t = self.start.elapsed().as_secs_f64(); unit_time.duration = t - unit_time.start; assert!(unit_time.unlocked_units.is_empty()); unit_time .unlocked_units .extend(unlocked.iter().cloned().cloned()); if self.report_json { let msg = machine_message::TimingInfo { package_id: unit_time.unit.pkg.package_id(), target: &unit_time.unit.target, mode: unit_time.unit.mode, duration: unit_time.duration, rmeta_time: unit_time.rmeta_time, } .to_json_string(); crate::drop_println!(self.config, "{}", msg); } self.unit_times.push(unit_time); } /// This is called periodically to mark the concurrency of internal structures. pub fn mark_concurrency( &mut self, active: usize, waiting: usize, inactive: usize, rustc_parallelism: usize, ) { if !self.enabled { return; } let c = Concurrency { t: self.start.elapsed().as_secs_f64(), active, waiting, inactive, rustc_parallelism, }; self.concurrency.push(c); } /// Mark that a fresh unit was encountered. pub fn add_fresh(&mut self) { self.total_fresh += 1; } /// Mark that a dirty unit was encountered. pub fn add_dirty(&mut self) { self.total_dirty += 1; } /// Take a sample of CPU usage pub fn record_cpu(&mut self) { if !self.enabled { return; } let prev = match &mut self.last_cpu_state { Some(state) => state, None => return, }; // Don't take samples too too frequently, even if requested. let now = Instant::now(); if self.last_cpu_recording.elapsed() < Duration::from_millis(100) { return; } let current = match State::current() { Ok(s) => s, Err(e) => { log::info!("failed to get CPU state: {:?}", e); return; } }; let pct_idle = current.idle_since(prev); *prev = current; self.last_cpu_recording = now; let dur = now.duration_since(self.start).as_secs_f64(); self.cpu_usage.push((dur, 100.0 - pct_idle)); } /// Call this when all units are finished. pub fn finished( &mut self, cx: &Context<'_, '_>, error: &Option, ) -> CargoResult<()> { if !self.enabled { return Ok(()); } self.mark_concurrency(0, 0, 0, 0); self.unit_times .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { self.report_html(cx, error) .with_context(|| "failed to save timing report")?; } Ok(()) } /// Save HTML report to disk. fn report_html(&self, cx: &Context<'_, '_>, error: &Option) -> CargoResult<()> { let duration = self.start.elapsed().as_secs_f64(); let timestamp = self.start_str.replace(&['-', ':'][..], ""); let timings_path = cx.files().host_root().join("cargo-timings"); paths::create_dir_all(&timings_path)?; let filename = timings_path.join(format!("cargo-timing-{}.html", timestamp)); let mut f = BufWriter::new(paths::create(&filename)?); let roots: Vec<&str> = self .root_targets .iter() .map(|(name, _targets)| name.as_str()) .collect(); f.write_all(HTML_TMPL.replace("{ROOTS}", &roots.join(", ")).as_bytes())?; self.write_summary_table(&mut f, duration, cx.bcx, error)?; f.write_all(HTML_CANVAS.as_bytes())?; self.write_unit_table(&mut f)?; // It helps with pixel alignment to use whole numbers. writeln!( f, "\n\ \n\ \n\ ", include_str!("timings.js") )?; drop(f); let msg = format!( "report saved to {}", std::env::current_dir() .unwrap_or_default() .join(&filename) .display() ); let unstamped_filename = timings_path.join("cargo-timing.html"); paths::link_or_copy(&filename, &unstamped_filename)?; self.config .shell() .status_with_color("Timing", msg, termcolor::Color::Cyan)?; Ok(()) } /// Render the summary table. fn write_summary_table( &self, f: &mut impl Write, duration: f64, bcx: &BuildContext<'_, '_>, error: &Option, ) -> CargoResult<()> { let targets: Vec = self .root_targets .iter() .map(|(name, targets)| format!("{} ({})", name, targets.join(", "))) .collect(); let targets = targets.join("
"); let time_human = if duration > 60.0 { format!(" ({}m {:.1}s)", duration as u32 / 60, duration % 60.0) } else { "".to_string() }; let total_time = format!("{:.1}s{}", duration, time_human); let max_concurrency = self.concurrency.iter().map(|c| c.active).max().unwrap(); let num_cpus = available_parallelism() .map(|x| x.get().to_string()) .unwrap_or_else(|_| "n/a".into()); let max_rustc_concurrency = self .concurrency .iter() .map(|c| c.rustc_parallelism) .max() .unwrap(); let rustc_info = render_rustc_info(bcx); let error_msg = match error { Some(e) => format!( r#"\ Error:{} "#, e ), None => "".to_string(), }; write!( f, r#" {}
Targets:{}
Profile:{}
Fresh units:{}
Dirty units:{}
Total units:{}
Max concurrency:{} (jobs={} ncpu={})
Build start:{}
Total time:{}
rustc:{}
Max (global) rustc threads concurrency:{}
"#, targets, self.profile, self.total_fresh, self.total_dirty, self.total_fresh + self.total_dirty, max_concurrency, bcx.jobs(), num_cpus, self.start_str, total_time, rustc_info, max_rustc_concurrency, error_msg, )?; Ok(()) } fn write_js_data(&self, f: &mut impl Write) -> CargoResult<()> { // Create a map to link indices of unlocked units. let unit_map: HashMap = self .unit_times .iter() .enumerate() .map(|(i, ut)| (ut.unit.clone(), i)) .collect(); #[derive(serde::Serialize)] struct UnitData { i: usize, name: String, version: String, mode: String, target: String, start: f64, duration: f64, rmeta_time: Option, unlocked_units: Vec, unlocked_rmeta_units: Vec, } let round = |x: f64| (x * 100.0).round() / 100.0; let unit_data: Vec = self .unit_times .iter() .enumerate() .map(|(i, ut)| { let mode = if ut.unit.mode.is_run_custom_build() { "run-custom-build" } else { "todo" } .to_string(); // These filter on the unlocked units because not all unlocked // units are actually "built". For example, Doctest mode units // don't actually generate artifacts. let unlocked_units: Vec = ut .unlocked_units .iter() .filter_map(|unit| unit_map.get(unit).copied()) .collect(); let unlocked_rmeta_units: Vec = ut .unlocked_rmeta_units .iter() .filter_map(|unit| unit_map.get(unit).copied()) .collect(); UnitData { i, name: ut.unit.pkg.name().to_string(), version: ut.unit.pkg.version().to_string(), mode, target: ut.target.clone(), start: round(ut.start), duration: round(ut.duration), rmeta_time: ut.rmeta_time.map(round), unlocked_units, unlocked_rmeta_units, } }) .collect(); writeln!( f, "const UNIT_DATA = {};", serde_json::to_string_pretty(&unit_data)? )?; writeln!( f, "const CONCURRENCY_DATA = {};", serde_json::to_string_pretty(&self.concurrency)? )?; writeln!( f, "const CPU_USAGE = {};", serde_json::to_string_pretty(&self.cpu_usage)? )?; Ok(()) } /// Render the table of all units. fn write_unit_table(&self, f: &mut impl Write) -> CargoResult<()> { write!( f, r#" "# )?; let mut units: Vec<&UnitTime> = self.unit_times.iter().collect(); units.sort_unstable_by(|a, b| b.duration.partial_cmp(&a.duration).unwrap()); for (i, unit) in units.iter().enumerate() { let codegen = match unit.codegen_time() { None => "".to_string(), Some((_rt, ctime, cent)) => format!("{:.1}s ({:.0}%)", ctime, cent), }; let features = unit.unit.features.join(", "); write!( f, r#" "#, i + 1, unit.name_ver(), unit.target, unit.duration, codegen, features, )?; } write!(f, "\n
Unit Total Codegen Features
{}. {}{} {:.1}s {} {}
\n")?; Ok(()) } } impl UnitTime { /// Returns the codegen time as (rmeta_time, codegen_time, percent of total) fn codegen_time(&self) -> Option<(f64, f64, f64)> { self.rmeta_time.map(|rmeta_time| { let ctime = self.duration - rmeta_time; let cent = (ctime / self.duration) * 100.0; (rmeta_time, ctime, cent) }) } fn name_ver(&self) -> String { format!("{} v{}", self.unit.pkg.name(), self.unit.pkg.version()) } } fn render_rustc_info(bcx: &BuildContext<'_, '_>) -> String { let version = bcx .rustc() .verbose_version .lines() .next() .expect("rustc version"); let requested_target = bcx .build_config .requested_kinds .iter() .map(|kind| bcx.target_data.short_name(kind)) .collect::>() .join(", "); format!( "{}
Host: {}
Target: {}", version, bcx.rustc().host, requested_target ) } static HTML_TMPL: &str = r#" Cargo Build Timings β€” {ROOTS}

Cargo Build Timings

See Documentation "#; static HTML_CANVAS: &str = r#"
"#; cargo-0.66.0/src/cargo/core/compiler/unit.rs000066400000000000000000000206421432416201200206460ustar00rootroot00000000000000use crate::core::compiler::{unit_dependencies::IsArtifact, CompileKind, CompileMode, CrateType}; use crate::core::manifest::{Target, TargetKind}; use crate::core::{profiles::Profile, Package}; use crate::util::hex::short_hash; use crate::util::interning::InternedString; use crate::util::Config; use std::cell::RefCell; use std::collections::HashSet; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::rc::Rc; /// All information needed to define a unit. /// /// A unit is an object that has enough information so that cargo knows how to build it. /// For example, if your package has dependencies, then every dependency will be built as a library /// unit. If your package is a library, then it will be built as a library unit as well, or if it /// is a binary with `main.rs`, then a binary will be output. There are also separate unit types /// for `test`ing and `check`ing, amongst others. /// /// The unit also holds information about all possible metadata about the package in `pkg`. /// /// A unit needs to know extra information in addition to the type and root source file. For /// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know /// whether you want a debug or release build. There is enough information in this struct to figure /// all that out. #[derive(Clone, PartialOrd, Ord)] pub struct Unit { inner: Rc, } /// Internal fields of `Unit` which `Unit` will dereference to. #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct UnitInner { /// Information about available targets, which files to include/exclude, etc. Basically stuff in /// `Cargo.toml`. pub pkg: Package, /// Information about the specific target to build, out of the possible targets in `pkg`. Not /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a /// build. pub target: Target, /// The profile contains information about *how* the build should be run, including debug /// level, etc. pub profile: Profile, /// Whether this compilation unit is for the host or target architecture. /// /// For example, when /// cross compiling and using a custom build script, the build script needs to be compiled for /// the host architecture so the host rustc can use it (when compiling to the target /// architecture). pub kind: CompileKind, /// The "mode" this unit is being compiled for. See [`CompileMode`] for more details. pub mode: CompileMode, /// The `cfg` features to enable for this unit. /// This must be sorted. pub features: Vec, // if `true`, the dependency is an artifact dependency, requiring special handling when // calculating output directories, linkage and environment variables provided to builds. pub artifact: IsArtifact, /// Whether this is a standard library unit. pub is_std: bool, /// A hash of all dependencies of this unit. /// /// This is used to keep the `Unit` unique in the situation where two /// otherwise identical units need to link to different dependencies. This /// can happen, for example, when there are shared dependencies that need /// to be built with different features between normal and build /// dependencies. See `rebuild_unit_graph_shared` for more on why this is /// done. /// /// This value initially starts as 0, and then is filled in via a /// second-pass after all the unit dependencies have been computed. pub dep_hash: u64, } impl UnitInner { /// Returns whether compilation of this unit requires all upstream artifacts /// to be available. /// /// This effectively means that this unit is a synchronization point (if the /// return value is `true`) that all previously pipelined units need to /// finish in their entirety before this one is started. pub fn requires_upstream_objects(&self) -> bool { self.mode.is_any_test() || self.target.kind().requires_upstream_objects() } /// Returns whether or not this is a "local" package. /// /// A "local" package is one that the user can likely edit, or otherwise /// wants warnings, etc. pub fn is_local(&self) -> bool { self.pkg.package_id().source_id().is_path() && !self.is_std } /// Returns whether or not warnings should be displayed for this unit. pub fn show_warnings(&self, config: &Config) -> bool { self.is_local() || config.extra_verbose() } } impl Unit { pub fn buildkey(&self) -> String { format!("{}-{}", self.pkg.name(), short_hash(self)) } } // Just hash the pointer for fast hashing impl Hash for Unit { fn hash(&self, hasher: &mut H) { std::ptr::hash(&*self.inner, hasher) } } // Just equate the pointer since these are interned impl PartialEq for Unit { fn eq(&self, other: &Unit) -> bool { std::ptr::eq(&*self.inner, &*other.inner) } } impl Eq for Unit {} impl Deref for Unit { type Target = UnitInner; fn deref(&self) -> &UnitInner { &*self.inner } } impl fmt::Debug for Unit { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Unit") .field("pkg", &self.pkg) .field("target", &self.target) .field("profile", &self.profile) .field("kind", &self.kind) .field("mode", &self.mode) .field("features", &self.features) .field("artifact", &self.artifact.is_true()) .field("is_std", &self.is_std) .field("dep_hash", &self.dep_hash) .finish() } } /// A small structure used to "intern" `Unit` values. /// /// A `Unit` is just a thin pointer to an internal `UnitInner`. This is done to /// ensure that `Unit` itself is quite small as well as enabling a very /// efficient hash/equality implementation for `Unit`. All units are /// manufactured through an interner which guarantees that each equivalent value /// is only produced once. pub struct UnitInterner { state: RefCell, } struct InternerState { cache: HashSet>, } impl UnitInterner { /// Creates a new blank interner pub fn new() -> UnitInterner { UnitInterner { state: RefCell::new(InternerState { cache: HashSet::new(), }), } } /// Creates a new `unit` from its components. The returned `Unit`'s fields /// will all be equivalent to the provided arguments, although they may not /// be the exact same instance. pub fn intern( &self, pkg: &Package, target: &Target, profile: Profile, kind: CompileKind, mode: CompileMode, features: Vec, is_std: bool, dep_hash: u64, artifact: IsArtifact, ) -> Unit { let target = match (is_std, target.kind()) { // This is a horrible hack to support build-std. `libstd` declares // itself with both rlib and dylib. We don't want the dylib for a // few reasons: // // - dylibs don't have a hash in the filename. If you do something // (like switch rustc versions), it will stomp on the dylib // file, invalidating the entire cache (because std is a dep of // everything). // - We don't want to publicize the presence of dylib for the // standard library. // // At some point in the future, it would be nice to have a // first-class way of overriding or specifying crate-types. (true, TargetKind::Lib(crate_types)) if crate_types.contains(&CrateType::Dylib) => { let mut new_target = Target::clone(target); new_target.set_kind(TargetKind::Lib(vec![CrateType::Rlib])); new_target } _ => target.clone(), }; let inner = self.intern_inner(&UnitInner { pkg: pkg.clone(), target, profile, kind, mode, features, is_std, dep_hash, artifact, }); Unit { inner } } fn intern_inner(&self, item: &UnitInner) -> Rc { let mut me = self.state.borrow_mut(); if let Some(item) = me.cache.get(item) { return item.clone(); } let item = Rc::new(item.clone()); me.cache.insert(item.clone()); item } } cargo-0.66.0/src/cargo/core/compiler/unit_dependencies.rs000066400000000000000000001223261432416201200233560ustar00rootroot00000000000000//! Constructs the dependency graph for compilation. //! //! Rust code is typically organized as a set of Cargo packages. The //! dependencies between the packages themselves are stored in the //! `Resolve` struct. However, we can't use that information as is for //! compilation! A package typically contains several targets, or crates, //! and these targets has inter-dependencies. For example, you need to //! compile the `lib` target before the `bin` one, and you need to compile //! `build.rs` before either of those. //! //! So, we need to lower the `Resolve`, which specifies dependencies between //! *packages*, to a graph of dependencies between their *targets*, and this //! is exactly what this module is doing! Well, almost exactly: another //! complication is that we might want to compile the same target several times //! (for example, with and without tests), so we actually build a dependency //! graph of `Unit`s, which capture these properties. use std::collections::{HashMap, HashSet}; use log::trace; use crate::core::compiler::unit_graph::{UnitDep, UnitGraph}; use crate::core::compiler::{ CompileKind, CompileMode, CrateType, RustcTargetData, Unit, UnitInterner, }; use crate::core::dependency::{Artifact, ArtifactKind, ArtifactTarget, DepKind}; use crate::core::profiles::{Profile, Profiles, UnitFor}; use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; use crate::core::{Dependency, Package, PackageId, PackageSet, Target, TargetKind, Workspace}; use crate::ops::resolve_all_features; use crate::util::interning::InternedString; use crate::util::Config; use crate::CargoResult; const IS_NO_ARTIFACT_DEP: Option<&'static Artifact> = None; /// Collection of stuff used while creating the `UnitGraph`. struct State<'a, 'cfg> { ws: &'a Workspace<'cfg>, config: &'cfg Config, unit_dependencies: UnitGraph, package_set: &'a PackageSet<'cfg>, usr_resolve: &'a Resolve, usr_features: &'a ResolvedFeatures, std_resolve: Option<&'a Resolve>, std_features: Option<&'a ResolvedFeatures>, /// This flag is `true` while generating the dependencies for the standard /// library. is_std: bool, global_mode: CompileMode, target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, scrape_units: &'a [Unit], /// A set of edges in `unit_dependencies` where (a, b) means that the /// dependency from a to b was added purely because it was a dev-dependency. /// This is used during `connect_run_custom_build_deps`. dev_dependency_edges: HashSet<(Unit, Unit)>, } /// A boolean-like to indicate if a `Unit` is an artifact or not. #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum IsArtifact { Yes, No, } impl IsArtifact { pub fn is_true(&self) -> bool { matches!(self, IsArtifact::Yes) } } pub fn build_unit_dependencies<'a, 'cfg>( ws: &'a Workspace<'cfg>, package_set: &'a PackageSet<'cfg>, resolve: &'a Resolve, features: &'a ResolvedFeatures, std_resolve: Option<&'a (Resolve, ResolvedFeatures)>, roots: &[Unit], scrape_units: &[Unit], std_roots: &HashMap>, global_mode: CompileMode, target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, ) -> CargoResult { if roots.is_empty() { // If -Zbuild-std, don't attach units if there is nothing to build. // Otherwise, other parts of the code may be confused by seeing units // in the dep graph without a root. return Ok(HashMap::new()); } let (std_resolve, std_features) = match std_resolve { Some((r, f)) => (Some(r), Some(f)), None => (None, None), }; let mut state = State { ws, config: ws.config(), unit_dependencies: HashMap::new(), package_set, usr_resolve: resolve, usr_features: features, std_resolve, std_features, is_std: false, global_mode, target_data, profiles, interner, scrape_units, dev_dependency_edges: HashSet::new(), }; let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?; deps_of_roots(roots, &mut state)?; super::links::validate_links(state.resolve(), &state.unit_dependencies)?; // Hopefully there aren't any links conflicts with the standard library? if let Some(std_unit_deps) = std_unit_deps { attach_std_deps(&mut state, std_roots, std_unit_deps); } connect_run_custom_build_deps(&mut state); // Dependencies are used in tons of places throughout the backend, many of // which affect the determinism of the build itself. As a result be sure // that dependency lists are always sorted to ensure we've always got a // deterministic output. for list in state.unit_dependencies.values_mut() { list.sort(); } trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies); Ok(state.unit_dependencies) } /// Compute all the dependencies for the standard library. fn calc_deps_of_std( mut state: &mut State<'_, '_>, std_roots: &HashMap>, ) -> CargoResult> { if std_roots.is_empty() { return Ok(None); } // Compute dependencies for the standard library. state.is_std = true; for roots in std_roots.values() { deps_of_roots(roots, state)?; } state.is_std = false; Ok(Some(std::mem::take(&mut state.unit_dependencies))) } /// Add the standard library units to the `unit_dependencies`. fn attach_std_deps( state: &mut State<'_, '_>, std_roots: &HashMap>, std_unit_deps: UnitGraph, ) { // Attach the standard library as a dependency of every target unit. let mut found = false; for (unit, deps) in state.unit_dependencies.iter_mut() { if !unit.kind.is_host() && !unit.mode.is_run_custom_build() { deps.extend(std_roots[&unit.kind].iter().map(|unit| UnitDep { unit: unit.clone(), unit_for: UnitFor::new_normal(unit.kind), extern_crate_name: unit.pkg.name(), dep_name: None, // TODO: Does this `public` make sense? public: true, noprelude: true, })); found = true; } } // And also include the dependencies of the standard library itself. Don't // include these if no units actually needed the standard library. if found { for (unit, deps) in std_unit_deps.into_iter() { if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) { panic!("std unit collision with existing unit: {:?}", other_unit); } } } } /// Compute all the dependencies of the given root units. /// The result is stored in state.unit_dependencies. fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> { for unit in roots.iter() { // Dependencies of tests/benches should not have `panic` set. // We check the global test mode to see if we are running in `cargo // test` in which case we ensure all dependencies have `panic` // cleared, and avoid building the lib thrice (once with `panic`, once // without, once for `--test`). In particular, the lib included for // Doc tests and examples are `Build` mode here. let root_compile_kind = unit.kind; let unit_for = if unit.mode.is_any_test() || state.global_mode.is_rustc_test() { if unit.target.proc_macro() { // Special-case for proc-macros, which are forced to for-host // since they need to link with the proc_macro crate. UnitFor::new_host_test(state.config, root_compile_kind) } else { UnitFor::new_test(state.config, root_compile_kind) } } else if unit.target.is_custom_build() { // This normally doesn't happen, except `clean` aggressively // generates all units. UnitFor::new_host(false, root_compile_kind) } else if unit.target.proc_macro() { UnitFor::new_host(true, root_compile_kind) } else if unit.target.for_host() { // Plugin should never have panic set. UnitFor::new_compiler(root_compile_kind) } else { UnitFor::new_normal(root_compile_kind) }; deps_of(unit, state, unit_for)?; } Ok(()) } /// Compute the dependencies of a single unit, recursively computing all /// transitive dependencies. /// /// The result is stored in `state.unit_dependencies`. fn deps_of(unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor) -> CargoResult<()> { // Currently the `unit_dependencies` map does not include `unit_for`. This should // be safe for now. `TestDependency` only exists to clear the `panic` // flag, and you'll never ask for a `unit` with `panic` set as a // `TestDependency`. `CustomBuild` should also be fine since if the // requested unit's settings are the same as `Any`, `CustomBuild` can't // affect anything else in the hierarchy. if !state.unit_dependencies.contains_key(unit) { let unit_deps = compute_deps(unit, state, unit_for)?; state .unit_dependencies .insert(unit.clone(), unit_deps.clone()); for unit_dep in unit_deps { deps_of(&unit_dep.unit, state, unit_dep.unit_for)?; } } Ok(()) } /// Returns the direct unit dependencies for the given `Unit`. fn compute_deps( unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor, ) -> CargoResult> { if unit.mode.is_run_custom_build() { return compute_deps_custom_build(unit, unit_for, state); } else if unit.mode.is_doc() { // Note: this does not include doc test. return compute_deps_doc(unit, state, unit_for); } let mut ret = Vec::new(); let mut dev_deps = Vec::new(); for (dep_pkg_id, deps) in state.deps(unit, unit_for) { let dep_lib = match calc_artifact_deps(unit, unit_for, dep_pkg_id, &deps, state, &mut ret)? { Some(lib) => lib, None => continue, }; let dep_pkg = state.get(dep_pkg_id); let mode = check_or_build_mode(unit.mode, dep_lib); let dep_unit_for = unit_for.with_dependency(unit, dep_lib, unit_for.root_compile_kind()); let start = ret.len(); if state.config.cli_unstable().dual_proc_macros && dep_lib.proc_macro() && !unit.kind.is_host() { let unit_dep = new_unit_dep( state, unit, dep_pkg, dep_lib, dep_unit_for, unit.kind, mode, IS_NO_ARTIFACT_DEP, )?; ret.push(unit_dep); let unit_dep = new_unit_dep( state, unit, dep_pkg, dep_lib, dep_unit_for, CompileKind::Host, mode, IS_NO_ARTIFACT_DEP, )?; ret.push(unit_dep); } else { let unit_dep = new_unit_dep( state, unit, dep_pkg, dep_lib, dep_unit_for, unit.kind.for_target(dep_lib), mode, IS_NO_ARTIFACT_DEP, )?; ret.push(unit_dep); } // If the unit added was a dev-dependency unit, then record that in the // dev-dependencies array. We'll add this to // `state.dev_dependency_edges` at the end and process it later in // `connect_run_custom_build_deps`. if deps.iter().all(|d| !d.is_transitive()) { for dep in ret[start..].iter() { dev_deps.push((unit.clone(), dep.unit.clone())); } } } state.dev_dependency_edges.extend(dev_deps); // If this target is a build script, then what we've collected so far is // all we need. If this isn't a build script, then it depends on the // build script if there is one. if unit.target.is_custom_build() { return Ok(ret); } ret.extend(dep_build_script(unit, unit_for, state)?); // If this target is a binary, test, example, etc, then it depends on // the library of the same package. The call to `resolve.deps` above // didn't include `pkg` in the return values, so we need to special case // it here and see if we need to push `(pkg, pkg_lib_target)`. if unit.target.is_lib() && unit.mode != CompileMode::Doctest { return Ok(ret); } ret.extend(maybe_lib(unit, state, unit_for)?); // If any integration tests/benches are being run, make sure that // binaries are built as well. if !unit.mode.is_check() && unit.mode.is_any_test() && (unit.target.is_test() || unit.target.is_bench()) { let id = unit.pkg.package_id(); ret.extend( unit.pkg .targets() .iter() .filter(|t| { // Skip binaries with required features that have not been selected. match t.required_features() { Some(rf) if t.is_bin() => { let features = resolve_all_features( state.resolve(), state.features(), state.package_set, id, ); rf.iter().all(|f| features.contains(f)) } None if t.is_bin() => true, _ => false, } }) .map(|t| { new_unit_dep( state, unit, &unit.pkg, t, UnitFor::new_normal(unit_for.root_compile_kind()), unit.kind.for_target(t), CompileMode::Build, IS_NO_ARTIFACT_DEP, ) }) .collect::>>()?, ); } Ok(ret) } /// Find artifacts for all `deps` of `unit` and add units that build these artifacts /// to `ret`. fn calc_artifact_deps<'a>( unit: &Unit, unit_for: UnitFor, dep_id: PackageId, deps: &[&Dependency], state: &State<'a, '_>, ret: &mut Vec, ) -> CargoResult> { let mut has_artifact_lib = false; let mut maybe_non_artifact_lib = false; let artifact_pkg = state.get(dep_id); for dep in deps { let artifact = match dep.artifact() { Some(a) => a, None => { maybe_non_artifact_lib = true; continue; } }; has_artifact_lib |= artifact.is_lib(); // Custom build scripts (build/compile) never get artifact dependencies, // but the run-build-script step does (where it is handled). if !unit.target.is_custom_build() { debug_assert!( !unit.mode.is_run_custom_build(), "BUG: This should be handled in a separate branch" ); ret.extend(artifact_targets_to_unit_deps( unit, unit_for.with_artifact_features(artifact), state, artifact .target() .and_then(|t| match t { ArtifactTarget::BuildDependencyAssumeTarget => None, ArtifactTarget::Force(kind) => Some(CompileKind::Target(kind)), }) .unwrap_or(unit.kind), artifact_pkg, dep, )?); } } if has_artifact_lib || maybe_non_artifact_lib { Ok(artifact_pkg.targets().iter().find(|t| t.is_lib())) } else { Ok(None) } } /// Returns the dependencies needed to run a build script. /// /// The `unit` provided must represent an execution of a build script, and /// the returned set of units must all be run before `unit` is run. fn compute_deps_custom_build( unit: &Unit, unit_for: UnitFor, state: &State<'_, '_>, ) -> CargoResult> { if let Some(links) = unit.pkg.manifest().links() { if state .target_data .script_override(links, unit.kind) .is_some() { // Overridden build scripts don't have any dependencies. return Ok(Vec::new()); } } // All dependencies of this unit should use profiles for custom builds. // If this is a build script of a proc macro, make sure it uses host // features. let script_unit_for = unit_for.for_custom_build(); // When not overridden, then the dependencies to run a build script are: // // 1. Compiling the build script itself. // 2. For each immediate dependency of our package which has a `links` // key, the execution of that build script. // // We don't have a great way of handling (2) here right now so this is // deferred until after the graph of all unit dependencies has been // constructed. let compile_script_unit = new_unit_dep( state, unit, &unit.pkg, &unit.target, script_unit_for, // Build scripts always compiled for the host. CompileKind::Host, CompileMode::Build, IS_NO_ARTIFACT_DEP, )?; let mut result = vec![compile_script_unit]; // Include any artifact dependencies. // // This is essentially the same as `calc_artifact_deps`, but there are some // subtle differences that require this to be implemented differently. // // Produce units that build all required artifact kinds (like binaries, // static libraries, etc) with the correct compile target. // // Computing the compile target for artifact units is more involved as it has to handle // various target configurations specific to artifacts, like `target = "target"` and // `target = ""`, which makes knowing the root units compile target // `root_unit_compile_target` necessary. let root_unit_compile_target = unit_for.root_compile_kind(); let unit_for = UnitFor::new_host(/*host_features*/ true, root_unit_compile_target); for (dep_pkg_id, deps) in state.deps(unit, script_unit_for) { for dep in deps { if dep.kind() != DepKind::Build || dep.artifact().is_none() { continue; } let artifact_pkg = state.get(dep_pkg_id); let artifact = dep.artifact().expect("artifact dep"); let resolved_artifact_compile_kind = artifact .target() .map(|target| target.to_resolved_compile_kind(root_unit_compile_target)); result.extend(artifact_targets_to_unit_deps( unit, unit_for.with_artifact_features_from_resolved_compile_kind( resolved_artifact_compile_kind, ), state, resolved_artifact_compile_kind.unwrap_or(CompileKind::Host), artifact_pkg, dep, )?); } } Ok(result) } /// Given a `parent` unit containing a dependency `dep` whose package is `artifact_pkg`, /// find all targets in `artifact_pkg` which refer to the `dep`s artifact declaration /// and turn them into units. /// Due to the nature of artifact dependencies, a single dependency in a manifest can /// cause one or more targets to be build, for instance with /// `artifact = ["bin:a", "bin:b", "staticlib"]`, which is very different from normal /// dependencies which cause only a single unit to be created. /// /// `compile_kind` is the computed kind for the future artifact unit /// dependency, only the caller can pick the correct one. fn artifact_targets_to_unit_deps( parent: &Unit, parent_unit_for: UnitFor, state: &State<'_, '_>, compile_kind: CompileKind, artifact_pkg: &Package, dep: &Dependency, ) -> CargoResult> { let ret = match_artifacts_kind_with_targets(dep, artifact_pkg.targets(), parent.pkg.name().as_str())? .into_iter() .flat_map(|target| { // We split target libraries into individual units, even though rustc is able // to produce multiple kinds in an single invocation for the sole reason that // each artifact kind has its own output directory, something we can't easily // teach rustc for now. match target.kind() { TargetKind::Lib(kinds) => Box::new( kinds .iter() .filter(|tk| matches!(tk, CrateType::Cdylib | CrateType::Staticlib)) .map(|target_kind| { new_unit_dep( state, parent, artifact_pkg, target .clone() .set_kind(TargetKind::Lib(vec![target_kind.clone()])), parent_unit_for, compile_kind, CompileMode::Build, dep.artifact(), ) }), ) as Box>, _ => Box::new(std::iter::once(new_unit_dep( state, parent, artifact_pkg, target, parent_unit_for, compile_kind, CompileMode::Build, dep.artifact(), ))), } }) .collect::, _>>()?; Ok(ret) } /// Given a dependency with an artifact `artifact_dep` and a set of available `targets` /// of its package, find a target for each kind of artifacts that are to be built. /// /// Failure to match any target results in an error mentioning the parent manifests /// `parent_package` name. fn match_artifacts_kind_with_targets<'a>( artifact_dep: &Dependency, targets: &'a [Target], parent_package: &str, ) -> CargoResult> { let mut out = HashSet::new(); let artifact_requirements = artifact_dep.artifact().expect("artifact present"); for artifact_kind in artifact_requirements.kinds() { let mut extend = |filter: &dyn Fn(&&Target) -> bool| { let mut iter = targets.iter().filter(filter).peekable(); let found = iter.peek().is_some(); out.extend(iter); found }; let found = match artifact_kind { ArtifactKind::Cdylib => extend(&|t| t.is_cdylib()), ArtifactKind::Staticlib => extend(&|t| t.is_staticlib()), ArtifactKind::AllBinaries => extend(&|t| t.is_bin()), ArtifactKind::SelectedBinary(bin_name) => { extend(&|t| t.is_bin() && t.name() == bin_name.as_str()) } }; if !found { anyhow::bail!( "dependency `{}` in package `{}` requires a `{}` artifact to be present.", artifact_dep.name_in_toml(), parent_package, artifact_kind ); } } Ok(out) } /// Returns the dependencies necessary to document a package. fn compute_deps_doc( unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor, ) -> CargoResult> { // To document a library, we depend on dependencies actually being // built. If we're documenting *all* libraries, then we also depend on // the documentation of the library being built. let mut ret = Vec::new(); for (id, deps) in state.deps(unit, unit_for) { let dep_lib = match calc_artifact_deps(unit, unit_for, id, &deps, state, &mut ret)? { Some(lib) => lib, None => continue, }; let dep_pkg = state.get(id); // Rustdoc only needs rmeta files for regular dependencies. // However, for plugins/proc macros, deps should be built like normal. let mode = check_or_build_mode(unit.mode, dep_lib); let dep_unit_for = unit_for.with_dependency(unit, dep_lib, unit_for.root_compile_kind()); let lib_unit_dep = new_unit_dep( state, unit, dep_pkg, dep_lib, dep_unit_for, unit.kind.for_target(dep_lib), mode, IS_NO_ARTIFACT_DEP, )?; ret.push(lib_unit_dep); if dep_lib.documented() { if let CompileMode::Doc { deps: true } = unit.mode { // Document this lib as well. let doc_unit_dep = new_unit_dep( state, unit, dep_pkg, dep_lib, dep_unit_for, unit.kind.for_target(dep_lib), unit.mode, IS_NO_ARTIFACT_DEP, )?; ret.push(doc_unit_dep); } } } // Be sure to build/run the build script for documented libraries. ret.extend(dep_build_script(unit, unit_for, state)?); // If we document a binary/example, we need the library available. if unit.target.is_bin() || unit.target.is_example() { // build the lib ret.extend(maybe_lib(unit, state, unit_for)?); // and also the lib docs for intra-doc links if let Some(lib) = unit .pkg .targets() .iter() .find(|t| t.is_linkable() && t.documented()) { let dep_unit_for = unit_for.with_dependency(unit, lib, unit_for.root_compile_kind()); let lib_doc_unit = new_unit_dep( state, unit, &unit.pkg, lib, dep_unit_for, unit.kind.for_target(lib), unit.mode, IS_NO_ARTIFACT_DEP, )?; ret.push(lib_doc_unit); } } // Add all units being scraped for examples as a dependency of top-level Doc units. if state.ws.unit_needs_doc_scrape(unit) { for scrape_unit in state.scrape_units.iter() { deps_of(scrape_unit, state, unit_for)?; ret.push(new_unit_dep( state, scrape_unit, &scrape_unit.pkg, &scrape_unit.target, unit_for, scrape_unit.kind, scrape_unit.mode, IS_NO_ARTIFACT_DEP, )?); } } Ok(ret) } fn maybe_lib( unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor, ) -> CargoResult> { unit.pkg .targets() .iter() .find(|t| t.is_linkable()) .map(|t| { let mode = check_or_build_mode(unit.mode, t); let dep_unit_for = unit_for.with_dependency(unit, t, unit_for.root_compile_kind()); new_unit_dep( state, unit, &unit.pkg, t, dep_unit_for, unit.kind.for_target(t), mode, IS_NO_ARTIFACT_DEP, ) }) .transpose() } /// If a build script is scheduled to be run for the package specified by /// `unit`, this function will return the unit to run that build script. /// /// Overriding a build script simply means that the running of the build /// script itself doesn't have any dependencies, so even in that case a unit /// of work is still returned. `None` is only returned if the package has no /// build script. fn dep_build_script( unit: &Unit, unit_for: UnitFor, state: &State<'_, '_>, ) -> CargoResult> { unit.pkg .targets() .iter() .find(|t| t.is_custom_build()) .map(|t| { // The profile stored in the Unit is the profile for the thing // the custom build script is running for. let profile = state.profiles.get_profile_run_custom_build(&unit.profile); // UnitFor::for_custom_build is used because we want the `host` flag set // for all of our build dependencies (so they all get // build-override profiles), including compiling the build.rs // script itself. // // If `is_for_host_features` here is `false`, that means we are a // build.rs script for a normal dependency and we want to set the // CARGO_FEATURE_* environment variables to the features as a // normal dep. // // If `is_for_host_features` here is `true`, that means that this // package is being used as a build dependency or proc-macro, and // so we only want to set CARGO_FEATURE_* variables for the host // side of the graph. // // Keep in mind that the RunCustomBuild unit and the Compile // build.rs unit use the same features. This is because some // people use `cfg!` and `#[cfg]` expressions to check for enabled // features instead of just checking `CARGO_FEATURE_*` at runtime. // In the case with the new feature resolver (decoupled host // deps), and a shared dependency has different features enabled // for normal vs. build, then the build.rs script will get // compiled twice. I believe it is not feasible to only build it // once because it would break a large number of scripts (they // would think they have the wrong set of features enabled). let script_unit_for = unit_for.for_custom_build(); new_unit_dep_with_profile( state, unit, &unit.pkg, t, script_unit_for, unit.kind, CompileMode::RunCustomBuild, profile, IS_NO_ARTIFACT_DEP, ) }) .transpose() } /// Choose the correct mode for dependencies. fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode { match mode { CompileMode::Check { .. } | CompileMode::Doc { .. } | CompileMode::Docscrape => { if target.for_host() { // Plugin and proc macro targets should be compiled like // normal. CompileMode::Build } else { // Regular dependencies should not be checked with --test. // Regular dependencies of doc targets should emit rmeta only. CompileMode::Check { test: false } } } _ => CompileMode::Build, } } /// Create a new Unit for a dependency from `parent` to `pkg` and `target`. fn new_unit_dep( state: &State<'_, '_>, parent: &Unit, pkg: &Package, target: &Target, unit_for: UnitFor, kind: CompileKind, mode: CompileMode, artifact: Option<&Artifact>, ) -> CargoResult { let is_local = pkg.package_id().source_id().is_path() && !state.is_std; let profile = state.profiles.get_profile( pkg.package_id(), state.ws.is_member(pkg), is_local, unit_for, kind, ); new_unit_dep_with_profile( state, parent, pkg, target, unit_for, kind, mode, profile, artifact, ) } fn new_unit_dep_with_profile( state: &State<'_, '_>, parent: &Unit, pkg: &Package, target: &Target, unit_for: UnitFor, kind: CompileKind, mode: CompileMode, profile: Profile, artifact: Option<&Artifact>, ) -> CargoResult { let (extern_crate_name, dep_name) = state.resolve().extern_crate_name_and_dep_name( parent.pkg.package_id(), pkg.package_id(), target, )?; let public = state .resolve() .is_public_dep(parent.pkg.package_id(), pkg.package_id()); let features_for = unit_for.map_to_features_for(artifact); let features = state.activated_features(pkg.package_id(), features_for); let unit = state.interner.intern( pkg, target, profile, kind, mode, features, state.is_std, /*dep_hash*/ 0, artifact.map_or(IsArtifact::No, |_| IsArtifact::Yes), ); Ok(UnitDep { unit, unit_for, extern_crate_name, dep_name, public, noprelude: false, }) } /// Fill in missing dependencies for units of the `RunCustomBuild` /// /// As mentioned above in `compute_deps_custom_build` each build script /// execution has two dependencies. The first is compiling the build script /// itself (already added) and the second is that all crates the package of the /// build script depends on with `links` keys, their build script execution. (a /// bit confusing eh?) /// /// Here we take the entire `deps` map and add more dependencies from execution /// of one build script to execution of another build script. fn connect_run_custom_build_deps(state: &mut State<'_, '_>) { let mut new_deps = Vec::new(); { let state = &*state; // First up build a reverse dependency map. This is a mapping of all // `RunCustomBuild` known steps to the unit which depends on them. For // example a library might depend on a build script, so this map will // have the build script as the key and the library would be in the // value's set. let mut reverse_deps_map = HashMap::new(); for (unit, deps) in state.unit_dependencies.iter() { for dep in deps { if dep.unit.mode == CompileMode::RunCustomBuild { reverse_deps_map .entry(dep.unit.clone()) .or_insert_with(HashSet::new) .insert(unit); } } } // Next, we take a look at all build scripts executions listed in the // dependency map. Our job here is to take everything that depends on // this build script (from our reverse map above) and look at the other // package dependencies of these parents. // // If we depend on a linkable target and the build script mentions // `links`, then we depend on that package's build script! Here we use // `dep_build_script` to manufacture an appropriate build script unit to // depend on. for unit in state .unit_dependencies .keys() .filter(|k| k.mode == CompileMode::RunCustomBuild) { // This list of dependencies all depend on `unit`, an execution of // the build script. let reverse_deps = match reverse_deps_map.get(unit) { Some(set) => set, None => continue, }; let to_add = reverse_deps .iter() // Get all sibling dependencies of `unit` .flat_map(|reverse_dep| { state.unit_dependencies[reverse_dep] .iter() .map(move |a| (reverse_dep, a)) }) // Only deps with `links`. .filter(|(_parent, other)| { other.unit.pkg != unit.pkg && other.unit.target.is_linkable() && other.unit.pkg.manifest().links().is_some() }) // Avoid cycles when using the doc --scrape-examples feature: // Say a workspace has crates A and B where A has a build-dependency on B. // The Doc units for A and B will have a dependency on the Docscrape for both A and B. // So this would add a dependency from B-build to A-build, causing a cycle: // B (build) -> A (build) -> B(build) // See the test scrape_examples_avoid_build_script_cycle for a concrete example. // To avoid this cycle, we filter out the B -> A (docscrape) dependency. .filter(|(_parent, other)| !other.unit.mode.is_doc_scrape()) // Skip dependencies induced via dev-dependencies since // connections between `links` and build scripts only happens // via normal dependencies. Otherwise since dev-dependencies can // be cyclic we could have cyclic build-script executions. .filter_map(move |(parent, other)| { if state .dev_dependency_edges .contains(&((*parent).clone(), other.unit.clone())) { None } else { Some(other) } }) // Get the RunCustomBuild for other lib. .filter_map(|other| { state.unit_dependencies[&other.unit] .iter() .find(|other_dep| other_dep.unit.mode == CompileMode::RunCustomBuild) .cloned() }) .collect::>(); if !to_add.is_empty() { // (RunCustomBuild, set(other RunCustomBuild)) new_deps.push((unit.clone(), to_add)); } } } // And finally, add in all the missing dependencies! for (unit, new_deps) in new_deps { state .unit_dependencies .get_mut(&unit) .unwrap() .extend(new_deps); } } impl<'a, 'cfg> State<'a, 'cfg> { fn resolve(&self) -> &'a Resolve { if self.is_std { self.std_resolve.unwrap() } else { self.usr_resolve } } fn features(&self) -> &'a ResolvedFeatures { if self.is_std { self.std_features.unwrap() } else { self.usr_features } } fn activated_features( &self, pkg_id: PackageId, features_for: FeaturesFor, ) -> Vec { let features = self.features(); features.activated_features(pkg_id, features_for) } fn is_dep_activated( &self, pkg_id: PackageId, features_for: FeaturesFor, dep_name: InternedString, ) -> bool { self.features() .is_dep_activated(pkg_id, features_for, dep_name) } fn get(&self, id: PackageId) -> &'a Package { self.package_set .get_one(id) .unwrap_or_else(|_| panic!("expected {} to be downloaded", id)) } /// Returns a filtered set of dependencies for the given unit. fn deps(&self, unit: &Unit, unit_for: UnitFor) -> Vec<(PackageId, Vec<&Dependency>)> { let pkg_id = unit.pkg.package_id(); let kind = unit.kind; self.resolve() .deps(pkg_id) .filter_map(|(id, deps)| { assert!(!deps.is_empty()); let deps: Vec<_> = deps .iter() .filter(|dep| { // If this target is a build command, then we only want build // dependencies, otherwise we want everything *other than* build // dependencies. if unit.target.is_custom_build() != dep.is_build() { return false; } // If this dependency is **not** a transitive dependency, then it // only applies to test/example targets. if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example() && !unit.mode.is_doc_scrape() && !unit.mode.is_any_test() { return false; } // If this dependency is only available for certain platforms, // make sure we're only enabling it for that platform. if !self.target_data.dep_platform_activated(dep, kind) { return false; } // If this is an optional dependency, and the new feature resolver // did not enable it, don't include it. if dep.is_optional() { let features_for = unit_for.map_to_features_for(dep.artifact()); if !self.is_dep_activated(pkg_id, features_for, dep.name_in_toml()) { return false; } } // If we've gotten past all that, then this dependency is // actually used! true }) .collect(); if deps.is_empty() { None } else { Some((id, deps)) } }) .collect() } } cargo-0.66.0/src/cargo/core/compiler/unit_graph.rs000066400000000000000000000105271432416201200220300ustar00rootroot00000000000000use crate::core::compiler::Unit; use crate::core::compiler::{CompileKind, CompileMode}; use crate::core::profiles::{Profile, UnitFor}; use crate::core::{PackageId, Target}; use crate::util::interning::InternedString; use crate::util::CargoResult; use crate::Config; use std::collections::HashMap; use std::io::Write; /// The dependency graph of Units. pub type UnitGraph = HashMap>; /// A unit dependency. #[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)] pub struct UnitDep { /// The dependency unit. pub unit: Unit, /// The purpose of this dependency (a dependency for a test, or a build /// script, etc.). Do not use this after the unit graph has been built. pub unit_for: UnitFor, /// The name the parent uses to refer to this dependency. pub extern_crate_name: InternedString, /// If `Some`, the name of the dependency if renamed in toml. /// It's particularly interesting to artifact dependencies which rely on it /// for naming their environment variables. Note that the `extern_crate_name` /// cannot be used for this as it also may be the build target itself, /// which isn't always the renamed dependency name. pub dep_name: Option, /// Whether or not this is a public dependency. pub public: bool, /// If `true`, the dependency should not be added to Rust's prelude. pub noprelude: bool, } const VERSION: u32 = 1; #[derive(serde::Serialize)] struct SerializedUnitGraph<'a> { version: u32, units: Vec>, roots: Vec, } #[derive(serde::Serialize)] struct SerializedUnit<'a> { pkg_id: PackageId, target: &'a Target, profile: &'a Profile, platform: CompileKind, mode: CompileMode, features: &'a Vec, #[serde(skip_serializing_if = "std::ops::Not::not")] // hide for unstable build-std is_std: bool, dependencies: Vec, } #[derive(serde::Serialize)] struct SerializedUnitDep { index: usize, extern_crate_name: InternedString, // This is only set on nightly since it is unstable. #[serde(skip_serializing_if = "Option::is_none")] public: Option, // This is only set on nightly since it is unstable. #[serde(skip_serializing_if = "Option::is_none")] noprelude: Option, // Intentionally not including `unit_for` because it is a low-level // internal detail that is mostly used for building the graph. } pub fn emit_serialized_unit_graph( root_units: &[Unit], unit_graph: &UnitGraph, config: &Config, ) -> CargoResult<()> { let mut units: Vec<(&Unit, &Vec)> = unit_graph.iter().collect(); units.sort_unstable(); // Create a map for quick lookup for dependencies. let indices: HashMap<&Unit, usize> = units .iter() .enumerate() .map(|(i, val)| (val.0, i)) .collect(); let roots = root_units.iter().map(|root| indices[root]).collect(); let ser_units = units .iter() .map(|(unit, unit_deps)| { let dependencies = unit_deps .iter() .map(|unit_dep| { // https://github.com/rust-lang/rust/issues/64260 when stabilized. let (public, noprelude) = if config.nightly_features_allowed { (Some(unit_dep.public), Some(unit_dep.noprelude)) } else { (None, None) }; SerializedUnitDep { index: indices[&unit_dep.unit], extern_crate_name: unit_dep.extern_crate_name, public, noprelude, } }) .collect(); SerializedUnit { pkg_id: unit.pkg.package_id(), target: &unit.target, profile: &unit.profile, platform: unit.kind, mode: unit.mode, features: &unit.features, is_std: unit.is_std, dependencies, } }) .collect(); let s = SerializedUnitGraph { version: VERSION, units: ser_units, roots, }; let stdout = std::io::stdout(); let mut lock = stdout.lock(); serde_json::to_writer(&mut lock, &s)?; drop(writeln!(lock)); Ok(()) } cargo-0.66.0/src/cargo/core/dependency.rs000066400000000000000000000473211432416201200201760ustar00rootroot00000000000000use cargo_platform::Platform; use log::trace; use semver::VersionReq; use serde::ser; use serde::Serialize; use std::borrow::Cow; use std::fmt; use std::path::PathBuf; use std::rc::Rc; use crate::core::compiler::{CompileKind, CompileTarget}; use crate::core::{PackageId, SourceId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::toml::StringOrVec; use crate::util::OptVersionReq; /// Information about a dependency requested by a Cargo manifest. /// Cheap to copy. #[derive(PartialEq, Eq, Hash, Clone, Debug)] pub struct Dependency { inner: Rc, } /// The data underlying a `Dependency`. #[derive(PartialEq, Eq, Hash, Clone, Debug)] struct Inner { name: InternedString, source_id: SourceId, /// Source ID for the registry as specified in the manifest. /// /// This will be None if it is not specified (crates.io dependency). /// This is different from `source_id` for example when both a `path` and /// `registry` is specified. Or in the case of a crates.io dependency, /// `source_id` will be crates.io and this will be None. registry_id: Option, req: OptVersionReq, specified_req: bool, kind: DepKind, only_match_name: bool, explicit_name_in_toml: Option, optional: bool, public: bool, default_features: bool, features: Vec, // The presence of this information turns a dependency into an artifact dependency. artifact: Option, // This dependency should be used only for this platform. // `None` means *all platforms*. platform: Option, } #[derive(Serialize)] struct SerializedDependency<'a> { name: &'a str, source: SourceId, req: String, kind: DepKind, rename: Option<&'a str>, optional: bool, uses_default_features: bool, features: &'a [InternedString], #[serde(skip_serializing_if = "Option::is_none")] artifact: Option<&'a Artifact>, target: Option<&'a Platform>, /// The registry URL this dependency is from. /// If None, then it comes from the default registry (crates.io). registry: Option<&'a str>, /// The file system path for a local path dependency. #[serde(skip_serializing_if = "Option::is_none")] path: Option, } impl ser::Serialize for Dependency { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { let registry_id = self.registry_id(); SerializedDependency { name: &*self.package_name(), source: self.source_id(), req: self.version_req().to_string(), kind: self.kind(), optional: self.is_optional(), uses_default_features: self.uses_default_features(), features: self.features(), target: self.platform(), rename: self.explicit_name_in_toml().map(|s| s.as_str()), registry: registry_id.as_ref().map(|sid| sid.url().as_str()), path: self.source_id().local_path(), artifact: self.artifact(), } .serialize(s) } } #[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)] pub enum DepKind { Normal, Development, Build, } impl ser::Serialize for DepKind { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { match *self { DepKind::Normal => None, DepKind::Development => Some("dev"), DepKind::Build => Some("build"), } .serialize(s) } } impl Dependency { /// Attempt to create a `Dependency` from an entry in the manifest. pub fn parse( name: impl Into, version: Option<&str>, source_id: SourceId, ) -> CargoResult { let name = name.into(); let (specified_req, version_req) = match version { Some(v) => match VersionReq::parse(v) { Ok(req) => (true, OptVersionReq::Req(req)), Err(err) => { return Err(anyhow::Error::new(err).context(format!( "failed to parse the version requirement `{}` for dependency `{}`", v, name, ))) } }, None => (false, OptVersionReq::Any), }; let mut ret = Dependency::new_override(name, source_id); { let ptr = Rc::make_mut(&mut ret.inner); ptr.only_match_name = false; ptr.req = version_req; ptr.specified_req = specified_req; } Ok(ret) } pub fn new_override(name: InternedString, source_id: SourceId) -> Dependency { assert!(!name.is_empty()); Dependency { inner: Rc::new(Inner { name, source_id, registry_id: None, req: OptVersionReq::Any, kind: DepKind::Normal, only_match_name: true, optional: false, public: false, features: Vec::new(), default_features: true, specified_req: false, platform: None, explicit_name_in_toml: None, artifact: None, }), } } pub fn version_req(&self) -> &OptVersionReq { &self.inner.req } /// This is the name of this `Dependency` as listed in `Cargo.toml`. /// /// Or in other words, this is what shows up in the `[dependencies]` section /// on the left hand side. This is *not* the name of the package that's /// being depended on as the dependency can be renamed. For that use /// `package_name` below. /// /// Both of the dependencies below return `foo` for `name_in_toml`: /// /// ```toml /// [dependencies] /// foo = "0.1" /// ``` /// /// and ... /// /// ```toml /// [dependencies] /// foo = { version = "0.1", package = 'bar' } /// ``` pub fn name_in_toml(&self) -> InternedString { self.explicit_name_in_toml().unwrap_or(self.inner.name) } /// The name of the package that this `Dependency` depends on. /// /// Usually this is what's written on the left hand side of a dependencies /// section, but it can also be renamed via the `package` key. /// /// Both of the dependencies below return `foo` for `package_name`: /// /// ```toml /// [dependencies] /// foo = "0.1" /// ``` /// /// and ... /// /// ```toml /// [dependencies] /// bar = { version = "0.1", package = 'foo' } /// ``` pub fn package_name(&self) -> InternedString { self.inner.name } pub fn source_id(&self) -> SourceId { self.inner.source_id } pub fn registry_id(&self) -> Option { self.inner.registry_id } pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency { Rc::make_mut(&mut self.inner).registry_id = Some(registry_id); self } pub fn kind(&self) -> DepKind { self.inner.kind } pub fn is_public(&self) -> bool { self.inner.public } /// Sets whether the dependency is public. pub fn set_public(&mut self, public: bool) -> &mut Dependency { if public { // Setting 'public' only makes sense for normal dependencies assert_eq!(self.kind(), DepKind::Normal); } Rc::make_mut(&mut self.inner).public = public; self } pub fn specified_req(&self) -> bool { self.inner.specified_req } /// If none, this dependencies must be built for all platforms. /// If some, it must only be built for the specified platform. pub fn platform(&self) -> Option<&Platform> { self.inner.platform.as_ref() } /// The renamed name of this dependency, if any. /// /// If the `package` key is used in `Cargo.toml` then this returns the same /// value as `name_in_toml`. pub fn explicit_name_in_toml(&self) -> Option { self.inner.explicit_name_in_toml } pub fn set_kind(&mut self, kind: DepKind) -> &mut Dependency { if self.is_public() { // Setting 'public' only makes sense for normal dependencies assert_eq!(kind, DepKind::Normal); } Rc::make_mut(&mut self.inner).kind = kind; self } /// Sets the list of features requested for the package. pub fn set_features( &mut self, features: impl IntoIterator>, ) -> &mut Dependency { Rc::make_mut(&mut self.inner).features = features.into_iter().map(|s| s.into()).collect(); self } /// Sets whether the dependency requests default features of the package. pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { Rc::make_mut(&mut self.inner).default_features = default_features; self } /// Sets whether the dependency is optional. pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { Rc::make_mut(&mut self.inner).optional = optional; self } /// Sets the source ID for this dependency. pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { Rc::make_mut(&mut self.inner).source_id = id; self } /// Sets the version requirement for this dependency. pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency { Rc::make_mut(&mut self.inner).req = OptVersionReq::Req(req); self } pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { Rc::make_mut(&mut self.inner).platform = platform; self } pub fn set_explicit_name_in_toml( &mut self, name: impl Into, ) -> &mut Dependency { Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(name.into()); self } /// Locks this dependency to depending on the specified package ID. pub fn lock_to(&mut self, id: PackageId) -> &mut Dependency { assert_eq!(self.inner.source_id, id.source_id()); trace!( "locking dep from `{}` with `{}` at {} to {}", self.package_name(), self.version_req(), self.source_id(), id ); let me = Rc::make_mut(&mut self.inner); me.req.lock_to(id.version()); // Only update the `precise` of this source to preserve other // information about dependency's source which may not otherwise be // tested during equality/hashing. me.source_id = me .source_id .with_precise(id.source_id().precise().map(|s| s.to_string())); self } /// Locks this dependency to a specified version. /// /// Mainly used in dependency patching like `[patch]` or `[replace]`, which /// doesn't need to lock the entire dependency to a specific [`PackageId`]. pub fn lock_version(&mut self, version: &semver::Version) -> &mut Dependency { let me = Rc::make_mut(&mut self.inner); me.req.lock_to(version); self } /// Returns `true` if this is a "locked" dependency. Basically a locked /// dependency has an exact version req, but not vice versa. pub fn is_locked(&self) -> bool { self.inner.req.is_locked() } /// Returns `false` if the dependency is only used to build the local package. pub fn is_transitive(&self) -> bool { match self.inner.kind { DepKind::Normal | DepKind::Build => true, DepKind::Development => false, } } pub fn is_build(&self) -> bool { matches!(self.inner.kind, DepKind::Build) } pub fn is_optional(&self) -> bool { self.inner.optional } /// Returns `true` if the default features of the dependency are requested. pub fn uses_default_features(&self) -> bool { self.inner.default_features } /// Returns the list of features that are requested by the dependency. pub fn features(&self) -> &[InternedString] { &self.inner.features } /// Returns `true` if the package (`sum`) can fulfill this dependency request. pub fn matches(&self, sum: &Summary) -> bool { self.matches_id(sum.package_id()) } /// Returns `true` if the package (`id`) can fulfill this dependency request. pub fn matches_ignoring_source(&self, id: PackageId) -> bool { self.package_name() == id.name() && self.version_req().matches(id.version()) } /// Returns `true` if the package (`id`) can fulfill this dependency request. pub fn matches_id(&self, id: PackageId) -> bool { self.inner.name == id.name() && (self.inner.only_match_name || (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id())) } pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency { if self.source_id() == to_replace { self.set_source_id(replace_with); } self } pub(crate) fn set_artifact(&mut self, artifact: Artifact) { Rc::make_mut(&mut self.inner).artifact = Some(artifact); } pub(crate) fn artifact(&self) -> Option<&Artifact> { self.inner.artifact.as_ref() } /// Dependencies are potential rust libs if they are not artifacts or they are an /// artifact which allows to be seen as library. /// Previously, every dependency was potentially seen as library. pub(crate) fn maybe_lib(&self) -> bool { self.artifact().map(|a| a.is_lib).unwrap_or(true) } } /// The presence of an artifact turns an ordinary dependency into an Artifact dependency. /// As such, it will build one or more different artifacts of possibly various kinds /// for making them available at build time for rustc invocations or runtime /// for build scripts. /// /// This information represents a requirement in the package this dependency refers to. #[derive(PartialEq, Eq, Hash, Clone, Debug)] pub struct Artifact { inner: Rc>, is_lib: bool, target: Option, } #[derive(Serialize)] pub struct SerializedArtifact<'a> { kinds: &'a [ArtifactKind], lib: bool, target: Option<&'a str>, } impl ser::Serialize for Artifact { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { SerializedArtifact { kinds: self.kinds(), lib: self.is_lib, target: self.target.as_ref().map(|t| match t { ArtifactTarget::BuildDependencyAssumeTarget => "target", ArtifactTarget::Force(target) => target.rustc_target().as_str(), }), } .serialize(s) } } impl Artifact { pub(crate) fn parse( artifacts: &StringOrVec, is_lib: bool, target: Option<&str>, ) -> CargoResult { let kinds = ArtifactKind::validate( artifacts .iter() .map(|s| ArtifactKind::parse(s)) .collect::, _>>()?, )?; Ok(Artifact { inner: Rc::new(kinds), is_lib, target: target.map(ArtifactTarget::parse).transpose()?, }) } pub(crate) fn kinds(&self) -> &[ArtifactKind] { &self.inner } pub(crate) fn is_lib(&self) -> bool { self.is_lib } pub(crate) fn target(&self) -> Option { self.target } } #[derive(PartialEq, Eq, Hash, Copy, Clone, Ord, PartialOrd, Debug)] pub enum ArtifactTarget { /// Only applicable to build-dependencies, causing them to be built /// for the given target (i.e. via `--target `) instead of for the host. /// Has no effect on non-build dependencies. BuildDependencyAssumeTarget, /// The name of the platform triple, like `x86_64-apple-darwin`, that this /// artifact will always be built for, no matter if it is a build, /// normal or dev dependency. Force(CompileTarget), } impl ArtifactTarget { pub fn parse(target: &str) -> CargoResult { Ok(match target { "target" => ArtifactTarget::BuildDependencyAssumeTarget, name => ArtifactTarget::Force(CompileTarget::new(name)?), }) } pub fn to_compile_kind(&self) -> Option { self.to_compile_target().map(CompileKind::Target) } pub fn to_compile_target(&self) -> Option { match self { ArtifactTarget::BuildDependencyAssumeTarget => None, ArtifactTarget::Force(target) => Some(*target), } } pub(crate) fn to_resolved_compile_kind( &self, root_unit_compile_kind: CompileKind, ) -> CompileKind { match self { ArtifactTarget::Force(target) => CompileKind::Target(*target), ArtifactTarget::BuildDependencyAssumeTarget => root_unit_compile_kind, } } pub(crate) fn to_resolved_compile_target( &self, root_unit_compile_kind: CompileKind, ) -> Option { match self.to_resolved_compile_kind(root_unit_compile_kind) { CompileKind::Host => None, CompileKind::Target(target) => Some(target), } } } #[derive(PartialEq, Eq, Hash, Copy, Clone, Ord, PartialOrd, Debug)] pub enum ArtifactKind { /// We represent all binaries in this dependency AllBinaries, /// We represent a single binary SelectedBinary(InternedString), Cdylib, Staticlib, } impl ser::Serialize for ArtifactKind { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { let out: Cow<'_, str> = match *self { ArtifactKind::AllBinaries => "bin".into(), ArtifactKind::Staticlib => "staticlib".into(), ArtifactKind::Cdylib => "cdylib".into(), ArtifactKind::SelectedBinary(name) => format!("bin:{}", name.as_str()).into(), }; out.serialize(s) } } impl fmt::Display for ArtifactKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { ArtifactKind::Cdylib => "cdylib", ArtifactKind::Staticlib => "staticlib", ArtifactKind::AllBinaries => "bin", ArtifactKind::SelectedBinary(bin_name) => return write!(f, "bin:{}", bin_name), }) } } impl ArtifactKind { fn parse(kind: &str) -> CargoResult { Ok(match kind { "bin" => ArtifactKind::AllBinaries, "cdylib" => ArtifactKind::Cdylib, "staticlib" => ArtifactKind::Staticlib, _ => { return kind .strip_prefix("bin:") .map(|bin_name| ArtifactKind::SelectedBinary(InternedString::new(bin_name))) .ok_or_else(|| anyhow::anyhow!("'{}' is not a valid artifact specifier", kind)) } }) } fn validate(kinds: Vec) -> CargoResult> { if kinds.iter().any(|k| matches!(k, ArtifactKind::AllBinaries)) && kinds .iter() .any(|k| matches!(k, ArtifactKind::SelectedBinary(_))) { anyhow::bail!("Cannot specify both 'bin' and 'bin:' binary artifacts, as 'bin' selects all available binaries."); } let mut kinds_without_dupes = kinds.clone(); kinds_without_dupes.sort(); kinds_without_dupes.dedup(); let num_dupes = kinds.len() - kinds_without_dupes.len(); if num_dupes != 0 { anyhow::bail!( "Found {} duplicate binary artifact{}", num_dupes, (num_dupes > 1).then(|| "s").unwrap_or("") ); } Ok(kinds) } } cargo-0.66.0/src/cargo/core/features.rs000066400000000000000000001210121432416201200176640ustar00rootroot00000000000000//! Support for nightly features in Cargo itself. //! //! This file is the version of `feature_gate.rs` in upstream Rust for Cargo //! itself and is intended to be the avenue for which new features in Cargo are //! gated by default and then eventually stabilized. All known stable and //! unstable features are tracked in this file. //! //! If you're reading this then you're likely interested in adding a feature to //! Cargo, and the good news is that it shouldn't be too hard! First determine //! how the feature should be gated: //! //! * New syntax in Cargo.toml should use `cargo-features`. //! * New CLI options should use `-Z unstable-options`. //! * New functionality that may not have an interface, or the interface has //! not yet been designed, or for more complex features that affect multiple //! parts of Cargo should use a new `-Z` flag. //! //! See below for more details. //! //! When adding new tests for your feature, usually the tests should go into a //! new module of the testsuite. See //! for more information on //! writing tests. Particularly, check out the "Testing Nightly Features" //! section for testing unstable features. //! //! After you have added your feature, be sure to update the unstable //! documentation at `src/doc/src/reference/unstable.md` to include a short //! description of how to use your new feature. //! //! And hopefully that's it! //! //! ## New Cargo.toml syntax //! //! The steps for adding new Cargo.toml syntax are: //! //! 1. Add the cargo-features unstable gate. Search below for "look here" to //! find the `features!` macro and add your feature to the list. //! //! 2. Update the Cargo.toml parsing code to handle your new feature. //! //! 3. Wherever you added the new parsing code, call //! `features.require(Feature::my_feature_name())?` if the new syntax is //! used. This will return an error if the user hasn't listed the feature //! in `cargo-features` or this is not the nightly channel. //! //! ## `-Z unstable-options` //! //! `-Z unstable-options` is intended to force the user to opt-in to new CLI //! flags, options, and new subcommands. //! //! The steps to add a new command-line option are: //! //! 1. Add the option to the CLI parsing code. In the help text, be sure to //! include `(unstable)` to note that this is an unstable option. //! 2. Where the CLI option is loaded, be sure to call //! [`CliUnstable::fail_if_stable_opt`]. This will return an error if `-Z //! unstable options` was not passed. //! //! ## `-Z` options //! //! The steps to add a new `-Z` option are: //! //! 1. Add the option to the [`CliUnstable`] struct below. Flags can take an //! optional value if you want. //! 2. Update the [`CliUnstable::add`][CliUnstable] function to parse the flag. //! 3. Wherever the new functionality is implemented, call //! [`Config::cli_unstable`][crate::util::config::Config::cli_unstable] to //! get an instance of `CliUnstable` and check if the option has been //! enabled on the `CliUnstable` instance. Nightly gating is already //! handled, so no need to worry about that. //! //! ## Stabilization //! //! For the stabilization process, see //! . //! //! The steps for stabilizing are roughly: //! //! 1. Update the feature to be stable, based on the kind of feature: //! 1. `cargo-features`: Change the feature to `stable` in the `features!` //! macro below, and include the version and a URL for the documentation. //! 2. `-Z unstable-options`: Find the call to `fail_if_stable_opt` and //! remove it. Be sure to update the man pages if necessary. //! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`][CliUnstable] //! to call `stabilized_warn` or `stabilized_err` and remove the field from //! `CliUnstable. Remove the `(unstable)` note in the clap help text if //! necessary. //! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove //! `cargo-features` from `Cargo.toml` test files if any. You can //! quickly find what needs to be removed by searching for the name //! of the feature, e.g. `print_im_a_teapot` //! 3. Update the docs in unstable.md to move the section to the bottom //! and summarize it similar to the other entries. Update the rest of the //! documentation to add the new feature. use std::collections::BTreeSet; use std::env; use std::fmt::{self, Write}; use std::str::FromStr; use anyhow::{bail, Error}; use cargo_util::ProcessBuilder; use serde::{Deserialize, Serialize}; use crate::core::resolver::ResolveBehavior; use crate::util::errors::CargoResult; use crate::util::{indented_lines, iter_join}; use crate::Config; pub const HIDDEN: &str = ""; pub const SEE_CHANNELS: &str = "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ about Rust release channels."; /// The edition of the compiler (RFC 2052) #[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)] pub enum Edition { /// The 2015 edition Edition2015, /// The 2018 edition Edition2018, /// The 2021 edition Edition2021, } // Adding a new edition: // - Add the next edition to the enum. // - Update every match expression that now fails to compile. // - Update the `FromStr` impl. // - Update CLI_VALUES to include the new edition. // - Set LATEST_UNSTABLE to Some with the new edition. // - Add an unstable feature to the features! macro below for the new edition. // - Gate on that new feature in TomlManifest::to_real_manifest. // - Update the shell completion files. // - Update any failing tests (hopefully there are very few). // - Update unstable.md to add a new section for this new edition (see // https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264 // as an example). // // Stabilization instructions: // - Set LATEST_UNSTABLE to None. // - Set LATEST_STABLE to the new version. // - Update `is_stable` to `true`. // - Set the editionNNNN feature to stable in the features macro below. // - Update any tests that are affected. // - Update the man page for the --edition flag. // - Update unstable.md to move the edition section to the bottom. // - Update the documentation: // - Update any features impacted by the edition. // - Update manifest.md#the-edition-field. // - Update the --edition flag (options-new.md). // - Rebuild man pages. impl Edition { /// The latest edition that is unstable. /// /// This is `None` if there is no next unstable edition. pub const LATEST_UNSTABLE: Option = None; /// The latest stable edition. pub const LATEST_STABLE: Edition = Edition::Edition2021; /// Possible values allowed for the `--edition` CLI flag. /// /// This requires a static value due to the way clap works, otherwise I /// would have built this dynamically. pub const CLI_VALUES: [&'static str; 3] = ["2015", "2018", "2021"]; /// Returns the first version that a particular edition was released on /// stable. pub(crate) fn first_version(&self) -> Option { use Edition::*; match self { Edition2015 => None, Edition2018 => Some(semver::Version::new(1, 31, 0)), Edition2021 => Some(semver::Version::new(1, 56, 0)), } } /// Returns `true` if this edition is stable in this release. pub fn is_stable(&self) -> bool { use Edition::*; match self { Edition2015 => true, Edition2018 => true, Edition2021 => true, } } /// Returns the previous edition from this edition. /// /// Returns `None` for 2015. pub fn previous(&self) -> Option { use Edition::*; match self { Edition2015 => None, Edition2018 => Some(Edition2015), Edition2021 => Some(Edition2018), } } /// Returns the next edition from this edition, returning the last edition /// if this is already the last one. pub fn saturating_next(&self) -> Edition { use Edition::*; match self { Edition2015 => Edition2018, Edition2018 => Edition2021, Edition2021 => Edition2021, } } /// Updates the given [`ProcessBuilder`] to include the appropriate flags /// for setting the edition. pub(crate) fn cmd_edition_arg(&self, cmd: &mut ProcessBuilder) { if *self != Edition::Edition2015 { cmd.arg(format!("--edition={}", self)); } if !self.is_stable() { cmd.arg("-Z").arg("unstable-options"); } } /// Whether or not this edition supports the `rust_*_compatibility` lint. /// /// Ideally this would not be necessary, but editions may not have any /// lints, and thus `rustc` doesn't recognize it. Perhaps `rustc` could /// create an empty group instead? pub(crate) fn supports_compat_lint(&self) -> bool { use Edition::*; match self { Edition2015 => false, Edition2018 => true, Edition2021 => true, } } /// Whether or not this edition supports the `rust_*_idioms` lint. /// /// Ideally this would not be necessary... pub(crate) fn supports_idiom_lint(&self) -> bool { use Edition::*; match self { Edition2015 => false, Edition2018 => true, Edition2021 => false, } } pub(crate) fn default_resolve_behavior(&self) -> ResolveBehavior { if *self >= Edition::Edition2021 { ResolveBehavior::V2 } else { ResolveBehavior::V1 } } } impl fmt::Display for Edition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Edition::Edition2015 => f.write_str("2015"), Edition::Edition2018 => f.write_str("2018"), Edition::Edition2021 => f.write_str("2021"), } } } impl FromStr for Edition { type Err = Error; fn from_str(s: &str) -> Result { match s { "2015" => Ok(Edition::Edition2015), "2018" => Ok(Edition::Edition2018), "2021" => Ok(Edition::Edition2021), s if s.parse().map_or(false, |y: u16| y > 2021 && y < 2050) => bail!( "this version of Cargo is older than the `{}` edition, \ and only supports `2015`, `2018`, and `2021` editions.", s ), s => bail!( "supported edition values are `2015`, `2018`, or `2021`, \ but `{}` is unknown", s ), } } } #[derive(PartialEq)] enum Status { Stable, Unstable, Removed, } macro_rules! features { ( $(($stab:ident, $feature:ident, $version:expr, $docs:expr),)* ) => ( #[derive(Default, Clone, Debug)] pub struct Features { $($feature: bool,)* activated: Vec, nightly_features_allowed: bool, is_local: bool, } impl Feature { $( pub fn $feature() -> &'static Feature { fn get(features: &Features) -> bool { stab!($stab) == Status::Stable || features.$feature } static FEAT: Feature = Feature { name: stringify!($feature), stability: stab!($stab), version: $version, docs: $docs, get, }; &FEAT } )* fn is_enabled(&self, features: &Features) -> bool { (self.get)(features) } } impl Features { fn status(&mut self, feature: &str) -> Option<(&mut bool, &'static Feature)> { if feature.contains("_") { return None } let feature = feature.replace("-", "_"); $( if feature == stringify!($feature) { return Some((&mut self.$feature, Feature::$feature())) } )* None } } ) } macro_rules! stab { (stable) => { Status::Stable }; (unstable) => { Status::Unstable }; (removed) => { Status::Removed }; } // A listing of all features in Cargo. // // "look here" // // This is the macro that lists all stable and unstable features in Cargo. // You'll want to add to this macro whenever you add a feature to Cargo, also // following the directions above. // // Note that all feature names here are valid Rust identifiers, but the `_` // character is translated to `-` when specified in the `cargo-features` // manifest entry in `Cargo.toml`. features! { // A dummy feature that doesn't actually gate anything, but it's used in // testing to ensure that we can enable stable features. (stable, test_dummy_stable, "1.0", ""), // A dummy feature that gates the usage of the `im-a-teapot` manifest // entry. This is basically just intended for tests. (unstable, test_dummy_unstable, "", "reference/unstable.html"), // Downloading packages from alternative registry indexes. (stable, alternative_registries, "1.34", "reference/registries.html"), // Using editions (stable, edition, "1.31", "reference/manifest.html#the-edition-field"), // Renaming a package in the manifest via the `package` key (stable, rename_dependency, "1.31", "reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml"), // Whether a lock file is published with this crate (removed, publish_lockfile, "1.37", "reference/unstable.html#publish-lockfile"), // Overriding profiles for dependencies. (stable, profile_overrides, "1.41", "reference/profiles.html#overrides"), // "default-run" manifest option, (stable, default_run, "1.37", "reference/manifest.html#the-default-run-field"), // Declarative build scripts. (unstable, metabuild, "", "reference/unstable.html#metabuild"), // Specifying the 'public' attribute on dependencies (unstable, public_dependency, "", "reference/unstable.html#public-dependency"), // Allow to specify profiles other than 'dev', 'release', 'test', etc. (stable, named_profiles, "1.57", "reference/profiles.html#custom-profiles"), // Opt-in new-resolver behavior. (stable, resolver, "1.51", "reference/resolver.html#resolver-versions"), // Allow to specify whether binaries should be stripped. (stable, strip, "1.58", "reference/profiles.html#strip-option"), // Specifying a minimal 'rust-version' attribute for crates (stable, rust_version, "1.56", "reference/manifest.html#the-rust-version-field"), // Support for 2021 edition. (stable, edition2021, "1.56", "reference/manifest.html#the-edition-field"), // Allow to specify per-package targets (compile kinds) (unstable, per_package_target, "", "reference/unstable.html#per-package-target"), // Allow to specify which codegen backend should be used. (unstable, codegen_backend, "", "reference/unstable.html#codegen-backend"), // Allow specifying different binary name apart from the crate name (unstable, different_binary_name, "", "reference/unstable.html#different-binary-name"), // Allow specifying rustflags directly in a profile (unstable, profile_rustflags, "", "reference/unstable.html#profile-rustflags-option"), // Allow specifying rustflags directly in a profile (stable, workspace_inheritance, "1.64", "reference/unstable.html#workspace-inheritance"), } pub struct Feature { name: &'static str, stability: Status, version: &'static str, docs: &'static str, get: fn(&Features) -> bool, } impl Features { pub fn new( features: &[String], config: &Config, warnings: &mut Vec, is_local: bool, ) -> CargoResult { let mut ret = Features::default(); ret.nightly_features_allowed = config.nightly_features_allowed; ret.is_local = is_local; for feature in features { ret.add(feature, config, warnings)?; ret.activated.push(feature.to_string()); } Ok(ret) } fn add( &mut self, feature_name: &str, config: &Config, warnings: &mut Vec, ) -> CargoResult<()> { let nightly_features_allowed = self.nightly_features_allowed; let is_local = self.is_local; let (slot, feature) = match self.status(feature_name) { Some(p) => p, None => bail!("unknown cargo feature `{}`", feature_name), }; if *slot { bail!( "the cargo feature `{}` has already been activated", feature_name ); } let see_docs = || { let url_channel = match channel().as_str() { "dev" | "nightly" => "nightly/", "beta" => "beta/", _ => "", }; format!( "See https://doc.rust-lang.org/{}cargo/{} for more information \ about using this feature.", url_channel, feature.docs ) }; match feature.stability { Status::Stable => { // The user can't do anything about non-local packages. // Warnings are usually suppressed, but just being cautious here. if is_local { let warning = format!( "the cargo feature `{}` has been stabilized in the {} \ release and is no longer necessary to be listed in the \ manifest\n {}", feature_name, feature.version, see_docs() ); warnings.push(warning); } } Status::Unstable if !nightly_features_allowed => bail!( "the cargo feature `{}` requires a nightly version of \ Cargo, but this is the `{}` channel\n\ {}\n{}", feature_name, channel(), SEE_CHANNELS, see_docs() ), Status::Unstable => { if let Some(allow) = &config.cli_unstable().allow_features { if !allow.contains(feature_name) { bail!( "the feature `{}` is not in the list of allowed features: [{}]", feature_name, iter_join(allow, ", "), ); } } } Status::Removed => { let mut msg = format!( "the cargo feature `{}` has been removed in the {} release\n\n", feature_name, feature.version ); if self.is_local { drop(writeln!( msg, "Remove the feature from Cargo.toml to remove this error." )); } else { drop(writeln!( msg, "This package cannot be used with this version of Cargo, \ as the unstable feature `{}` is no longer supported.", feature_name )); } drop(writeln!(msg, "{}", see_docs())); bail!(msg); } } *slot = true; Ok(()) } pub fn activated(&self) -> &[String] { &self.activated } pub fn require(&self, feature: &Feature) -> CargoResult<()> { if feature.is_enabled(self) { return Ok(()); } let feature_name = feature.name.replace("_", "-"); let mut msg = format!( "feature `{}` is required\n\ \n\ The package requires the Cargo feature called `{}`, but \ that feature is not stabilized in this version of Cargo ({}).\n\ ", feature_name, feature_name, crate::version(), ); if self.nightly_features_allowed { if self.is_local { drop(writeln!( msg, "Consider adding `cargo-features = [\"{}\"]` \ to the top of Cargo.toml (above the [package] table) \ to tell Cargo you are opting in to use this unstable feature.", feature_name )); } else { drop(writeln!( msg, "Consider trying a more recent nightly release." )); } } else { drop(writeln!( msg, "Consider trying a newer version of Cargo \ (this may require the nightly release)." )); } drop(writeln!( msg, "See https://doc.rust-lang.org/nightly/cargo/{} for more information \ about the status of this feature.", feature.docs )); bail!("{}", msg); } pub fn is_enabled(&self, feature: &Feature) -> bool { feature.is_enabled(self) } } macro_rules! unstable_cli_options { ( $( $(#[$meta:meta])? $element: ident: $ty: ty = ($help: expr ), )* ) => { /// A parsed representation of all unstable flags that Cargo accepts. /// /// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for /// gating unstable functionality to Cargo. These flags are only available on /// the nightly channel of Cargo. #[derive(Default, Debug, Deserialize)] #[serde(default, rename_all = "kebab-case")] pub struct CliUnstable { $( $(#[$meta])? pub $element: $ty ),* } impl CliUnstable { pub fn help() -> Vec<(&'static str, &'static str)> { let fields = vec![$((stringify!($element), $help)),*]; fields } } } } unstable_cli_options!( // Permanently unstable features: allow_features: Option> = ("Allow *only* the listed unstable features"), print_im_a_teapot: bool = (HIDDEN), // All other unstable features. // Please keep this list lexiographically ordered. advanced_env: bool = (HIDDEN), avoid_dev_deps: bool = ("Avoid installing dev-dependencies if possible"), binary_dep_depinfo: bool = ("Track changes to dependency artifacts"), bindeps: bool = ("Allow Cargo packages to depend on bin, cdylib, and staticlib crates, and use the artifacts built by those crates"), #[serde(deserialize_with = "deserialize_build_std")] build_std: Option> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"), build_std_features: Option> = ("Configure features enabled for the standard library itself when building the standard library"), config_include: bool = ("Enable the `include` key in config files"), credential_process: bool = ("Add a config setting to fetch registry authentication tokens by calling an external process"), #[serde(deserialize_with = "deserialize_check_cfg")] check_cfg: Option<(/*features:*/ bool, /*well_known_names:*/ bool, /*well_known_values:*/ bool, /*output:*/ bool)> = ("Specify scope of compile-time checking of `cfg` names/values"), doctest_in_workspace: bool = ("Compile doctests with paths relative to the workspace root"), doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"), dual_proc_macros: bool = ("Build proc-macros for both the host and the target"), features: Option> = (HIDDEN), jobserver_per_rustc: bool = (HIDDEN), minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), multitarget: bool = ("Allow passing multiple `--target` flags to the cargo subcommand selected"), no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), host_config: bool = ("Enable the [host] section in the .cargo/config.toml file"), sparse_registry: bool = ("Support plain-HTTP-based crate registries"), target_applies_to_host: bool = ("Enable the `target-applies-to-host` key in the .cargo/config.toml file"), rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"), separate_nightlies: bool = (HIDDEN), terminal_width: Option> = ("Provide a terminal width to rustc for error truncation"), unstable_options: bool = ("Allow the usage of unstable options"), // TODO(wcrichto): move scrape example configuration into Cargo.toml before stabilization // See: https://github.com/rust-lang/cargo/pull/9525#discussion_r728470927 rustdoc_scrape_examples: Option = ("Allow rustdoc to scrape examples from reverse-dependencies for documentation"), skip_rustdoc_fingerprint: bool = (HIDDEN), ); const STABILIZED_COMPILE_PROGRESS: &str = "The progress bar is now always \ enabled when used on an interactive console.\n\ See https://doc.rust-lang.org/cargo/reference/config.html#termprogresswhen \ for information on controlling the progress bar."; const STABILIZED_OFFLINE: &str = "Offline mode is now available via the \ --offline CLI option"; const STABILIZED_CACHE_MESSAGES: &str = "Message caching is now always enabled."; const STABILIZED_INSTALL_UPGRADE: &str = "Packages are now always upgraded if \ they appear out of date.\n\ See https://doc.rust-lang.org/cargo/commands/cargo-install.html for more \ information on how upgrading works."; const STABILIZED_CONFIG_PROFILE: &str = "See \ https://doc.rust-lang.org/cargo/reference/config.html#profile for more \ information about specifying profiles in config."; const STABILIZED_CRATE_VERSIONS: &str = "The crate version is now \ automatically added to the documentation."; const STABILIZED_PACKAGE_FEATURES: &str = "Enhanced feature flag behavior is now \ available in virtual workspaces, and `member/feature-name` syntax is also \ always available. Other extensions require setting `resolver = \"2\"` in \ Cargo.toml.\n\ See https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags \ for more information."; const STABILIZED_FEATURES: &str = "The new feature resolver is now available \ by specifying `resolver = \"2\"` in Cargo.toml.\n\ See https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2 \ for more information."; const STABILIZED_EXTRA_LINK_ARG: &str = "Additional linker arguments are now \ supported without passing this flag."; const STABILIZED_CONFIGURABLE_ENV: &str = "The [env] section is now always enabled."; const STABILIZED_PATCH_IN_CONFIG: &str = "The patch-in-config feature is now always enabled."; const STABILIZED_NAMED_PROFILES: &str = "The named-profiles feature is now always enabled.\n\ See https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles \ for more information"; const STABILIZED_FUTURE_INCOMPAT_REPORT: &str = "The future-incompat-report feature is now always enabled."; const STABILIZED_WEAK_DEP_FEATURES: &str = "Weak dependency features are now always available."; const STABILISED_NAMESPACED_FEATURES: &str = "Namespaced features are now always available."; const STABILIZED_TIMINGS: &str = "The -Ztimings option has been stabilized as --timings."; const STABILISED_MULTITARGET: &str = "Multiple `--target` options are now always available."; fn deserialize_build_std<'de, D>(deserializer: D) -> Result>, D::Error> where D: serde::Deserializer<'de>, { let crates = match >>::deserialize(deserializer)? { Some(list) => list, None => return Ok(None), }; let v = crates.join(","); Ok(Some( crate::core::compiler::standard_lib::parse_unstable_flag(Some(&v)), )) } fn deserialize_check_cfg<'de, D>( deserializer: D, ) -> Result, D::Error> where D: serde::Deserializer<'de>, { use serde::de::Error; let crates = match >>::deserialize(deserializer)? { Some(list) => list, None => return Ok(None), }; parse_check_cfg(crates.into_iter()).map_err(D::Error::custom) } fn parse_check_cfg( it: impl Iterator>, ) -> CargoResult> { let mut features = false; let mut well_known_names = false; let mut well_known_values = false; let mut output = false; for e in it { match e.as_ref() { "features" => features = true, "names" => well_known_names = true, "values" => well_known_values = true, "output" => output = true, _ => bail!("unstable check-cfg only takes `features`, `names`, `values` or `output` as valid inputs"), } } Ok(Some(( features, well_known_names, well_known_values, output, ))) } impl CliUnstable { pub fn parse( &mut self, flags: &[String], nightly_features_allowed: bool, ) -> CargoResult> { if !flags.is_empty() && !nightly_features_allowed { bail!( "the `-Z` flag is only accepted on the nightly channel of Cargo, \ but this is the `{}` channel\n\ {}", channel(), SEE_CHANNELS ); } let mut warnings = Vec::new(); // We read flags twice, first to get allowed-features (if specified), // and then to read the remaining unstable flags. for flag in flags { if flag.starts_with("allow-features=") { self.add(flag, &mut warnings)?; } } for flag in flags { self.add(flag, &mut warnings)?; } Ok(warnings) } fn add(&mut self, flag: &str, warnings: &mut Vec) -> CargoResult<()> { let mut parts = flag.splitn(2, '='); let k = parts.next().unwrap(); let v = parts.next(); fn parse_bool(key: &str, value: Option<&str>) -> CargoResult { match value { None | Some("yes") => Ok(true), Some("no") => Ok(false), Some(s) => bail!("flag -Z{} expected `no` or `yes`, found: `{}`", key, s), } } fn parse_features(value: Option<&str>) -> Vec { match value { None => Vec::new(), Some("") => Vec::new(), Some(v) => v.split(',').map(|s| s.to_string()).collect(), } } // Asserts that there is no argument to the flag. fn parse_empty(key: &str, value: Option<&str>) -> CargoResult { if let Some(v) = value { bail!("flag -Z{} does not take a value, found: `{}`", key, v); } Ok(true) } fn parse_usize_opt(value: Option<&str>) -> CargoResult> { Ok(match value { Some(value) => match value.parse::() { Ok(value) => Some(value), Err(e) => bail!("expected a number, found: {}", e), }, None => None, }) } let mut stabilized_warn = |key: &str, version: &str, message: &str| { warnings.push(format!( "flag `-Z {}` has been stabilized in the {} release, \ and is no longer necessary\n{}", key, version, indented_lines(message) )); }; // Use this if the behavior now requires another mechanism to enable. let stabilized_err = |key: &str, version: &str, message: &str| { Err(anyhow::format_err!( "flag `-Z {}` has been stabilized in the {} release\n{}", key, version, indented_lines(message) )) }; if let Some(allowed) = &self.allow_features { if k != "allow-features" && !allowed.contains(k) { bail!( "the feature `{}` is not in the list of allowed features: [{}]", k, iter_join(allowed, ", ") ); } } match k { "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(k, v)?, "allow-features" => self.allow_features = Some(parse_features(v).into_iter().collect()), "unstable-options" => self.unstable_options = parse_empty(k, v)?, "no-index-update" => self.no_index_update = parse_empty(k, v)?, "avoid-dev-deps" => self.avoid_dev_deps = parse_empty(k, v)?, "minimal-versions" => self.minimal_versions = parse_empty(k, v)?, "advanced-env" => self.advanced_env = parse_empty(k, v)?, "config-include" => self.config_include = parse_empty(k, v)?, "check-cfg" => { self.check_cfg = v.map_or(Ok(None), |v| parse_check_cfg(v.split(',')))? } "dual-proc-macros" => self.dual_proc_macros = parse_empty(k, v)?, // can also be set in .cargo/config or with and ENV "mtime-on-use" => self.mtime_on_use = parse_empty(k, v)?, "named-profiles" => stabilized_warn(k, "1.57", STABILIZED_NAMED_PROFILES), "binary-dep-depinfo" => self.binary_dep_depinfo = parse_empty(k, v)?, "bindeps" => self.bindeps = parse_empty(k, v)?, "build-std" => { self.build_std = Some(crate::core::compiler::standard_lib::parse_unstable_flag(v)) } "build-std-features" => self.build_std_features = Some(parse_features(v)), "doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?, "doctest-in-workspace" => self.doctest_in_workspace = parse_empty(k, v)?, "panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?, "jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?, "host-config" => self.host_config = parse_empty(k, v)?, "target-applies-to-host" => self.target_applies_to_host = parse_empty(k, v)?, "features" => { // For now this is still allowed (there are still some // unstable options like "compare"). This should be removed at // some point, and migrate to a new -Z flag for any future // things. let feats = parse_features(v); let stab_is_not_empty = feats.iter().any(|feat| { matches!( feat.as_str(), "build_dep" | "host_dep" | "dev_dep" | "itarget" | "all" ) }); if stab_is_not_empty || feats.is_empty() { // Make this stabilized_err once -Zfeature support is removed. stabilized_warn(k, "1.51", STABILIZED_FEATURES); } self.features = Some(feats); } "separate-nightlies" => self.separate_nightlies = parse_empty(k, v)?, "multitarget" => stabilized_warn(k, "1.64", STABILISED_MULTITARGET), "rustdoc-map" => self.rustdoc_map = parse_empty(k, v)?, "terminal-width" => self.terminal_width = Some(parse_usize_opt(v)?), "sparse-registry" => self.sparse_registry = parse_empty(k, v)?, "namespaced-features" => stabilized_warn(k, "1.60", STABILISED_NAMESPACED_FEATURES), "weak-dep-features" => stabilized_warn(k, "1.60", STABILIZED_WEAK_DEP_FEATURES), "credential-process" => self.credential_process = parse_empty(k, v)?, "rustdoc-scrape-examples" => { if let Some(s) = v { self.rustdoc_scrape_examples = Some(s.to_string()) } else { bail!( r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# ) } } "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?, "compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS), "offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?, "cache-messages" => stabilized_warn(k, "1.40", STABILIZED_CACHE_MESSAGES), "install-upgrade" => stabilized_warn(k, "1.41", STABILIZED_INSTALL_UPGRADE), "config-profile" => stabilized_warn(k, "1.43", STABILIZED_CONFIG_PROFILE), "crate-versions" => stabilized_warn(k, "1.47", STABILIZED_CRATE_VERSIONS), "package-features" => stabilized_warn(k, "1.51", STABILIZED_PACKAGE_FEATURES), "extra-link-arg" => stabilized_warn(k, "1.56", STABILIZED_EXTRA_LINK_ARG), "configurable-env" => stabilized_warn(k, "1.56", STABILIZED_CONFIGURABLE_ENV), "patch-in-config" => stabilized_warn(k, "1.56", STABILIZED_PATCH_IN_CONFIG), "future-incompat-report" => { stabilized_warn(k, "1.59.0", STABILIZED_FUTURE_INCOMPAT_REPORT) } "timings" => stabilized_warn(k, "1.60", STABILIZED_TIMINGS), _ => bail!("unknown `-Z` flag specified: {}", k), } Ok(()) } /// Generates an error if `-Z unstable-options` was not used for a new, /// unstable command-line flag. pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> { if !self.unstable_options { let see = format!( "See https://github.com/rust-lang/cargo/issues/{} for more \ information about the `{}` flag.", issue, flag ); // NOTE: a `config` isn't available here, check the channel directly let channel = channel(); if channel == "nightly" || channel == "dev" { bail!( "the `{}` flag is unstable, pass `-Z unstable-options` to enable it\n\ {}", flag, see ); } else { bail!( "the `{}` flag is unstable, and only available on the nightly channel \ of Cargo, but this is the `{}` channel\n\ {}\n\ {}", flag, channel, SEE_CHANNELS, see ); } } Ok(()) } /// Generates an error if `-Z unstable-options` was not used for a new, /// unstable subcommand. pub fn fail_if_stable_command( &self, config: &Config, command: &str, issue: u32, ) -> CargoResult<()> { if self.unstable_options { return Ok(()); } let see = format!( "See https://github.com/rust-lang/cargo/issues/{} for more \ information about the `cargo {}` command.", issue, command ); if config.nightly_features_allowed { bail!( "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\ {}", command, see ); } else { bail!( "the `cargo {}` command is unstable, and only available on the \ nightly channel of Cargo, but this is the `{}` channel\n\ {}\n\ {}", command, channel(), SEE_CHANNELS, see ); } } } /// Returns the current release channel ("stable", "beta", "nightly", "dev"). pub fn channel() -> String { if let Ok(override_channel) = env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS") { return override_channel; } if let Ok(staging) = env::var("RUSTC_BOOTSTRAP") { if staging == "1" { return "dev".to_string(); } } crate::version() .release_channel .unwrap_or_else(|| String::from("dev")) } cargo-0.66.0/src/cargo/core/manifest.rs000066400000000000000000000740531432416201200176700ustar00rootroot00000000000000use std::collections::{BTreeMap, HashMap}; use std::fmt; use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; use anyhow::Context as _; use semver::Version; use serde::ser; use serde::Serialize; use toml_edit::easy as toml; use url::Url; use crate::core::compiler::{CompileKind, CrateType}; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; use crate::core::{Edition, Feature, Features, WorkspaceConfig}; use crate::util::errors::*; use crate::util::interning::InternedString; use crate::util::toml::{TomlManifest, TomlProfiles}; use crate::util::{short_hash, Config, Filesystem}; pub enum EitherManifest { Real(Manifest), Virtual(VirtualManifest), } impl EitherManifest { pub(crate) fn workspace_config(&self) -> &WorkspaceConfig { match *self { EitherManifest::Real(ref r) => r.workspace_config(), EitherManifest::Virtual(ref v) => v.workspace_config(), } } } /// Contains all the information about a package, as loaded from a `Cargo.toml`. /// /// This is deserialized using the [`TomlManifest`] type. #[derive(Clone, Debug)] pub struct Manifest { summary: Summary, targets: Vec, default_kind: Option, forced_kind: Option, links: Option, warnings: Warnings, exclude: Vec, include: Vec, metadata: ManifestMetadata, custom_metadata: Option, profiles: Option, publish: Option>, replace: Vec<(PackageIdSpec, Dependency)>, patch: HashMap>, workspace: WorkspaceConfig, original: Rc, unstable_features: Features, edition: Edition, rust_version: Option, im_a_teapot: Option, default_run: Option, metabuild: Option>, resolve_behavior: Option, } /// When parsing `Cargo.toml`, some warnings should silenced /// if the manifest comes from a dependency. `ManifestWarning` /// allows this delayed emission of warnings. #[derive(Clone, Debug)] pub struct DelayedWarning { pub message: String, pub is_critical: bool, } #[derive(Clone, Debug)] pub struct Warnings(Vec); #[derive(Clone, Debug)] pub struct VirtualManifest { replace: Vec<(PackageIdSpec, Dependency)>, patch: HashMap>, workspace: WorkspaceConfig, profiles: Option, warnings: Warnings, features: Features, resolve_behavior: Option, } /// General metadata about a package which is just blindly uploaded to the /// registry. /// /// Note that many of these fields can contain invalid values such as the /// homepage, repository, documentation, or license. These fields are not /// validated by cargo itself, but rather it is up to the registry when uploaded /// to validate these fields. Cargo will itself accept any valid TOML /// specification for these values. #[derive(PartialEq, Clone, Debug)] pub struct ManifestMetadata { pub authors: Vec, pub keywords: Vec, pub categories: Vec, pub license: Option, pub license_file: Option, pub description: Option, // Not in Markdown pub readme: Option, // File, not contents pub homepage: Option, // URL pub repository: Option, // URL pub documentation: Option, // URL pub badges: BTreeMap>, pub links: Option, } #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum TargetKind { Lib(Vec), Bin, Test, Bench, ExampleLib(Vec), ExampleBin, CustomBuild, } impl ser::Serialize for TargetKind { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { use self::TargetKind::*; match self { Lib(kinds) => s.collect_seq(kinds.iter().map(|t| t.to_string())), Bin => ["bin"].serialize(s), ExampleBin | ExampleLib(_) => ["example"].serialize(s), Test => ["test"].serialize(s), CustomBuild => ["custom-build"].serialize(s), Bench => ["bench"].serialize(s), } } } impl fmt::Debug for TargetKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::TargetKind::*; match *self { Lib(ref kinds) => kinds.fmt(f), Bin => "bin".fmt(f), ExampleBin | ExampleLib(_) => "example".fmt(f), Test => "test".fmt(f), CustomBuild => "custom-build".fmt(f), Bench => "bench".fmt(f), } } } impl TargetKind { pub fn description(&self) -> &'static str { match self { TargetKind::Lib(..) => "lib", TargetKind::Bin => "bin", TargetKind::Test => "integration-test", TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example", TargetKind::Bench => "bench", TargetKind::CustomBuild => "build-script", } } /// Returns whether production of this artifact requires the object files /// from dependencies to be available. /// /// This only returns `false` when all we're producing is an rlib, otherwise /// it will return `true`. pub fn requires_upstream_objects(&self) -> bool { match self { TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => { kinds.iter().any(|k| k.requires_upstream_objects()) } _ => true, } } /// Returns the arguments suitable for `--crate-type` to pass to rustc. pub fn rustc_crate_types(&self) -> Vec { match self { TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test | TargetKind::ExampleBin | TargetKind::Bin => vec![CrateType::Bin], } } } /// Information about a binary, a library, an example, etc. that is part of the /// package. #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Target { inner: Arc, } #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] struct TargetInner { kind: TargetKind, name: String, // Note that `bin_name` is used for the cargo-feature `different_binary_name` bin_name: Option, // Note that the `src_path` here is excluded from the `Hash` implementation // as it's absolute currently and is otherwise a little too brittle for // causing rebuilds. Instead the hash for the path that we send to the // compiler is handled elsewhere. src_path: TargetSourcePath, required_features: Option>, tested: bool, benched: bool, doc: bool, doctest: bool, harness: bool, // whether to use the test harness (--test) for_host: bool, proc_macro: bool, edition: Edition, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum TargetSourcePath { Path(PathBuf), Metabuild, } impl TargetSourcePath { pub fn path(&self) -> Option<&Path> { match self { TargetSourcePath::Path(path) => Some(path.as_ref()), TargetSourcePath::Metabuild => None, } } pub fn is_path(&self) -> bool { matches!(self, TargetSourcePath::Path(_)) } } impl Hash for TargetSourcePath { fn hash(&self, _: &mut H) { // ... } } impl fmt::Debug for TargetSourcePath { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TargetSourcePath::Path(path) => path.fmt(f), TargetSourcePath::Metabuild => "metabuild".fmt(f), } } } impl From for TargetSourcePath { fn from(path: PathBuf) -> Self { assert!(path.is_absolute(), "`{}` is not absolute", path.display()); TargetSourcePath::Path(path) } } #[derive(Serialize)] struct SerializedTarget<'a> { /// Is this a `--bin bin`, `--lib`, `--example ex`? /// Serialized as a list of strings for historical reasons. kind: &'a TargetKind, /// Corresponds to `--crate-type` compiler attribute. /// See crate_types: Vec, name: &'a str, src_path: Option<&'a PathBuf>, edition: &'a str, #[serde(rename = "required-features", skip_serializing_if = "Option::is_none")] required_features: Option>, /// Whether docs should be built for the target via `cargo doc` /// See doc: bool, doctest: bool, /// Whether tests should be run for the target (`test` field in `Cargo.toml`) test: bool, } impl ser::Serialize for Target { fn serialize(&self, s: S) -> Result { let src_path = match self.src_path() { TargetSourcePath::Path(p) => Some(p), // Unfortunately getting the correct path would require access to // target_dir, which is not available here. TargetSourcePath::Metabuild => None, }; SerializedTarget { kind: self.kind(), crate_types: self.rustc_crate_types(), name: self.name(), src_path, edition: &self.edition().to_string(), required_features: self .required_features() .map(|rf| rf.iter().map(|s| s.as_str()).collect()), doc: self.documented(), doctest: self.doctested() && self.doctestable(), test: self.tested(), } .serialize(s) } } impl fmt::Debug for Target { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } compact_debug! { impl fmt::Debug for TargetInner { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (default, default_name) = { match &self.kind { TargetKind::Lib(kinds) => { ( Target::lib_target( &self.name, kinds.clone(), self.src_path.path().unwrap().to_path_buf(), self.edition, ).inner, format!("lib_target({:?}, {:?}, {:?}, {:?})", self.name, kinds, self.src_path, self.edition), ) } TargetKind::CustomBuild => { match self.src_path { TargetSourcePath::Path(ref path) => { ( Target::custom_build_target( &self.name, path.to_path_buf(), self.edition, ).inner, format!("custom_build_target({:?}, {:?}, {:?})", self.name, path, self.edition), ) } TargetSourcePath::Metabuild => { ( Target::metabuild_target(&self.name).inner, format!("metabuild_target({:?})", self.name), ) } } } _ => ( Target::new(self.src_path.clone(), self.edition).inner, format!("with_path({:?}, {:?})", self.src_path, self.edition), ), } }; [debug_the_fields( kind name bin_name src_path required_features tested benched doc doctest harness for_host proc_macro edition )] } } } impl Manifest { pub fn new( summary: Summary, default_kind: Option, forced_kind: Option, targets: Vec, exclude: Vec, include: Vec, links: Option, metadata: ManifestMetadata, custom_metadata: Option, profiles: Option, publish: Option>, replace: Vec<(PackageIdSpec, Dependency)>, patch: HashMap>, workspace: WorkspaceConfig, unstable_features: Features, edition: Edition, rust_version: Option, im_a_teapot: Option, default_run: Option, original: Rc, metabuild: Option>, resolve_behavior: Option, ) -> Manifest { Manifest { summary, default_kind, forced_kind, targets, warnings: Warnings::new(), exclude, include, links, metadata, custom_metadata, profiles, publish, replace, patch, workspace, unstable_features, edition, rust_version, original, im_a_teapot, default_run, metabuild, resolve_behavior, } } pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } pub fn default_kind(&self) -> Option { self.default_kind } pub fn forced_kind(&self) -> Option { self.forced_kind } pub fn exclude(&self) -> &[String] { &self.exclude } pub fn include(&self) -> &[String] { &self.include } pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } pub fn name(&self) -> InternedString { self.package_id().name() } pub fn package_id(&self) -> PackageId { self.summary.package_id() } pub fn summary(&self) -> &Summary { &self.summary } pub fn summary_mut(&mut self) -> &mut Summary { &mut self.summary } pub fn targets(&self) -> &[Target] { &self.targets } // It is used by cargo-c, please do not remove it pub fn targets_mut(&mut self) -> &mut [Target] { &mut self.targets } pub fn version(&self) -> &Version { self.package_id().version() } pub fn warnings_mut(&mut self) -> &mut Warnings { &mut self.warnings } pub fn warnings(&self) -> &Warnings { &self.warnings } pub fn profiles(&self) -> Option<&TomlProfiles> { self.profiles.as_ref() } pub fn publish(&self) -> &Option> { &self.publish } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } pub fn original(&self) -> &TomlManifest { &self.original } pub fn patch(&self) -> &HashMap> { &self.patch } pub fn links(&self) -> Option<&str> { self.links.as_deref() } pub fn workspace_config(&self) -> &WorkspaceConfig { &self.workspace } /// Unstable, nightly features that are enabled in this manifest. pub fn unstable_features(&self) -> &Features { &self.unstable_features } /// The style of resolver behavior to use, declared with the `resolver` field. /// /// Returns `None` if it is not specified. pub fn resolve_behavior(&self) -> Option { self.resolve_behavior } pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest { Manifest { summary: self.summary.map_source(to_replace, replace_with), ..self } } pub fn feature_gate(&self) -> CargoResult<()> { if self.im_a_teapot.is_some() { self.unstable_features .require(Feature::test_dummy_unstable()) .with_context(|| { "the `im-a-teapot` manifest key is unstable and may \ not work properly in England" })?; } if self.default_kind.is_some() || self.forced_kind.is_some() { self.unstable_features .require(Feature::per_package_target()) .with_context(|| { "the `package.default-target` and `package.forced-target` \ manifest keys are unstable and may not work properly" })?; } Ok(()) } // Just a helper function to test out `-Z` flags on Cargo pub fn print_teapot(&self, config: &Config) { if let Some(teapot) = self.im_a_teapot { if config.cli_unstable().print_im_a_teapot { crate::drop_println!(config, "im-a-teapot = {}", teapot); } } } pub fn edition(&self) -> Edition { self.edition } pub fn rust_version(&self) -> Option<&str> { self.rust_version.as_deref() } pub fn custom_metadata(&self) -> Option<&toml::Value> { self.custom_metadata.as_ref() } pub fn default_run(&self) -> Option<&str> { self.default_run.as_deref() } pub fn metabuild(&self) -> Option<&Vec> { self.metabuild.as_ref() } pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf { let hash = short_hash(&self.package_id()); target_dir .into_path_unlocked() .join(".metabuild") .join(format!("metabuild-{}-{}.rs", self.name(), hash)) } } impl VirtualManifest { pub fn new( replace: Vec<(PackageIdSpec, Dependency)>, patch: HashMap>, workspace: WorkspaceConfig, profiles: Option, features: Features, resolve_behavior: Option, ) -> VirtualManifest { VirtualManifest { replace, patch, workspace, profiles, warnings: Warnings::new(), features, resolve_behavior, } } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } pub fn patch(&self) -> &HashMap> { &self.patch } pub fn workspace_config(&self) -> &WorkspaceConfig { &self.workspace } pub fn profiles(&self) -> Option<&TomlProfiles> { self.profiles.as_ref() } pub fn warnings_mut(&mut self) -> &mut Warnings { &mut self.warnings } pub fn warnings(&self) -> &Warnings { &self.warnings } pub fn unstable_features(&self) -> &Features { &self.features } /// The style of resolver behavior to use, declared with the `resolver` field. /// /// Returns `None` if it is not specified. pub fn resolve_behavior(&self) -> Option { self.resolve_behavior } } impl Target { fn new(src_path: TargetSourcePath, edition: Edition) -> Target { Target { inner: Arc::new(TargetInner { kind: TargetKind::Bin, name: String::new(), bin_name: None, src_path, required_features: None, doc: false, doctest: false, harness: true, for_host: false, proc_macro: false, edition, tested: true, benched: true, }), } } fn with_path(src_path: PathBuf, edition: Edition) -> Target { Target::new(TargetSourcePath::from(src_path), edition) } pub fn lib_target( name: &str, crate_targets: Vec, src_path: PathBuf, edition: Edition, ) -> Target { let mut target = Target::with_path(src_path, edition); target .set_kind(TargetKind::Lib(crate_targets)) .set_name(name) .set_doctest(true) .set_doc(true); target } pub fn bin_target( name: &str, bin_name: Option, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { let mut target = Target::with_path(src_path, edition); target .set_kind(TargetKind::Bin) .set_name(name) .set_binary_name(bin_name) .set_required_features(required_features) .set_doc(true); target } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target { let mut target = Target::with_path(src_path, edition); target .set_kind(TargetKind::CustomBuild) .set_name(name) .set_for_host(true) .set_benched(false) .set_tested(false); target } pub fn metabuild_target(name: &str) -> Target { let mut target = Target::new(TargetSourcePath::Metabuild, Edition::Edition2018); target .set_kind(TargetKind::CustomBuild) .set_name(name) .set_for_host(true) .set_benched(false) .set_tested(false); target } pub fn example_target( name: &str, crate_targets: Vec, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { let kind = if crate_targets.is_empty() || crate_targets.iter().all(|t| *t == CrateType::Bin) { TargetKind::ExampleBin } else { TargetKind::ExampleLib(crate_targets) }; let mut target = Target::with_path(src_path, edition); target .set_kind(kind) .set_name(name) .set_required_features(required_features) .set_tested(false) .set_benched(false); target } pub fn test_target( name: &str, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { let mut target = Target::with_path(src_path, edition); target .set_kind(TargetKind::Test) .set_name(name) .set_required_features(required_features) .set_benched(false); target } pub fn bench_target( name: &str, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { let mut target = Target::with_path(src_path, edition); target .set_kind(TargetKind::Bench) .set_name(name) .set_required_features(required_features) .set_tested(false); target } pub fn name(&self) -> &str { &self.inner.name } pub fn crate_name(&self) -> String { self.name().replace("-", "_") } pub fn src_path(&self) -> &TargetSourcePath { &self.inner.src_path } pub fn set_src_path(&mut self, src_path: TargetSourcePath) { Arc::make_mut(&mut self.inner).src_path = src_path; } pub fn required_features(&self) -> Option<&Vec> { self.inner.required_features.as_ref() } pub fn kind(&self) -> &TargetKind { &self.inner.kind } pub fn tested(&self) -> bool { self.inner.tested } pub fn harness(&self) -> bool { self.inner.harness } pub fn documented(&self) -> bool { self.inner.doc } // A plugin, proc-macro, or build-script. pub fn for_host(&self) -> bool { self.inner.for_host } pub fn proc_macro(&self) -> bool { self.inner.proc_macro } pub fn edition(&self) -> Edition { self.inner.edition } pub fn benched(&self) -> bool { self.inner.benched } pub fn doctested(&self) -> bool { self.inner.doctest } pub fn doctestable(&self) -> bool { match self.kind() { TargetKind::Lib(ref kinds) => kinds.iter().any(|k| { *k == CrateType::Rlib || *k == CrateType::Lib || *k == CrateType::ProcMacro }), _ => false, } } pub fn is_lib(&self) -> bool { matches!(self.kind(), TargetKind::Lib(_)) } pub fn is_dylib(&self) -> bool { match self.kind() { TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib), _ => false, } } pub fn is_cdylib(&self) -> bool { match self.kind() { TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib), _ => false, } } pub fn is_staticlib(&self) -> bool { match self.kind() { TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Staticlib), _ => false, } } /// Returns whether this target produces an artifact which can be linked /// into a Rust crate. /// /// This only returns true for certain kinds of libraries. pub fn is_linkable(&self) -> bool { match self.kind() { TargetKind::Lib(kinds) => kinds.iter().any(|k| k.is_linkable()), _ => false, } } pub fn is_bin(&self) -> bool { *self.kind() == TargetKind::Bin } pub fn is_example(&self) -> bool { matches!( self.kind(), TargetKind::ExampleBin | TargetKind::ExampleLib(..) ) } /// Returns `true` if it is a binary or executable example. /// NOTE: Tests are `false`! pub fn is_executable(&self) -> bool { self.is_bin() || self.is_exe_example() } /// Returns `true` if it is an executable example. pub fn is_exe_example(&self) -> bool { // Needed for --all-examples in contexts where only runnable examples make sense matches!(self.kind(), TargetKind::ExampleBin) } pub fn is_test(&self) -> bool { *self.kind() == TargetKind::Test } pub fn is_bench(&self) -> bool { *self.kind() == TargetKind::Bench } pub fn is_custom_build(&self) -> bool { *self.kind() == TargetKind::CustomBuild } /// Returns the arguments suitable for `--crate-type` to pass to rustc. pub fn rustc_crate_types(&self) -> Vec { self.kind().rustc_crate_types() } pub fn set_tested(&mut self, tested: bool) -> &mut Target { Arc::make_mut(&mut self.inner).tested = tested; self } pub fn set_benched(&mut self, benched: bool) -> &mut Target { Arc::make_mut(&mut self.inner).benched = benched; self } pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { Arc::make_mut(&mut self.inner).doctest = doctest; self } pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { Arc::make_mut(&mut self.inner).for_host = for_host; self } pub fn set_proc_macro(&mut self, proc_macro: bool) -> &mut Target { Arc::make_mut(&mut self.inner).proc_macro = proc_macro; self } pub fn set_edition(&mut self, edition: Edition) -> &mut Target { Arc::make_mut(&mut self.inner).edition = edition; self } pub fn set_harness(&mut self, harness: bool) -> &mut Target { Arc::make_mut(&mut self.inner).harness = harness; self } pub fn set_doc(&mut self, doc: bool) -> &mut Target { Arc::make_mut(&mut self.inner).doc = doc; self } pub fn set_kind(&mut self, kind: TargetKind) -> &mut Target { Arc::make_mut(&mut self.inner).kind = kind; self } pub fn set_name(&mut self, name: &str) -> &mut Target { Arc::make_mut(&mut self.inner).name = name.to_string(); self } pub fn set_binary_name(&mut self, bin_name: Option) -> &mut Target { Arc::make_mut(&mut self.inner).bin_name = bin_name; self } pub fn set_required_features(&mut self, required_features: Option>) -> &mut Target { Arc::make_mut(&mut self.inner).required_features = required_features; self } pub fn binary_filename(&self) -> Option { self.inner.bin_name.clone() } pub fn description_named(&self) -> String { match self.kind() { TargetKind::Lib(..) => "lib".to_string(), TargetKind::Bin => format!("bin \"{}\"", self.name()), TargetKind::Test => format!("test \"{}\"", self.name()), TargetKind::Bench => format!("bench \"{}\"", self.name()), TargetKind::ExampleLib(..) | TargetKind::ExampleBin => { format!("example \"{}\"", self.name()) } TargetKind::CustomBuild => "build script".to_string(), } } } impl fmt::Display for Target { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.kind() { TargetKind::Lib(..) => write!(f, "Target(lib)"), TargetKind::Bin => write!(f, "Target(bin: {})", self.name()), TargetKind::Test => write!(f, "Target(test: {})", self.name()), TargetKind::Bench => write!(f, "Target(bench: {})", self.name()), TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { write!(f, "Target(example: {})", self.name()) } TargetKind::CustomBuild => write!(f, "Target(script)"), } } } impl Warnings { fn new() -> Warnings { Warnings(Vec::new()) } pub fn add_warning(&mut self, s: String) { self.0.push(DelayedWarning { message: s, is_critical: false, }) } pub fn add_critical_warning(&mut self, s: String) { self.0.push(DelayedWarning { message: s, is_critical: true, }) } pub fn warnings(&self) -> &[DelayedWarning] { &self.0 } } cargo-0.66.0/src/cargo/core/mod.rs000066400000000000000000000020361432416201200166310ustar00rootroot00000000000000pub use self::dependency::Dependency; pub use self::features::{CliUnstable, Edition, Feature, Features}; pub use self::manifest::{EitherManifest, VirtualManifest}; pub use self::manifest::{Manifest, Target, TargetKind}; pub use self::package::{Package, PackageSet}; pub use self::package_id::PackageId; pub use self::package_id_spec::PackageIdSpec; pub use self::registry::Registry; pub use self::resolver::{Resolve, ResolveVersion}; pub use self::shell::{Shell, Verbosity}; pub use self::source::{GitReference, QueryKind, Source, SourceId, SourceMap}; pub use self::summary::{FeatureMap, FeatureValue, Summary}; pub use self::workspace::{ find_workspace_root, resolve_relative_path, MaybePackage, Workspace, WorkspaceConfig, WorkspaceRootConfig, }; pub use crate::util::toml::InheritableFields; pub mod compiler; pub mod dependency; pub mod features; pub mod manifest; pub mod package; pub mod package_id; mod package_id_spec; pub mod profiles; pub mod registry; pub mod resolver; pub mod shell; pub mod source; pub mod summary; mod workspace; cargo-0.66.0/src/cargo/core/package.rs000066400000000000000000001261431432416201200174530ustar00rootroot00000000000000use std::cell::{Cell, Ref, RefCell, RefMut}; use std::cmp::Ordering; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fmt; use std::hash; use std::mem; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::time::{Duration, Instant}; use anyhow::Context; use bytesize::ByteSize; use curl::easy::{Easy, HttpVersion}; use curl::multi::{EasyHandle, Multi}; use lazycell::LazyCell; use log::{debug, warn}; use semver::Version; use serde::Serialize; use toml_edit::easy as toml; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::resolver::features::ForceAllTargets; use crate::core::resolver::{HasDevUnits, Resolve}; use crate::core::source::MaybePackage; use crate::core::{Dependency, Manifest, PackageId, SourceId, Target}; use crate::core::{SourceMap, Summary, Workspace}; use crate::ops; use crate::util::config::PackageCacheLock; use crate::util::errors::{CargoResult, HttpNot200}; use crate::util::interning::InternedString; use crate::util::network::Retry; use crate::util::{self, internal, Config, Progress, ProgressStyle}; pub const MANIFEST_PREAMBLE: &str = "\ # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # \"normalize\" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. "; /// Information about a package that is available somewhere in the file system. /// /// A package is a `Cargo.toml` file plus all the files that are part of it. // // TODO: is `manifest_path` a relic? #[derive(Clone)] pub struct Package { inner: Rc, } #[derive(Clone)] struct PackageInner { /// The package's manifest. manifest: Manifest, /// The root of the package. manifest_path: PathBuf, } impl Ord for Package { fn cmp(&self, other: &Package) -> Ordering { self.package_id().cmp(&other.package_id()) } } impl PartialOrd for Package { fn partial_cmp(&self, other: &Package) -> Option { Some(self.cmp(other)) } } /// A Package in a form where `Serialize` can be derived. #[derive(Serialize)] pub struct SerializedPackage { name: InternedString, version: Version, id: PackageId, license: Option, license_file: Option, description: Option, source: SourceId, dependencies: Vec, targets: Vec, features: BTreeMap>, manifest_path: PathBuf, metadata: Option, publish: Option>, authors: Vec, categories: Vec, keywords: Vec, readme: Option, repository: Option, homepage: Option, documentation: Option, edition: String, links: Option, #[serde(skip_serializing_if = "Option::is_none")] metabuild: Option>, default_run: Option, rust_version: Option, } impl Package { /// Creates a package from a manifest and its location. pub fn new(manifest: Manifest, manifest_path: &Path) -> Package { Package { inner: Rc::new(PackageInner { manifest, manifest_path: manifest_path.to_path_buf(), }), } } /// Gets the manifest dependencies. pub fn dependencies(&self) -> &[Dependency] { self.manifest().dependencies() } /// Gets the manifest. pub fn manifest(&self) -> &Manifest { &self.inner.manifest } /// Gets the manifest. pub fn manifest_mut(&mut self) -> &mut Manifest { &mut Rc::make_mut(&mut self.inner).manifest } /// Gets the path to the manifest. pub fn manifest_path(&self) -> &Path { &self.inner.manifest_path } /// Gets the name of the package. pub fn name(&self) -> InternedString { self.package_id().name() } /// Gets the `PackageId` object for the package (fully defines a package). pub fn package_id(&self) -> PackageId { self.manifest().package_id() } /// Gets the root folder of the package. pub fn root(&self) -> &Path { self.manifest_path().parent().unwrap() } /// Gets the summary for the package. pub fn summary(&self) -> &Summary { self.manifest().summary() } /// Gets the targets specified in the manifest. pub fn targets(&self) -> &[Target] { self.manifest().targets() } /// Gets the library crate for this package, if it exists. pub fn library(&self) -> Option<&Target> { self.targets().iter().find(|t| t.is_lib()) } /// Gets the current package version. pub fn version(&self) -> &Version { self.package_id().version() } /// Gets the package authors. pub fn authors(&self) -> &Vec { &self.manifest().metadata().authors } /// Returns `None` if the package is set to publish. /// Returns `Some(allowed_registries)` if publishing is limited to specified /// registries or if package is set to not publish. pub fn publish(&self) -> &Option> { self.manifest().publish() } /// Returns `true` if this package is a proc-macro. pub fn proc_macro(&self) -> bool { self.targets().iter().any(|target| target.proc_macro()) } /// Gets the package's minimum Rust version. pub fn rust_version(&self) -> Option<&str> { self.manifest().rust_version() } /// Returns `true` if the package uses a custom build script for any target. pub fn has_custom_build(&self) -> bool { self.targets().iter().any(|t| t.is_custom_build()) } pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package { Package { inner: Rc::new(PackageInner { manifest: self.manifest().clone().map_source(to_replace, replace_with), manifest_path: self.manifest_path().to_owned(), }), } } pub fn to_registry_toml(&self, ws: &Workspace<'_>) -> CargoResult { let manifest = self .manifest() .original() .prepare_for_publish(ws, self.root())?; let toml = toml::to_string_pretty(&manifest)?; Ok(format!("{}\n{}", MANIFEST_PREAMBLE, toml)) } /// Returns if package should include `Cargo.lock`. pub fn include_lockfile(&self) -> bool { self.targets().iter().any(|t| t.is_example() || t.is_bin()) } pub fn serialized(&self) -> SerializedPackage { let summary = self.manifest().summary(); let package_id = summary.package_id(); let manmeta = self.manifest().metadata(); // Filter out metabuild targets. They are an internal implementation // detail that is probably not relevant externally. There's also not a // real path to show in `src_path`, and this avoids changing the format. let targets: Vec = self .manifest() .targets() .iter() .filter(|t| t.src_path().is_path()) .cloned() .collect(); // Convert Vec to Vec let features = summary .features() .iter() .map(|(k, v)| { ( *k, v.iter() .map(|fv| InternedString::new(&fv.to_string())) .collect(), ) }) .collect(); SerializedPackage { name: package_id.name(), version: package_id.version().clone(), id: package_id, license: manmeta.license.clone(), license_file: manmeta.license_file.clone(), description: manmeta.description.clone(), source: summary.source_id(), dependencies: summary.dependencies().to_vec(), targets, features, manifest_path: self.manifest_path().to_path_buf(), metadata: self.manifest().custom_metadata().cloned(), authors: manmeta.authors.clone(), categories: manmeta.categories.clone(), keywords: manmeta.keywords.clone(), readme: manmeta.readme.clone(), repository: manmeta.repository.clone(), homepage: manmeta.homepage.clone(), documentation: manmeta.documentation.clone(), edition: self.manifest().edition().to_string(), links: self.manifest().links().map(|s| s.to_owned()), metabuild: self.manifest().metabuild().cloned(), publish: self.publish().as_ref().cloned(), default_run: self.manifest().default_run().map(|s| s.to_owned()), rust_version: self.rust_version().map(|s| s.to_owned()), } } } impl fmt::Display for Package { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.summary().package_id()) } } impl fmt::Debug for Package { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Package") .field("id", &self.summary().package_id()) .field("..", &"..") .finish() } } impl PartialEq for Package { fn eq(&self, other: &Package) -> bool { self.package_id() == other.package_id() } } impl Eq for Package {} impl hash::Hash for Package { fn hash(&self, into: &mut H) { self.package_id().hash(into) } } /// A set of packages, with the intent to download. /// /// This is primarily used to convert a set of `PackageId`s to `Package`s. It /// will download as needed, or used the cached download if available. pub struct PackageSet<'cfg> { packages: HashMap>, sources: RefCell>, config: &'cfg Config, multi: Multi, /// Used to prevent reusing the PackageSet to download twice. downloading: Cell, /// Whether or not to use curl HTTP/2 multiplexing. multiplexing: bool, } /// Helper for downloading crates. pub struct Downloads<'a, 'cfg> { set: &'a PackageSet<'cfg>, /// When a download is started, it is added to this map. The key is a /// "token" (see `Download::token`). It is removed once the download is /// finished. pending: HashMap, EasyHandle)>, /// Set of packages currently being downloaded. This should stay in sync /// with `pending`. pending_ids: HashSet, /// The final result of each download. A pair `(token, result)`. This is a /// temporary holding area, needed because curl can report multiple /// downloads at once, but the main loop (`wait`) is written to only /// handle one at a time. results: Vec<(usize, Result<(), curl::Error>)>, /// The next ID to use for creating a token (see `Download::token`). next: usize, /// Progress bar. progress: RefCell>>, /// Number of downloads that have successfully finished. downloads_finished: usize, /// Total bytes for all successfully downloaded packages. downloaded_bytes: u64, /// Size (in bytes) and package name of the largest downloaded package. largest: (u64, String), /// Time when downloading started. start: Instant, /// Indicates *all* downloads were successful. success: bool, /// Timeout management, both of timeout thresholds as well as whether or not /// our connection has timed out (and accompanying message if it has). /// /// Note that timeout management is done manually here instead of in libcurl /// because we want to apply timeouts to an entire batch of operations, not /// any one particular single operation. timeout: ops::HttpTimeout, /// Last time bytes were received. updated_at: Cell, /// This is a slow-speed check. It is reset to `now + timeout_duration` /// every time at least `threshold` bytes are received. If the current /// time ever exceeds `next_speed_check`, then give up and report a /// timeout error. next_speed_check: Cell, /// This is the slow-speed threshold byte count. It starts at the /// configured threshold value (default 10), and is decremented by the /// number of bytes received in each chunk. If it is <= zero, the /// threshold has been met and data is being received fast enough not to /// trigger a timeout; reset `next_speed_check` and set this back to the /// configured threshold. next_speed_check_bytes_threshold: Cell, /// Global filesystem lock to ensure only one Cargo is downloading at a /// time. _lock: PackageCacheLock<'cfg>, } struct Download<'cfg> { /// The token for this download, used as the key of the `Downloads::pending` map /// and stored in `EasyHandle` as well. token: usize, /// The package that we're downloading. id: PackageId, /// Actual downloaded data, updated throughout the lifetime of this download. data: RefCell>, /// The URL that we're downloading from, cached here for error messages and /// reenqueuing. url: String, /// A descriptive string to print when we've finished downloading this crate. descriptor: String, /// Statistics updated from the progress callback in libcurl. total: Cell, current: Cell, /// The moment we started this transfer at. start: Instant, timed_out: Cell>, /// Logic used to track retrying this download if it's a spurious failure. retry: Retry<'cfg>, } impl<'cfg> PackageSet<'cfg> { pub fn new( package_ids: &[PackageId], sources: SourceMap<'cfg>, config: &'cfg Config, ) -> CargoResult> { // We've enabled the `http2` feature of `curl` in Cargo, so treat // failures here as fatal as it would indicate a build-time problem. let mut multi = Multi::new(); let multiplexing = config.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) .with_context(|| "failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections multi.set_max_host_connections(2)?; Ok(PackageSet { packages: package_ids .iter() .map(|&id| (id, LazyCell::new())) .collect(), sources: RefCell::new(sources), config, multi, downloading: Cell::new(false), multiplexing, }) } pub fn package_ids(&self) -> impl Iterator + '_ { self.packages.keys().cloned() } pub fn packages(&self) -> impl Iterator { self.packages.values().filter_map(|p| p.borrow()) } pub fn enable_download<'a>(&'a self) -> CargoResult> { assert!(!self.downloading.replace(true)); let timeout = ops::HttpTimeout::new(self.config)?; Ok(Downloads { start: Instant::now(), set: self, next: 0, pending: HashMap::new(), pending_ids: HashSet::new(), results: Vec::new(), progress: RefCell::new(Some(Progress::with_style( "Downloading", ProgressStyle::Ratio, self.config, ))), downloads_finished: 0, downloaded_bytes: 0, largest: (0, String::new()), success: false, updated_at: Cell::new(Instant::now()), timeout, next_speed_check: Cell::new(Instant::now()), next_speed_check_bytes_threshold: Cell::new(0), _lock: self.config.acquire_package_cache_lock()?, }) } pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> { if let Some(pkg) = self.packages.get(&id).and_then(|slot| slot.borrow()) { return Ok(pkg); } Ok(self.get_many(Some(id))?.remove(0)) } pub fn get_many(&self, ids: impl IntoIterator) -> CargoResult> { let mut pkgs = Vec::new(); let mut downloads = self.enable_download()?; for id in ids { pkgs.extend(downloads.start(id)?); } while downloads.remaining() > 0 { pkgs.push(downloads.wait()?); } downloads.success = true; Ok(pkgs) } /// Downloads any packages accessible from the give root ids. pub fn download_accessible( &self, resolve: &Resolve, root_ids: &[PackageId], has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], target_data: &RustcTargetData<'cfg>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { fn collect_used_deps( used: &mut BTreeSet, resolve: &Resolve, pkg_id: PackageId, has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], target_data: &RustcTargetData<'_>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { if !used.insert(pkg_id) { return Ok(()); } let filtered_deps = PackageSet::filter_deps( pkg_id, resolve, has_dev_units, requested_kinds, target_data, force_all_targets, ); for (pkg_id, _dep) in filtered_deps { collect_used_deps( used, resolve, pkg_id, has_dev_units, requested_kinds, target_data, force_all_targets, )?; } Ok(()) } // This is sorted by PackageId to get consistent behavior and error // messages for Cargo's testsuite. Perhaps there is a better ordering // that optimizes download time? let mut to_download = BTreeSet::new(); for id in root_ids { collect_used_deps( &mut to_download, resolve, *id, has_dev_units, requested_kinds, target_data, force_all_targets, )?; } self.get_many(to_download.into_iter())?; Ok(()) } /// Check if there are any dependency packages that violate artifact constraints /// to instantly abort, or that do not have any libs which results in warnings. pub(crate) fn warn_no_lib_packages_and_artifact_libs_overlapping_deps( &self, ws: &Workspace<'cfg>, resolve: &Resolve, root_ids: &[PackageId], has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], target_data: &RustcTargetData<'_>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { let no_lib_pkgs: BTreeMap)>> = root_ids .iter() .map(|&root_id| { let dep_pkgs_to_deps: Vec<_> = PackageSet::filter_deps( root_id, resolve, has_dev_units, requested_kinds, target_data, force_all_targets, ) .collect(); let dep_pkgs_and_deps = dep_pkgs_to_deps .into_iter() .filter(|(_id, deps)| deps.iter().any(|dep| dep.maybe_lib())) .filter_map(|(dep_package_id, deps)| { self.get_one(dep_package_id).ok().and_then(|dep_pkg| { (!dep_pkg.targets().iter().any(|t| t.is_lib())).then(|| (dep_pkg, deps)) }) }) .collect(); (root_id, dep_pkgs_and_deps) }) .collect(); for (pkg_id, dep_pkgs) in no_lib_pkgs { for (_dep_pkg_without_lib_target, deps) in dep_pkgs { for dep in deps.iter().filter(|dep| { dep.artifact() .map(|artifact| artifact.is_lib()) .unwrap_or(true) }) { ws.config().shell().warn(&format!( "{} ignoring invalid dependency `{}` which is missing a lib target", pkg_id, dep.name_in_toml(), ))?; } } } Ok(()) } fn filter_deps<'a>( pkg_id: PackageId, resolve: &'a Resolve, has_dev_units: HasDevUnits, requested_kinds: &'a [CompileKind], target_data: &'a RustcTargetData<'_>, force_all_targets: ForceAllTargets, ) -> impl Iterator)> + 'a { resolve .deps(pkg_id) .filter(move |&(_id, deps)| { deps.iter().any(|dep| { if dep.kind() == DepKind::Development && has_dev_units == HasDevUnits::No { return false; } if force_all_targets == ForceAllTargets::No { let activated = requested_kinds .iter() .chain(Some(&CompileKind::Host)) .any(|kind| target_data.dep_platform_activated(dep, *kind)); if !activated { return false; } } true }) }) .into_iter() } pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> { self.sources.borrow() } pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> { self.sources.borrow_mut() } /// Merge the given set into self. pub fn add_set(&mut self, set: PackageSet<'cfg>) { assert!(!self.downloading.get()); assert!(!set.downloading.get()); for (pkg_id, p_cell) in set.packages { self.packages.entry(pkg_id).or_insert(p_cell); } let mut sources = self.sources.borrow_mut(); let other_sources = set.sources.into_inner(); sources.add_source_map(other_sources); } } // When dynamically linked against libcurl, we want to ignore some failures // when using old versions that don't support certain features. macro_rules! try_old_curl { ($e:expr, $msg:expr) => { let result = $e; if cfg!(target_os = "macos") { if let Err(e) = result { warn!("ignoring libcurl {} error: {}", $msg, e); } } else { result.with_context(|| { anyhow::format_err!("failed to enable {}, is curl not built right?", $msg) })?; } }; } impl<'a, 'cfg> Downloads<'a, 'cfg> { /// Starts to download the package for the `id` specified. /// /// Returns `None` if the package is queued up for download and will /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if /// the package is ready and doesn't need to be downloaded. pub fn start(&mut self, id: PackageId) -> CargoResult> { self.start_inner(id) .with_context(|| format!("failed to download `{}`", id)) } fn start_inner(&mut self, id: PackageId) -> CargoResult> { // First up see if we've already cached this package, in which case // there's nothing to do. let slot = self .set .packages .get(&id) .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; if let Some(pkg) = slot.borrow() { return Ok(Some(pkg)); } // Ask the original source for this `PackageId` for the corresponding // package. That may immediately come back and tell us that the package // is ready, or it could tell us that it needs to be downloaded. let mut sources = self.set.sources.borrow_mut(); let source = sources .get_mut(id.source_id()) .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; let pkg = source .download(id) .with_context(|| "unable to get packages from source")?; let (url, descriptor) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); assert!(slot.fill(pkg).is_ok()); return Ok(Some(slot.borrow().unwrap())); } MaybePackage::Download { url, descriptor } => (url, descriptor), }; // Ok we're going to download this crate, so let's set up all our // internal state and hand off an `Easy` handle to our libcurl `Multi` // handle. This won't actually start the transfer, but later it'll // happen during `wait_for_download` let token = self.next; self.next += 1; debug!("downloading {} as {}", id, token); assert!(self.pending_ids.insert(id)); let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?; handle.get(true)?; handle.url(&url)?; handle.follow_location(true)?; // follow redirects // Enable HTTP/2 to be used as it'll allow true multiplexing which makes // downloads much faster. // // Currently Cargo requests the `http2` feature of the `curl` crate // which means it should always be built in. On OSX, however, we ship // cargo still linked against the system libcurl. Building curl with // ALPN support for HTTP/2 requires newer versions of OSX (the // SecureTransport API) than we want to ship Cargo for. By linking Cargo // against the system libcurl then older curl installations won't use // HTTP/2 but newer ones will. All that to basically say we ignore // errors here on OSX, but consider this a fatal error to not activate // HTTP/2 on all other platforms. if self.set.multiplexing { try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2"); } else { handle.http_version(HttpVersion::V11)?; } // This is an option to `libcurl` which indicates that if there's a // bunch of parallel requests to the same host they all wait until the // pipelining status of the host is known. This means that we won't // initiate dozens of connections to crates.io, but rather only one. // Once the main one is opened we realized that pipelining is possible // and multiplexing is possible with static.crates.io. All in all this // reduces the number of connections down to a more manageable state. try_old_curl!(handle.pipewait(true), "pipewait"); handle.write_function(move |buf| { debug!("{} - {} bytes of data", token, buf.len()); tls::with(|downloads| { if let Some(downloads) = downloads { downloads.pending[&token] .0 .data .borrow_mut() .extend_from_slice(buf); } }); Ok(buf.len()) })?; handle.progress(true)?; handle.progress_function(move |dl_total, dl_cur, _, _| { tls::with(|downloads| match downloads { Some(d) => d.progress(token, dl_total as u64, dl_cur as u64), None => false, }) })?; // If the progress bar isn't enabled then it may be awhile before the // first crate finishes downloading so we inform immediately that we're // downloading crates here. if self.downloads_finished == 0 && self.pending.is_empty() && !self.progress.borrow().as_ref().unwrap().is_enabled() { self.set .config .shell() .status("Downloading", "crates ...")?; } let dl = Download { token, data: RefCell::new(Vec::new()), id, url, descriptor, total: Cell::new(0), current: Cell::new(0), start: Instant::now(), timed_out: Cell::new(None), retry: Retry::new(self.set.config)?, }; self.enqueue(dl, handle)?; self.tick(WhyTick::DownloadStarted)?; Ok(None) } /// Returns the number of crates that are still downloading. pub fn remaining(&self) -> usize { self.pending.len() } /// Blocks the current thread waiting for a package to finish downloading. /// /// This method will wait for a previously enqueued package to finish /// downloading and return a reference to it after it's done downloading. /// /// # Panics /// /// This function will panic if there are no remaining downloads. pub fn wait(&mut self) -> CargoResult<&'a Package> { let (dl, data) = loop { assert_eq!(self.pending.len(), self.pending_ids.len()); let (token, result) = self.wait_for_curl()?; debug!("{} finished with {:?}", token, result); let (mut dl, handle) = self .pending .remove(&token) .expect("got a token for a non-in-progress transfer"); let data = mem::take(&mut *dl.data.borrow_mut()); let mut handle = self.set.multi.remove(handle)?; self.pending_ids.remove(&dl.id); // Check if this was a spurious error. If it was a spurious error // then we want to re-enqueue our request for another attempt and // then we wait for another request to finish. let ret = { let timed_out = &dl.timed_out; let url = &dl.url; dl.retry .r#try(|| { if let Err(e) = result { // If this error is "aborted by callback" then that's // probably because our progress callback aborted due to // a timeout. We'll find out by looking at the // `timed_out` field, looking for a descriptive message. // If one is found we switch the error code (to ensure // it's flagged as spurious) and then attach our extra // information to the error. if !e.is_aborted_by_callback() { return Err(e.into()); } return Err(match timed_out.replace(None) { Some(msg) => { let code = curl_sys::CURLE_OPERATION_TIMEDOUT; let mut err = curl::Error::new(code); err.set_extra(msg); err } None => e, } .into()); } let code = handle.response_code()?; if code != 200 && code != 0 { let url = handle.effective_url()?.unwrap_or(url); return Err(HttpNot200 { code, url: url.to_string(), } .into()); } Ok(()) }) .with_context(|| format!("failed to download from `{}`", dl.url))? }; match ret { Some(()) => break (dl, data), None => { self.pending_ids.insert(dl.id); self.enqueue(dl, handle)? } } }; // If the progress bar isn't enabled then we still want to provide some // semblance of progress of how we're downloading crates, and if the // progress bar is enabled this provides a good log of what's happening. self.progress.borrow_mut().as_mut().unwrap().clear(); self.set .config .shell() .status("Downloaded", &dl.descriptor)?; self.downloads_finished += 1; self.downloaded_bytes += dl.total.get(); if dl.total.get() > self.largest.0 { self.largest = (dl.total.get(), dl.id.name().to_string()); } // We're about to synchronously extract the crate below. While we're // doing that our download progress won't actually be updated, nor do we // have a great view into the progress of the extraction. Let's prepare // the user for this CPU-heavy step if it looks like it'll take some // time to do so. if dl.total.get() < ByteSize::kb(400).0 { self.tick(WhyTick::DownloadFinished)?; } else { self.tick(WhyTick::Extracting(&dl.id.name()))?; } // Inform the original source that the download is finished which // should allow us to actually get the package and fill it in now. let mut sources = self.set.sources.borrow_mut(); let source = sources .get_mut(dl.id.source_id()) .ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?; let start = Instant::now(); let pkg = source.finish_download(dl.id, data)?; // Assume that no time has passed while we were calling // `finish_download`, update all speed checks and timeout limits of all // active downloads to make sure they don't fire because of a slowly // extracted tarball. let finish_dur = start.elapsed(); self.updated_at.set(self.updated_at.get() + finish_dur); self.next_speed_check .set(self.next_speed_check.get() + finish_dur); let slot = &self.set.packages[&dl.id]; assert!(slot.fill(pkg).is_ok()); Ok(slot.borrow().unwrap()) } fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> { let mut handle = self.set.multi.add(handle)?; let now = Instant::now(); handle.set_token(dl.token)?; self.updated_at.set(now); self.next_speed_check.set(now + self.timeout.dur); self.next_speed_check_bytes_threshold .set(u64::from(self.timeout.low_speed_limit)); dl.timed_out.set(None); dl.current.set(0); dl.total.set(0); self.pending.insert(dl.token, (dl, handle)); Ok(()) } /// Block, waiting for curl. Returns a token and a `Result` for that token /// (`Ok` means the download successfully finished). fn wait_for_curl(&mut self) -> CargoResult<(usize, Result<(), curl::Error>)> { // This is the main workhorse loop. We use libcurl's portable `wait` // method to actually perform blocking. This isn't necessarily too // efficient in terms of fd management, but we should only be juggling // a few anyway. // // Here we start off by asking the `multi` handle to do some work via // the `perform` method. This will actually do I/O work (non-blocking) // and attempt to make progress. Afterwards we ask about the `messages` // contained in the handle which will inform us if anything has finished // transferring. // // If we've got a finished transfer after all that work we break out // and process the finished transfer at the end. Otherwise we need to // actually block waiting for I/O to happen, which we achieve with the // `wait` method on `multi`. loop { let n = tls::set(self, || { self.set .multi .perform() .with_context(|| "failed to perform http requests") })?; debug!("handles remaining: {}", n); let results = &mut self.results; let pending = &self.pending; self.set.multi.messages(|msg| { let token = msg.token().expect("failed to read token"); let handle = &pending[&token].1; if let Some(result) = msg.result_for(handle) { results.push((token, result)); } else { debug!("message without a result (?)"); } }); if let Some(pair) = results.pop() { break Ok(pair); } assert!(!self.pending.is_empty()); let min_timeout = Duration::new(1, 0); let timeout = self.set.multi.get_timeout()?.unwrap_or(min_timeout); let timeout = timeout.min(min_timeout); self.set .multi .wait(&mut [], timeout) .with_context(|| "failed to wait on curl `Multi`")?; } } fn progress(&self, token: usize, total: u64, cur: u64) -> bool { let dl = &self.pending[&token].0; dl.total.set(total); let now = Instant::now(); if cur > dl.current.get() { let delta = cur - dl.current.get(); let threshold = self.next_speed_check_bytes_threshold.get(); dl.current.set(cur); self.updated_at.set(now); if delta >= threshold { self.next_speed_check.set(now + self.timeout.dur); self.next_speed_check_bytes_threshold .set(u64::from(self.timeout.low_speed_limit)); } else { self.next_speed_check_bytes_threshold.set(threshold - delta); } } if self.tick(WhyTick::DownloadUpdate).is_err() { return false; } // If we've spent too long not actually receiving any data we time out. if now > self.updated_at.get() + self.timeout.dur { self.updated_at.set(now); let msg = format!( "failed to download any data for `{}` within {}s", dl.id, self.timeout.dur.as_secs() ); dl.timed_out.set(Some(msg)); return false; } // If we reached the point in time that we need to check our speed // limit, see if we've transferred enough data during this threshold. If // it fails this check then we fail because the download is going too // slowly. if now >= self.next_speed_check.get() { self.next_speed_check.set(now + self.timeout.dur); assert!(self.next_speed_check_bytes_threshold.get() > 0); let msg = format!( "download of `{}` failed to transfer more \ than {} bytes in {}s", dl.id, self.timeout.low_speed_limit, self.timeout.dur.as_secs() ); dl.timed_out.set(Some(msg)); return false; } true } fn tick(&self, why: WhyTick<'_>) -> CargoResult<()> { let mut progress = self.progress.borrow_mut(); let progress = progress.as_mut().unwrap(); if let WhyTick::DownloadUpdate = why { if !progress.update_allowed() { return Ok(()); } } let pending = self.pending.len(); let mut msg = if pending == 1 { format!("{} crate", pending) } else { format!("{} crates", pending) }; match why { WhyTick::Extracting(krate) => { msg.push_str(&format!(", extracting {} ...", krate)); } _ => { let mut dur = Duration::new(0, 0); let mut remaining = 0; for (dl, _) in self.pending.values() { dur += dl.start.elapsed(); // If the total/current look weird just throw out the data // point, sounds like curl has more to learn before we have // the true information. if dl.total.get() >= dl.current.get() { remaining += dl.total.get() - dl.current.get(); } } if remaining > 0 && dur > Duration::from_millis(500) { msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining))); } } } progress.print_now(&msg) } } #[derive(Copy, Clone)] enum WhyTick<'a> { DownloadStarted, DownloadUpdate, DownloadFinished, Extracting(&'a str), } impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { fn drop(&mut self) { self.set.downloading.set(false); let progress = self.progress.get_mut().take().unwrap(); // Don't print a download summary if we're not using a progress bar, // we've already printed lots of `Downloading...` items. if !progress.is_enabled() { return; } // If we didn't download anything, no need for a summary. if self.downloads_finished == 0 { return; } // If an error happened, let's not clutter up the output. if !self.success { return; } // pick the correct plural of crate(s) let crate_string = if self.downloads_finished == 1 { "crate" } else { "crates" }; let mut status = format!( "{} {} ({}) in {}", self.downloads_finished, crate_string, ByteSize(self.downloaded_bytes), util::elapsed(self.start.elapsed()) ); // print the size of largest crate if it was >1mb // however don't print if only a single crate was downloaded // because it is obvious that it will be the largest then if self.largest.0 > ByteSize::mb(1).0 && self.downloads_finished > 1 { status.push_str(&format!( " (largest was `{}` at {})", self.largest.1, ByteSize(self.largest.0), )); } // Clear progress before displaying final summary. drop(progress); drop(self.set.config.shell().status("Downloaded", status)); } } mod tls { use std::cell::Cell; use super::Downloads; thread_local!(static PTR: Cell = Cell::new(0)); pub(crate) fn with(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R { let ptr = PTR.with(|p| p.get()); if ptr == 0 { f(None) } else { unsafe { f(Some(&*(ptr as *const Downloads<'_, '_>))) } } } pub(crate) fn set(dl: &Downloads<'_, '_>, f: impl FnOnce() -> R) -> R { struct Reset<'a, T: Copy>(&'a Cell, T); impl<'a, T: Copy> Drop for Reset<'a, T> { fn drop(&mut self) { self.0.set(self.1); } } PTR.with(|p| { let _reset = Reset(p, p.get()); p.set(dl as *const Downloads<'_, '_> as usize); f() }) } } cargo-0.66.0/src/cargo/core/package_id.rs000066400000000000000000000220521432416201200201210ustar00rootroot00000000000000use std::collections::HashSet; use std::fmt::{self, Formatter}; use std::hash; use std::hash::Hash; use std::path::Path; use std::ptr; use std::sync::Mutex; use serde::de; use serde::ser; use crate::core::source::SourceId; use crate::util::interning::InternedString; use crate::util::{CargoResult, ToSemver}; lazy_static::lazy_static! { static ref PACKAGE_ID_CACHE: Mutex> = Mutex::new(HashSet::new()); } /// Identifier for a specific version of a package in a specific source. #[derive(Clone, Copy, Eq, PartialOrd, Ord)] pub struct PackageId { inner: &'static PackageIdInner, } #[derive(PartialOrd, Eq, Ord)] struct PackageIdInner { name: InternedString, version: semver::Version, source_id: SourceId, } // Custom equality that uses full equality of SourceId, rather than its custom equality, // and Version, which usually ignores `build` metadata. // // The `build` part of the version is usually ignored (like a "comment"). // However, there are some cases where it is important. The download path from // a registry includes the build metadata, and Cargo uses PackageIds for // creating download paths. Including it here prevents the PackageId interner // from getting poisoned with PackageIds where that build metadata is missing. impl PartialEq for PackageIdInner { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.version.major == other.version.major && self.version.minor == other.version.minor && self.version.patch == other.version.patch && self.version.pre == other.version.pre && self.version.build == other.version.build && self.source_id.full_eq(other.source_id) } } // Custom hash that is coherent with the custom equality above. impl Hash for PackageIdInner { fn hash(&self, into: &mut S) { self.name.hash(into); self.version.major.hash(into); self.version.minor.hash(into); self.version.patch.hash(into); self.version.pre.hash(into); self.version.build.hash(into); self.source_id.full_hash(into); } } impl ser::Serialize for PackageId { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { s.collect_str(&format_args!( "{} {} ({})", self.inner.name, self.inner.version, self.inner.source_id.as_url() )) } } impl<'de> de::Deserialize<'de> for PackageId { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let string = String::deserialize(d)?; let mut s = string.splitn(3, ' '); let name = s.next().unwrap(); let name = InternedString::new(name); let version = match s.next() { Some(s) => s, None => return Err(de::Error::custom("invalid serialized PackageId")), }; let version = version.to_semver().map_err(de::Error::custom)?; let url = match s.next() { Some(s) => s, None => return Err(de::Error::custom("invalid serialized PackageId")), }; let url = if url.starts_with('(') && url.ends_with(')') { &url[1..url.len() - 1] } else { return Err(de::Error::custom("invalid serialized PackageId")); }; let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; Ok(PackageId::pure(name, version, source_id)) } } impl PartialEq for PackageId { fn eq(&self, other: &PackageId) -> bool { if ptr::eq(self.inner, other.inner) { return true; } // This is here so that PackageId uses SourceId's and Version's idea // of equality. PackageIdInner uses a more exact notion of equality. self.inner.name == other.inner.name && self.inner.version == other.inner.version && self.inner.source_id == other.inner.source_id } } impl Hash for PackageId { fn hash(&self, state: &mut S) { // This is here (instead of derived) so that PackageId uses SourceId's // and Version's idea of equality. PackageIdInner uses a more exact // notion of hashing. self.inner.name.hash(state); self.inner.version.hash(state); self.inner.source_id.hash(state); } } impl PackageId { pub fn new( name: impl Into, version: T, sid: SourceId, ) -> CargoResult { let v = version.to_semver()?; Ok(PackageId::pure(name.into(), v, sid)) } pub fn pure(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId { let inner = PackageIdInner { name, version, source_id, }; let mut cache = PACKAGE_ID_CACHE.lock().unwrap(); let inner = cache.get(&inner).cloned().unwrap_or_else(|| { let inner = Box::leak(Box::new(inner)); cache.insert(inner); inner }); PackageId { inner } } pub fn name(self) -> InternedString { self.inner.name } pub fn version(self) -> &'static semver::Version { &self.inner.version } pub fn source_id(self) -> SourceId { self.inner.source_id } pub fn with_precise(self, precise: Option) -> PackageId { PackageId::pure( self.inner.name, self.inner.version.clone(), self.inner.source_id.with_precise(precise), ) } pub fn with_source_id(self, source: SourceId) -> PackageId { PackageId::pure(self.inner.name, self.inner.version.clone(), source) } pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Self { if self.source_id() == to_replace { self.with_source_id(replace_with) } else { self } } /// Returns a value that implements a "stable" hashable value. /// /// Stable hashing removes the path prefix of the workspace from path /// packages. This helps with reproducible builds, since this hash is part /// of the symbol metadata, and we don't want the absolute path where the /// build is performed to affect the binary output. pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> { PackageIdStableHash(self, workspace) } } pub struct PackageIdStableHash<'a>(PackageId, &'a Path); impl<'a> Hash for PackageIdStableHash<'a> { fn hash(&self, state: &mut S) { self.0.inner.name.hash(state); self.0.inner.version.hash(state); self.0.inner.source_id.stable_hash(self.1, state); } } impl fmt::Display for PackageId { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{} v{}", self.inner.name, self.inner.version)?; if !self.inner.source_id.is_default_registry() { write!(f, " ({})", self.inner.source_id)?; } Ok(()) } } impl fmt::Debug for PackageId { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("PackageId") .field("name", &self.inner.name) .field("version", &self.inner.version.to_string()) .field("source", &self.inner.source_id.to_string()) .finish() } } #[cfg(test)] mod tests { use super::PackageId; use crate::core::source::SourceId; use crate::sources::CRATES_IO_INDEX; use crate::util::IntoUrl; #[test] fn invalid_version_handled_nicely() { let loc = CRATES_IO_INDEX.into_url().unwrap(); let repo = SourceId::for_registry(&loc).unwrap(); assert!(PackageId::new("foo", "1.0", repo).is_err()); assert!(PackageId::new("foo", "1", repo).is_err()); assert!(PackageId::new("foo", "bar", repo).is_err()); assert!(PackageId::new("foo", "", repo).is_err()); } #[test] fn debug() { let loc = CRATES_IO_INDEX.into_url().unwrap(); let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); assert_eq!( r#"PackageId { name: "foo", version: "1.0.0", source: "registry `crates-io`" }"#, format!("{:?}", pkg_id) ); let expected = r#" PackageId { name: "foo", version: "1.0.0", source: "registry `crates-io`", } "# .trim(); // Can be removed once trailing commas in Debug have reached the stable // channel. let expected_without_trailing_comma = r#" PackageId { name: "foo", version: "1.0.0", source: "registry `crates-io`" } "# .trim(); let actual = format!("{:#?}", pkg_id); if actual.ends_with(",\n}") { assert_eq!(actual, expected); } else { assert_eq!(actual, expected_without_trailing_comma); } } #[test] fn display() { let loc = CRATES_IO_INDEX.into_url().unwrap(); let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); assert_eq!("foo v1.0.0", pkg_id.to_string()); } } cargo-0.66.0/src/cargo/core/package_id_spec.rs000066400000000000000000000351551432416201200211430ustar00rootroot00000000000000use std::collections::HashMap; use std::fmt; use anyhow::{bail, Context as _}; use semver::Version; use serde::{de, ser}; use url::Url; use crate::core::PackageId; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; use crate::util::{validate_package_name, IntoUrl, ToSemver}; /// Some or all of the data required to identify a package: /// /// 1. the package name (a `String`, required) /// 2. the package version (a `Version`, optional) /// 3. the package source (a `Url`, optional) /// /// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be /// more than one package/version/url combo that will match. However, often just the name is /// sufficient to uniquely define a package ID. #[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)] pub struct PackageIdSpec { name: InternedString, version: Option, url: Option, } impl PackageIdSpec { /// Parses a spec string and returns a `PackageIdSpec` if the string was valid. /// /// # Examples /// Some examples of valid strings /// /// ``` /// use cargo::core::PackageIdSpec; /// /// let specs = vec![ /// "https://crates.io/foo", /// "https://crates.io/foo#1.2.3", /// "https://crates.io/foo#bar:1.2.3", /// "https://crates.io/foo#bar@1.2.3", /// "foo", /// "foo:1.2.3", /// "foo@1.2.3", /// ]; /// for spec in specs { /// assert!(PackageIdSpec::parse(spec).is_ok()); /// } pub fn parse(spec: &str) -> CargoResult { if spec.contains("://") { if let Ok(url) = spec.into_url() { return PackageIdSpec::from_url(url); } } else if spec.contains('/') || spec.contains('\\') { let abs = std::env::current_dir().unwrap_or_default().join(spec); if abs.exists() { let maybe_url = Url::from_file_path(abs) .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string()); bail!( "package ID specification `{}` looks like a file path, \ maybe try {}", spec, maybe_url ); } } let mut parts = spec.splitn(2, [':', '@']); let name = parts.next().unwrap(); let version = match parts.next() { Some(version) => Some(version.to_semver()?), None => None, }; validate_package_name(name, "pkgid", "")?; Ok(PackageIdSpec { name: InternedString::new(name), version, url: None, }) } /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)` pub fn query_str(spec: &str, i: I) -> CargoResult where I: IntoIterator, { let i: Vec<_> = i.into_iter().collect(); let spec = PackageIdSpec::parse(spec).with_context(|| { let suggestion = lev_distance::closest_msg(spec, i.iter(), |id| id.name().as_str()); format!("invalid package ID specification: `{}`{}", spec, suggestion) })?; spec.query(i) } /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url` /// fields filled in. pub fn from_package_id(package_id: PackageId) -> PackageIdSpec { PackageIdSpec { name: package_id.name(), version: Some(package_id.version().clone()), url: Some(package_id.source_id().url().clone()), } } /// Tries to convert a valid `Url` to a `PackageIdSpec`. fn from_url(mut url: Url) -> CargoResult { if url.query().is_some() { bail!("cannot have a query string in a pkgid: {}", url) } let frag = url.fragment().map(|s| s.to_owned()); url.set_fragment(None); let (name, version) = { let mut path = url .path_segments() .ok_or_else(|| anyhow::format_err!("pkgid urls must have a path: {}", url))?; let path_name = path.next_back().ok_or_else(|| { anyhow::format_err!( "pkgid urls must have at least one path \ component: {}", url ) })?; match frag { Some(fragment) => { let mut parts = fragment.splitn(2, [':', '@']); let name_or_version = parts.next().unwrap(); match parts.next() { Some(part) => { let version = part.to_semver()?; (InternedString::new(name_or_version), Some(version)) } None => { if name_or_version.chars().next().unwrap().is_alphabetic() { (InternedString::new(name_or_version), None) } else { let version = name_or_version.to_semver()?; (InternedString::new(path_name), Some(version)) } } } } None => (InternedString::new(path_name), None), } }; Ok(PackageIdSpec { name, version, url: Some(url), }) } pub fn name(&self) -> InternedString { self.name } pub fn version(&self) -> Option<&Version> { self.version.as_ref() } pub fn url(&self) -> Option<&Url> { self.url.as_ref() } pub fn set_url(&mut self, url: Url) { self.url = Some(url); } /// Checks whether the given `PackageId` matches the `PackageIdSpec`. pub fn matches(&self, package_id: PackageId) -> bool { if self.name() != package_id.name() { return false; } if let Some(ref v) = self.version { if v != package_id.version() { return false; } } match self.url { Some(ref u) => u == package_id.source_id().url(), None => true, } } /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or /// more are found, then this returns an error. pub fn query(&self, i: I) -> CargoResult where I: IntoIterator, { let all_ids: Vec<_> = i.into_iter().collect(); let mut ids = all_ids.iter().copied().filter(|&id| self.matches(id)); let ret = match ids.next() { Some(id) => id, None => { let mut suggestion = String::new(); let try_spec = |spec: PackageIdSpec, suggestion: &mut String| { let try_matches: Vec<_> = all_ids .iter() .copied() .filter(|&id| spec.matches(id)) .collect(); if !try_matches.is_empty() { suggestion.push_str("\nDid you mean one of these?\n"); minimize(suggestion, &try_matches, self); } }; if self.url.is_some() { try_spec( PackageIdSpec { name: self.name, version: self.version.clone(), url: None, }, &mut suggestion, ); } if suggestion.is_empty() && self.version.is_some() { try_spec( PackageIdSpec { name: self.name, version: None, url: None, }, &mut suggestion, ); } if suggestion.is_empty() { suggestion.push_str(&lev_distance::closest_msg( &self.name, all_ids.iter(), |id| id.name().as_str(), )); } bail!( "package ID specification `{}` did not match any packages{}", self, suggestion ); } }; return match ids.next() { Some(other) => { let mut msg = format!( "There are multiple `{}` packages in \ your project, and the specification \ `{}` is ambiguous.\n\ Please re-run this command \ with `-p ` where `` is one \ of the following:", self.name(), self ); let mut vec = vec![ret, other]; vec.extend(ids); minimize(&mut msg, &vec, self); Err(anyhow::format_err!("{}", msg)) } None => Ok(ret), }; fn minimize(msg: &mut String, ids: &[PackageId], spec: &PackageIdSpec) { let mut version_cnt = HashMap::new(); for id in ids { *version_cnt.entry(id.version()).or_insert(0) += 1; } for id in ids { if version_cnt[id.version()] == 1 { msg.push_str(&format!("\n {}@{}", spec.name(), id.version())); } else { msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id))); } } } } } impl fmt::Display for PackageIdSpec { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut printed_name = false; match self.url { Some(ref url) => { write!(f, "{}", url)?; if url.path_segments().unwrap().next_back().unwrap() != &*self.name { printed_name = true; write!(f, "#{}", self.name)?; } } None => { printed_name = true; write!(f, "{}", self.name)?; } } if let Some(ref v) = self.version { write!(f, "{}{}", if printed_name { "@" } else { "#" }, v)?; } Ok(()) } } impl ser::Serialize for PackageIdSpec { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { self.to_string().serialize(s) } } impl<'de> de::Deserialize<'de> for PackageIdSpec { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let string = String::deserialize(d)?; PackageIdSpec::parse(&string).map_err(de::Error::custom) } } #[cfg(test)] mod tests { use super::PackageIdSpec; use crate::core::{PackageId, SourceId}; use crate::util::interning::InternedString; use crate::util::ToSemver; use url::Url; #[test] fn good_parsing() { #[track_caller] fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) { let parsed = PackageIdSpec::parse(spec).unwrap(); assert_eq!(parsed, expected); assert_eq!(parsed.to_string(), expected_rendered); } ok( "https://crates.io/foo", PackageIdSpec { name: InternedString::new("foo"), version: None, url: Some(Url::parse("https://crates.io/foo").unwrap()), }, "https://crates.io/foo", ); ok( "https://crates.io/foo#1.2.3", PackageIdSpec { name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("https://crates.io/foo").unwrap()), }, "https://crates.io/foo#1.2.3", ); ok( "https://crates.io/foo#bar:1.2.3", PackageIdSpec { name: InternedString::new("bar"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("https://crates.io/foo").unwrap()), }, "https://crates.io/foo#bar@1.2.3", ); ok( "https://crates.io/foo#bar@1.2.3", PackageIdSpec { name: InternedString::new("bar"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("https://crates.io/foo").unwrap()), }, "https://crates.io/foo#bar@1.2.3", ); ok( "foo", PackageIdSpec { name: InternedString::new("foo"), version: None, url: None, }, "foo", ); ok( "foo:1.2.3", PackageIdSpec { name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: None, }, "foo@1.2.3", ); ok( "foo@1.2.3", PackageIdSpec { name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: None, }, "foo@1.2.3", ); } #[test] fn bad_parsing() { assert!(PackageIdSpec::parse("baz:").is_err()); assert!(PackageIdSpec::parse("baz:*").is_err()); assert!(PackageIdSpec::parse("baz:1.0").is_err()); assert!(PackageIdSpec::parse("baz@").is_err()); assert!(PackageIdSpec::parse("baz@*").is_err()); assert!(PackageIdSpec::parse("baz@1.0").is_err()); assert!(PackageIdSpec::parse("https://baz:1.0").is_err()); assert!(PackageIdSpec::parse("https://#baz:1.0").is_err()); } #[test] fn matching() { let url = Url::parse("https://example.com").unwrap(); let sid = SourceId::for_registry(&url).unwrap(); let foo = PackageId::new("foo", "1.2.3", sid).unwrap(); let bar = PackageId::new("bar", "1.2.3", sid).unwrap(); assert!(PackageIdSpec::parse("foo").unwrap().matches(foo)); assert!(!PackageIdSpec::parse("foo").unwrap().matches(bar)); assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo)); assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(foo)); assert!(PackageIdSpec::parse("foo@1.2.3").unwrap().matches(foo)); assert!(!PackageIdSpec::parse("foo@1.2.2").unwrap().matches(foo)); } } cargo-0.66.0/src/cargo/core/profiles.rs000066400000000000000000001310671432416201200177040ustar00rootroot00000000000000use crate::core::compiler::{CompileKind, CompileTarget, Unit}; use crate::core::dependency::Artifact; use crate::core::resolver::features::FeaturesFor; use crate::core::{PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace}; use crate::util::interning::InternedString; use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool}; use crate::util::{closest_msg, config, CargoResult, Config}; use anyhow::{bail, Context as _}; use std::collections::{BTreeMap, HashMap, HashSet}; use std::hash::Hash; use std::{cmp, env, fmt, hash}; /// Collection of all profiles. #[derive(Clone, Debug)] pub struct Profiles { /// Incremental compilation can be overridden globally via: /// - `CARGO_INCREMENTAL` environment variable. /// - `build.incremental` config value. incremental: Option, /// Map of profile name to directory name for that profile. dir_names: HashMap, /// The profile makers. Key is the profile name. by_name: HashMap, /// The original profiles written by the user in the manifest and config. /// /// This is here to assist with error reporting, as the `ProfileMaker` /// values have the inherits chains all merged together. original_profiles: BTreeMap, /// The profile the user requested to use. requested_profile: InternedString, /// The host target for rustc being used by this `Profiles`. rustc_host: InternedString, } impl Profiles { pub fn new(ws: &Workspace<'_>, requested_profile: InternedString) -> CargoResult { let config = ws.config(); let incremental = match env::var_os("CARGO_INCREMENTAL") { Some(v) => Some(v == "1"), None => config.build_config()?.incremental, }; let mut profiles = merge_config_profiles(ws, requested_profile)?; let rustc_host = ws.config().load_global_rustc(Some(ws))?.host; let mut profile_makers = Profiles { incremental, dir_names: Self::predefined_dir_names(), by_name: HashMap::new(), original_profiles: profiles.clone(), requested_profile, rustc_host, }; Self::add_root_profiles(&mut profile_makers, &profiles); // Merge with predefined profiles. use std::collections::btree_map::Entry; for (predef_name, mut predef_prof) in Self::predefined_profiles().into_iter() { match profiles.entry(InternedString::new(predef_name)) { Entry::Vacant(vac) => { vac.insert(predef_prof); } Entry::Occupied(mut oc) => { // Override predefined with the user-provided Toml. let r = oc.get_mut(); predef_prof.merge(r); *r = predef_prof; } } } for (name, profile) in &profiles { profile_makers.add_maker(*name, profile, &profiles)?; } // Verify that the requested profile is defined *somewhere*. // This simplifies the API (no need for CargoResult), and enforces // assumptions about how config profiles are loaded. profile_makers.get_profile_maker(requested_profile)?; Ok(profile_makers) } /// Returns the hard-coded directory names for built-in profiles. fn predefined_dir_names() -> HashMap { [ (InternedString::new("dev"), InternedString::new("debug")), (InternedString::new("test"), InternedString::new("debug")), (InternedString::new("bench"), InternedString::new("release")), ] .into() } /// Initialize `by_name` with the two "root" profiles, `dev`, and /// `release` given the user's definition. fn add_root_profiles( profile_makers: &mut Profiles, profiles: &BTreeMap, ) { profile_makers.by_name.insert( InternedString::new("dev"), ProfileMaker::new(Profile::default_dev(), profiles.get("dev").cloned()), ); profile_makers.by_name.insert( InternedString::new("release"), ProfileMaker::new(Profile::default_release(), profiles.get("release").cloned()), ); } /// Returns the built-in profiles (not including dev/release, which are /// "root" profiles). fn predefined_profiles() -> Vec<(&'static str, TomlProfile)> { vec![ ( "bench", TomlProfile { inherits: Some(InternedString::new("release")), ..TomlProfile::default() }, ), ( "test", TomlProfile { inherits: Some(InternedString::new("dev")), ..TomlProfile::default() }, ), ( "doc", TomlProfile { inherits: Some(InternedString::new("dev")), ..TomlProfile::default() }, ), ] } /// Creates a `ProfileMaker`, and inserts it into `self.by_name`. fn add_maker( &mut self, name: InternedString, profile: &TomlProfile, profiles: &BTreeMap, ) -> CargoResult<()> { match &profile.dir_name { None => {} Some(dir_name) => { self.dir_names.insert(name, dir_name.to_owned()); } } // dev/release are "roots" and don't inherit. if name == "dev" || name == "release" { if profile.inherits.is_some() { bail!( "`inherits` must not be specified in root profile `{}`", name ); } // Already inserted from `add_root_profiles`, no need to do anything. return Ok(()); } // Keep track for inherits cycles. let mut set = HashSet::new(); set.insert(name); let maker = self.process_chain(name, profile, &mut set, profiles)?; self.by_name.insert(name, maker); Ok(()) } /// Build a `ProfileMaker` by recursively following the `inherits` setting. /// /// * `name`: The name of the profile being processed. /// * `profile`: The TOML profile being processed. /// * `set`: Set of profiles that have been visited, used to detect cycles. /// * `profiles`: Map of all TOML profiles. /// /// Returns a `ProfileMaker` to be used for the given named profile. fn process_chain( &mut self, name: InternedString, profile: &TomlProfile, set: &mut HashSet, profiles: &BTreeMap, ) -> CargoResult { let mut maker = match profile.inherits { Some(inherits_name) if inherits_name == "dev" || inherits_name == "release" => { // These are the root profiles added in `add_root_profiles`. self.get_profile_maker(inherits_name).unwrap().clone() } Some(inherits_name) => { if !set.insert(inherits_name) { bail!( "profile inheritance loop detected with profile `{}` inheriting `{}`", name, inherits_name ); } match profiles.get(&inherits_name) { None => { bail!( "profile `{}` inherits from `{}`, but that profile is not defined", name, inherits_name ); } Some(parent) => self.process_chain(inherits_name, parent, set, profiles)?, } } None => { bail!( "profile `{}` is missing an `inherits` directive \ (`inherits` is required for all profiles except `dev` or `release`)", name ); } }; match &mut maker.toml { Some(toml) => toml.merge(profile), None => maker.toml = Some(profile.clone()), }; Ok(maker) } /// Retrieves the profile for a target. /// `is_member` is whether or not this package is a member of the /// workspace. pub fn get_profile( &self, pkg_id: PackageId, is_member: bool, is_local: bool, unit_for: UnitFor, kind: CompileKind, ) -> Profile { let maker = self.get_profile_maker(self.requested_profile).unwrap(); let mut profile = maker.get_profile(Some(pkg_id), is_member, unit_for.is_for_host()); // Dealing with `panic=abort` and `panic=unwind` requires some special // treatment. Be sure to process all the various options here. match unit_for.panic_setting() { PanicSetting::AlwaysUnwind => profile.panic = PanicStrategy::Unwind, PanicSetting::ReadProfile => {} } // Default macOS debug information to being stored in the "unpacked" // split-debuginfo format. At the time of this writing that's the only // platform which has a stable `-Csplit-debuginfo` option for rustc, // and it's typically much faster than running `dsymutil` on all builds // in incremental cases. if let Some(debug) = profile.debuginfo { if profile.split_debuginfo.is_none() && debug > 0 { let target = match &kind { CompileKind::Host => self.rustc_host.as_str(), CompileKind::Target(target) => target.short_name(), }; if target.contains("-apple-") { profile.split_debuginfo = Some(InternedString::new("unpacked")); } } } // Incremental can be globally overridden. if let Some(v) = self.incremental { profile.incremental = v; } // Only enable incremental compilation for sources the user can // modify (aka path sources). For things that change infrequently, // non-incremental builds yield better performance in the compiler // itself (aka crates.io / git dependencies) // // (see also https://github.com/rust-lang/cargo/issues/3972) if !is_local { profile.incremental = false; } profile.name = self.requested_profile; profile } /// The profile for *running* a `build.rs` script is only used for setting /// a few environment variables. To ensure proper de-duplication of the /// running `Unit`, this uses a stripped-down profile (so that unrelated /// profile flags don't cause `build.rs` to needlessly run multiple /// times). pub fn get_profile_run_custom_build(&self, for_unit_profile: &Profile) -> Profile { let mut result = Profile::default(); result.name = for_unit_profile.name; result.root = for_unit_profile.root; result.debuginfo = for_unit_profile.debuginfo; result.opt_level = for_unit_profile.opt_level; result } /// This returns the base profile. This is currently used for the /// `[Finished]` line. It is not entirely accurate, since it doesn't /// select for the package that was actually built. pub fn base_profile(&self) -> Profile { let profile_name = self.requested_profile; let maker = self.get_profile_maker(profile_name).unwrap(); maker.get_profile(None, /*is_member*/ true, /*is_for_host*/ false) } /// Gets the directory name for a profile, like `debug` or `release`. pub fn get_dir_name(&self) -> InternedString { *self .dir_names .get(&self.requested_profile) .unwrap_or(&self.requested_profile) } /// Used to check for overrides for non-existing packages. pub fn validate_packages( &self, profiles: Option<&TomlProfiles>, shell: &mut Shell, resolve: &Resolve, ) -> CargoResult<()> { for (name, profile) in &self.by_name { // If the user did not specify an override, skip this. This is here // to avoid generating errors for inherited profiles which don't // specify package overrides. The `by_name` profile has had the inherits // chain merged, so we need to look at the original source to check // if an override was specified. if self .original_profiles .get(name) .and_then(|orig| orig.package.as_ref()) .is_none() { continue; } let found = validate_packages_unique(resolve, name, &profile.toml)?; // We intentionally do not validate unmatched packages for config // profiles, in case they are defined in a central location. This // iterates over the manifest profiles only. if let Some(profiles) = profiles { if let Some(toml_profile) = profiles.get(name) { validate_packages_unmatched(shell, resolve, name, toml_profile, &found)?; } } } Ok(()) } /// Returns the profile maker for the given profile name. fn get_profile_maker(&self, name: InternedString) -> CargoResult<&ProfileMaker> { self.by_name .get(&name) .ok_or_else(|| anyhow::format_err!("profile `{}` is not defined", name)) } } /// An object used for handling the profile hierarchy. /// /// The precedence of profiles are (first one wins): /// - Profiles in `.cargo/config` files (using same order as below). /// - [profile.dev.package.name] -- a named package. /// - [profile.dev.package."*"] -- this cannot apply to workspace members. /// - [profile.dev.build-override] -- this can only apply to `build.rs` scripts /// and their dependencies. /// - [profile.dev] /// - Default (hard-coded) values. #[derive(Debug, Clone)] struct ProfileMaker { /// The starting, hard-coded defaults for the profile. default: Profile, /// The TOML profile defined in `Cargo.toml` or config. /// /// This is None if the user did not specify one, in which case the /// `default` is used. Note that the built-in defaults for test/bench/doc /// always set this since they need to declare the `inherits` value. toml: Option, } impl ProfileMaker { /// Creates a new `ProfileMaker`. /// /// Note that this does not process `inherits`, the caller is responsible for that. fn new(default: Profile, toml: Option) -> ProfileMaker { ProfileMaker { default, toml } } /// Generates a new `Profile`. fn get_profile( &self, pkg_id: Option, is_member: bool, is_for_host: bool, ) -> Profile { let mut profile = self.default.clone(); // First apply profile-specific settings, things like // `[profile.release]` if let Some(toml) = &self.toml { merge_profile(&mut profile, toml); } // Next start overriding those settings. First comes build dependencies // which default to opt-level 0... if is_for_host { // For-host units are things like procedural macros, build scripts, and // their dependencies. For these units most projects simply want them // to compile quickly and the runtime doesn't matter too much since // they tend to process very little data. For this reason we default // them to a "compile as quickly as possible" mode which for now means // basically turning down the optimization level and avoid limiting // codegen units. This ensures that we spend little time optimizing as // well as enabling parallelism by not constraining codegen units. profile.opt_level = InternedString::new("0"); profile.codegen_units = None; } // ... and next comes any other sorts of overrides specified in // profiles, such as `[profile.release.build-override]` or // `[profile.release.package.foo]` if let Some(toml) = &self.toml { merge_toml_overrides(pkg_id, is_member, is_for_host, &mut profile, toml); } profile } } /// Merge package and build overrides from the given TOML profile into the given `Profile`. fn merge_toml_overrides( pkg_id: Option, is_member: bool, is_for_host: bool, profile: &mut Profile, toml: &TomlProfile, ) { if is_for_host { if let Some(build_override) = &toml.build_override { merge_profile(profile, build_override); } } if let Some(overrides) = toml.package.as_ref() { if !is_member { if let Some(all) = overrides.get(&ProfilePackageSpec::All) { merge_profile(profile, all); } } if let Some(pkg_id) = pkg_id { let mut matches = overrides .iter() .filter_map(|(key, spec_profile)| match *key { ProfilePackageSpec::All => None, ProfilePackageSpec::Spec(ref s) => { if s.matches(pkg_id) { Some(spec_profile) } else { None } } }); if let Some(spec_profile) = matches.next() { merge_profile(profile, spec_profile); // `validate_packages` should ensure that there are // no additional matches. assert!( matches.next().is_none(), "package `{}` matched multiple package profile overrides", pkg_id ); } } } } /// Merge the given TOML profile into the given `Profile`. /// /// Does not merge overrides (see `merge_toml_overrides`). fn merge_profile(profile: &mut Profile, toml: &TomlProfile) { if let Some(ref opt_level) = toml.opt_level { profile.opt_level = InternedString::new(&opt_level.0); } match toml.lto { Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b), Some(StringOrBool::String(ref n)) if is_off(n.as_str()) => profile.lto = Lto::Off, Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)), None => {} } if toml.codegen_backend.is_some() { profile.codegen_backend = toml.codegen_backend; } if toml.codegen_units.is_some() { profile.codegen_units = toml.codegen_units; } match toml.debug { Some(U32OrBool::U32(debug)) => profile.debuginfo = Some(debug), Some(U32OrBool::Bool(true)) => profile.debuginfo = Some(2), Some(U32OrBool::Bool(false)) => profile.debuginfo = None, None => {} } if let Some(debug_assertions) = toml.debug_assertions { profile.debug_assertions = debug_assertions; } if let Some(split_debuginfo) = &toml.split_debuginfo { profile.split_debuginfo = Some(InternedString::new(split_debuginfo)); } if let Some(rpath) = toml.rpath { profile.rpath = rpath; } if let Some(panic) = &toml.panic { profile.panic = match panic.as_str() { "unwind" => PanicStrategy::Unwind, "abort" => PanicStrategy::Abort, // This should be validated in TomlProfile::validate _ => panic!("Unexpected panic setting `{}`", panic), }; } if let Some(overflow_checks) = toml.overflow_checks { profile.overflow_checks = overflow_checks; } if let Some(incremental) = toml.incremental { profile.incremental = incremental; } if let Some(flags) = &toml.rustflags { profile.rustflags = flags.clone(); } profile.strip = match toml.strip { Some(StringOrBool::Bool(true)) => Strip::Named(InternedString::new("symbols")), None | Some(StringOrBool::Bool(false)) => Strip::None, Some(StringOrBool::String(ref n)) if n.as_str() == "none" => Strip::None, Some(StringOrBool::String(ref n)) => Strip::Named(InternedString::new(n)), }; } /// The root profile (dev/release). /// /// This is currently only used for the `PROFILE` env var for build scripts /// for backwards compatibility. We should probably deprecate `PROFILE` and /// encourage using things like `DEBUG` and `OPT_LEVEL` instead. #[derive(Clone, Copy, Eq, PartialOrd, Ord, PartialEq, Debug)] pub enum ProfileRoot { Release, Debug, } /// Profile settings used to determine which compiler flags to use for a /// target. #[derive(Clone, Eq, PartialOrd, Ord, serde::Serialize)] pub struct Profile { pub name: InternedString, pub opt_level: InternedString, #[serde(skip)] // named profiles are unstable pub root: ProfileRoot, pub lto: Lto, // `None` means use rustc default. pub codegen_backend: Option, // `None` means use rustc default. pub codegen_units: Option, pub debuginfo: Option, pub split_debuginfo: Option, pub debug_assertions: bool, pub overflow_checks: bool, pub rpath: bool, pub incremental: bool, pub panic: PanicStrategy, pub strip: Strip, #[serde(skip_serializing_if = "Vec::is_empty")] // remove when `rustflags` is stablized // Note that `rustflags` is used for the cargo-feature `profile_rustflags` pub rustflags: Vec, } impl Default for Profile { fn default() -> Profile { Profile { name: InternedString::new(""), opt_level: InternedString::new("0"), root: ProfileRoot::Debug, lto: Lto::Bool(false), codegen_backend: None, codegen_units: None, debuginfo: None, debug_assertions: false, split_debuginfo: None, overflow_checks: false, rpath: false, incremental: false, panic: PanicStrategy::Unwind, strip: Strip::None, rustflags: vec![], } } } compact_debug! { impl fmt::Debug for Profile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (default, default_name) = match self.name.as_str() { "dev" => (Profile::default_dev(), "default_dev()"), "release" => (Profile::default_release(), "default_release()"), _ => (Profile::default(), "default()"), }; [debug_the_fields( name opt_level lto root codegen_backend codegen_units debuginfo split_debuginfo debug_assertions overflow_checks rpath incremental panic strip rustflags )] } } } impl fmt::Display for Profile { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Profile({})", self.name) } } impl hash::Hash for Profile { fn hash(&self, state: &mut H) where H: hash::Hasher, { self.comparable().hash(state); } } impl cmp::PartialEq for Profile { fn eq(&self, other: &Self) -> bool { self.comparable() == other.comparable() } } impl Profile { fn default_dev() -> Profile { Profile { name: InternedString::new("dev"), root: ProfileRoot::Debug, debuginfo: Some(2), debug_assertions: true, overflow_checks: true, incremental: true, ..Profile::default() } } fn default_release() -> Profile { Profile { name: InternedString::new("release"), root: ProfileRoot::Release, opt_level: InternedString::new("3"), ..Profile::default() } } /// Compares all fields except `name`, which doesn't affect compilation. /// This is necessary for `Unit` deduplication for things like "test" and /// "dev" which are essentially the same. fn comparable(&self) -> impl Hash + Eq { ( self.opt_level, self.lto, self.codegen_backend, self.codegen_units, self.debuginfo, self.split_debuginfo, self.debug_assertions, self.overflow_checks, self.rpath, self.incremental, self.panic, self.strip, ) } } /// The link-time-optimization setting. #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub enum Lto { /// Explicitly no LTO, disables thin-LTO. Off, /// True = "Fat" LTO /// False = rustc default (no args), currently "thin LTO" Bool(bool), /// Named LTO settings like "thin". Named(InternedString), } impl serde::ser::Serialize for Lto { fn serialize(&self, s: S) -> Result where S: serde::ser::Serializer, { match self { Lto::Off => "off".serialize(s), Lto::Bool(b) => b.to_string().serialize(s), Lto::Named(n) => n.serialize(s), } } } /// The `panic` setting. #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)] #[serde(rename_all = "lowercase")] pub enum PanicStrategy { Unwind, Abort, } impl fmt::Display for PanicStrategy { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { PanicStrategy::Unwind => "unwind", PanicStrategy::Abort => "abort", } .fmt(f) } } /// The setting for choosing which symbols to strip #[derive( Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize, )] #[serde(rename_all = "lowercase")] pub enum Strip { /// Don't remove any symbols None, /// Named Strip settings Named(InternedString), } impl fmt::Display for Strip { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Strip::None => "none", Strip::Named(s) => s.as_str(), } .fmt(f) } } /// Flags used in creating `Unit`s to indicate the purpose for the target, and /// to ensure the target's dependencies have the correct settings. /// /// This means these are passed down from the root of the dependency tree to apply /// to most child dependencies. #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] pub struct UnitFor { /// A target for `build.rs` or any of its dependencies, or a proc-macro or /// any of its dependencies. This enables `build-override` profiles for /// these targets. /// /// An invariant is that if `host_features` is true, `host` must be true. /// /// Note that this is `true` for `RunCustomBuild` units, even though that /// unit should *not* use build-override profiles. This is a bit of a /// special case. When computing the `RunCustomBuild` unit, it manually /// uses the `get_profile_run_custom_build` method to get the correct /// profile information for the unit. `host` needs to be true so that all /// of the dependencies of that `RunCustomBuild` unit have this flag be /// sticky (and forced to `true` for all further dependencies) β€” which is /// the whole point of `UnitFor`. host: bool, /// A target for a build dependency or proc-macro (or any of its /// dependencies). This is used for computing features of build /// dependencies and proc-macros independently of other dependency kinds. /// /// The subtle difference between this and `host` is that the build script /// for a non-host package sets this to `false` because it wants the /// features of the non-host package (whereas `host` is true because the /// build script is being built for the host). `host_features` becomes /// `true` for build-dependencies or proc-macros, or any of their /// dependencies. For example, with this dependency tree: /// /// ```text /// foo /// β”œβ”€β”€ foo build.rs /// β”‚ └── shared_dep (BUILD dependency) /// β”‚ └── shared_dep build.rs /// └── shared_dep (Normal dependency) /// └── shared_dep build.rs /// ``` /// /// In this example, `foo build.rs` is HOST=true, HOST_FEATURES=false. /// This is so that `foo build.rs` gets the profile settings for build /// scripts (HOST=true) and features of foo (HOST_FEATURES=false) because /// build scripts need to know which features their package is being built /// with. /// /// But in the case of `shared_dep`, when built as a build dependency, /// both flags are true (it only wants the build-dependency features). /// When `shared_dep` is built as a normal dependency, then `shared_dep /// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that /// foo's build script is set that way. host_features: bool, /// How Cargo processes the `panic` setting or profiles. This is done to /// handle test/benches inheriting from dev/release, as well as forcing /// `for_host` units to always unwind. panic_setting: PanicSetting, /// The compile kind of the root unit for which artifact dependencies are built. /// This is required particularly for the `target = "target"` setting of artifact /// dependencies which mean to inherit the `--target` specified on the command-line. /// However, that is a multi-value argument and root units are already created to /// reflect one unit per --target. Thus we have to build one artifact with the /// correct target for each of these trees. /// Note that this will always be set as we don't initially know if there are /// artifacts that make use of it. root_compile_kind: CompileKind, /// This is only set for artifact dependencies which have their /// `|target` set. /// If so, this information is used as part of the key for resolving their features, /// allowing for target-dependent feature resolution within the entire dependency tree. /// Note that this target corresponds to the target used to build the units in that /// dependency tree, too, but this copy of it is specifically used for feature lookup. artifact_target_for_features: Option, } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] enum PanicSetting { /// Used to force a unit to always be compiled with the `panic=unwind` /// strategy, notably for build scripts, proc macros, etc. AlwaysUnwind, /// Indicates that this unit will read its `profile` setting and use /// whatever is configured there. ReadProfile, } impl UnitFor { /// A unit for a normal target/dependency (i.e., not custom build, /// proc macro/plugin, or test/bench). pub fn new_normal(root_compile_kind: CompileKind) -> UnitFor { UnitFor { host: false, host_features: false, panic_setting: PanicSetting::ReadProfile, root_compile_kind, artifact_target_for_features: None, } } /// A unit for a custom build script or proc-macro or its dependencies. /// /// The `host_features` parameter is whether or not this is for a build /// dependency or proc-macro (something that requires being built "on the /// host"). Build scripts for non-host units should use `false` because /// they want to use the features of the package they are running for. pub fn new_host(host_features: bool, root_compile_kind: CompileKind) -> UnitFor { UnitFor { host: true, host_features, // Force build scripts to always use `panic=unwind` for now to // maximally share dependencies with procedural macros. panic_setting: PanicSetting::AlwaysUnwind, root_compile_kind, artifact_target_for_features: None, } } /// A unit for a compiler plugin or their dependencies. pub fn new_compiler(root_compile_kind: CompileKind) -> UnitFor { UnitFor { host: false, // The feature resolver doesn't know which dependencies are // plugins, so for now plugins don't split features. Since plugins // are mostly deprecated, just leave this as false. host_features: false, // Force plugins to use `panic=abort` so panics in the compiler do // not abort the process but instead end with a reasonable error // message that involves catching the panic in the compiler. panic_setting: PanicSetting::AlwaysUnwind, root_compile_kind, artifact_target_for_features: None, } } /// A unit for a test/bench target or their dependencies. /// /// Note that `config` is taken here for unstable CLI features to detect /// whether `panic=abort` is supported for tests. Historical versions of /// rustc did not support this, but newer versions do with an unstable /// compiler flag. pub fn new_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor { UnitFor { host: false, host_features: false, // We're testing out an unstable feature (`-Zpanic-abort-tests`) // which inherits the panic setting from the dev/release profile // (basically avoid recompiles) but historical defaults required // that we always unwound. panic_setting: if config.cli_unstable().panic_abort_tests { PanicSetting::ReadProfile } else { PanicSetting::AlwaysUnwind }, root_compile_kind, artifact_target_for_features: None, } } /// This is a special case for unit tests of a proc-macro. /// /// Proc-macro unit tests are forced to be run on the host. pub fn new_host_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor { let mut unit_for = UnitFor::new_test(config, root_compile_kind); unit_for.host = true; unit_for.host_features = true; unit_for } /// Returns a new copy updated based on the target dependency. /// /// This is where the magic happens that the host/host_features settings /// transition in a sticky fashion. As the dependency graph is being /// built, once those flags are set, they stay set for the duration of /// that portion of tree. pub fn with_dependency( self, parent: &Unit, dep_target: &Target, root_compile_kind: CompileKind, ) -> UnitFor { // A build script or proc-macro transitions this to being built for the host. let dep_for_host = dep_target.for_host(); // This is where feature decoupling of host versus target happens. // // Once host features are desired, they are always desired. // // A proc-macro should always use host features. // // Dependencies of a build script should use host features (subtle // point: the build script itself does *not* use host features, that's // why the parent is checked here, and not the dependency). let host_features = self.host_features || parent.target.is_custom_build() || dep_target.proc_macro(); // Build scripts and proc macros, and all of their dependencies are // AlwaysUnwind. let panic_setting = if dep_for_host { PanicSetting::AlwaysUnwind } else { self.panic_setting }; UnitFor { host: self.host || dep_for_host, host_features, panic_setting, root_compile_kind, artifact_target_for_features: self.artifact_target_for_features, } } pub fn for_custom_build(self) -> UnitFor { UnitFor { host: true, host_features: self.host_features, // Force build scripts to always use `panic=unwind` for now to // maximally share dependencies with procedural macros. panic_setting: PanicSetting::AlwaysUnwind, root_compile_kind: self.root_compile_kind, artifact_target_for_features: self.artifact_target_for_features, } } /// Set the artifact compile target for use in features using the given `artifact`. pub(crate) fn with_artifact_features(mut self, artifact: &Artifact) -> UnitFor { self.artifact_target_for_features = artifact.target().and_then(|t| t.to_compile_target()); self } /// Set the artifact compile target as determined by a resolved compile target. This is used if `target = "target"`. pub(crate) fn with_artifact_features_from_resolved_compile_kind( mut self, kind: Option, ) -> UnitFor { self.artifact_target_for_features = kind.and_then(|kind| match kind { CompileKind::Host => None, CompileKind::Target(triple) => Some(triple), }); self } /// Returns `true` if this unit is for a build script or any of its /// dependencies, or a proc macro or any of its dependencies. pub fn is_for_host(&self) -> bool { self.host } pub fn is_for_host_features(&self) -> bool { self.host_features } /// Returns how `panic` settings should be handled for this profile fn panic_setting(&self) -> PanicSetting { self.panic_setting } /// We might contain a parent artifact compile kind for features already, but will /// gladly accept the one of this dependency as an override as it defines how /// the artifact is built. /// If we are an artifact but don't specify a `target`, we assume the default /// compile kind that is suitable in this situation. pub(crate) fn map_to_features_for(&self, dep_artifact: Option<&Artifact>) -> FeaturesFor { FeaturesFor::from_for_host_or_artifact_target( self.is_for_host_features(), match dep_artifact { Some(artifact) => artifact .target() .and_then(|t| t.to_resolved_compile_target(self.root_compile_kind)), None => self.artifact_target_for_features, }, ) } pub(crate) fn root_compile_kind(&self) -> CompileKind { self.root_compile_kind } } /// Takes the manifest profiles, and overlays the config profiles on-top. /// /// Returns a new copy of the profile map with all the mergers complete. fn merge_config_profiles( ws: &Workspace<'_>, requested_profile: InternedString, ) -> CargoResult> { let mut profiles = match ws.profiles() { Some(profiles) => profiles.get_all().clone(), None => BTreeMap::new(), }; // Set of profile names to check if defined in config only. let mut check_to_add = HashSet::new(); check_to_add.insert(requested_profile); // Merge config onto manifest profiles. for (name, profile) in &mut profiles { if let Some(config_profile) = get_config_profile(ws, name)? { profile.merge(&config_profile); } if let Some(inherits) = &profile.inherits { check_to_add.insert(*inherits); } } // Add the built-in profiles. This is important for things like `cargo // test` which implicitly use the "dev" profile for dependencies. for name in &["dev", "release", "test", "bench"] { check_to_add.insert(InternedString::new(name)); } // Add config-only profiles. // Need to iterate repeatedly to get all the inherits values. let mut current = HashSet::new(); while !check_to_add.is_empty() { std::mem::swap(&mut current, &mut check_to_add); for name in current.drain() { if !profiles.contains_key(&name) { if let Some(config_profile) = get_config_profile(ws, &name)? { if let Some(inherits) = &config_profile.inherits { check_to_add.insert(*inherits); } profiles.insert(name, config_profile); } } } } Ok(profiles) } /// Helper for fetching a profile from config. fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult> { let profile: Option> = ws.config().get(&format!("profile.{}", name))?; let profile = match profile { Some(profile) => profile, None => return Ok(None), }; let mut warnings = Vec::new(); profile .val .validate(name, ws.unstable_features(), &mut warnings) .with_context(|| { format!( "config profile `{}` is not valid (defined in `{}`)", name, profile.definition ) })?; for warning in warnings { ws.config().shell().warn(warning)?; } Ok(Some(profile.val)) } /// Validate that a package does not match multiple package override specs. /// /// For example `[profile.dev.package.bar]` and `[profile.dev.package."bar:0.5.0"]` /// would both match `bar:0.5.0` which would be ambiguous. fn validate_packages_unique( resolve: &Resolve, name: &str, toml: &Option, ) -> CargoResult> { let toml = match toml { Some(ref toml) => toml, None => return Ok(HashSet::new()), }; let overrides = match toml.package.as_ref() { Some(overrides) => overrides, None => return Ok(HashSet::new()), }; // Verify that a package doesn't match multiple spec overrides. let mut found = HashSet::new(); for pkg_id in resolve.iter() { let matches: Vec<&PackageIdSpec> = overrides .keys() .filter_map(|key| match *key { ProfilePackageSpec::All => None, ProfilePackageSpec::Spec(ref spec) => { if spec.matches(pkg_id) { Some(spec) } else { None } } }) .collect(); match matches.len() { 0 => {} 1 => { found.insert(matches[0].clone()); } _ => { let specs = matches .iter() .map(|spec| spec.to_string()) .collect::>() .join(", "); bail!( "multiple package overrides in profile `{}` match package `{}`\n\ found package specs: {}", name, pkg_id, specs ); } } } Ok(found) } /// Check for any profile override specs that do not match any known packages. /// /// This helps check for typos and mistakes. fn validate_packages_unmatched( shell: &mut Shell, resolve: &Resolve, name: &str, toml: &TomlProfile, found: &HashSet, ) -> CargoResult<()> { let overrides = match toml.package.as_ref() { Some(overrides) => overrides, None => return Ok(()), }; // Verify every override matches at least one package. let missing_specs = overrides.keys().filter_map(|key| { if let ProfilePackageSpec::Spec(ref spec) = *key { if !found.contains(spec) { return Some(spec); } } None }); for spec in missing_specs { // See if there is an exact name match. let name_matches: Vec = resolve .iter() .filter_map(|pkg_id| { if pkg_id.name() == spec.name() { Some(pkg_id.to_string()) } else { None } }) .collect(); if name_matches.is_empty() { let suggestion = closest_msg(&spec.name(), resolve.iter(), |p| p.name().as_str()); shell.warn(format!( "profile package spec `{}` in profile `{}` did not match any packages{}", spec, name, suggestion ))?; } else { shell.warn(format!( "profile package spec `{}` in profile `{}` \ has a version or URL that does not match any of the packages: {}", spec, name, name_matches.join(", ") ))?; } } Ok(()) } /// Returns `true` if a string is a toggle that turns an option off. fn is_off(s: &str) -> bool { matches!(s, "off" | "n" | "no" | "none") } cargo-0.66.0/src/cargo/core/registry.rs000066400000000000000000001162331432416201200177270ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::task::Poll; use crate::core::PackageSet; use crate::core::{Dependency, PackageId, QueryKind, Source, SourceId, SourceMap, Summary}; use crate::sources::config::SourceConfigMap; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{CanonicalUrl, Config}; use anyhow::{bail, Context as _}; use log::{debug, trace}; use url::Url; /// Source of information about a group of packages. /// /// See also `core::Source`. pub trait Registry { /// Attempt to find the packages that match a dependency request. fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll>; fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll>> { let mut ret = Vec::new(); self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|()| ret) } fn describe_source(&self, source: SourceId) -> String; fn is_replaced(&self, source: SourceId) -> bool; /// Block until all outstanding Poll::Pending requests are Poll::Ready. fn block_until_ready(&mut self) -> CargoResult<()>; } /// This structure represents a registry of known packages. It internally /// contains a number of `Box` instances which are used to load a /// `Package` from. /// /// The resolution phase of Cargo uses this to drive knowledge about new /// packages as well as querying for lists of new packages. It is here that /// sources are updated (e.g., network operations) and overrides are /// handled. /// /// The general idea behind this registry is that it is centered around the /// `SourceMap` structure, contained within which is a mapping of a `SourceId` to /// a `Source`. Each `Source` in the map has been updated (using network /// operations if necessary) and is ready to be queried for packages. pub struct PackageRegistry<'cfg> { config: &'cfg Config, sources: SourceMap<'cfg>, // A list of sources which are considered "overrides" which take precedent // when querying for packages. overrides: Vec, // Note that each SourceId does not take into account its `precise` field // when hashing or testing for equality. When adding a new `SourceId`, we // want to avoid duplicates in the `SourceMap` (to prevent re-updating the // same git repo twice for example), but we also want to ensure that the // loaded source is always updated. // // Sources with a `precise` field normally don't need to be updated because // their contents are already on disk, but sources without a `precise` field // almost always need to be updated. If we have a cached `Source` for a // precise `SourceId`, then when we add a new `SourceId` that is not precise // we want to ensure that the underlying source is updated. // // This is basically a long-winded way of saying that we want to know // precisely what the keys of `sources` are, so this is a mapping of key to // what exactly the key is. source_ids: HashMap, locked: LockedMap, yanked_whitelist: HashSet, source_config: SourceConfigMap<'cfg>, patches: HashMap>, patches_locked: bool, patches_available: HashMap>, } /// A map of all "locked packages" which is filled in when parsing a lock file /// and is used to guide dependency resolution by altering summaries as they're /// queried from this source. /// /// This map can be thought of as a glorified `Vec` where `MySummary` /// has a `PackageId` for which package it represents as well as a list of /// `PackageId` for the resolved dependencies. The hash map is otherwise /// structured though for easy access throughout this registry. type LockedMap = HashMap< // The first level of key-ing done in this hash map is the source that // dependencies come from, identified by a `SourceId`. // The next level is keyed by the name of the package... (SourceId, InternedString), // ... and the value here is a list of tuples. The first element of each // tuple is a package which has the source/name used to get to this // point. The second element of each tuple is the list of locked // dependencies that the first element has. Vec<(PackageId, Vec)>, >; #[derive(PartialEq, Eq, Clone, Copy)] enum Kind { Override, Locked, Normal, } /// Argument to `PackageRegistry::patch` which is information about a `[patch]` /// directive that we found in a lockfile, if present. pub struct LockedPatchDependency { /// The original `Dependency` directive, except "locked" so it's version /// requirement is `=foo` and its `SourceId` has a "precise" listed. pub dependency: Dependency, /// The `PackageId` that was previously found in a lock file which /// `dependency` matches. pub package_id: PackageId, /// Something only used for backwards compatibility with the v2 lock file /// format where `branch=master` is considered the same as `DefaultBranch`. /// For more comments on this see the code in `ops/resolve.rs`. pub alt_package_id: Option, } impl<'cfg> PackageRegistry<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { let source_config = SourceConfigMap::new(config)?; Ok(PackageRegistry { config, sources: SourceMap::new(), source_ids: HashMap::new(), overrides: Vec::new(), source_config, locked: HashMap::new(), yanked_whitelist: HashSet::new(), patches: HashMap::new(), patches_locked: false, patches_available: HashMap::new(), }) } pub fn get(self, package_ids: &[PackageId]) -> CargoResult> { trace!("getting packages; sources={}", self.sources.len()); PackageSet::new(package_ids, self.sources, self.config) } fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> { match self.source_ids.get(&namespace) { // We've previously loaded this source, and we've already locked it, // so we're not allowed to change it even if `namespace` has a // slightly different precise version listed. Some((_, Kind::Locked)) => { debug!("load/locked {}", namespace); return Ok(()); } // If the previous source was not a precise source, then we can be // sure that it's already been updated if we've already loaded it. Some((previous, _)) if previous.precise().is_none() => { debug!("load/precise {}", namespace); return Ok(()); } // If the previous source has the same precise version as we do, // then we're done, otherwise we need to need to move forward // updating this source. Some((previous, _)) => { if previous.precise() == namespace.precise() { debug!("load/match {}", namespace); return Ok(()); } debug!("load/mismatch {}", namespace); } None => { debug!("load/missing {}", namespace); } } self.load(namespace, kind)?; // This isn't strictly necessary since it will be called later. // However it improves error messages for sources that issue errors // in `block_until_ready` because the callers here have context about // which deps are being resolved. self.block_until_ready()?; Ok(()) } pub fn add_sources(&mut self, ids: impl IntoIterator) -> CargoResult<()> { for id in ids { self.ensure_loaded(id, Kind::Locked)?; } Ok(()) } pub fn add_preloaded(&mut self, source: Box) { self.add_source(source, Kind::Locked); } fn add_source(&mut self, source: Box, kind: Kind) { let id = source.source_id(); self.sources.insert(source); self.source_ids.insert(id, (id, kind)); } pub fn add_override(&mut self, source: Box) { self.overrides.push(source.source_id()); self.add_source(source, Kind::Override); } pub fn add_to_yanked_whitelist(&mut self, iter: impl Iterator) { let pkgs = iter.collect::>(); for (_, source) in self.sources.sources_mut() { source.add_to_yanked_whitelist(&pkgs); } self.yanked_whitelist.extend(pkgs); } /// remove all residual state from previous lock files. pub fn clear_lock(&mut self) { trace!("clear_lock"); self.locked = HashMap::new(); } pub fn register_lock(&mut self, id: PackageId, deps: Vec) { trace!("register_lock: {}", id); for dep in deps.iter() { trace!("\t-> {}", dep); } let sub_vec = self .locked .entry((id.source_id(), id.name())) .or_insert_with(Vec::new); sub_vec.push((id, deps)); } /// Insert a `[patch]` section into this registry. /// /// This method will insert a `[patch]` section for the `url` specified, /// with the given list of dependencies. The `url` specified is the URL of /// the source to patch (for example this is `crates-io` in the manifest). /// The `deps` is an array of all the entries in the `[patch]` section of /// the manifest. /// /// Here the `deps` will be resolved to a precise version and stored /// internally for future calls to `query` below. `deps` should be a tuple /// where the first element is the patch definition straight from the /// manifest, and the second element is an optional variant where the /// patch has been locked. This locked patch is the patch locked to /// a specific version found in Cargo.lock. This will be `None` if /// `Cargo.lock` doesn't exist, or the patch did not match any existing /// entries in `Cargo.lock`. /// /// Note that the patch list specified here *will not* be available to /// `query` until `lock_patches` is called below, which should be called /// once all patches have been added. /// /// The return value is a `Vec` of patches that should *not* be locked. /// This happens when the patch is locked, but the patch has been updated /// so the locked value is no longer correct. pub fn patch( &mut self, url: &Url, deps: &[(&Dependency, Option)], ) -> CargoResult> { // NOTE: None of this code is aware of required features. If a patch // is missing a required feature, you end up with an "unused patch" // warning, which is very hard to understand. Ideally the warning // would be tailored to indicate *why* it is unused. let canonical = CanonicalUrl::new(url)?; // Return value of patches that shouldn't be locked. let mut unlock_patches = Vec::new(); // First up we need to actually resolve each `deps` specification to // precisely one summary. We're not using the `query` method below as it // internally uses maps we're building up as part of this method // (`patches_available` and `patches`). Instead we're going straight to // the source to load information from it. // // Remember that each dependency listed in `[patch]` has to resolve to // precisely one package, so that's why we're just creating a flat list // of summaries which should be the same length as `deps` above. let mut deps_remaining: Vec<_> = deps.iter().collect(); let mut unlocked_summaries = Vec::new(); while !deps_remaining.is_empty() { let mut deps_pending = Vec::new(); for dep_remaining in deps_remaining { let (orig_patch, locked) = dep_remaining; // Use the locked patch if it exists, otherwise use the original. let dep = match locked { Some(lock) => &lock.dependency, None => *orig_patch, }; debug!( "registering a patch for `{}` with `{}`", url, dep.package_name() ); if dep.features().len() != 0 || !dep.uses_default_features() { self.source_config.config().shell().warn(format!( "patch for `{}` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism", dep.package_name() ))?; } // Go straight to the source for resolving `dep`. Load it as we // normally would and then ask it directly for the list of summaries // corresponding to this `dep`. self.ensure_loaded(dep.source_id(), Kind::Normal) .with_context(|| { format!( "failed to load source for dependency `{}`", dep.package_name() ) })?; let source = self .sources .get_mut(dep.source_id()) .expect("loaded source not present"); let summaries = match source.query_vec(dep, QueryKind::Exact)? { Poll::Ready(deps) => deps, Poll::Pending => { deps_pending.push(dep_remaining); continue; } }; let (summary, should_unlock) = match summary_for_patch(orig_patch, &locked, summaries, source) { Poll::Ready(x) => x, Poll::Pending => { deps_pending.push(dep_remaining); continue; } } .with_context(|| { format!( "patch for `{}` in `{}` failed to resolve", orig_patch.package_name(), url, ) }) .with_context(|| format!("failed to resolve patches for `{}`", url))?; debug!( "patch summary is {:?} should_unlock={:?}", summary, should_unlock ); if let Some(unlock_id) = should_unlock { unlock_patches.push(((*orig_patch).clone(), unlock_id)); } if *summary.package_id().source_id().canonical_url() == canonical { return Err(anyhow::anyhow!( "patch for `{}` in `{}` points to the same source, but \ patches must point to different sources", dep.package_name(), url )) .context(format!("failed to resolve patches for `{}`", url)); } unlocked_summaries.push(summary); } deps_remaining = deps_pending; self.block_until_ready()?; } let mut name_and_version = HashSet::new(); for summary in unlocked_summaries.iter() { let name = summary.package_id().name(); let version = summary.package_id().version(); if !name_and_version.insert((name, version)) { bail!( "cannot have two `[patch]` entries which both resolve \ to `{} v{}`", name, version ); } } // Calculate a list of all patches available for this source which is // then used later during calls to `lock` to rewrite summaries to point // directly at these patched entries. // // Note that this is somewhat subtle where the list of `ids` for a // canonical URL is extend with possibly two ids per summary. This is done // to handle the transition from the v2->v3 lock file format where in // v2 DefeaultBranch was either DefaultBranch or Branch("master") for // git dependencies. In this case if `summary.package_id()` is // Branch("master") then alt_package_id will be DefaultBranch. This // signifies that there's a patch available for either of those // dependency directives if we see them in the dependency graph. // // This is a bit complicated and hopefully an edge case we can remove // in the future, but for now it hopefully doesn't cause too much // harm... let mut ids = Vec::new(); for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) { ids.push(summary.package_id()); if let Some(lock) = lock { ids.extend(lock.alt_package_id); } } self.patches_available.insert(canonical.clone(), ids); // Note that we do not use `lock` here to lock summaries! That step // happens later once `lock_patches` is invoked. In the meantime though // we want to fill in the `patches_available` map (later used in the // `lock` method) and otherwise store the unlocked summaries in // `patches` to get locked in a future call to `lock_patches`. self.patches.insert(canonical, unlocked_summaries); Ok(unlock_patches) } /// Lock all patch summaries added via `patch`, making them available to /// resolution via `query`. /// /// This function will internally `lock` each summary added via `patch` /// above now that the full set of `patch` packages are known. This'll allow /// us to correctly resolve overridden dependencies between patches /// hopefully! pub fn lock_patches(&mut self) { assert!(!self.patches_locked); for summaries in self.patches.values_mut() { for summary in summaries { debug!("locking patch {:?}", summary); *summary = lock(&self.locked, &self.patches_available, summary.clone()); } } self.patches_locked = true; } /// Gets all patches grouped by the source URLS they are going to patch. /// /// These patches are mainly collected from [`patch`](Self::patch). /// They might not be the same as patches actually used during dependency resolving. pub fn patches(&self) -> &HashMap> { &self.patches } fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> { debug!("loading source {}", source_id); let source = self .source_config .load(source_id, &self.yanked_whitelist) .with_context(|| format!("Unable to update {}", source_id))?; assert_eq!(source.source_id(), source_id); if kind == Kind::Override { self.overrides.push(source_id); } self.add_source(source, kind); // If we have an imprecise version then we don't know what we're going // to look for, so we always attempt to perform an update here. // // If we have a precise version, then we'll update lazily during the // querying phase. Note that precise in this case is only // `Some("locked")` as other `Some` values indicate a `cargo update // --precise` request if source_id.precise() != Some("locked") { self.sources.get_mut(source_id).unwrap().invalidate_cache(); } else { debug!("skipping update due to locked registry"); } Ok(()) } fn query_overrides(&mut self, dep: &Dependency) -> Poll>> { for &s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); let dep = Dependency::new_override(dep.package_name(), s); let mut results = match src.query_vec(&dep, QueryKind::Exact) { Poll::Ready(results) => results?, Poll::Pending => return Poll::Pending, }; if !results.is_empty() { return Poll::Ready(Ok(Some(results.remove(0)))); } } Poll::Ready(Ok(None)) } /// This function is used to transform a summary to another locked summary /// if possible. This is where the concept of a lock file comes into play. /// /// If a summary points at a package ID which was previously locked, then we /// override the summary's ID itself, as well as all dependencies, to be /// rewritten to the locked versions. This will transform the summary's /// source to a precise source (listed in the locked version) as well as /// transforming all of the dependencies from range requirements on /// imprecise sources to exact requirements on precise sources. /// /// If a summary does not point at a package ID which was previously locked, /// or if any dependencies were added and don't have a previously listed /// version, we still want to avoid updating as many dependencies as /// possible to keep the graph stable. In this case we map all of the /// summary's dependencies to be rewritten to a locked version wherever /// possible. If we're unable to map a dependency though, we just pass it on /// through. pub fn lock(&self, summary: Summary) -> Summary { assert!(self.patches_locked); lock(&self.locked, &self.patches_available, summary) } fn warn_bad_override( &self, override_summary: &Summary, real_summary: &Summary, ) -> CargoResult<()> { let mut real_deps = real_summary.dependencies().iter().collect::>(); let boilerplate = "\ This is currently allowed but is known to produce buggy behavior with spurious recompiles and changes to the crate graph. Path overrides unfortunately were never intended to support this feature, so for now this message is just a warning. In the future, however, this message will become a hard error. To change the dependency graph via an override it's recommended to use the `[patch]` feature of Cargo instead of the path override feature. This is documented online at the url below for more information. https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html "; for dep in override_summary.dependencies() { if let Some(i) = real_deps.iter().position(|d| dep == *d) { real_deps.remove(i); continue; } let msg = format!( "path override for crate `{}` has altered the original list of\n\ dependencies; the dependency on `{}` was either added or\n\ modified to not match the previously resolved version\n\n\ {}", override_summary.package_id().name(), dep.package_name(), boilerplate ); self.source_config.config().shell().warn(&msg)?; return Ok(()); } if let Some(dep) = real_deps.get(0) { let msg = format!( "path override for crate `{}` has altered the original list of\n\ dependencies; the dependency on `{}` was removed\n\n\ {}", override_summary.package_id().name(), dep.package_name(), boilerplate ); self.source_config.config().shell().warn(&msg)?; return Ok(()); } Ok(()) } } impl<'cfg> Registry for PackageRegistry<'cfg> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { assert!(self.patches_locked); let (override_summary, n, to_warn) = { // Look for an override and get ready to query the real source. let override_summary = match self.query_overrides(dep) { Poll::Ready(override_summary) => override_summary?, Poll::Pending => return Poll::Pending, }; // Next up on our list of candidates is to check the `[patch]` // section of the manifest. Here we look through all patches // relevant to the source that `dep` points to, and then we match // name/version. Note that we don't use `dep.matches(..)` because // the patches, by definition, come from a different source. // This means that `dep.matches(..)` will always return false, when // what we really care about is the name/version match. let mut patches = Vec::::new(); if let Some(extra) = self.patches.get(dep.source_id().canonical_url()) { patches.extend( extra .iter() .filter(|s| dep.matches_ignoring_source(s.package_id())) .cloned(), ); } // A crucial feature of the `[patch]` feature is that we *don't* // query the actual registry if we have a "locked" dependency. A // locked dep basically just means a version constraint of `=a.b.c`, // and because patches take priority over the actual source then if // we have a candidate we're done. if patches.len() == 1 && dep.is_locked() { let patch = patches.remove(0); match override_summary { Some(summary) => (summary, 1, Some(patch)), None => { f(patch); return Poll::Ready(Ok(())); } } } else { if !patches.is_empty() { debug!( "found {} patches with an unlocked dep on `{}` at {} \ with `{}`, \ looking at sources", patches.len(), dep.package_name(), dep.source_id(), dep.version_req() ); } // Ensure the requested source_id is loaded self.ensure_loaded(dep.source_id(), Kind::Normal) .with_context(|| { format!( "failed to load source for dependency `{}`", dep.package_name() ) })?; let source = self.sources.get_mut(dep.source_id()); match (override_summary, source) { (Some(_), None) => { return Poll::Ready(Err(anyhow::anyhow!("override found but no real ones"))) } (None, None) => return Poll::Ready(Ok(())), // If we don't have an override then we just ship // everything upstairs after locking the summary (None, Some(source)) => { for patch in patches.iter() { f(patch.clone()); } // Our sources shouldn't ever come back to us with two // summaries that have the same version. We could, // however, have an `[patch]` section which is in use // to override a version in the registry. This means // that if our `summary` in this loop has the same // version as something in `patches` that we've // already selected, then we skip this `summary`. let locked = &self.locked; let all_patches = &self.patches_available; let callback = &mut |summary: Summary| { for patch in patches.iter() { let patch = patch.package_id().version(); if summary.package_id().version() == patch { return; } } f(lock(locked, all_patches, summary)) }; return source.query(dep, kind, callback); } // If we have an override summary then we query the source // to sanity check its results. We don't actually use any of // the summaries it gives us though. (Some(override_summary), Some(source)) => { if !patches.is_empty() { return Poll::Ready(Err(anyhow::anyhow!( "found patches and a path override" ))); } let mut n = 0; let mut to_warn = None; { let callback = &mut |summary| { n += 1; to_warn = Some(summary); }; let pend = source.query(dep, kind, callback); if pend.is_pending() { return Poll::Pending; } } (override_summary, n, to_warn) } } } }; if n > 1 { return Poll::Ready(Err(anyhow::anyhow!( "found an override with a non-locked list" ))); } else if let Some(summary) = to_warn { self.warn_bad_override(&override_summary, &summary)?; } f(self.lock(override_summary)); Poll::Ready(Ok(())) } fn describe_source(&self, id: SourceId) -> String { match self.sources.get(id) { Some(src) => src.describe(), None => id.to_string(), } } fn is_replaced(&self, id: SourceId) -> bool { match self.sources.get(id) { Some(src) => src.is_replaced(), None => false, } } fn block_until_ready(&mut self) -> CargoResult<()> { for (source_id, source) in self.sources.sources_mut() { source .block_until_ready() .with_context(|| format!("Unable to update {}", source_id))?; } Ok(()) } } fn lock( locked: &LockedMap, patches: &HashMap>, summary: Summary, ) -> Summary { let pair = locked .get(&(summary.source_id(), summary.name())) .and_then(|vec| vec.iter().find(|&&(id, _)| id == summary.package_id())); trace!("locking summary of {}", summary.package_id()); // Lock the summary's ID if possible let summary = match pair { Some((precise, _)) => summary.override_id(*precise), None => summary, }; summary.map_dependencies(|dep| { trace!( "\t{}/{}/{}", dep.package_name(), dep.version_req(), dep.source_id() ); // If we've got a known set of overrides for this summary, then // one of a few cases can arise: // // 1. We have a lock entry for this dependency from the same // source as it's listed as coming from. In this case we make // sure to lock to precisely the given package ID. // // 2. We have a lock entry for this dependency, but it's from a // different source than what's listed, or the version // requirement has changed. In this case we must discard the // locked version because the dependency needs to be // re-resolved. // // 3. We have a lock entry for this dependency, but it's from a // different source than what's listed. This lock though happens // through `[patch]`, so we want to preserve it. // // 4. We don't have a lock entry for this dependency, in which // case it was likely an optional dependency which wasn't // included previously so we just pass it through anyway. // // Cases 1/2 are handled by `matches_id`, case 3 is handled specially, // and case 4 is handled by falling through to the logic below. if let Some((_, locked_deps)) = pair { let locked = locked_deps.iter().find(|&&id| { // If the dependency matches the package id exactly then we've // found a match, this is the id the dependency was previously // locked to. if dep.matches_id(id) { return true; } // If the name/version doesn't match, then we definitely don't // have a match whatsoever. Otherwise we need to check // `[patch]`... if !dep.matches_ignoring_source(id) { return false; } // ... so here we look up the dependency url in the patches // map, and we see if `id` is contained in the list of patches // for that url. If it is then this lock is still valid, // otherwise the lock is no longer valid. match patches.get(dep.source_id().canonical_url()) { Some(list) => list.contains(&id), None => false, } }); if let Some(&locked) = locked { trace!("\tfirst hit on {}", locked); let mut dep = dep; // If we found a locked version where the sources match, then // we can `lock_to` to get an exact lock on this dependency. // Otherwise we got a lock via `[patch]` so we only lock the // version requirement, not the source. if locked.source_id() == dep.source_id() { dep.lock_to(locked); } else { dep.lock_version(locked.version()); } return dep; } } // If this dependency did not have a locked version, then we query // all known locked packages to see if they match this dependency. // If anything does then we lock it to that and move on. let v = locked .get(&(dep.source_id(), dep.package_name())) .and_then(|vec| vec.iter().find(|&&(id, _)| dep.matches_id(id))); if let Some(&(id, _)) = v { trace!("\tsecond hit on {}", id); let mut dep = dep; dep.lock_to(id); return dep; } trace!("\tnope, unlocked"); dep }) } /// This is a helper for selecting the summary, or generating a helpful error message. fn summary_for_patch( orig_patch: &Dependency, locked: &Option, mut summaries: Vec, source: &mut dyn Source, ) -> Poll)>> { if summaries.len() == 1 { return Poll::Ready(Ok((summaries.pop().unwrap(), None))); } if summaries.len() > 1 { // TODO: In the future, it might be nice to add all of these // candidates so that version selection would just pick the // appropriate one. However, as this is currently structured, if we // added these all as patches, the unselected versions would end up in // the "unused patch" listing, and trigger a warning. It might take a // fair bit of restructuring to make that work cleanly, and there // isn't any demand at this time to support that. let mut vers: Vec<_> = summaries.iter().map(|summary| summary.version()).collect(); vers.sort(); let versions: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect(); return Poll::Ready(Err(anyhow::anyhow!( "patch for `{}` in `{}` resolved to more than one candidate\n\ Found versions: {}\n\ Update the patch definition to select only one package.\n\ For example, add an `=` version requirement to the patch definition, \ such as `version = \"={}\"`.", orig_patch.package_name(), orig_patch.source_id(), versions.join(", "), versions.last().unwrap() ))); } assert!(summaries.is_empty()); // No summaries found, try to help the user figure out what is wrong. if let Some(locked) = locked { // Since the locked patch did not match anything, try the unlocked one. let orig_matches = match source.query_vec(orig_patch, QueryKind::Exact) { Poll::Pending => return Poll::Pending, Poll::Ready(deps) => deps, } .unwrap_or_else(|e| { log::warn!( "could not determine unlocked summaries for dep {:?}: {:?}", orig_patch, e ); Vec::new() }); let summary = match summary_for_patch(orig_patch, &None, orig_matches, source) { Poll::Pending => return Poll::Pending, Poll::Ready(summary) => summary?, }; // The unlocked version found a match. This returns a value to // indicate that this entry should be unlocked. return Poll::Ready(Ok((summary.0, Some(locked.package_id)))); } // Try checking if there are *any* packages that match this by name. let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id()); let name_summaries = match source.query_vec(&name_only_dep, QueryKind::Exact) { Poll::Pending => return Poll::Pending, Poll::Ready(deps) => deps, } .unwrap_or_else(|e| { log::warn!( "failed to do name-only summary query for {:?}: {:?}", name_only_dep, e ); Vec::new() }); let mut vers = name_summaries .iter() .map(|summary| summary.version()) .collect::>(); let found = match vers.len() { 0 => format!(""), 1 => format!("version `{}`", vers[0]), _ => { vers.sort(); let strs: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect(); format!("versions `{}`", strs.join(", ")) } }; Poll::Ready(Err(if found.is_empty() { anyhow::anyhow!( "The patch location `{}` does not appear to contain any packages \ matching the name `{}`.", orig_patch.source_id(), orig_patch.package_name() ) } else { anyhow::anyhow!( "The patch location `{}` contains a `{}` package with {}, but the patch \ definition requires `{}`.\n\ Check that the version in the patch location is what you expect, \ and update the patch definition to match.", orig_patch.source_id(), orig_patch.package_name(), found, orig_patch.version_req() ) })) } cargo-0.66.0/src/cargo/core/resolver/000077500000000000000000000000001432416201200173445ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/resolver/conflict_cache.rs000066400000000000000000000216041432416201200226410ustar00rootroot00000000000000use std::collections::{BTreeMap, HashMap, HashSet}; use log::trace; use super::types::ConflictMap; use crate::core::resolver::Context; use crate::core::{Dependency, PackageId}; /// This is a trie for storing a large number of sets designed to /// efficiently see if any of the stored sets are a subset of a search set. enum ConflictStoreTrie { /// One of the stored sets. Leaf(ConflictMap), /// A map from an element to a subtrie where /// all the sets in the subtrie contains that element. Node(BTreeMap), } impl ConflictStoreTrie { /// Finds any known set of conflicts, if any, /// where all elements return some from `is_active` and contain `PackageId` specified. /// If more than one are activated, then it will return /// one that will allow for the most jump-back. fn find( &self, is_active: &impl Fn(PackageId) -> Option, must_contain: Option, mut max_age: usize, ) -> Option<(&ConflictMap, usize)> { match self { ConflictStoreTrie::Leaf(c) => { if must_contain.is_none() { Some((c, 0)) } else { // We did not find `must_contain`, so we need to keep looking. None } } ConflictStoreTrie::Node(m) => { let mut out = None; for (&pid, store) in must_contain .map(|f| m.range(..=f)) .unwrap_or_else(|| m.range(..)) { // If the key is active, then we need to check all of the corresponding subtrie. if let Some(age_this) = is_active(pid) { if age_this >= max_age && must_contain != Some(pid) { // not worth looking at, it is to old. continue; } if let Some((o, age_o)) = store.find(is_active, must_contain.filter(|&f| f != pid), max_age) { let age = if must_contain == Some(pid) { // all the results will include `must_contain` // so the age of must_contain is not relevant to find the best result. age_o } else { std::cmp::max(age_this, age_o) }; if max_age > age { // we found one that can jump-back further so replace the out. out = Some((o, age)); // and don't look at anything older max_age = age } } } // Else, if it is not active then there is no way any of the corresponding // subtrie will be conflicting. } out } } } fn insert(&mut self, mut iter: impl Iterator, con: ConflictMap) { if let Some(pid) = iter.next() { if let ConflictStoreTrie::Node(p) = self { p.entry(pid) .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) .insert(iter, con); } // Else, we already have a subset of this in the `ConflictStore`. } else { // We are at the end of the set we are adding, there are three cases for what to do // next: // 1. `self` is an empty dummy Node inserted by `or_insert_with` // in witch case we should replace it with `Leaf(con)`. // 2. `self` is a `Node` because we previously inserted a superset of // the thing we are working on (I don't know if this happens in practice) // but the subset that we are working on will // always match any time the larger set would have // in witch case we can replace it with `Leaf(con)`. // 3. `self` is a `Leaf` that is in the same spot in the structure as // the thing we are working on. So it is equivalent. // We can replace it with `Leaf(con)`. if cfg!(debug_assertions) { if let ConflictStoreTrie::Leaf(c) = self { let a: Vec<_> = con.keys().collect(); let b: Vec<_> = c.keys().collect(); assert_eq!(a, b); } } *self = ConflictStoreTrie::Leaf(con) } } } pub(super) struct ConflictCache { // `con_from_dep` is a cache of the reasons for each time we // backtrack. For example after several backtracks we may have: // // con_from_dep[`foo = "^1.0.2"`] = map!{ // `foo=1.0.1`: map!{`foo=1.0.1`: Semver}, // `foo=1.0.0`: map!{`foo=1.0.0`: Semver}, // }; // // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"` // if either `foo=1.0.1` OR `foo=1.0.0` are activated". // // Another example after several backtracks we may have: // // con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = map!{ // `foo=0.8.1`: map!{ // `foo=0.9.4`: map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver}, // } // }; // // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2, // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated". // // This is used to make sure we don't queue work we know will fail. See the // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this // is so important. The nested HashMaps act as a kind of btree, that lets us // look up which entries are still active without // linearly scanning through the full list. // // Also, as a final note, this map is **not** ever removed from. This remains // as a global cache which we never delete from. Any entry in this map is // unconditionally true regardless of our resolution history of how we got // here. con_from_dep: HashMap, // `dep_from_pid` is an inverse-index of `con_from_dep`. // For every `PackageId` this lists the `Dependency`s that mention it in `dep_from_pid`. dep_from_pid: HashMap>, } impl ConflictCache { pub fn new() -> ConflictCache { ConflictCache { con_from_dep: HashMap::new(), dep_from_pid: HashMap::new(), } } pub fn find( &self, dep: &Dependency, is_active: &impl Fn(PackageId) -> Option, must_contain: Option, max_age: usize, ) -> Option<&ConflictMap> { self.con_from_dep .get(dep)? .find(is_active, must_contain, max_age) .map(|(c, _)| c) } /// Finds any known set of conflicts, if any, /// which are activated in `cx` and contain `PackageId` specified. /// If more than one are activated, then it will return /// one that will allow for the most jump-back. pub fn find_conflicting( &self, cx: &Context, dep: &Dependency, must_contain: Option, ) -> Option<&ConflictMap> { let out = self.find(dep, &|id| cx.is_active(id), must_contain, usize::MAX); if cfg!(debug_assertions) { if let Some(c) = &out { assert!(cx.is_conflicting(None, c).is_some()); if let Some(f) = must_contain { assert!(c.contains_key(&f)); } } } out } pub fn conflicting(&self, cx: &Context, dep: &Dependency) -> Option<&ConflictMap> { self.find_conflicting(cx, dep, None) } /// Adds to the cache a conflict of the form: /// `dep` is known to be unresolvable if /// all the `PackageId` entries are activated. pub fn insert(&mut self, dep: &Dependency, con: &ConflictMap) { if con.values().any(|c| c.is_public_dependency()) { // TODO: needs more info for back jumping // for now refuse to cache it. return; } self.con_from_dep .entry(dep.clone()) .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) .insert(con.keys().cloned(), con.clone()); trace!( "{} = \"{}\" adding a skip {:?}", dep.package_name(), dep.version_req(), con ); for c in con.keys() { self.dep_from_pid .entry(*c) .or_insert_with(HashSet::new) .insert(dep.clone()); } } pub fn dependencies_conflicting_with(&self, pid: PackageId) -> Option<&HashSet> { self.dep_from_pid.get(&pid) } } cargo-0.66.0/src/cargo/core/resolver/context.rs000066400000000000000000000442031432416201200214010ustar00rootroot00000000000000use super::dep_cache::RegistryQueryer; use super::errors::ActivateResult; use super::types::{ConflictMap, ConflictReason, FeaturesSet, ResolveOpts}; use super::RequestedFeatures; use crate::core::{Dependency, PackageId, SourceId, Summary}; use crate::util::interning::InternedString; use crate::util::Graph; use anyhow::format_err; use log::debug; use std::collections::HashMap; use std::num::NonZeroU64; pub use super::encode::Metadata; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use super::resolve::Resolve; // A `Context` is basically a bunch of local resolution information which is // kept around for all `BacktrackFrame` instances. As a result, this runs the // risk of being cloned *a lot* so we want to make this as cheap to clone as // possible. #[derive(Clone)] pub struct Context { pub age: ContextAge, pub activations: Activations, /// list the features that are activated for each package pub resolve_features: im_rc::HashMap, /// get the package that will be linking to a native library by its links attribute pub links: im_rc::HashMap, /// for each package the list of names it can see, /// then for each name the exact version that name represents and whether the name is public. pub public_dependency: Option, /// a way to look up for a package in activations what packages required it /// and all of the exact deps that it fulfilled. pub parents: Graph>, } /// When backtracking it can be useful to know how far back to go. /// The `ContextAge` of a `Context` is a monotonically increasing counter of the number /// of decisions made to get to this state. /// Several structures store the `ContextAge` when it was added, /// to be used in `find_candidate` for backtracking. pub type ContextAge = usize; /// Find the activated version of a crate based on the name, source, and semver compatibility. /// By storing this in a hash map we ensure that there is only one /// semver compatible version of each crate. /// This all so stores the `ContextAge`. pub type ActivationsKey = (InternedString, SourceId, SemverCompatibility); pub type Activations = im_rc::HashMap; /// A type that represents when cargo treats two Versions as compatible. /// Versions `a` and `b` are compatible if their left-most nonzero digit is the /// same. #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, PartialOrd, Ord)] pub enum SemverCompatibility { Major(NonZeroU64), Minor(NonZeroU64), Patch(u64), } impl From<&semver::Version> for SemverCompatibility { fn from(ver: &semver::Version) -> Self { if let Some(m) = NonZeroU64::new(ver.major) { return SemverCompatibility::Major(m); } if let Some(m) = NonZeroU64::new(ver.minor) { return SemverCompatibility::Minor(m); } SemverCompatibility::Patch(ver.patch) } } impl PackageId { pub fn as_activations_key(self) -> ActivationsKey { (self.name(), self.source_id(), self.version().into()) } } impl Context { pub fn new(check_public_visible_dependencies: bool) -> Context { Context { age: 0, resolve_features: im_rc::HashMap::new(), links: im_rc::HashMap::new(), public_dependency: if check_public_visible_dependencies { Some(PublicDependency::new()) } else { None }, parents: Graph::new(), activations: im_rc::HashMap::new(), } } /// Activate this summary by inserting it into our list of known activations. /// /// The `parent` passed in here is the parent summary/dependency edge which /// cased `summary` to get activated. This may not be present for the root /// crate, for example. /// /// Returns `true` if this summary with the given features is already activated. pub fn flag_activated( &mut self, summary: &Summary, opts: &ResolveOpts, parent: Option<(&Summary, &Dependency)>, ) -> ActivateResult { let id = summary.package_id(); let age: ContextAge = self.age; match self.activations.entry(id.as_activations_key()) { im_rc::hashmap::Entry::Occupied(o) => { debug_assert_eq!( &o.get().0, summary, "cargo does not allow two semver compatible versions" ); } im_rc::hashmap::Entry::Vacant(v) => { if let Some(link) = summary.links() { if self.links.insert(link, id).is_some() { return Err(format_err!( "Attempting to resolve a dependency with more than \ one crate with links={}.\nThis will not build as \ is. Consider rebuilding the .lock file.", &*link ) .into()); } } v.insert((summary.clone(), age)); // If we've got a parent dependency which activated us, *and* // the dependency has a different source id listed than the // `summary` itself, then things get interesting. This basically // means that a `[patch]` was used to augment `dep.source_id()` // with `summary`. // // In this scenario we want to consider the activation key, as // viewed from the perspective of `dep.source_id()`, as being // fulfilled. This means that we need to add a second entry in // the activations map for the source that was patched, in // addition to the source of the actual `summary` itself. // // Without this it would be possible to have both 1.0.0 and // 1.1.0 "from crates.io" in a dependency graph if one of those // versions came from a `[patch]` source. if let Some((_, dep)) = parent { if dep.source_id() != id.source_id() { let key = (id.name(), dep.source_id(), id.version().into()); let prev = self.activations.insert(key, (summary.clone(), age)); if let Some((previous_summary, _)) = prev { return Err( (previous_summary.package_id(), ConflictReason::Semver).into() ); } } } return Ok(false); } } debug!("checking if {} is already activated", summary.package_id()); match &opts.features { // This returns `false` for CliFeatures just for simplicity. It // would take a bit of work to compare since they are not in the // same format as DepFeatures (and that may be expensive // performance-wise). Also, it should only occur once for a root // package. The only drawback is that it may re-activate a root // package again, which should only affect performance, but that // should be rare. Cycles should still be detected since those // will have `DepFeatures` edges. RequestedFeatures::CliFeatures(_) => Ok(false), RequestedFeatures::DepFeatures { features, uses_default_features, } => { let has_default_feature = summary.features().contains_key("default"); Ok(match self.resolve_features.get(&id) { Some(prev) => { features.is_subset(prev) && (!uses_default_features || prev.contains("default") || !has_default_feature) } None => features.is_empty() && (!uses_default_features || !has_default_feature), }) } } } /// If the package is active returns the `ContextAge` when it was added pub fn is_active(&self, id: PackageId) -> Option { self.activations .get(&id.as_activations_key()) .and_then(|(s, l)| if s.package_id() == id { Some(*l) } else { None }) } /// If the conflict reason on the package still applies returns the `ContextAge` when it was added pub fn still_applies(&self, id: PackageId, reason: &ConflictReason) -> Option { self.is_active(id).and_then(|mut max| { match reason { ConflictReason::PublicDependency(name) => { if &id == name { return Some(max); } max = std::cmp::max(max, self.is_active(*name)?); max = std::cmp::max( max, self.public_dependency .as_ref() .unwrap() .can_see_item(*name, id)?, ); } ConflictReason::PubliclyExports(name) => { if &id == name { return Some(max); } max = std::cmp::max(max, self.is_active(*name)?); max = std::cmp::max( max, self.public_dependency .as_ref() .unwrap() .publicly_exports_item(*name, id)?, ); } _ => {} } Some(max) }) } /// Checks whether all of `parent` and the keys of `conflicting activations` /// are still active. /// If so returns the `ContextAge` when the newest one was added. pub fn is_conflicting( &self, parent: Option, conflicting_activations: &ConflictMap, ) -> Option { let mut max = 0; if let Some(parent) = parent { max = std::cmp::max(max, self.is_active(parent)?); } for (id, reason) in conflicting_activations.iter() { max = std::cmp::max(max, self.still_applies(*id, reason)?); } Some(max) } pub fn resolve_replacements( &self, registry: &RegistryQueryer<'_>, ) -> HashMap { self.activations .values() .filter_map(|(s, _)| registry.used_replacement_for(s.package_id())) .collect() } pub fn graph(&self) -> Graph> { let mut graph: Graph> = Graph::new(); self.activations .values() .for_each(|(r, _)| graph.add(r.package_id())); for i in self.parents.iter() { graph.add(*i); for (o, e) in self.parents.edges(i) { let old_link = graph.link(*o, *i); assert!(old_link.is_empty()); *old_link = e.iter().cloned().collect(); } } graph } } impl Graph> { pub fn parents_of(&self, p: PackageId) -> impl Iterator + '_ { self.edges(&p) .map(|(grand, d)| (*grand, d.iter().any(|x| x.is_public()))) } } #[derive(Clone, Debug, Default)] pub struct PublicDependency { /// For each active package the set of all the names it can see, /// for each name the exact package that name resolves to, /// the `ContextAge` when it was first visible, /// and the `ContextAge` when it was first exported. inner: im_rc::HashMap< PackageId, im_rc::HashMap)>, >, } impl PublicDependency { fn new() -> Self { PublicDependency { inner: im_rc::HashMap::new(), } } fn publicly_exports(&self, candidate_pid: PackageId) -> Vec { self.inner .get(&candidate_pid) // if we have seen it before .iter() .flat_map(|x| x.values()) // all the things we have stored .filter(|x| x.2.is_some()) // as publicly exported .map(|x| x.0) .chain(Some(candidate_pid)) // but even if not we know that everything exports itself .collect() } fn publicly_exports_item( &self, candidate_pid: PackageId, target: PackageId, ) -> Option { debug_assert_ne!(candidate_pid, target); let out = self .inner .get(&candidate_pid) .and_then(|names| names.get(&target.name())) .filter(|(p, _, _)| *p == target) .and_then(|(_, _, age)| *age); debug_assert_eq!( out.is_some(), self.publicly_exports(candidate_pid).contains(&target) ); out } pub fn can_see_item(&self, candidate_pid: PackageId, target: PackageId) -> Option { self.inner .get(&candidate_pid) .and_then(|names| names.get(&target.name())) .filter(|(p, _, _)| *p == target) .map(|(_, age, _)| *age) } pub fn add_edge( &mut self, candidate_pid: PackageId, parent_pid: PackageId, is_public: bool, age: ContextAge, parents: &Graph>, ) { // one tricky part is that `candidate_pid` may already be active and // have public dependencies of its own. So we not only need to mark // `candidate_pid` as visible to its parents but also all of its existing // publicly exported dependencies. for c in self.publicly_exports(candidate_pid) { // for each (transitive) parent that can newly see `t` let mut stack = vec![(parent_pid, is_public)]; while let Some((p, public)) = stack.pop() { match self.inner.entry(p).or_default().entry(c.name()) { im_rc::hashmap::Entry::Occupied(mut o) => { // the (transitive) parent can already see something by `c`s name, it had better be `c`. assert_eq!(o.get().0, c); if o.get().2.is_some() { // The previous time the parent saw `c`, it was a public dependency. // So all of its parents already know about `c` // and we can save some time by stopping now. continue; } if public { // Mark that `c` has now bean seen publicly let old_age = o.get().1; o.insert((c, old_age, if public { Some(age) } else { None })); } } im_rc::hashmap::Entry::Vacant(v) => { // The (transitive) parent does not have anything by `c`s name, // so we add `c`. v.insert((c, age, if public { Some(age) } else { None })); } } // if `candidate_pid` was a private dependency of `p` then `p` parents can't see `c` thru `p` if public { // if it was public, then we add all of `p`s parents to be checked stack.extend(parents.parents_of(p)); } } } } pub fn can_add_edge( &self, b_id: PackageId, parent: PackageId, is_public: bool, parents: &Graph>, ) -> Result< (), ( ((PackageId, ConflictReason), (PackageId, ConflictReason)), Option<(PackageId, ConflictReason)>, ), > { // one tricky part is that `candidate_pid` may already be active and // have public dependencies of its own. So we not only need to check // `b_id` as visible to its parents but also all of its existing // publicly exported dependencies. for t in self.publicly_exports(b_id) { // for each (transitive) parent that can newly see `t` let mut stack = vec![(parent, is_public)]; while let Some((p, public)) = stack.pop() { // TODO: don't look at the same thing more than once if let Some(o) = self.inner.get(&p).and_then(|x| x.get(&t.name())) { if o.0 != t { // the (transitive) parent can already see a different version by `t`s name. // So, adding `b` will cause `p` to have a public dependency conflict on `t`. return Err(( (o.0, ConflictReason::PublicDependency(p)), // p can see the other version and (parent, ConflictReason::PublicDependency(p)), // p can see us )) .map_err(|e| { if t == b_id { (e, None) } else { (e, Some((t, ConflictReason::PubliclyExports(b_id)))) } }); } if o.2.is_some() { // The previous time the parent saw `t`, it was a public dependency. // So all of its parents already know about `t` // and we can save some time by stopping now. continue; } } // if `b` was a private dependency of `p` then `p` parents can't see `t` thru `p` if public { // if it was public, then we add all of `p`s parents to be checked stack.extend(parents.parents_of(p)); } } } Ok(()) } } cargo-0.66.0/src/cargo/core/resolver/dep_cache.rs000066400000000000000000000521031432416201200216060ustar00rootroot00000000000000//! There are 2 sources of facts for the resolver: //! //! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. //! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. //! //! These constitute immutable facts, the soled ground truth that all other inference depends on. //! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only //! look up things we need to. The compromise is to cache the results as they are computed. //! //! This module impl that cache in all the gory details use crate::core::resolver::context::Context; use crate::core::resolver::errors::describe_path_in_context; use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; use crate::core::resolver::{ ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, VersionOrdering, VersionPreferences, }; use crate::core::{ Dependency, FeatureValue, PackageId, PackageIdSpec, QueryKind, Registry, Summary, }; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use anyhow::Context as _; use log::debug; use std::collections::{BTreeSet, HashMap, HashSet}; use std::rc::Rc; use std::task::Poll; pub struct RegistryQueryer<'a> { pub registry: &'a mut (dyn Registry + 'a), replacements: &'a [(PackageIdSpec, Dependency)], version_prefs: &'a VersionPreferences, /// If set the list of dependency candidates will be sorted by minimal /// versions first. That allows `cargo update -Z minimal-versions` which will /// specify minimum dependency versions to be used. minimal_versions: bool, /// a cache of `Candidate`s that fulfil a `Dependency` registry_cache: HashMap>>>, /// a cache of `Dependency`s that are required for a `Summary` summary_cache: HashMap< (Option, Summary, ResolveOpts), (Rc<(HashSet, Rc>)>, bool), >, /// all the cases we ended up using a supplied replacement used_replacements: HashMap, } impl<'a> RegistryQueryer<'a> { pub fn new( registry: &'a mut dyn Registry, replacements: &'a [(PackageIdSpec, Dependency)], version_prefs: &'a VersionPreferences, minimal_versions: bool, ) -> Self { RegistryQueryer { registry, replacements, version_prefs, minimal_versions, registry_cache: HashMap::new(), summary_cache: HashMap::new(), used_replacements: HashMap::new(), } } pub fn reset_pending(&mut self) -> bool { let mut all_ready = true; self.registry_cache.retain(|_, r| { if !r.is_ready() { all_ready = false; } r.is_ready() }); self.summary_cache.retain(|_, (_, r)| { if !*r { all_ready = false; } *r }); all_ready } pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { self.used_replacements.get(&p).map(|r| (p, r.package_id())) } pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { self.used_replacements.get(&p) } /// Queries the `registry` to return a list of candidates for `dep`. /// /// This method is the location where overrides are taken into account. If /// any candidates are returned which match an override then the override is /// applied by performing a second query for what the override should /// return. pub fn query(&mut self, dep: &Dependency) -> Poll>>> { if let Some(out) = self.registry_cache.get(dep).cloned() { return out.map(Result::Ok); } let mut ret = Vec::new(); let ready = self.registry.query(dep, QueryKind::Exact, &mut |s| { ret.push(s); })?; if ready.is_pending() { self.registry_cache.insert(dep.clone(), Poll::Pending); return Poll::Pending; } for summary in ret.iter_mut() { let mut potential_matches = self .replacements .iter() .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); let &(ref spec, ref dep) = match potential_matches.next() { None => continue, Some(replacement) => replacement, }; debug!( "found an override for {} {}", dep.package_name(), dep.version_req() ); let mut summaries = match self.registry.query_vec(dep, QueryKind::Exact)? { Poll::Ready(s) => s.into_iter(), Poll::Pending => { self.registry_cache.insert(dep.clone(), Poll::Pending); return Poll::Pending; } }; let s = summaries.next().ok_or_else(|| { anyhow::format_err!( "no matching package for override `{}` found\n\ location searched: {}\n\ version required: {}", spec, dep.source_id(), dep.version_req() ) })?; let summaries = summaries.collect::>(); if !summaries.is_empty() { let bullets = summaries .iter() .map(|s| format!(" * {}", s.package_id())) .collect::>(); return Poll::Ready(Err(anyhow::anyhow!( "the replacement specification `{}` matched \ multiple packages:\n * {}\n{}", spec, s.package_id(), bullets.join("\n") ))); } // The dependency should be hard-coded to have the same name and an // exact version requirement, so both of these assertions should // never fail. assert_eq!(s.version(), summary.version()); assert_eq!(s.name(), summary.name()); let replace = if s.source_id() == summary.source_id() { debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); None } else { Some(s) }; let matched_spec = spec.clone(); // Make sure no duplicates if let Some(&(ref spec, _)) = potential_matches.next() { return Poll::Ready(Err(anyhow::anyhow!( "overlapping replacement specifications found:\n\n \ * {}\n * {}\n\nboth specifications match: {}", matched_spec, spec, summary.package_id() ))); } for dep in summary.dependencies() { debug!("\t{} => {}", dep.package_name(), dep.version_req()); } if let Some(r) = replace { self.used_replacements.insert(summary.package_id(), r); } } // When we attempt versions for a package we'll want to do so in a sorted fashion to pick // the "best candidates" first. VersionPreferences implements this notion. self.version_prefs.sort_summaries( &mut ret, if self.minimal_versions { VersionOrdering::MinimumVersionsFirst } else { VersionOrdering::MaximumVersionsFirst }, ); let out = Poll::Ready(Rc::new(ret)); self.registry_cache.insert(dep.clone(), out.clone()); out.map(Result::Ok) } /// Find out what dependencies will be added by activating `candidate`, /// with features described in `opts`. Then look up in the `registry` /// the candidates that will fulfil each of these dependencies, as it is the /// next obvious question. pub fn build_deps( &mut self, cx: &Context, parent: Option, candidate: &Summary, opts: &ResolveOpts, ) -> ActivateResult, Rc>)>> { // if we have calculated a result before, then we can just return it, // as it is a "pure" query of its arguments. if let Some(out) = self .summary_cache .get(&(parent, candidate.clone(), opts.clone())) { return Ok(out.0.clone()); } // First, figure out our set of dependencies based on the requested set // of features. This also calculates what features we're going to enable // for our own dependencies. let (used_features, deps) = resolve_features(parent, candidate, opts)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. let mut all_ready = true; let mut deps = deps .into_iter() .filter_map(|(dep, features)| match self.query(&dep) { Poll::Ready(Ok(candidates)) => Some(Ok((dep, candidates, features))), Poll::Pending => { all_ready = false; // we can ignore Pending deps, resolve will be repeatedly called // until there are none to ignore None } Poll::Ready(Err(e)) => Some(Err(e).with_context(|| { format!( "failed to get `{}` as a dependency of {}", dep.package_name(), describe_path_in_context(cx, &candidate.package_id()), ) })), }) .collect::>>()?; // Attempt to resolve dependencies with fewer candidates before trying // dependencies with more candidates. This way if the dependency with // only one candidate can't be resolved we don't have to do a bunch of // work before we figure that out. deps.sort_by_key(|&(_, ref a, _)| a.len()); let out = Rc::new((used_features, Rc::new(deps))); // If we succeed we add the result to the cache so we can use it again next time. // We don't cache the failure cases as they don't impl Clone. self.summary_cache.insert( (parent, candidate.clone(), opts.clone()), (out.clone(), all_ready), ); Ok(out) } } /// Returns the features we ended up using and /// all dependencies and the features we want from each of them. pub fn resolve_features<'b>( parent: Option, s: &'b Summary, opts: &'b ResolveOpts, ) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { // First, filter by dev-dependencies. let deps = s.dependencies(); let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); let reqs = build_requirements(parent, s, opts)?; let mut ret = Vec::new(); let default_dep = BTreeSet::new(); let mut valid_dep_names = HashSet::new(); // Next, collect all actually enabled dependencies and their features. for dep in deps { // Skip optional dependencies, but not those enabled through a // feature if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { continue; } valid_dep_names.insert(dep.name_in_toml()); // So we want this dependency. Move the features we want from // `feature_deps` to `ret` and register ourselves as using this // name. let mut base = reqs .deps .get(&dep.name_in_toml()) .unwrap_or(&default_dep) .clone(); base.extend(dep.features().iter()); ret.push((dep.clone(), Rc::new(base))); } // This is a special case for command-line `--features // dep_name/feat_name` where `dep_name` does not exist. All other // validation is done either in `build_requirements` or // `build_feature_map`. if parent.is_none() { for dep_name in reqs.deps.keys() { if !valid_dep_names.contains(dep_name) { let e = RequirementError::MissingDependency(*dep_name); return Err(e.into_activate_error(parent, s)); } } } Ok((reqs.into_features(), ret)) } /// Takes requested features for a single package from the input `ResolveOpts` and /// recurses to find all requested features, dependencies and requested /// dependency features in a `Requirements` object, returning it to the resolver. fn build_requirements<'a, 'b: 'a>( parent: Option, s: &'a Summary, opts: &'b ResolveOpts, ) -> ActivateResult> { let mut reqs = Requirements::new(s); let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| { if uses_default_features && s.features().contains_key("default") { if let Err(e) = reqs.require_feature(InternedString::new("default")) { return Err(e.into_activate_error(parent, s)); } } Ok(()) }; match &opts.features { RequestedFeatures::CliFeatures(CliFeatures { features, all_features, uses_default_features, }) => { if *all_features { for key in s.features().keys() { if let Err(e) = reqs.require_feature(*key) { return Err(e.into_activate_error(parent, s)); } } } for fv in features.iter() { if let Err(e) = reqs.require_value(fv) { return Err(e.into_activate_error(parent, s)); } } handle_default(*uses_default_features, &mut reqs)?; } RequestedFeatures::DepFeatures { features, uses_default_features, } => { for feature in features.iter() { if let Err(e) = reqs.require_feature(*feature) { return Err(e.into_activate_error(parent, s)); } } handle_default(*uses_default_features, &mut reqs)?; } } Ok(reqs) } /// Set of feature and dependency requirements for a package. #[derive(Debug)] struct Requirements<'a> { summary: &'a Summary, /// The deps map is a mapping of dependency name to list of features enabled. /// /// The resolver will activate all of these dependencies, with the given /// features enabled. deps: HashMap>, /// The set of features enabled on this package which is later used when /// compiling to instruct the code what features were enabled. features: HashSet, } /// An error for a requirement. /// /// This will later be converted to an `ActivateError` depending on whether or /// not this is a dependency or a root package. enum RequirementError { /// The package does not have the requested feature. MissingFeature(InternedString), /// The package does not have the requested dependency. MissingDependency(InternedString), /// A feature has a direct cycle to itself. /// /// Note that cycles through multiple features are allowed (but perhaps /// they shouldn't be?). Cycle(InternedString), } impl Requirements<'_> { fn new(summary: &Summary) -> Requirements<'_> { Requirements { summary, deps: HashMap::new(), features: HashSet::new(), } } fn into_features(self) -> HashSet { self.features } fn require_dep_feature( &mut self, package: InternedString, feat: InternedString, weak: bool, ) -> Result<(), RequirementError> { // If `package` is indeed an optional dependency then we activate the // feature named `package`, but otherwise if `package` is a required // dependency then there's no feature associated with it. if !weak && self .summary .dependencies() .iter() .any(|dep| dep.name_in_toml() == package && dep.is_optional()) { self.require_feature(package)?; } self.deps.entry(package).or_default().insert(feat); Ok(()) } fn require_dependency(&mut self, pkg: InternedString) { self.deps.entry(pkg).or_default(); } fn require_feature(&mut self, feat: InternedString) -> Result<(), RequirementError> { if !self.features.insert(feat) { // Already seen this feature. return Ok(()); } let fvs = match self.summary.features().get(&feat) { Some(fvs) => fvs, None => return Err(RequirementError::MissingFeature(feat)), }; for fv in fvs { if let FeatureValue::Feature(dep_feat) = fv { if *dep_feat == feat { return Err(RequirementError::Cycle(feat)); } } self.require_value(fv)?; } Ok(()) } fn require_value(&mut self, fv: &FeatureValue) -> Result<(), RequirementError> { match fv { FeatureValue::Feature(feat) => self.require_feature(*feat)?, FeatureValue::Dep { dep_name } => self.require_dependency(*dep_name), FeatureValue::DepFeature { dep_name, dep_feature, // Weak features are always activated in the dependency // resolver. They will be narrowed inside the new feature // resolver. weak, } => self.require_dep_feature(*dep_name, *dep_feature, *weak)?, }; Ok(()) } } impl RequirementError { fn into_activate_error(self, parent: Option, summary: &Summary) -> ActivateError { match self { RequirementError::MissingFeature(feat) => { let deps: Vec<_> = summary .dependencies() .iter() .filter(|dep| dep.name_in_toml() == feat) .collect(); if deps.is_empty() { return match parent { None => ActivateError::Fatal(anyhow::format_err!( "Package `{}` does not have the feature `{}`", summary.package_id(), feat )), Some(p) => ActivateError::Conflict( p, ConflictReason::MissingFeatures(feat.to_string()), ), }; } if deps.iter().any(|dep| dep.is_optional()) { match parent { None => ActivateError::Fatal(anyhow::format_err!( "Package `{}` does not have feature `{}`. It has an optional dependency \ with that name, but that dependency uses the \"dep:\" \ syntax in the features table, so it does not have an implicit feature with that name.", summary.package_id(), feat )), Some(p) => ActivateError::Conflict( p, ConflictReason::NonImplicitDependencyAsFeature(feat), ), } } else { match parent { None => ActivateError::Fatal(anyhow::format_err!( "Package `{}` does not have feature `{}`. It has a required dependency \ with that name, but only optional dependencies can be used as features.", summary.package_id(), feat )), Some(p) => ActivateError::Conflict( p, ConflictReason::RequiredDependencyAsFeature(feat), ), } } } RequirementError::MissingDependency(dep_name) => { match parent { None => ActivateError::Fatal(anyhow::format_err!( "package `{}` does not have a dependency named `{}`", summary.package_id(), dep_name )), // This code path currently isn't used, since `foo/bar` // and `dep:` syntax is not allowed in a dependency. Some(p) => ActivateError::Conflict( p, ConflictReason::MissingFeatures(dep_name.to_string()), ), } } RequirementError::Cycle(feat) => ActivateError::Fatal(anyhow::format_err!( "cyclic feature dependency: feature `{}` depends on itself", feat )), } } } cargo-0.66.0/src/cargo/core/resolver/encode.rs000066400000000000000000000646071432416201200211640ustar00rootroot00000000000000//! Definition of how to encode a `Resolve` into a TOML `Cargo.lock` file //! //! This module contains all machinery necessary to parse a `Resolve` from a //! `Cargo.lock` as well as serialize a `Resolve` to a `Cargo.lock`. //! //! ## Changing `Cargo.lock` //! //! In general Cargo is quite conservative about changing the format of //! `Cargo.lock`. Usage of new features in Cargo can change `Cargo.lock` at any //! time, but otherwise changing the serialization of `Cargo.lock` is a //! difficult operation to do that we typically avoid. //! //! The main problem with changing the format of `Cargo.lock` is that it can //! cause quite a bad experience for end users who use different versions of //! Cargo. If every PR to a project oscillates between the stable channel's //! encoding of Cargo.lock and the nightly channel's encoding then that's a //! pretty bad experience. //! //! We do, however, want to change `Cargo.lock` over time. (and we have!). To do //! this the rules that we currently have are: //! //! * Add support for the new format to Cargo. This involves code changes in //! Cargo itself, likely by adding a new variant of `ResolveVersion` and //! branching on that where necessary. This is accompanied with tests in the //! `lockfile_compat` module. //! //! * Do not update `ResolveVersion::default()`. The new lockfile format will //! not be used yet. //! //! * Preserve the new format if found. This means that if Cargo finds the new //! version it'll keep using it, but otherwise it continues to use whatever //! format it previously found. //! //! * Wait a "long time". This is at least until the changes here hit stable //! Rust. Often though we wait a little longer to let the changes percolate //! into one or two older stable releases. //! //! * Change the return value of `ResolveVersion::default()` to the new format. //! This will cause new lock files to use the latest encoding as well as //! causing any operation which updates the lock file to update to the new //! format. //! //! This migration scheme in general means that Cargo we'll get *support* for a //! new format into Cargo ASAP, but it won't be exercised yet (except in Cargo's //! own tests). Eventually when stable/beta/nightly all have support for the new //! format (and maybe a few previous stable versions) we flip the switch. //! Projects on nightly will quickly start seeing changes, but //! stable/beta/nightly will all understand this new format and will preserve //! it. //! //! While this does mean that projects' `Cargo.lock` changes over time, it's //! typically a pretty minimal effort change that's just "check in what's //! there". //! //! ## Historical changes to `Cargo.lock` //! //! Listed from most recent to oldest, these are some of the changes we've made //! to `Cargo.lock`'s serialization format: //! //! * A `version` marker is now at the top of the lock file which is a way for //! super-old Cargos (at least since this was implemented) to give a formal //! error if they see a lock file from a super-future Cargo. Additionally as //! part of this change the encoding of `git` dependencies in lock files //! changed where `branch = "master"` is now encoded with `branch=master` //! instead of with nothing at all. //! //! * The entries in `dependencies` arrays have been shortened and the //! `checksum` field now shows up directly in `[[package]]` instead of always //! at the end of the file. The goal of this change was to ideally reduce //! merge conflicts being generated on `Cargo.lock`. Updating a version of a //! package now only updates two lines in the file, the checksum and the //! version number, most of the time. Dependency edges are specified in a //! compact form where possible where just the name is listed. The //! version/source on dependency edges are only listed if necessary to //! disambiguate which version or which source is in use. //! //! * A comment at the top of the file indicates that the file is a generated //! file and contains the special symbol `@generated` to indicate to common //! review tools that it's a generated file. //! //! * A `[root]` entry for the "root crate" has been removed and instead now //! included in `[[package]]` like everything else. //! //! * All packages from registries contain a `checksum` which is a sha256 //! checksum of the tarball the package is associated with. This is all stored //! in the `[metadata]` table of `Cargo.lock` which all versions of Cargo //! since 1.0 have preserved. The goal of this was to start recording //! checksums so mirror sources can be verified. //! //! ## Other oddities about `Cargo.lock` //! //! There's a few other miscellaneous weird things about `Cargo.lock` that you //! may want to be aware of when reading this file: //! //! * All packages have a `source` listed to indicate where they come from. For //! `path` dependencies, however, no `source` is listed. There's no way we //! could emit a filesystem path name and have that be portable across //! systems, so all packages from a `path` are not listed with a `source`. //! Note that this also means that all packages with `path` sources must have //! unique names. //! //! * The `[metadata]` table in `Cargo.lock` is intended to be a generic mapping //! of strings to strings that's simply preserved by Cargo. This was a very //! early effort to be forward compatible against changes to `Cargo.lock`'s //! format. This is nowadays sort of deemed a bad idea though and we don't //! really use it that much except for `checksum`s historically. It's not //! really recommended to use this. //! //! * The actual literal on-disk serialiation is found in //! `src/cargo/ops/lockfile.rs` which basically renders a `toml::Value` in a //! special fashion to make sure we have strict control over the on-disk //! format. use super::{Resolve, ResolveVersion}; use crate::core::{Dependency, GitReference, Package, PackageId, SourceId, Workspace}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{internal, Graph}; use anyhow::{bail, Context as _}; use log::debug; use serde::de; use serde::ser; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt; use std::str::FromStr; /// The `Cargo.lock` structure. #[derive(Serialize, Deserialize, Debug)] pub struct EncodableResolve { version: Option, package: Option>, /// `root` is optional to allow backward compatibility. root: Option, metadata: Option, #[serde(default, skip_serializing_if = "Patch::is_empty")] patch: Patch, } #[derive(Serialize, Deserialize, Debug, Default)] struct Patch { unused: Vec, } pub type Metadata = BTreeMap; impl EncodableResolve { /// Convert a `Cargo.lock` to a Resolve. /// /// Note that this `Resolve` is not "complete". For example, the /// dependencies do not know the difference between regular/dev/build /// dependencies, so they are not filled in. It also does not include /// `features`. Care should be taken when using this Resolve. One of the /// primary uses is to be used with `resolve_with_previous` to guide the /// resolver to create a complete Resolve. pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult { let path_deps = build_path_deps(ws)?; let mut checksums = HashMap::new(); let mut version = match self.version { Some(3) => ResolveVersion::V3, Some(n) => bail!( "lock file version `{}` was found, but this version of Cargo \ does not understand this lock file, perhaps Cargo needs \ to be updated?", n, ), // Historically Cargo did not have a version indicator in lock // files, so this could either be the V1 or V2 encoding. We assume // an older format is being parsed until we see so otherwise. None => ResolveVersion::V1, }; let packages = { let mut packages = self.package.unwrap_or_default(); if let Some(root) = self.root { packages.insert(0, root); } packages }; // `PackageId`s in the lock file don't include the `source` part // for workspace members, so we reconstruct proper IDs. let live_pkgs = { let mut live_pkgs = HashMap::new(); let mut all_pkgs = HashSet::new(); for pkg in packages.iter() { let enc_id = EncodablePackageId { name: pkg.name.clone(), version: Some(pkg.version.clone()), source: pkg.source, }; if !all_pkgs.insert(enc_id.clone()) { anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name); } let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { // We failed to find a local package in the workspace. // It must have been removed and should be ignored. None => { debug!("path dependency now missing {} v{}", pkg.name, pkg.version); continue; } Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?, }; // If a package has a checksum listed directly on it then record // that here, and we also bump our version up to 2 since V1 // didn't ever encode this field. if let Some(cksum) = &pkg.checksum { version = version.max(ResolveVersion::V2); checksums.insert(id, Some(cksum.clone())); } assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) } live_pkgs }; // When decoding a V2 version the edges in `dependencies` aren't // guaranteed to have either version or source information. This `map` // is used to find package ids even if dependencies have missing // information. This map is from name to version to source to actual // package ID. (various levels to drill down step by step) let mut map = HashMap::new(); for (id, _) in live_pkgs.values() { map.entry(id.name().as_str()) .or_insert_with(HashMap::new) .entry(id.version().to_string()) .or_insert_with(HashMap::new) .insert(id.source_id(), *id); } let mut lookup_id = |enc_id: &EncodablePackageId| -> Option { // The name of this package should always be in the larger list of // all packages. let by_version = map.get(enc_id.name.as_str())?; // If the version is provided, look that up. Otherwise if the // version isn't provided this is a V2 manifest and we should only // have one version for this name. If we have more than one version // for the name then it's ambiguous which one we'd use. That // shouldn't ever actually happen but in theory bad git merges could // produce invalid lock files, so silently ignore these cases. let by_source = match &enc_id.version { Some(version) => by_version.get(version)?, None => { version = version.max(ResolveVersion::V2); if by_version.len() == 1 { by_version.values().next().unwrap() } else { return None; } } }; // This is basically the same as above. Note though that `source` is // always missing for path dependencies regardless of serialization // format. That means we have to handle the `None` case a bit more // carefully. match &enc_id.source { Some(source) => by_source.get(source).cloned(), None => { // Look through all possible packages ids for this // name/version. If there's only one `path` dependency then // we are hardcoded to use that since `path` dependencies // can't have a source listed. let mut path_packages = by_source.values().filter(|p| p.source_id().is_path()); if let Some(path) = path_packages.next() { if path_packages.next().is_some() { return None; } Some(*path) // ... otherwise if there's only one then we must be // implicitly using that one due to a V2 serialization of // the lock file } else if by_source.len() == 1 { let id = by_source.values().next().unwrap(); version = version.max(ResolveVersion::V2); Some(*id) // ... and failing that we probably had a bad git merge of // `Cargo.lock` or something like that, so just ignore this. } else { None } } } }; let mut g = Graph::new(); for &(ref id, _) in live_pkgs.values() { g.add(*id); } for &(ref id, pkg) in live_pkgs.values() { let deps = match pkg.dependencies { Some(ref deps) => deps, None => continue, }; for edge in deps.iter() { if let Some(to_depend_on) = lookup_id(edge) { g.link(*id, to_depend_on); } } } let replacements = { let mut replacements = HashMap::new(); for &(ref id, pkg) in live_pkgs.values() { if let Some(ref replace) = pkg.replace { assert!(pkg.dependencies.is_none()); if let Some(replace_id) = lookup_id(replace) { replacements.insert(*id, replace_id); } } } replacements }; let mut metadata = self.metadata.unwrap_or_default(); // In the V1 serialization formats all checksums were listed in the lock // file in the `[metadata]` section, so if we're still V1 then look for // that here. let prefix = "checksum "; let mut to_remove = Vec::new(); for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { to_remove.push(k.to_string()); let k = &k[prefix.len()..]; let enc_id: EncodablePackageId = k .parse() .with_context(|| internal("invalid encoding of checksum in lockfile"))?; let id = match lookup_id(&enc_id) { Some(id) => id, _ => continue, }; let v = if v == "" { None } else { Some(v.to_string()) }; checksums.insert(id, v); } // If `checksum` was listed in `[metadata]` but we were previously // listed as `V2` then assume some sort of bad git merge happened, so // discard all checksums and let's regenerate them later. if !to_remove.is_empty() && version >= ResolveVersion::V2 { checksums.drain(); } for k in to_remove { metadata.remove(&k); } let mut unused_patches = Vec::new(); for pkg in self.patch.unused { let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?, None => continue, }; unused_patches.push(id); } // We have a curious issue where in the "v1 format" we buggily had a // trailing blank line at the end of lock files under some specific // conditions. // // Cargo is trying to write new lockfies in the "v2 format" but if you // have no dependencies, for example, then the lockfile encoded won't // really have any indicator that it's in the new format (no // dependencies or checksums listed). This means that if you type `cargo // new` followed by `cargo build` it will generate a "v2 format" lock // file since none previously existed. When reading this on the next // `cargo build`, however, it generates a new lock file because when // reading in that lockfile we think it's the v1 format. // // To help fix this issue we special case here. If our lockfile only has // one trailing newline, not two, *and* it only has one package, then // this is actually the v2 format. if original.ends_with('\n') && !original.ends_with("\n\n") && version == ResolveVersion::V1 && g.iter().count() == 1 { version = ResolveVersion::V2; } Ok(Resolve::new( g, replacements, HashMap::new(), checksums, metadata, unused_patches, version, HashMap::new(), )) } } fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> { // If a crate is **not** a path source, then we're probably in a situation // such as `cargo install` with a lock file from a remote dependency. In // that case we don't need to fixup any path dependencies (as they're not // actually path dependencies any more), so we ignore them. let members = ws .members() .filter(|p| p.package_id().source_id().is_path()) .collect::>(); let mut ret = HashMap::new(); let mut visited = HashSet::new(); for member in members.iter() { ret.insert( member.package_id().name().to_string(), member.package_id().source_id(), ); visited.insert(member.package_id().source_id()); } for member in members.iter() { build_pkg(member, ws, &mut ret, &mut visited); } for deps in ws.root_patch()?.values() { for dep in deps { build_dep(dep, ws, &mut ret, &mut visited); } } for &(_, ref dep) in ws.root_replace() { build_dep(dep, ws, &mut ret, &mut visited); } return Ok(ret); fn build_pkg( pkg: &Package, ws: &Workspace<'_>, ret: &mut HashMap, visited: &mut HashSet, ) { for dep in pkg.dependencies() { build_dep(dep, ws, ret, visited); } } fn build_dep( dep: &Dependency, ws: &Workspace<'_>, ret: &mut HashMap, visited: &mut HashSet, ) { let id = dep.source_id(); if visited.contains(&id) || !id.is_path() { return; } let path = match id.url().to_file_path() { Ok(p) => p.join("Cargo.toml"), Err(_) => return, }; let pkg = match ws.load(&path) { Ok(p) => p, Err(_) => return, }; ret.insert(pkg.name().to_string(), pkg.package_id().source_id()); visited.insert(pkg.package_id().source_id()); build_pkg(&pkg, ws, ret, visited); } } impl Patch { fn is_empty(&self) -> bool { self.unused.is_empty() } } #[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodableDependency { name: String, version: String, source: Option, checksum: Option, dependencies: Option>, replace: Option, } #[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] pub struct EncodablePackageId { name: String, version: Option, source: Option, } impl fmt::Display for EncodablePackageId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.name)?; if let Some(s) = &self.version { write!(f, " {}", s)?; } if let Some(s) = &self.source { write!(f, " ({})", s.as_url())?; } Ok(()) } } impl FromStr for EncodablePackageId { type Err = anyhow::Error; fn from_str(s: &str) -> CargoResult { let mut s = s.splitn(3, ' '); let name = s.next().unwrap(); let version = s.next(); let source_id = match s.next() { Some(s) => { if s.starts_with('(') && s.ends_with(')') { Some(SourceId::from_url(&s[1..s.len() - 1])?) } else { anyhow::bail!("invalid serialized PackageId") } } None => None, }; Ok(EncodablePackageId { name: name.to_string(), version: version.map(|v| v.to_string()), source: source_id, }) } } impl ser::Serialize for EncodablePackageId { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { s.collect_str(self) } } impl<'de> de::Deserialize<'de> for EncodablePackageId { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { String::deserialize(d).and_then(|string| { string .parse::() .map_err(de::Error::custom) }) } } impl ser::Serialize for Resolve { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { let mut ids: Vec<_> = self.iter().collect(); ids.sort(); let state = EncodeState::new(self); let encodable = ids .iter() .map(|&id| encodable_resolve_node(id, self, &state)) .collect::>(); let mut metadata = self.metadata().clone(); if self.version() == ResolveVersion::V1 { for &id in ids.iter().filter(|id| !id.source_id().is_path()) { let checksum = match self.checksums()[&id] { Some(ref s) => &s[..], None => "", }; let id = encodable_package_id(id, &state, self.version()); metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string()); } } let metadata = if metadata.is_empty() { None } else { Some(metadata) }; let patch = Patch { unused: self .unused_patches() .iter() .map(|id| EncodableDependency { name: id.name().to_string(), version: id.version().to_string(), source: encode_source(id.source_id()), dependencies: None, replace: None, checksum: if self.version() >= ResolveVersion::V2 { self.checksums().get(id).and_then(|x| x.clone()) } else { None }, }) .collect(), }; EncodableResolve { package: Some(encodable), root: None, metadata, patch, version: match self.version() { ResolveVersion::V3 => Some(3), ResolveVersion::V2 | ResolveVersion::V1 => None, }, } .serialize(s) } } pub struct EncodeState<'a> { counts: Option>>, } impl<'a> EncodeState<'a> { pub fn new(resolve: &'a Resolve) -> EncodeState<'a> { let counts = if resolve.version() >= ResolveVersion::V2 { let mut map = HashMap::new(); for id in resolve.iter() { let slot = map .entry(id.name()) .or_insert_with(HashMap::new) .entry(id.version()) .or_insert(0); *slot += 1; } Some(map) } else { None }; EncodeState { counts } } } fn encodable_resolve_node( id: PackageId, resolve: &Resolve, state: &EncodeState<'_>, ) -> EncodableDependency { let (replace, deps) = match resolve.replacement(id) { Some(id) => ( Some(encodable_package_id(id, state, resolve.version())), None, ), None => { let mut deps = resolve .deps_not_replaced(id) .map(|(id, _)| encodable_package_id(id, state, resolve.version())) .collect::>(); deps.sort(); (None, Some(deps)) } }; EncodableDependency { name: id.name().to_string(), version: id.version().to_string(), source: encode_source(id.source_id()), dependencies: deps, replace, checksum: if resolve.version() >= ResolveVersion::V2 { resolve.checksums().get(&id).and_then(|s| s.clone()) } else { None }, } } pub fn encodable_package_id( id: PackageId, state: &EncodeState<'_>, resolve_version: ResolveVersion, ) -> EncodablePackageId { let mut version = Some(id.version().to_string()); let mut id_to_encode = id.source_id(); if resolve_version <= ResolveVersion::V2 { if let Some(GitReference::Branch(b)) = id_to_encode.git_reference() { if b == "master" { id_to_encode = SourceId::for_git(id_to_encode.url(), GitReference::DefaultBranch).unwrap(); } } } let mut source = encode_source(id_to_encode).map(|s| s.with_precise(None)); if let Some(counts) = &state.counts { let version_counts = &counts[&id.name()]; if version_counts[&id.version()] == 1 { source = None; if version_counts.len() == 1 { version = None; } } } EncodablePackageId { name: id.name().to_string(), version, source, } } fn encode_source(id: SourceId) -> Option { if id.is_path() { None } else { Some(id) } } cargo-0.66.0/src/cargo/core/resolver/errors.rs000066400000000000000000000411121432416201200212250ustar00rootroot00000000000000use std::fmt; use std::task::Poll; use crate::core::{Dependency, PackageId, QueryKind, Registry, Summary}; use crate::util::lev_distance::lev_distance; use crate::util::{Config, VersionExt}; use anyhow::Error; use super::context::Context; use super::types::{ConflictMap, ConflictReason}; /// Error during resolution providing a path of `PackageId`s. pub struct ResolveError { cause: Error, package_path: Vec, } impl ResolveError { pub fn new>(cause: E, package_path: Vec) -> Self { Self { cause: cause.into(), package_path, } } /// Returns a path of packages from the package whose requirements could not be resolved up to /// the root. pub fn package_path(&self) -> &[PackageId] { &self.package_path } } impl std::error::Error for ResolveError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.cause.source() } } impl fmt::Debug for ResolveError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } impl fmt::Display for ResolveError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } pub type ActivateResult = Result; #[derive(Debug)] pub enum ActivateError { Fatal(anyhow::Error), Conflict(PackageId, ConflictReason), } impl From<::anyhow::Error> for ActivateError { fn from(t: ::anyhow::Error) -> Self { ActivateError::Fatal(t) } } impl From<(PackageId, ConflictReason)> for ActivateError { fn from(t: (PackageId, ConflictReason)) -> Self { ActivateError::Conflict(t.0, t.1) } } pub(super) fn activation_error( cx: &Context, registry: &mut dyn Registry, parent: &Summary, dep: &Dependency, conflicting_activations: &ConflictMap, candidates: &[Summary], config: Option<&Config>, ) -> ResolveError { let to_resolve_err = |err| { ResolveError::new( err, cx.parents .path_to_bottom(&parent.package_id()) .into_iter() .map(|(node, _)| node) .cloned() .collect(), ) }; if !candidates.is_empty() { let mut msg = format!("failed to select a version for `{}`.", dep.package_name()); msg.push_str("\n ... required by "); msg.push_str(&describe_path_in_context(cx, &parent.package_id())); msg.push_str("\nversions that meet the requirements `"); msg.push_str(&dep.version_req().to_string()); msg.push_str("` "); if let Some(v) = dep.version_req().locked_version() { msg.push_str("(locked to "); msg.push_str(&v.to_string()); msg.push_str(") "); } msg.push_str("are: "); msg.push_str( &candidates .iter() .map(|v| v.version()) .map(|v| v.to_string()) .collect::>() .join(", "), ); let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect(); conflicting_activations.sort_unstable(); // This is reversed to show the newest versions first. I don't know if there is // a strong reason to do this, but that is how the code previously worked // (see https://github.com/rust-lang/cargo/pull/5037) and I don't feel like changing it. conflicting_activations.reverse(); // Flag used for grouping all semver errors together. let mut has_semver = false; for (p, r) in &conflicting_activations { match r { ConflictReason::Semver => { has_semver = true; } ConflictReason::Links(link) => { msg.push_str("\n\nthe package `"); msg.push_str(&*dep.package_name()); msg.push_str("` links to the native library `"); msg.push_str(link); msg.push_str("`, but it conflicts with a previous package which links to `"); msg.push_str(link); msg.push_str("` as well:\n"); msg.push_str(&describe_path_in_context(cx, p)); msg.push_str("\nOnly one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. "); msg.push_str("Try to adjust your dependencies so that only one package uses the links ='"); msg.push_str(&*dep.package_name()); msg.push_str("' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links."); } ConflictReason::MissingFeatures(features) => { msg.push_str("\n\nthe package `"); msg.push_str(&*p.name()); msg.push_str("` depends on `"); msg.push_str(&*dep.package_name()); msg.push_str("`, with features: `"); msg.push_str(features); msg.push_str("` but `"); msg.push_str(&*dep.package_name()); msg.push_str("` does not have these features.\n"); // p == parent so the full path is redundant. } ConflictReason::RequiredDependencyAsFeature(features) => { msg.push_str("\n\nthe package `"); msg.push_str(&*p.name()); msg.push_str("` depends on `"); msg.push_str(&*dep.package_name()); msg.push_str("`, with features: `"); msg.push_str(features); msg.push_str("` but `"); msg.push_str(&*dep.package_name()); msg.push_str("` does not have these features.\n"); msg.push_str( " It has a required dependency with that name, \ but only optional dependencies can be used as features.\n", ); // p == parent so the full path is redundant. } ConflictReason::NonImplicitDependencyAsFeature(features) => { msg.push_str("\n\nthe package `"); msg.push_str(&*p.name()); msg.push_str("` depends on `"); msg.push_str(&*dep.package_name()); msg.push_str("`, with features: `"); msg.push_str(features); msg.push_str("` but `"); msg.push_str(&*dep.package_name()); msg.push_str("` does not have these features.\n"); msg.push_str( " It has an optional dependency with that name, \ but that dependency uses the \"dep:\" \ syntax in the features table, so it does not have an \ implicit feature with that name.\n", ); // p == parent so the full path is redundant. } ConflictReason::PublicDependency(pkg_id) => { // TODO: This needs to be implemented. unimplemented!("pub dep {:?}", pkg_id); } ConflictReason::PubliclyExports(pkg_id) => { // TODO: This needs to be implemented. unimplemented!("pub exp {:?}", pkg_id); } } } if has_semver { // Group these errors together. msg.push_str("\n\nall possible versions conflict with previously selected packages."); for (p, r) in &conflicting_activations { if let ConflictReason::Semver = r { msg.push_str("\n\n previously selected "); msg.push_str(&describe_path_in_context(cx, p)); } } } msg.push_str("\n\nfailed to select a version for `"); msg.push_str(&*dep.package_name()); msg.push_str("` which could resolve this conflict"); return to_resolve_err(anyhow::format_err!("{}", msg)); } // We didn't actually find any candidates, so we need to // give an error message that nothing was found. // // Maybe the user mistyped the ver_req? Like `dep="2"` when `dep="0.2"` // was meant. So we re-query the registry with `dep="*"` so we can // list a few versions that were actually found. let all_req = semver::VersionReq::parse("*").unwrap(); let mut new_dep = dep.clone(); new_dep.set_version_req(all_req); let mut candidates = loop { match registry.query_vec(&new_dep, QueryKind::Exact) { Poll::Ready(Ok(candidates)) => break candidates, Poll::Ready(Err(e)) => return to_resolve_err(e), Poll::Pending => match registry.block_until_ready() { Ok(()) => continue, Err(e) => return to_resolve_err(e), }, } }; candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); let mut msg = if !candidates.is_empty() { let versions = { let mut versions = candidates .iter() .take(3) .map(|cand| cand.version().to_string()) .collect::>(); if candidates.len() > 3 { versions.push("...".into()); } versions.join(", ") }; let locked_version = dep .version_req() .locked_version() .map(|v| format!(" (locked to {})", v)) .unwrap_or_default(); let mut msg = format!( "failed to select a version for the requirement `{} = \"{}\"`{}\n\ candidate versions found which didn't match: {}\n\ location searched: {}\n", dep.package_name(), dep.version_req(), locked_version, versions, registry.describe_source(dep.source_id()), ); msg.push_str("required by "); msg.push_str(&describe_path_in_context(cx, &parent.package_id())); // If we have a path dependency with a locked version, then this may // indicate that we updated a sub-package and forgot to run `cargo // update`. In this case try to print a helpful error! if dep.source_id().is_path() && dep.version_req().is_locked() { msg.push_str( "\nconsider running `cargo update` to update \ a path dependency's locked version", ); } if registry.is_replaced(dep.source_id()) { msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); } msg } else { // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` // was meant. So we try asking the registry for a `fuzzy` search for suggestions. let mut candidates = loop { match registry.query_vec(&new_dep, QueryKind::Fuzzy) { Poll::Ready(Ok(candidates)) => break candidates, Poll::Ready(Err(e)) => return to_resolve_err(e), Poll::Pending => match registry.block_until_ready() { Ok(()) => continue, Err(e) => return to_resolve_err(e), }, } }; candidates.sort_unstable_by_key(|a| a.name()); candidates.dedup_by(|a, b| a.name() == b.name()); let mut candidates: Vec<_> = candidates .iter() .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) .filter(|&(d, _)| d < 4) .collect(); candidates.sort_by_key(|o| o.0); let mut msg: String; if candidates.is_empty() { msg = format!("no matching package named `{}` found\n", dep.package_name()); } else { msg = format!( "no matching package found\nsearched package name: `{}`\n", dep.package_name() ); // If dependency package name is equal to the name of the candidate here // it may be a prerelease package which hasn't been specified correctly if dep.package_name() == candidates[0].1.name() && candidates[0].1.package_id().version().is_prerelease() { msg.push_str("prerelease package needs to be specified explicitly\n"); msg.push_str(&format!( "{name} = {{ version = \"{version}\" }}", name = candidates[0].1.name(), version = candidates[0].1.package_id().version() )); } else { let mut names = candidates .iter() .take(3) .map(|c| c.1.name().as_str()) .collect::>(); if candidates.len() > 3 { names.push("..."); } // Vertically align first suggestion with missing crate name // so a typo jumps out at you. msg.push_str("perhaps you meant: "); msg.push_str(&names.iter().enumerate().fold( String::default(), |acc, (i, el)| match i { 0 => acc + el, i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, _ => acc + ", " + el, }, )); } msg.push('\n'); } msg.push_str(&format!("location searched: {}\n", dep.source_id())); msg.push_str("required by "); msg.push_str(&describe_path_in_context(cx, &parent.package_id())); msg }; if let Some(config) = config { if config.offline() { msg.push_str( "\nAs a reminder, you're using offline mode (--offline) \ which can sometimes cause surprising resolution failures, \ if this error is too confusing you may wish to retry \ without the offline flag.", ); } } to_resolve_err(anyhow::format_err!("{}", msg)) } /// Returns String representation of dependency chain for a particular `pkgid` /// within given context. pub(super) fn describe_path_in_context(cx: &Context, id: &PackageId) -> String { let iter = cx .parents .path_to_bottom(id) .into_iter() .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); describe_path(iter) } /// Returns String representation of dependency chain for a particular `pkgid`. /// /// Note that all elements of `path` iterator should have `Some` dependency /// except the first one. It would look like: /// /// (pkg0, None) /// -> (pkg1, dep from pkg1 satisfied by pkg0) /// -> (pkg2, dep from pkg2 satisfied by pkg1) /// -> ... pub(crate) fn describe_path<'a>( mut path: impl Iterator)>, ) -> String { use std::fmt::Write; if let Some(p) = path.next() { let mut dep_path_desc = format!("package `{}`", p.0); for (pkg, dep) in path { let dep = dep.unwrap(); let source_kind = if dep.source_id().is_path() { "path " } else if dep.source_id().is_git() { "git " } else { "" }; let requirement = if source_kind.is_empty() { format!("{} = \"{}\"", dep.name_in_toml(), dep.version_req()) } else { dep.name_in_toml().to_string() }; let locked_version = dep .version_req() .locked_version() .map(|v| format!("(locked to {}) ", v)) .unwrap_or_default(); write!( dep_path_desc, "\n ... which satisfies {}dependency `{}` {}of package `{}`", source_kind, requirement, locked_version, pkg ) .unwrap(); } return dep_path_desc; } String::new() } cargo-0.66.0/src/cargo/core/resolver/features.rs000066400000000000000000001077201432416201200215370ustar00rootroot00000000000000//! Feature resolver. //! //! This is a new feature resolver that runs independently of the main //! dependency resolver. It has several options which can enable new feature //! resolution behavior. //! //! One of its key characteristics is that it can avoid unifying features for //! shared dependencies in some situations. See `FeatureOpts` for the //! different behaviors that can be enabled. If no extra options are enabled, //! then it should behave exactly the same as the dependency resolver's //! feature resolution. //! //! The preferred way to engage this new resolver is via //! `resolve_ws_with_opts`. //! //! This does not *replace* feature resolution in the dependency resolver, but //! instead acts as a second pass which can *narrow* the features selected in //! the dependency resolver. The dependency resolver still needs to do its own //! feature resolution in order to avoid selecting optional dependencies that //! are never enabled. The dependency resolver could, in theory, just assume //! all optional dependencies on all packages are enabled (and remove all //! knowledge of features), but that could introduce new requirements that //! might change old behavior or cause conflicts. Maybe some day in the future //! we could experiment with that, but it seems unlikely to work or be all //! that helpful. //! //! There are many assumptions made about the dependency resolver. This //! feature resolver assumes validation has already been done on the feature //! maps, and doesn't do any validation itself. It assumes dev-dependencies //! within a dependency have been removed. There are probably other //! assumptions that I am forgetting. use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData}; use crate::core::dependency::{ArtifactTarget, DepKind, Dependency}; use crate::core::resolver::types::FeaturesSet; use crate::core::resolver::{Resolve, ResolveBehavior}; use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace}; use crate::util::interning::InternedString; use crate::util::CargoResult; use anyhow::bail; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; /// The key used in various places to store features for a particular dependency. /// The actual discrimination happens with the `FeaturesFor` type. type PackageFeaturesKey = (PackageId, FeaturesFor); /// Map of activated features. /// /// The key is `(PackageId, bool)` where the bool is `true` if these /// are features for a build dependency or proc-macro. type ActivateMap = HashMap>; /// Set of all activated features for all packages in the resolve graph. pub struct ResolvedFeatures { activated_features: ActivateMap, /// Optional dependencies that should be built. /// /// The value is the `name_in_toml` of the dependencies. activated_dependencies: ActivateMap, opts: FeatureOpts, } /// Options for how the feature resolver works. #[derive(Default)] pub struct FeatureOpts { /// Build deps and proc-macros will not share share features with other dep kinds, /// and so won't artifact targets. /// In other terms, if true, features associated with certain kinds of dependencies /// will only be unified together. /// If false, there is only one namespace for features, unifying all features across /// all dependencies, no matter what kind. decouple_host_deps: bool, /// Dev dep features will not be activated unless needed. decouple_dev_deps: bool, /// Targets that are not in use will not activate features. ignore_inactive_targets: bool, /// If enabled, compare against old resolver (for testing). compare: bool, } /// Flag to indicate if Cargo is building *any* dev units (tests, examples, etc.). /// /// This disables decoupling of dev dependencies. It may be possible to relax /// this in the future, but it will require significant changes to how unit /// dependencies are computed, and can result in longer build times with /// `cargo test` because the lib may need to be built 3 times instead of /// twice. #[derive(Copy, Clone, PartialEq)] pub enum HasDevUnits { Yes, No, } /// Flag to indicate that target-specific filtering should be disabled. #[derive(Copy, Clone, PartialEq)] pub enum ForceAllTargets { Yes, No, } /// Flag to indicate if features are requested for a build dependency or not. #[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum FeaturesFor { /// If `Some(target)` is present, we represent an artifact target. /// Otherwise any other normal or dev dependency. NormalOrDevOrArtifactTarget(Option), /// Build dependency or proc-macro. HostDep, } impl Default for FeaturesFor { fn default() -> Self { FeaturesFor::NormalOrDevOrArtifactTarget(None) } } impl std::fmt::Display for FeaturesFor { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { FeaturesFor::HostDep => f.write_str("host"), FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)) => { f.write_str(&target.rustc_target()) } FeaturesFor::NormalOrDevOrArtifactTarget(None) => Ok(()), } } } impl FeaturesFor { pub fn from_for_host(for_host: bool) -> FeaturesFor { if for_host { FeaturesFor::HostDep } else { FeaturesFor::NormalOrDevOrArtifactTarget(None) } } pub fn from_for_host_or_artifact_target( for_host: bool, artifact_target: Option, ) -> FeaturesFor { match artifact_target { Some(target) => FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)), None => { if for_host { FeaturesFor::HostDep } else { FeaturesFor::NormalOrDevOrArtifactTarget(None) } } } } fn apply_opts(self, opts: &FeatureOpts) -> Self { if opts.decouple_host_deps { self } else { FeaturesFor::default() } } } impl FeatureOpts { pub fn new( ws: &Workspace<'_>, has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, ) -> CargoResult { let mut opts = FeatureOpts::default(); let unstable_flags = ws.config().cli_unstable(); let mut enable = |feat_opts: &Vec| { for opt in feat_opts { match opt.as_ref() { "build_dep" | "host_dep" => opts.decouple_host_deps = true, "dev_dep" => opts.decouple_dev_deps = true, "itarget" => opts.ignore_inactive_targets = true, "all" => { opts.decouple_host_deps = true; opts.decouple_dev_deps = true; opts.ignore_inactive_targets = true; } "compare" => opts.compare = true, "ws" => unimplemented!(), s => bail!("-Zfeatures flag `{}` is not supported", s), } } Ok(()) }; if let Some(feat_opts) = unstable_flags.features.as_ref() { enable(feat_opts)?; } match ws.resolve_behavior() { ResolveBehavior::V1 => {} ResolveBehavior::V2 => { enable(&vec!["all".to_string()]).unwrap(); } } if let HasDevUnits::Yes = has_dev_units { // Dev deps cannot be decoupled when they are in use. opts.decouple_dev_deps = false; } if let ForceAllTargets::Yes = force_all_targets { opts.ignore_inactive_targets = false; } Ok(opts) } /// Creates a new FeatureOpts for the given behavior. pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts { match behavior { ResolveBehavior::V1 => FeatureOpts::default(), ResolveBehavior::V2 => FeatureOpts { decouple_host_deps: true, decouple_dev_deps: has_dev_units == HasDevUnits::No, ignore_inactive_targets: true, compare: false, }, } } } /// Features flags requested for a package. /// /// This should be cheap and fast to clone, it is used in the resolver for /// various caches. /// /// This is split into enum variants because the resolver needs to handle /// features coming from different places (command-line and dependency /// declarations), but those different places have different constraints on /// which syntax is allowed. This helps ensure that every place dealing with /// features is properly handling those syntax restrictions. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub enum RequestedFeatures { /// Features requested on the command-line with flags. CliFeatures(CliFeatures), /// Features specified in a dependency declaration. DepFeatures { /// The `features` dependency field. features: FeaturesSet, /// The `default-features` dependency field. uses_default_features: bool, }, } /// Features specified on the command-line. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct CliFeatures { /// Features from the `--features` flag. pub features: Rc>, /// The `--all-features` flag. pub all_features: bool, /// Inverse of `--no-default-features` flag. pub uses_default_features: bool, } impl CliFeatures { /// Creates a new CliFeatures from the given command-line flags. pub fn from_command_line( features: &[String], all_features: bool, uses_default_features: bool, ) -> CargoResult { let features = Rc::new(CliFeatures::split_features(features)); // Some early validation to ensure correct syntax. for feature in features.iter() { match feature { // Maybe call validate_feature_name here once it is an error? FeatureValue::Feature(_) => {} FeatureValue::Dep { .. } => { bail!( "feature `{}` is not allowed to use explicit `dep:` syntax", feature ); } FeatureValue::DepFeature { dep_feature, .. } => { if dep_feature.contains('/') { bail!("multiple slashes in feature `{}` is not allowed", feature); } } } } Ok(CliFeatures { features, all_features, uses_default_features, }) } /// Creates a new CliFeatures with the given `all_features` setting. pub fn new_all(all_features: bool) -> CliFeatures { CliFeatures { features: Rc::new(BTreeSet::new()), all_features, uses_default_features: true, } } fn split_features(features: &[String]) -> BTreeSet { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) .map(InternedString::new) .map(FeatureValue::new) .collect() } } impl ResolvedFeatures { /// Returns the list of features that are enabled for the given package. pub fn activated_features( &self, pkg_id: PackageId, features_for: FeaturesFor, ) -> Vec { self.activated_features_int(pkg_id, features_for) .expect("activated_features for invalid package") } /// Returns if the given dependency should be included. /// /// This handles dependencies disabled via `cfg` expressions and optional /// dependencies which are not enabled. pub fn is_dep_activated( &self, pkg_id: PackageId, features_for: FeaturesFor, dep_name: InternedString, ) -> bool { let key = features_for.apply_opts(&self.opts); self.activated_dependencies .get(&(pkg_id, key)) .map(|deps| deps.contains(&dep_name)) .unwrap_or(false) } /// Variant of `activated_features` that returns `None` if this is /// not a valid pkg_id/is_build combination. Used in places which do /// not know which packages are activated (like `cargo clean`). pub fn activated_features_unverified( &self, pkg_id: PackageId, features_for: FeaturesFor, ) -> Option> { self.activated_features_int(pkg_id, features_for).ok() } fn activated_features_int( &self, pkg_id: PackageId, features_for: FeaturesFor, ) -> CargoResult> { let fk = features_for.apply_opts(&self.opts); if let Some(fs) = self.activated_features.get(&(pkg_id, fk)) { Ok(fs.iter().cloned().collect()) } else { bail!("features did not find {:?} {:?}", pkg_id, fk) } } /// Compares the result against the original resolver behavior. /// /// Used by `cargo fix --edition` to display any differences. pub fn compare_legacy(&self, legacy: &ResolvedFeatures) -> DiffMap { self.activated_features .iter() .filter_map(|((pkg_id, for_host), new_features)| { let old_features = legacy .activated_features .get(&(*pkg_id, *for_host)) // The new features may have for_host entries where the old one does not. .or_else(|| { legacy .activated_features .get(&(*pkg_id, FeaturesFor::default())) }) .map(|feats| feats.iter().cloned().collect()) .unwrap_or_else(|| BTreeSet::new()); // The new resolver should never add features. assert_eq!(new_features.difference(&old_features).next(), None); let removed_features: BTreeSet<_> = old_features.difference(new_features).cloned().collect(); if removed_features.is_empty() { None } else { Some(((*pkg_id, *for_host), removed_features)) } }) .collect() } } /// Map of differences. /// /// Key is `(pkg_id, for_host)`. Value is a set of features or dependencies removed. pub type DiffMap = BTreeMap>; pub struct FeatureResolver<'a, 'cfg> { ws: &'a Workspace<'cfg>, target_data: &'a RustcTargetData<'cfg>, /// The platforms to build for, requested by the user. requested_targets: &'a [CompileKind], resolve: &'a Resolve, package_set: &'a PackageSet<'cfg>, /// Options that change how the feature resolver operates. opts: FeatureOpts, /// Map of features activated for each package. activated_features: ActivateMap, /// Map of optional dependencies activated for each package. activated_dependencies: ActivateMap, /// Keeps track of which packages have had its dependencies processed. /// Used to avoid cycles, and to speed up processing. processed_deps: HashSet, /// If this is `true`, then a non-default `feature_key` needs to be tracked while /// traversing the graph. /// /// This is only here to avoid calling `is_proc_macro` when all feature /// options are disabled (because `is_proc_macro` can trigger downloads). /// This has to be separate from `FeatureOpts.decouple_host_deps` because /// `for_host` tracking is also needed for `itarget` to work properly. track_for_host: bool, /// `dep_name?/feat_name` features that will be activated if `dep_name` is /// ever activated. /// /// The key is the `(package, for_host, dep_name)` of the package whose /// dependency will trigger the addition of new features. The value is the /// set of features to activate. deferred_weak_dependencies: HashMap<(PackageId, FeaturesFor, InternedString), HashSet>, } impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { /// Runs the resolution algorithm and returns a new `ResolvedFeatures` /// with the result. pub fn resolve( ws: &Workspace<'cfg>, target_data: &RustcTargetData<'cfg>, resolve: &Resolve, package_set: &'a PackageSet<'cfg>, cli_features: &CliFeatures, specs: &[PackageIdSpec], requested_targets: &[CompileKind], opts: FeatureOpts, ) -> CargoResult { use crate::util::profile; let _p = profile::start("resolve features"); let track_for_host = opts.decouple_host_deps || opts.ignore_inactive_targets; let mut r = FeatureResolver { ws, target_data, requested_targets, resolve, package_set, opts, activated_features: HashMap::new(), activated_dependencies: HashMap::new(), processed_deps: HashSet::new(), track_for_host, deferred_weak_dependencies: HashMap::new(), }; r.do_resolve(specs, cli_features)?; log::debug!("features={:#?}", r.activated_features); if r.opts.compare { r.compare(); } Ok(ResolvedFeatures { activated_features: r.activated_features, activated_dependencies: r.activated_dependencies, opts: r.opts, }) } /// Performs the process of resolving all features for the resolve graph. fn do_resolve( &mut self, specs: &[PackageIdSpec], cli_features: &CliFeatures, ) -> CargoResult<()> { let member_features = self.ws.members_with_features(specs, cli_features)?; for (member, cli_features) in &member_features { let fvs = self.fvs_from_requested(member.package_id(), cli_features); let fk = if self.track_for_host && self.is_proc_macro(member.package_id()) { // Also activate for normal dependencies. This is needed if the // proc-macro includes other targets (like binaries or tests), // or running in `cargo test`. Note that in a workspace, if // the proc-macro is selected on the command like (like with // `--workspace`), this forces feature unification with normal // dependencies. This is part of the bigger problem where // features depend on which packages are built. self.activate_pkg(member.package_id(), FeaturesFor::default(), &fvs)?; FeaturesFor::HostDep } else { FeaturesFor::default() }; self.activate_pkg(member.package_id(), fk, &fvs)?; } Ok(()) } fn activate_pkg( &mut self, pkg_id: PackageId, fk: FeaturesFor, fvs: &[FeatureValue], ) -> CargoResult<()> { log::trace!("activate_pkg {} {}", pkg_id.name(), fk); // Add an empty entry to ensure everything is covered. This is intended for // finding bugs where the resolver missed something it should have visited. // Remove this in the future if `activated_features` uses an empty default. self.activated_features .entry((pkg_id, fk.apply_opts(&self.opts))) .or_insert_with(BTreeSet::new); for fv in fvs { self.activate_fv(pkg_id, fk, fv)?; } if !self.processed_deps.insert((pkg_id, fk)) { // Already processed dependencies. There's no need to process them // again. This is primarily to avoid cycles, but also helps speed // things up. // // This is safe because if another package comes along and adds a // feature on this package, it will immediately add it (in // `activate_fv`), and recurse as necessary right then and there. // For example, consider we've already processed our dependencies, // and another package comes along and enables one of our optional // dependencies, it will do so immediately in the // `FeatureValue::DepFeature` branch, and then immediately // recurse into that optional dependency. This also holds true for // features that enable other features. return Ok(()); } for (dep_pkg_id, deps) in self.deps(pkg_id, fk) { for (dep, dep_fk) in deps { if dep.is_optional() { // Optional dependencies are enabled in `activate_fv` when // a feature enables it. continue; } // Recurse into the dependency. let fvs = self.fvs_from_dependency(dep_pkg_id, dep); self.activate_pkg(dep_pkg_id, dep_fk, &fvs)?; } } Ok(()) } /// Activate a single FeatureValue for a package. fn activate_fv( &mut self, pkg_id: PackageId, fk: FeaturesFor, fv: &FeatureValue, ) -> CargoResult<()> { log::trace!("activate_fv {} {} {}", pkg_id.name(), fk, fv); match fv { FeatureValue::Feature(f) => { self.activate_rec(pkg_id, fk, *f)?; } FeatureValue::Dep { dep_name } => { self.activate_dependency(pkg_id, fk, *dep_name)?; } FeatureValue::DepFeature { dep_name, dep_feature, weak, } => { self.activate_dep_feature(pkg_id, fk, *dep_name, *dep_feature, *weak)?; } } Ok(()) } /// Activate the given feature for the given package, and then recursively /// activate any other features that feature enables. fn activate_rec( &mut self, pkg_id: PackageId, fk: FeaturesFor, feature_to_enable: InternedString, ) -> CargoResult<()> { log::trace!( "activate_rec {} {} feat={}", pkg_id.name(), fk, feature_to_enable ); let enabled = self .activated_features .entry((pkg_id, fk.apply_opts(&self.opts))) .or_insert_with(BTreeSet::new); if !enabled.insert(feature_to_enable) { // Already enabled. return Ok(()); } let summary = self.resolve.summary(pkg_id); let feature_map = summary.features(); let fvs = match feature_map.get(&feature_to_enable) { Some(fvs) => fvs, None => { // TODO: this should only happen for optional dependencies. // Other cases should be validated by Summary's `build_feature_map`. // Figure out some way to validate this assumption. log::debug!( "pkg {:?} does not define feature {}", pkg_id, feature_to_enable ); return Ok(()); } }; for fv in fvs { self.activate_fv(pkg_id, fk, fv)?; } Ok(()) } /// Activate a dependency (`dep:dep_name` syntax). fn activate_dependency( &mut self, pkg_id: PackageId, fk: FeaturesFor, dep_name: InternedString, ) -> CargoResult<()> { // Mark this dependency as activated. let save_decoupled = fk.apply_opts(&self.opts); self.activated_dependencies .entry((pkg_id, save_decoupled)) .or_default() .insert(dep_name); // Check for any deferred features. let to_enable = self .deferred_weak_dependencies .remove(&(pkg_id, fk, dep_name)); // Activate the optional dep. for (dep_pkg_id, deps) in self.deps(pkg_id, fk) { for (dep, dep_fk) in deps { if dep.name_in_toml() != dep_name { continue; } if let Some(to_enable) = &to_enable { for dep_feature in to_enable { log::trace!( "activate deferred {} {} -> {}/{}", pkg_id.name(), fk, dep_name, dep_feature ); let fv = FeatureValue::new(*dep_feature); self.activate_fv(dep_pkg_id, dep_fk, &fv)?; } } let fvs = self.fvs_from_dependency(dep_pkg_id, dep); self.activate_pkg(dep_pkg_id, dep_fk, &fvs)?; } } Ok(()) } /// Activate a feature within a dependency (`dep_name/feat_name` syntax). fn activate_dep_feature( &mut self, pkg_id: PackageId, fk: FeaturesFor, dep_name: InternedString, dep_feature: InternedString, weak: bool, ) -> CargoResult<()> { for (dep_pkg_id, deps) in self.deps(pkg_id, fk) { for (dep, dep_fk) in deps { if dep.name_in_toml() != dep_name { continue; } if dep.is_optional() { let save_for_host = fk.apply_opts(&self.opts); if weak && !self .activated_dependencies .get(&(pkg_id, save_for_host)) .map(|deps| deps.contains(&dep_name)) .unwrap_or(false) { // This is weak, but not yet activated. Defer in case // something comes along later and enables it. log::trace!( "deferring feature {} {} -> {}/{}", pkg_id.name(), fk, dep_name, dep_feature ); self.deferred_weak_dependencies .entry((pkg_id, fk, dep_name)) .or_default() .insert(dep_feature); continue; } // Activate the dependency on self. let fv = FeatureValue::Dep { dep_name }; self.activate_fv(pkg_id, fk, &fv)?; if !weak { // The old behavior before weak dependencies were // added is to also enables a feature of the same // name. self.activate_rec(pkg_id, fk, dep_name)?; } } // Activate the feature on the dependency. let fv = FeatureValue::new(dep_feature); self.activate_fv(dep_pkg_id, dep_fk, &fv)?; } } Ok(()) } /// Returns Vec of FeatureValues from a Dependency definition. fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec { let summary = self.resolve.summary(dep_id); let feature_map = summary.features(); let mut result: Vec = dep .features() .iter() .map(|f| FeatureValue::new(*f)) .collect(); let default = InternedString::new("default"); if dep.uses_default_features() && feature_map.contains_key(&default) { result.push(FeatureValue::Feature(default)); } result } /// Returns Vec of FeatureValues from a set of command-line features. fn fvs_from_requested( &self, pkg_id: PackageId, cli_features: &CliFeatures, ) -> Vec { let summary = self.resolve.summary(pkg_id); let feature_map = summary.features(); let mut result: Vec = cli_features.features.iter().cloned().collect(); let default = InternedString::new("default"); if cli_features.uses_default_features && feature_map.contains_key(&default) { result.push(FeatureValue::Feature(default)); } if cli_features.all_features { result.extend(feature_map.keys().map(|k| FeatureValue::Feature(*k))) } result } /// Returns the dependencies for a package, filtering out inactive targets. fn deps( &self, pkg_id: PackageId, fk: FeaturesFor, ) -> Vec<(PackageId, Vec<(&'a Dependency, FeaturesFor)>)> { // Helper for determining if a platform is activated. let platform_activated = |dep: &Dependency| -> bool { // We always count platforms as activated if the target stems from an artifact // dependency's target specification. This triggers in conjunction with // `[target.'cfg(…)'.dependencies]` manifest sections. match (dep.is_build(), fk) { (true, _) | (_, FeaturesFor::HostDep) => { // We always care about build-dependencies, and they are always // Host. If we are computing dependencies "for a build script", // even normal dependencies are host-only. self.target_data .dep_platform_activated(dep, CompileKind::Host) } (_, FeaturesFor::NormalOrDevOrArtifactTarget(None)) => self .requested_targets .iter() .any(|kind| self.target_data.dep_platform_activated(dep, *kind)), (_, FeaturesFor::NormalOrDevOrArtifactTarget(Some(target))) => self .target_data .dep_platform_activated(dep, CompileKind::Target(target)), } }; self.resolve .deps(pkg_id) .map(|(dep_id, deps)| { let deps = deps .iter() .filter(|dep| { if dep.platform().is_some() && self.opts.ignore_inactive_targets && !platform_activated(dep) { return false; } if self.opts.decouple_dev_deps && dep.kind() == DepKind::Development { return false; } true }) .flat_map(|dep| { // Each `dep`endency can be built for multiple targets. For one, it // may be a library target which is built as initially configured // by `fk`. If it appears as build dependency, it must be built // for the host. // // It may also be an artifact dependency, // which could be built either // // - for a specified (aka 'forced') target, specified by // `dep = { …, target = ` }` // - as an artifact for use in build dependencies that should // build for whichever `--target`s are specified // - like a library would be built // // Generally, the logic for choosing a target for dependencies is // unaltered and used to determine how to build non-artifacts, // artifacts without target specification and no library, // or an artifacts library. // // All this may result in a dependency being built multiple times // for various targets which are either specified in the manifest // or on the cargo command-line. let lib_fk = if fk == FeaturesFor::default() { (self.track_for_host && (dep.is_build() || self.is_proc_macro(dep_id))) .then(|| FeaturesFor::HostDep) .unwrap_or_default() } else { fk }; // `artifact_target_keys` are produced to fulfil the needs of artifacts that have a target specification. let artifact_target_keys = dep.artifact().map(|artifact| { ( artifact.is_lib(), artifact.target().map(|target| match target { ArtifactTarget::Force(target) => { vec![FeaturesFor::NormalOrDevOrArtifactTarget(Some(target))] } ArtifactTarget::BuildDependencyAssumeTarget => self .requested_targets .iter() .filter_map(|kind| match kind { CompileKind::Host => None, CompileKind::Target(target) => { Some(FeaturesFor::NormalOrDevOrArtifactTarget( Some(*target), )) } }) .collect(), }), ) }); let dep_fks = match artifact_target_keys { // The artifact is also a library and does specify custom // targets. // The library's feature key needs to be used alongside // the keys artifact targets. Some((is_lib, Some(mut dep_fks))) if is_lib => { dep_fks.push(lib_fk); dep_fks } // The artifact is not a library, but does specify // custom targets. // Use only these targets feature keys. Some((_, Some(dep_fks))) => dep_fks, // There is no artifact in the current dependency // or there is no target specified on the artifact. // Use the standard feature key without any alteration. Some((_, None)) | None => vec![lib_fk], }; dep_fks.into_iter().map(move |dep_fk| (dep, dep_fk)) }) .collect::>(); (dep_id, deps) }) .filter(|(_id, deps)| !deps.is_empty()) .collect() } /// Compare the activated features to the resolver. Used for testing. fn compare(&self) { let mut found = false; for ((pkg_id, dep_kind), features) in &self.activated_features { let r_features = self.resolve.features(*pkg_id); if !r_features.iter().eq(features.iter()) { crate::drop_eprintln!( self.ws.config(), "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n", pkg_id, dep_kind, r_features, features ); found = true; } } if found { panic!("feature mismatch"); } } fn is_proc_macro(&self, package_id: PackageId) -> bool { self.package_set .get_one(package_id) .expect("packages downloaded") .proc_macro() } } cargo-0.66.0/src/cargo/core/resolver/mod.rs000066400000000000000000001401641432416201200204770ustar00rootroot00000000000000//! Resolution of the entire dependency graph for a crate. //! //! This module implements the core logic in taking the world of crates and //! constraints and creating a resolved graph with locked versions for all //! crates and their dependencies. This is separate from the registry module //! which is more worried about discovering crates from various sources, this //! module just uses the Registry trait as a source to learn about crates from. //! //! Actually solving a constraint graph is an NP-hard problem. This algorithm //! is basically a nice heuristic to make sure we get roughly the best answer //! most of the time. The constraints that we're working with are: //! //! 1. Each crate can have any number of dependencies. Each dependency can //! declare a version range that it is compatible with. //! 2. Crates can be activated with multiple version (e.g., show up in the //! dependency graph twice) so long as each pairwise instance have //! semver-incompatible versions. //! //! The algorithm employed here is fairly simple, we simply do a DFS, activating //! the "newest crate" (highest version) first and then going to the next //! option. The heuristics we employ are: //! //! * Never try to activate a crate version which is incompatible. This means we //! only try crates which will actually satisfy a dependency and we won't ever //! try to activate a crate that's semver compatible with something else //! activated (as we're only allowed to have one) nor try to activate a crate //! that has the same links attribute as something else //! activated. //! * Always try to activate the highest version crate first. The default //! dependency in Cargo (e.g., when you write `foo = "0.1.2"`) is //! semver-compatible, so selecting the highest version possible will allow us //! to hopefully satisfy as many dependencies at once. //! //! Beyond that, what's implemented below is just a naive backtracking version //! which should in theory try all possible combinations of dependencies and //! versions to see if one works. The first resolution that works causes //! everything to bail out immediately and return success, and only if *nothing* //! works do we actually return an error up the stack. //! //! ## Performance //! //! Note that this is a relatively performance-critical portion of Cargo. The //! data that we're processing is proportional to the size of the dependency //! graph, which can often be quite large (e.g., take a look at Servo). To make //! matters worse the DFS algorithm we're implemented is inherently quite //! inefficient. When we add the requirement of backtracking on top it means //! that we're implementing something that probably shouldn't be allocating all //! over the place. use std::collections::{BTreeMap, HashMap, HashSet}; use std::mem; use std::rc::Rc; use std::time::{Duration, Instant}; use log::{debug, trace}; use crate::core::PackageIdSpec; use crate::core::{Dependency, PackageId, Registry, Summary}; use crate::util::config::Config; use crate::util::errors::CargoResult; use crate::util::network::PollExt; use crate::util::profile; use self::context::Context; use self::dep_cache::RegistryQueryer; use self::features::RequestedFeatures; use self::types::{ConflictMap, ConflictReason, DepsFrame}; use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::errors::{ActivateError, ActivateResult, ResolveError}; pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits}; pub use self::resolve::{Resolve, ResolveVersion}; pub use self::types::{ResolveBehavior, ResolveOpts}; pub use self::version_prefs::{VersionOrdering, VersionPreferences}; mod conflict_cache; mod context; mod dep_cache; mod encode; pub(crate) mod errors; pub mod features; mod resolve; mod types; mod version_prefs; /// Builds the list of all packages required to build the first argument. /// /// * `summaries` - the list of package summaries along with how to resolve /// their features. This is a list of all top-level packages that are intended /// to be part of the lock file (resolve output). These typically are a list /// of all workspace members. /// /// * `replacements` - this is a list of `[replace]` directives found in the /// root of the workspace. The list here is a `PackageIdSpec` of what to /// replace and a `Dependency` to replace that with. In general it's not /// recommended to use `[replace]` any more and use `[patch]` instead, which /// is supported elsewhere. /// /// * `registry` - this is the source from which all package summaries are /// loaded. It's expected that this is extensively configured ahead of time /// and is idempotent with our requests to it (aka returns the same results /// for the same query every time). Typically this is an instance of a /// `PackageRegistry`. /// /// * `version_prefs` - this represents a preference for some versions over others, /// based on the lock file or other reasons such as `[patch]`es. /// /// * `config` - a location to print warnings and such, or `None` if no warnings /// should be printed /// /// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions /// introduced in the "public & private dependencies" RFC (1977). The current implementation /// makes sure that there is only one version of each name visible to each package. /// /// But there are 2 stable ways to directly depend on different versions of the same name. /// 1. Use the renamed dependencies functionality /// 2. Use 'cfg({})' dependencies functionality /// /// When we have a decision for how to implement is without breaking existing functionality /// this flag can be removed. pub fn resolve( summaries: &[(Summary, ResolveOpts)], replacements: &[(PackageIdSpec, Dependency)], registry: &mut dyn Registry, version_prefs: &VersionPreferences, config: Option<&Config>, check_public_visible_dependencies: bool, ) -> CargoResult { let _p = profile::start("resolving"); let minimal_versions = match config { Some(config) => config.cli_unstable().minimal_versions, None => false, }; let mut registry = RegistryQueryer::new(registry, replacements, version_prefs, minimal_versions); let cx = loop { let cx = Context::new(check_public_visible_dependencies); let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; if registry.reset_pending() { break cx; } else { registry.registry.block_until_ready()?; } }; let mut cksums = HashMap::new(); for (summary, _) in cx.activations.values() { let cksum = summary.checksum().map(|s| s.to_string()); cksums.insert(summary.package_id(), cksum); } let graph = cx.graph(); let replacements = cx.resolve_replacements(®istry); let features = cx .resolve_features .iter() .map(|(k, v)| (*k, v.iter().cloned().collect())) .collect(); let summaries = cx .activations .into_iter() .map(|(_key, (summary, _age))| (summary.package_id(), summary)) .collect(); let resolve = Resolve::new( graph, replacements, features, cksums, BTreeMap::new(), Vec::new(), ResolveVersion::default(), summaries, ); check_cycles(&resolve)?; check_duplicate_pkgs_in_lockfile(&resolve)?; trace!("resolved: {:?}", resolve); Ok(resolve) } /// Recursively activates the dependencies for `summaries`, in depth-first order, /// backtracking across possible candidates for each dependency as necessary. /// /// If all dependencies can be activated and resolved to a version in the /// dependency graph, `cx` is returned. fn activate_deps_loop( mut cx: Context, registry: &mut RegistryQueryer<'_>, summaries: &[(Summary, ResolveOpts)], config: Option<&Config>, ) -> CargoResult { let mut backtrack_stack = Vec::new(); let mut remaining_deps = RemainingDeps::new(); // `past_conflicting_activations` is a cache of the reasons for each time we // backtrack. let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); // Activate all the initial summaries to kick off some work. for &(ref summary, ref opts) in summaries { debug!("initial activation: {}", summary.package_id()); let res = activate(&mut cx, registry, None, summary.clone(), opts); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), Err(ActivateError::Fatal(e)) => return Err(e), Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"), } } let mut printed = ResolverProgress::new(); // Main resolution loop, this is the workhorse of the resolution algorithm. // // You'll note that a few stacks are maintained on the side, which might // seem odd when this algorithm looks like it could be implemented // recursively. While correct, this is implemented iteratively to avoid // blowing the stack (the recursion depth is proportional to the size of the // input). // // The general sketch of this loop is to run until there are no dependencies // left to activate, and for each dependency to attempt to activate all of // its own dependencies in turn. The `backtrack_stack` is a side table of // backtracking states where if we hit an error we can return to in order to // attempt to continue resolving. while let Some((just_here_for_the_error_messages, frame)) = remaining_deps.pop_most_constrained() { let (mut parent, (mut dep, candidates, mut features)) = frame; // If we spend a lot of time here (we shouldn't in most cases) then give // a bit of a visual indicator as to what we're doing. printed.shell_status(config)?; trace!( "{}[{}]>{} {} candidates", parent.name(), cx.age, dep.package_name(), candidates.len() ); let just_here_for_the_error_messages = just_here_for_the_error_messages && past_conflicting_activations .conflicting(&cx, &dep) .is_some(); let mut remaining_candidates = RemainingCandidates::new(&candidates); // `conflicting_activations` stores all the reasons we were unable to // activate candidates. One of these reasons will have to go away for // backtracking to find a place to restart. It is also the list of // things to explain in the error message if we fail to resolve. // // This is a map of package ID to a reason why that packaged caused a // conflict for us. let mut conflicting_activations = ConflictMap::new(); // When backtracking we don't fully update `conflicting_activations` // especially for the cases that we didn't make a backtrack frame in the // first place. This `backtracked` var stores whether we are continuing // from a restored backtrack frame so that we can skip caching // `conflicting_activations` in `past_conflicting_activations` let mut backtracked = false; loop { let next = remaining_candidates.next( &mut conflicting_activations, &cx, &dep, parent.package_id(), ); let (candidate, has_another) = next.ok_or(()).or_else(|_| { // If we get here then our `remaining_candidates` was just // exhausted, so `dep` failed to activate. // // It's our job here to backtrack, if possible, and find a // different candidate to activate. If we can't find any // candidates whatsoever then it's time to bail entirely. trace!( "{}[{}]>{} -- no candidates", parent.name(), cx.age, dep.package_name() ); // Use our list of `conflicting_activations` to add to our // global list of past conflicting activations, effectively // globally poisoning `dep` if `conflicting_activations` ever // shows up again. We'll use the `past_conflicting_activations` // below to determine if a dependency is poisoned and skip as // much work as possible. // // If we're only here for the error messages then there's no // need to try this as this dependency is already known to be // bad. // // As we mentioned above with the `backtracked` variable if this // local is set to `true` then our `conflicting_activations` may // not be right, so we can't push into our global cache. let mut generalize_conflicting_activations = None; if !just_here_for_the_error_messages && !backtracked { past_conflicting_activations.insert(&dep, &conflicting_activations); if let Some(c) = generalize_conflicting( &cx, registry, &mut past_conflicting_activations, &parent, &dep, &conflicting_activations, ) { generalize_conflicting_activations = Some(c); } } match find_candidate( &cx, &mut backtrack_stack, &parent, backtracked, generalize_conflicting_activations .as_ref() .unwrap_or(&conflicting_activations), ) { Some((candidate, has_another, frame)) => { // Reset all of our local variables used with the // contents of `frame` to complete our backtrack. cx = frame.context; remaining_deps = frame.remaining_deps; remaining_candidates = frame.remaining_candidates; parent = frame.parent; dep = frame.dep; features = frame.features; conflicting_activations = frame.conflicting_activations; backtracked = true; Ok((candidate, has_another)) } None => { debug!("no candidates found"); Err(errors::activation_error( &cx, registry.registry, &parent, &dep, &conflicting_activations, &candidates, config, )) } } })?; // If we're only here for the error messages then we know that this // activation will fail one way or another. To that end if we've got // more candidates we want to fast-forward to the last one as // otherwise we'll just backtrack here anyway (helping us to skip // some work). if just_here_for_the_error_messages && !backtracked && has_another { continue; } // We have a `candidate`. Create a `BacktrackFrame` so we can add it // to the `backtrack_stack` later if activation succeeds. // // Note that if we don't actually have another candidate then there // will be nothing to backtrack to so we skip construction of the // frame. This is a relatively important optimization as a number of // the `clone` calls below can be quite expensive, so we avoid them // if we can. let backtrack = if has_another { Some(BacktrackFrame { context: Context::clone(&cx), remaining_deps: remaining_deps.clone(), remaining_candidates: remaining_candidates.clone(), parent: Summary::clone(&parent), dep: Dependency::clone(&dep), features: Rc::clone(&features), conflicting_activations: conflicting_activations.clone(), }) } else { None }; let pid = candidate.package_id(); let opts = ResolveOpts { dev_deps: false, features: RequestedFeatures::DepFeatures { features: Rc::clone(&features), uses_default_features: dep.uses_default_features(), }, }; trace!( "{}[{}]>{} trying {}", parent.name(), cx.age, dep.package_name(), candidate.version() ); let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &opts); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context // and we're almost ready to move on. We may want to scrap this // frame in the end if it looks like it's not going to end well, // so figure that out here. Ok(Some((mut frame, dur))) => { printed.elapsed(dur); // Our `frame` here is a new package with its own list of // dependencies. Do a sanity check here of all those // dependencies by cross-referencing our global // `past_conflicting_activations`. Recall that map is a // global cache which lists sets of packages where, when // activated, the dependency is unresolvable. // // If any our our frame's dependencies fit in that bucket, // aka known unresolvable, then we extend our own set of // conflicting activations with theirs. We can do this // because the set of conflicts we found implies the // dependency can't be activated which implies that we // ourselves can't be activated, so we know that they // conflict with us. let mut has_past_conflicting_dep = just_here_for_the_error_messages; if !has_past_conflicting_dep { if let Some(conflicting) = frame .remaining_siblings .clone() .filter_map(|(ref new_dep, _, _)| { past_conflicting_activations.conflicting(&cx, new_dep) }) .next() { // If one of our deps is known unresolvable // then we will not succeed. // How ever if we are part of the reason that // one of our deps conflicts then // we can make a stronger statement // because we will definitely be activated when // we try our dep. conflicting_activations.extend( conflicting .iter() .filter(|&(p, _)| p != &pid) .map(|(&p, r)| (p, r.clone())), ); has_past_conflicting_dep = true; } } // If any of `remaining_deps` are known unresolvable with // us activated, then we extend our own set of // conflicting activations with theirs and its parent. We can do this // because the set of conflicts we found implies the // dependency can't be activated which implies that we // ourselves are incompatible with that dep, so we know that deps // parent conflict with us. if !has_past_conflicting_dep { if let Some(known_related_bad_deps) = past_conflicting_activations.dependencies_conflicting_with(pid) { if let Some((other_parent, conflict)) = remaining_deps .iter() // for deps related to us .filter(|&(_, ref other_dep)| { known_related_bad_deps.contains(other_dep) }) .filter_map(|(other_parent, other_dep)| { past_conflicting_activations .find_conflicting(&cx, &other_dep, Some(pid)) .map(|con| (other_parent, con)) }) .next() { let rel = conflict.get(&pid).unwrap().clone(); // The conflict we found is // "other dep will not succeed if we are activated." // We want to add // "our dep will not succeed if other dep is in remaining_deps" // but that is not how the cache is set up. // So we add the less general but much faster, // "our dep will not succeed if other dep's parent is activated". conflicting_activations.extend( conflict .iter() .filter(|&(p, _)| p != &pid) .map(|(&p, r)| (p, r.clone())), ); conflicting_activations.insert(other_parent, rel); has_past_conflicting_dep = true; } } } // Ok if we're in a "known failure" state for this frame we // may want to skip it altogether though. We don't want to // skip it though in the case that we're displaying error // messages to the user! // // Here we need to figure out if the user will see if we // skipped this candidate (if it's known to fail, aka has a // conflicting dep and we're the last candidate). If we're // here for the error messages, we can't skip it (but we can // prune extra work). If we don't have any candidates in our // backtrack stack then we're the last line of defense, so // we'll want to present an error message for sure. let activate_for_error_message = has_past_conflicting_dep && !has_another && { just_here_for_the_error_messages || { find_candidate( &cx, &mut backtrack_stack.clone(), &parent, backtracked, &conflicting_activations, ) .is_none() } }; // If we're only here for the error messages then we know // one of our candidate deps will fail, meaning we will // fail and that none of the backtrack frames will find a // candidate that will help. Consequently let's clean up the // no longer needed backtrack frames. if activate_for_error_message { backtrack_stack.clear(); } // If we don't know for a fact that we'll fail or if we're // just here for the error message then we push this frame // onto our list of to-be-resolve, which will generate more // work for us later on. // // Otherwise we're guaranteed to fail and were not here for // error messages, so we skip work and don't push anything // onto our stack. frame.just_for_error_messages = has_past_conflicting_dep; if !has_past_conflicting_dep || activate_for_error_message { remaining_deps.push(frame); true } else { trace!( "{}[{}]>{} skipping {} ", parent.name(), cx.age, dep.package_name(), pid.version() ); false } } // This candidate's already activated, so there's no extra work // for us to do. Let's keep going. Ok(None) => true, // We failed with a super fatal error (like a network error), so // bail out as quickly as possible as we can't reliably // backtrack from errors like these Err(ActivateError::Fatal(e)) => return Err(e), // We failed due to a bland conflict, bah! Record this in our // frame's list of conflicting activations as to why this // candidate failed, and then move on. Err(ActivateError::Conflict(id, reason)) => { conflicting_activations.insert(id, reason); false } }; // If we've successfully activated then save off the backtrack frame // if one was created, and otherwise break out of the inner // activation loop as we're ready to move to the next dependency if successfully_activated { backtrack_stack.extend(backtrack); break; } // We've failed to activate this dependency, oh dear! Our call to // `activate` above may have altered our `cx` local variable, so // restore it back if we've got a backtrack frame. // // If we don't have a backtrack frame then we're just using the `cx` // for error messages anyway so we can live with a little // imprecision. if let Some(b) = backtrack { cx = b.context; } } // Ok phew, that loop was a big one! If we've broken out then we've // successfully activated a candidate. Our stacks are all in place that // we're ready to move on to the next dependency that needs activation, // so loop back to the top of the function here. } Ok(cx) } /// Attempts to activate the summary `candidate` in the context `cx`. /// /// This function will pull dependency summaries from the registry provided, and /// the dependencies of the package will be determined by the `opts` provided. /// If `candidate` was activated, this function returns the dependency frame to /// iterate through next. fn activate( cx: &mut Context, registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Summary, opts: &ResolveOpts, ) -> ActivateResult> { let candidate_pid = candidate.package_id(); cx.age += 1; if let Some((parent, dep)) = parent { let parent_pid = parent.package_id(); // add an edge from candidate to parent in the parents graph cx.parents .link(candidate_pid, parent_pid) // and associate dep with that edge .insert(dep.clone()); if let Some(public_dependency) = cx.public_dependency.as_mut() { public_dependency.add_edge( candidate_pid, parent_pid, dep.is_public(), cx.age, &cx.parents, ); } } let activated = cx.flag_activated(&candidate, opts, parent)?; let candidate = match registry.replacement_summary(candidate_pid) { Some(replace) => { // Note the `None` for parent here since `[replace]` is a bit wonky // and doesn't activate the same things that `[patch]` typically // does. TBH it basically cause panics in the test suite if // `parent` is passed through here and `[replace]` is otherwise // on life support so it's not critical to fix bugs anyway per se. if cx.flag_activated(replace, opts, None)? && activated { return Ok(None); } trace!( "activating {} (replacing {})", replace.package_id(), candidate_pid ); replace.clone() } None => { if activated { return Ok(None); } trace!("activating {}", candidate_pid); candidate } }; let now = Instant::now(); let (used_features, deps) = &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, opts)?; // Record what list of features is active for this package. if !used_features.is_empty() { Rc::make_mut( cx.resolve_features .entry(candidate.package_id()) .or_insert_with(Rc::default), ) .extend(used_features); } let frame = DepsFrame { parent: candidate, just_for_error_messages: false, remaining_siblings: RcVecIter::new(Rc::clone(deps)), }; Ok(Some((frame, now.elapsed()))) } #[derive(Clone)] struct BacktrackFrame { context: Context, remaining_deps: RemainingDeps, remaining_candidates: RemainingCandidates, parent: Summary, dep: Dependency, features: FeaturesSet, conflicting_activations: ConflictMap, } /// A helper "iterator" used to extract candidates within a current `Context` of /// a dependency graph. /// /// This struct doesn't literally implement the `Iterator` trait (requires a few /// more inputs) but in general acts like one. Each `RemainingCandidates` is /// created with a list of candidates to choose from. When attempting to iterate /// over the list of candidates only *valid* candidates are returned. Validity /// is defined within a `Context`. /// /// Candidates passed to `new` may not be returned from `next` as they could be /// filtered out, and as they are filtered the causes will be added to `conflicting_prev_active`. #[derive(Clone)] struct RemainingCandidates { remaining: RcVecIter, // This is an inlined peekable generator has_another: Option, } impl RemainingCandidates { fn new(candidates: &Rc>) -> RemainingCandidates { RemainingCandidates { remaining: RcVecIter::new(Rc::clone(candidates)), has_another: None, } } /// Attempts to find another candidate to check from this list. /// /// This method will attempt to move this iterator forward, returning a /// candidate that's possible to activate. The `cx` argument is the current /// context which determines validity for candidates returned, and the `dep` /// is the dependency listing that we're activating for. /// /// If successful a `(Candidate, bool)` pair will be returned. The /// `Candidate` is the candidate to attempt to activate, and the `bool` is /// an indicator of whether there are remaining candidates to try of if /// we've reached the end of iteration. /// /// If we've reached the end of the iterator here then `Err` will be /// returned. The error will contain a map of package ID to conflict reason, /// where each package ID caused a candidate to be filtered out from the /// original list for the reason listed. fn next( &mut self, conflicting_prev_active: &mut ConflictMap, cx: &Context, dep: &Dependency, parent: PackageId, ) -> Option<(Summary, bool)> { for b in self.remaining.by_ref() { let b_id = b.package_id(); // The `links` key in the manifest dictates that there's only one // package in a dependency graph, globally, with that particular // `links` key. If this candidate links to something that's already // linked to by a different package then we've gotta skip this. if let Some(link) = b.links() { if let Some(&a) = cx.links.get(&link) { if a != b_id { conflicting_prev_active .entry(a) .or_insert_with(|| ConflictReason::Links(link)); continue; } } } // Otherwise the condition for being a valid candidate relies on // semver. Cargo dictates that you can't duplicate multiple // semver-compatible versions of a crate. For example we can't // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can, // however, activate `1.0.2` and `2.0.0`. // // Here we throw out our candidate if it's *compatible*, yet not // equal, to all previously activated versions. if let Some((a, _)) = cx.activations.get(&b_id.as_activations_key()) { if *a != b { conflicting_prev_active .entry(a.package_id()) .or_insert(ConflictReason::Semver); continue; } } // We may still have to reject do to a public dependency conflict. If one of any of our // ancestors that can see us already knows about a different crate with this name then // we have to reject this candidate. Additionally this candidate may already have been // activated and have public dependants of its own, // all of witch also need to be checked the same way. if let Some(public_dependency) = cx.public_dependency.as_ref() { if let Err(((c1, c2), c3)) = public_dependency.can_add_edge(b_id, parent, dep.is_public(), &cx.parents) { conflicting_prev_active.insert(c1.0, c1.1); conflicting_prev_active.insert(c2.0, c2.1); if let Some(c3) = c3 { conflicting_prev_active.insert(c3.0, c3.1); } continue; } } // Well if we made it this far then we've got a valid dependency. We // want this iterator to be inherently "peekable" so we don't // necessarily return the item just yet. Instead we stash it away to // get returned later, and if we replaced something then that was // actually the candidate to try first so we return that. if let Some(r) = mem::replace(&mut self.has_another, Some(b)) { return Some((r, true)); } } // Alright we've entirely exhausted our list of candidates. If we've got // something stashed away return that here (also indicating that there's // nothing else). self.has_another.take().map(|r| (r, false)) } } /// Attempts to find a new conflict that allows a `find_candidate` feather then the input one. /// It will add the new conflict to the cache if one is found. /// /// Panics if the input conflict is not all active in `cx`. fn generalize_conflicting( cx: &Context, registry: &mut RegistryQueryer<'_>, past_conflicting_activations: &mut conflict_cache::ConflictCache, parent: &Summary, dep: &Dependency, conflicting_activations: &ConflictMap, ) -> Option { if conflicting_activations.is_empty() { return None; } // We need to determine the `ContextAge` that this `conflicting_activations` will jump to, and why. let (backtrack_critical_age, backtrack_critical_id) = conflicting_activations .keys() .map(|&c| (cx.is_active(c).expect("not currently active!?"), c)) .max() .unwrap(); let backtrack_critical_reason: ConflictReason = conflicting_activations[&backtrack_critical_id].clone(); if backtrack_critical_reason.is_public_dependency() { return None; } if cx .parents .is_path_from_to(&parent.package_id(), &backtrack_critical_id) { // We are a descendant of the trigger of the problem. // The best generalization of this is to let things bubble up // and let `backtrack_critical_id` figure this out. return None; } // What parents does that critical activation have for (critical_parent, critical_parents_deps) in cx.parents.edges(&backtrack_critical_id).filter(|(p, _)| { // it will only help backjump further if it is older then the critical_age cx.is_active(**p).expect("parent not currently active!?") < backtrack_critical_age }) { for critical_parents_dep in critical_parents_deps.iter() { // A dep is equivalent to one of the things it can resolve to. // Thus, if all the things it can resolve to have already ben determined // to be conflicting, then we can just say that we conflict with the parent. if let Some(others) = registry .query(critical_parents_dep) .expect("an already used dep now error!?") .expect("an already used dep now pending!?") .iter() .rev() // the last one to be tried is the least likely to be in the cache, so start with that. .map(|other| { past_conflicting_activations .find( dep, &|id| { if id == other.package_id() { // we are imagining that we used other instead Some(backtrack_critical_age) } else { cx.is_active(id) } }, Some(other.package_id()), // we only care about things that are newer then critical_age backtrack_critical_age, ) .map(|con| (other.package_id(), con)) }) .collect::>>() { let mut con = conflicting_activations.clone(); // It is always valid to combine previously inserted conflicts. // A, B are both known bad states each that can never be activated. // A + B is redundant but can't be activated, as if // A + B is active then A is active and we know that is not ok. for (_, other) in &others { con.extend(other.iter().map(|(&id, re)| (id, re.clone()))); } // Now that we have this combined conflict, we can do a substitution: // A dep is equivalent to one of the things it can resolve to. // So we can remove all the things that it resolves to and replace with the parent. for (other_id, _) in &others { con.remove(other_id); } con.insert(*critical_parent, backtrack_critical_reason); if cfg!(debug_assertions) { // the entire point is to find an older conflict, so let's make sure we did let new_age = con .keys() .map(|&c| cx.is_active(c).expect("not currently active!?")) .max() .unwrap(); assert!( new_age < backtrack_critical_age, "new_age {} < backtrack_critical_age {}", new_age, backtrack_critical_age ); } past_conflicting_activations.insert(dep, &con); return Some(con); } } } None } /// Looks through the states in `backtrack_stack` for dependencies with /// remaining candidates. For each one, also checks if rolling back /// could change the outcome of the failed resolution that caused backtracking /// in the first place. Namely, if we've backtracked past the parent of the /// failed dep, or any of the packages flagged as giving us trouble in /// `conflicting_activations`. /// /// Read /// For several more detailed explanations of the logic here. fn find_candidate( cx: &Context, backtrack_stack: &mut Vec, parent: &Summary, backtracked: bool, conflicting_activations: &ConflictMap, ) -> Option<(Summary, bool, BacktrackFrame)> { // When we're calling this method we know that `parent` failed to // activate. That means that some dependency failed to get resolved for // whatever reason. Normally, that means that all of those reasons // (plus maybe some extras) are listed in `conflicting_activations`. // // The abnormal situations are things that do not put all of the reasons in `conflicting_activations`: // If we backtracked we do not know how our `conflicting_activations` related to // the cause of that backtrack, so we do not update it. let age = if !backtracked { // we don't have abnormal situations. So we can ask `cx` for how far back we need to go. let a = cx.is_conflicting(Some(parent.package_id()), conflicting_activations); // If the `conflicting_activations` does not apply to `cx`, then something went very wrong // in building it. But we will just fall back to laboriously trying all possibilities witch // will give us the correct answer so only `assert` if there is a developer to debug it. debug_assert!(a.is_some()); a } else { None }; while let Some(mut frame) = backtrack_stack.pop() { let next = frame.remaining_candidates.next( &mut frame.conflicting_activations, &frame.context, &frame.dep, frame.parent.package_id(), ); let (candidate, has_another) = match next { Some(pair) => pair, None => continue, }; // If all members of `conflicting_activations` are still // active in this back up we know that we're guaranteed to not actually // make any progress. As a result if we hit this condition we can // completely skip this backtrack frame and move on to the next. if let Some(age) = age { if frame.context.age >= age { trace!( "{} = \"{}\" skip as not solving {}: {:?}", frame.dep.package_name(), frame.dep.version_req(), parent.package_id(), conflicting_activations ); // above we use `cx` to determine that this is still going to be conflicting. // but lets just double check. debug_assert!( frame .context .is_conflicting(Some(parent.package_id()), conflicting_activations) == Some(age) ); continue; } else { // above we use `cx` to determine that this is not going to be conflicting. // but lets just double check. debug_assert!(frame .context .is_conflicting(Some(parent.package_id()), conflicting_activations) .is_none()); } } return Some((candidate, has_another, frame)); } None } fn check_cycles(resolve: &Resolve) -> CargoResult<()> { // Create a simple graph representation alternative of `resolve` which has // only the edges we care about. Note that `BTree*` is used to produce // deterministic error messages here. Also note that the main reason for // this copy of the resolve graph is to avoid edges between a crate and its // dev-dependency since that doesn't count for cycles. let mut graph = BTreeMap::new(); for id in resolve.iter() { let map = graph.entry(id).or_insert_with(BTreeMap::new); for (dep_id, listings) in resolve.deps_not_replaced(id) { let transitive_dep = listings.iter().find(|d| d.is_transitive()); if let Some(transitive_dep) = transitive_dep.cloned() { map.insert(dep_id, transitive_dep.clone()); resolve .replacement(dep_id) .map(|p| map.insert(p, transitive_dep)); } } } // After we have the `graph` that we care about, perform a simple cycle // check by visiting all nodes. We visit each node at most once and we keep // track of the path through the graph as we walk it. If we walk onto the // same node twice that's a cycle. let mut checked = HashSet::new(); let mut path = Vec::new(); let mut visited = HashSet::new(); for pkg in graph.keys() { if !checked.contains(pkg) { visit(&graph, *pkg, &mut visited, &mut path, &mut checked)? } } return Ok(()); fn visit( graph: &BTreeMap>, id: PackageId, visited: &mut HashSet, path: &mut Vec, checked: &mut HashSet, ) -> CargoResult<()> { path.push(id); if !visited.insert(id) { let iter = path.iter().rev().skip(1).scan(id, |child, parent| { let dep = graph.get(parent).and_then(|adjacent| adjacent.get(child)); *child = *parent; Some((parent, dep)) }); let iter = std::iter::once((&id, None)).chain(iter); anyhow::bail!( "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}", id, errors::describe_path(iter), ); } if checked.insert(id) { for dep in graph[&id].keys() { visit(graph, *dep, visited, path, checked)?; } } path.pop(); visited.remove(&id); Ok(()) } } /// Checks that packages are unique when written to lock file. /// /// When writing package ID's to lock file, we apply lossy encoding. In /// particular, we don't store paths of path dependencies. That means that /// *different* packages may collide in the lock file, hence this check. fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { let mut unique_pkg_ids = HashMap::new(); let state = encode::EncodeState::new(resolve); for pkg_id in resolve.iter() { let encodable_pkd_id = encode::encodable_package_id(pkg_id, &state, resolve.version()); if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) { anyhow::bail!( "package collision in the lockfile: packages {} and {} are different, \ but only one can be written to lockfile unambiguously", prev_pkg_id, pkg_id ) } } Ok(()) } cargo-0.66.0/src/cargo/core/resolver/resolve.rs000066400000000000000000000360471432416201200214030ustar00rootroot00000000000000use super::encode::Metadata; use crate::core::dependency::DepKind; use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Graph; use std::borrow::Borrow; use std::collections::{HashMap, HashSet}; use std::fmt; /// Represents a fully-resolved package dependency graph. Each node in the graph /// is a package and edges represent dependencies between packages. /// /// Each instance of `Resolve` also understands the full set of features used /// for each package. pub struct Resolve { /// A graph, whose vertices are packages and edges are dependency specifications /// from `Cargo.toml`. We need a `HashSet` here because the same package /// might be present in both `[dependencies]` and `[build-dependencies]`. graph: Graph>, /// Replacements from the `[replace]` table. replacements: HashMap, /// Inverted version of `replacements`. reverse_replacements: HashMap, /// Features enabled for a given package. features: HashMap>, /// Checksum for each package. A SHA256 hash of the `.crate` file used to /// validate the correct crate file is used. This is `None` for sources /// that do not use `.crate` files, like path or git dependencies. checksums: HashMap>, /// "Unknown" metadata. This is a collection of extra, unrecognized data /// found in the `[metadata]` section of `Cargo.lock`, preserved for /// forwards compatibility. metadata: Metadata, /// `[patch]` entries that did not match anything, preserved in /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused /// patches helps prevent Cargo from being forced to re-update the /// registry every time it runs, and keeps the resolve in a locked state /// so it doesn't re-resolve the unused entries. unused_patches: Vec, /// A map from packages to a set of their public dependencies public_dependencies: HashMap>, /// Version of the `Cargo.lock` format, see /// `cargo::core::resolver::encode` for more. version: ResolveVersion, summaries: HashMap, } /// A version to indicate how a `Cargo.lock` should be serialized. Currently /// V2 is the default when creating a new lockfile. If a V1 lockfile already /// exists, it will stay as V1. /// /// It's theorized that we can add more here over time to track larger changes /// to the `Cargo.lock` format, but we've yet to see how that strategy pans out. #[derive(PartialEq, Eq, Clone, Copy, Debug, PartialOrd, Ord)] pub enum ResolveVersion { /// Historical baseline for when this abstraction was added. V1, /// A more compact format, more amenable to avoiding source-control merge /// conflicts. The `dependencies` arrays are compressed and checksums are /// listed inline. Introduced in 2019 in version 1.38. New lockfiles use /// V2 by default starting in 1.41. V2, /// A format that explicitly lists a `version` at the top of the file as /// well as changing how git dependencies are encoded. Dependencies with /// `branch = "master"` are no longer encoded the same way as those without /// branch specifiers. V3, } impl Resolve { pub fn new( graph: Graph>, replacements: HashMap, features: HashMap>, checksums: HashMap>, metadata: Metadata, unused_patches: Vec, version: ResolveVersion, summaries: HashMap, ) -> Resolve { let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect(); let public_dependencies = graph .iter() .map(|p| { let public_deps = graph .edges(p) .filter(|(_, deps)| { deps.iter() .any(|d| d.kind() == DepKind::Normal && d.is_public()) }) .map(|(dep_package, _)| *dep_package) .collect::>(); (*p, public_deps) }) .collect(); Resolve { graph, replacements, features, checksums, metadata, unused_patches, reverse_replacements, public_dependencies, version, summaries, } } /// Resolves one of the paths from the given dependent package up to /// the root. pub fn path_to_top<'a>( &'a self, pkg: &'a PackageId, ) -> Vec<(&'a PackageId, Option<&'a HashSet>)> { self.graph.path_to_top(pkg) } pub fn register_used_patches(&mut self, patches: &[Summary]) { for summary in patches { if !self.graph.contains(&summary.package_id()) { self.unused_patches.push(summary.package_id()) }; } } pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { // Given a previous instance of resolve, it should be forbidden to ever // have a checksums which *differ*. If the same package ID has differing // checksums, then something has gone wrong such as: // // * Something got seriously corrupted // * A "mirror" isn't actually a mirror as some changes were made // * A replacement source wasn't actually a replacement, some changes // were made // // In all of these cases, we want to report an error to indicate that // something is awry. Normal execution (esp just using crates.io) should // never run into this. for (id, cksum) in previous.checksums.iter() { if let Some(mine) = self.checksums.get(id) { if mine == cksum { continue; } // If the previous checksum wasn't calculated, the current // checksum is `Some`. This may indicate that a source was // erroneously replaced or was replaced with something that // desires stronger checksum guarantees than can be afforded // elsewhere. if cksum.is_none() { anyhow::bail!( "\ checksum for `{}` was not previously calculated, but a checksum could now \ be calculated this could be indicative of a few possible situations: * the source `{}` did not previously support checksums, but was replaced with one that does * newer Cargo implementations know how to checksum this source, but this older implementation does not * the lock file is corrupt ", id, id.source_id() ) // If our checksum hasn't been calculated, then it could mean // that future Cargo figured out how to checksum something or // more realistically we were overridden with a source that does // not have checksums. } else if mine.is_none() { anyhow::bail!( "\ checksum for `{}` could not be calculated, but a checksum is listed in \ the existing lock file this could be indicative of a few possible situations: * the source `{}` supports checksums, but was replaced with one that doesn't * the lock file is corrupt unable to verify that `{0}` is the same as when the lockfile was generated ", id, id.source_id() ) // If the checksums aren't equal, and neither is None, then they // must both be Some, in which case the checksum now differs. // That's quite bad! } else { anyhow::bail!( "\ checksum for `{}` changed between lock files this could be indicative of a few possible errors: * the lock file is corrupt * a replacement source in use (e.g., a mirror) returned a different checksum * the source itself may be corrupt in one way or another unable to verify that `{0}` is the same as when the lockfile was generated ", id ); } } } // Be sure to just copy over any unknown metadata. self.metadata = previous.metadata.clone(); // Preserve the lockfile encoding where possible to avoid lockfile churn self.version = previous.version; Ok(()) } pub fn contains(&self, k: &Q) -> bool where PackageId: Borrow, Q: Ord + Eq, { self.graph.contains(k) } pub fn sort(&self) -> Vec { self.graph.sort() } pub fn iter(&self) -> impl Iterator + '_ { self.graph.iter().cloned() } pub fn deps(&self, pkg: PackageId) -> impl Iterator)> { self.deps_not_replaced(pkg) .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps)) } pub fn deps_not_replaced( &self, pkg: PackageId, ) -> impl Iterator)> { self.graph.edges(&pkg).map(|(id, deps)| (*id, deps)) } pub fn replacement(&self, pkg: PackageId) -> Option { self.replacements.get(&pkg).cloned() } pub fn replacements(&self) -> &HashMap { &self.replacements } pub fn features(&self, pkg: PackageId) -> &[InternedString] { self.features.get(&pkg).map(|v| &**v).unwrap_or(&[]) } /// This is only here for legacy support, it will be removed when /// switching to the new feature resolver. pub fn features_clone(&self) -> HashMap> { self.features.clone() } pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool { self.public_dependencies .get(&pkg) .map(|public_deps| public_deps.contains(&dep)) .unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg)) } pub fn query(&self, spec: &str) -> CargoResult { PackageIdSpec::query_str(spec, self.iter()) } pub fn specs_to_ids(&self, specs: &[PackageIdSpec]) -> CargoResult> { specs.iter().map(|s| s.query(self.iter())).collect() } pub fn unused_patches(&self) -> &[PackageId] { &self.unused_patches } pub fn checksums(&self) -> &HashMap> { &self.checksums } pub fn metadata(&self) -> &Metadata { &self.metadata } pub fn extern_crate_name_and_dep_name( &self, from: PackageId, to: PackageId, to_target: &Target, ) -> CargoResult<(InternedString, Option)> { let empty_set: HashSet = HashSet::new(); let deps = if from == to { &empty_set } else { self.dependencies_listed(from, to) }; let target_crate_name = || (to_target.crate_name(), None); let mut name_pairs = deps.iter().map(|d| { d.explicit_name_in_toml() .map(|s| (s.as_str().replace("-", "_"), Some(s))) .unwrap_or_else(target_crate_name) }); let (extern_crate_name, dep_name) = name_pairs.next().unwrap_or_else(target_crate_name); for (n, _) in name_pairs { anyhow::ensure!( n == extern_crate_name, "the crate `{}` depends on crate `{}` multiple times with different names", from, to, ); } Ok((extern_crate_name.into(), dep_name)) } fn dependencies_listed(&self, from: PackageId, to: PackageId) -> &HashSet { // We've got a dependency on `from` to `to`, but this dependency edge // may be affected by [replace]. If the `to` package is listed as the // target of a replacement (aka the key of a reverse replacement map) // then we try to find our dependency edge through that. If that fails // then we go down below assuming it's not replaced. // // Note that we don't treat `from` as if it's been replaced because // that's where the dependency originates from, and we only replace // targets of dependencies not the originator. if let Some(replace) = self.reverse_replacements.get(&to) { if let Some(deps) = self.graph.edge(&from, replace) { return deps; } } match self.graph.edge(&from, &to) { Some(ret) => ret, None => panic!("no Dependency listed for `{}` => `{}`", from, to), } } /// Returns the version of the encoding that's being used for this lock /// file. pub fn version(&self) -> ResolveVersion { self.version } pub fn set_version(&mut self, version: ResolveVersion) { self.version = version; } pub fn summary(&self, pkg_id: PackageId) -> &Summary { &self.summaries[&pkg_id] } } impl PartialEq for Resolve { fn eq(&self, other: &Resolve) -> bool { macro_rules! compare { ($($fields:ident)* | $($ignored:ident)*) => { let Resolve { $($fields,)* $($ignored: _,)* } = self; $($fields == &other.$fields)&&* } } compare! { // fields to compare graph replacements reverse_replacements features checksums metadata unused_patches public_dependencies summaries | // fields to ignore version } } } impl fmt::Debug for Resolve { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(fmt, "graph: {:?}", self.graph)?; writeln!(fmt, "\nfeatures: {{")?; for (pkg, features) in &self.features { writeln!(fmt, " {}: {:?}", pkg, features)?; } write!(fmt, "}}") } } impl Default for ResolveVersion { /// The default way to encode new or updated `Cargo.lock` files. /// /// It's important that if a new version of `ResolveVersion` is added that /// this is not updated until *at least* the support for the version is in /// the stable release of Rust. /// /// This resolve version will be used for all new lock files, for example /// those generated by `cargo update` (update everything) or building after /// a `cargo new` (where no lock file previously existed). This is also used /// for *updated* lock files such as when a dependency is added or when a /// version requirement changes. In this situation Cargo's updating the lock /// file anyway so it takes the opportunity to bump the lock file version /// forward. fn default() -> ResolveVersion { ResolveVersion::V3 } } cargo-0.66.0/src/cargo/core/resolver/types.rs000066400000000000000000000317361432416201200210700ustar00rootroot00000000000000use super::features::{CliFeatures, RequestedFeatures}; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Config; use std::cmp::Ordering; use std::collections::{BTreeMap, BTreeSet}; use std::ops::Range; use std::rc::Rc; use std::time::{Duration, Instant}; pub struct ResolverProgress { ticks: u16, start: Instant, time_to_print: Duration, printed: bool, deps_time: Duration, #[cfg(debug_assertions)] slow_cpu_multiplier: u64, } impl ResolverProgress { pub fn new() -> ResolverProgress { ResolverProgress { ticks: 0, start: Instant::now(), time_to_print: Duration::from_millis(500), printed: false, deps_time: Duration::new(0, 0), // Some CI setups are much slower then the equipment used by Cargo itself. // Architectures that do not have a modern processor, hardware emulation, etc. // In the test code we have `slow_cpu_multiplier`, but that is not accessible here. #[cfg(debug_assertions)] slow_cpu_multiplier: std::env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER") .ok() .and_then(|m| m.parse().ok()) .unwrap_or(1), } } pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> { // If we spend a lot of time here (we shouldn't in most cases) then give // a bit of a visual indicator as to what we're doing. Only enable this // when stderr is a tty (a human is likely to be watching) to ensure we // get deterministic output otherwise when observed by tools. // // Also note that we hit this loop a lot, so it's fairly performance // sensitive. As a result try to defer a possibly expensive operation // like `Instant::now` by only checking every N iterations of this loop // to amortize the cost of the current time lookup. self.ticks += 1; if let Some(config) = config { if config.shell().is_err_tty() && !self.printed && self.ticks % 1000 == 0 && self.start.elapsed() - self.deps_time > self.time_to_print { self.printed = true; config.shell().status("Resolving", "dependency graph...")?; } } #[cfg(debug_assertions)] { // The largest test in our suite takes less then 5000 ticks // with all the algorithm improvements. // If any of them are removed then it takes more than I am willing to measure. // So lets fail the test fast if we have ben running for two long. assert!( self.ticks < 50_000, "got to 50_000 ticks in {:?}", self.start.elapsed() ); // The largest test in our suite takes less then 30 sec // with all the improvements to how fast a tick can go. // If any of them are removed then it takes more than I am willing to measure. // So lets fail the test fast if we have ben running for two long. if self.ticks % 1000 == 0 { assert!( self.start.elapsed() - self.deps_time < Duration::from_secs(self.slow_cpu_multiplier * 90) ); } } Ok(()) } pub fn elapsed(&mut self, dur: Duration) { self.deps_time += dur; } } /// The preferred way to store the set of activated features for a package. /// This is sorted so that it impls Hash, and owns its contents, /// needed so it can be part of the key for caching in the `DepsCache`. /// It is also cloned often as part of `Context`, hence the `RC`. /// `im-rs::OrdSet` was slower of small sets like this, /// but this can change with improvements to std, im, or llvm. /// Using a consistent type for this allows us to use the highly /// optimized comparison operators like `is_subset` at the interfaces. pub type FeaturesSet = Rc>; /// Resolver behavior, used to opt-in to new behavior that is /// backwards-incompatible via the `resolver` field in the manifest. #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub enum ResolveBehavior { /// V1 is the original resolver behavior. V1, /// V2 adds the new feature resolver. V2, } impl ResolveBehavior { pub fn from_manifest(resolver: &str) -> CargoResult { match resolver { "1" => Ok(ResolveBehavior::V1), "2" => Ok(ResolveBehavior::V2), s => anyhow::bail!( "`resolver` setting `{}` is not valid, valid options are \"1\" or \"2\"", s ), } } pub fn to_manifest(&self) -> String { match self { ResolveBehavior::V1 => "1", ResolveBehavior::V2 => "2", } .to_owned() } } /// Options for how the resolve should work. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct ResolveOpts { /// Whether or not dev-dependencies should be included. /// /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. /// It also gets set to `false` when activating dependencies in the resolver. pub dev_deps: bool, /// Set of features requested on the command-line. pub features: RequestedFeatures, } impl ResolveOpts { /// Creates a ResolveOpts that resolves everything. pub fn everything() -> ResolveOpts { ResolveOpts { dev_deps: true, features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)), } } pub fn new(dev_deps: bool, features: RequestedFeatures) -> ResolveOpts { ResolveOpts { dev_deps, features } } } #[derive(Clone)] pub struct DepsFrame { pub parent: Summary, pub just_for_error_messages: bool, pub remaining_siblings: RcVecIter, } impl DepsFrame { /// Returns the least number of candidates that any of this frame's siblings /// has. /// /// The `remaining_siblings` array is already sorted with the smallest /// number of candidates at the front, so we just return the number of /// candidates in that entry. fn min_candidates(&self) -> usize { self.remaining_siblings .peek() .map(|(_, (_, candidates, _))| candidates.len()) .unwrap_or(0) } pub fn flatten(&self) -> impl Iterator + '_ { self.remaining_siblings .clone() .map(move |(d, _, _)| (self.parent.package_id(), d)) } } impl PartialEq for DepsFrame { fn eq(&self, other: &DepsFrame) -> bool { self.just_for_error_messages == other.just_for_error_messages && self.min_candidates() == other.min_candidates() } } impl Eq for DepsFrame {} impl PartialOrd for DepsFrame { fn partial_cmp(&self, other: &DepsFrame) -> Option { Some(self.cmp(other)) } } impl Ord for DepsFrame { fn cmp(&self, other: &DepsFrame) -> Ordering { self.just_for_error_messages .cmp(&other.just_for_error_messages) .reverse() .then_with(|| self.min_candidates().cmp(&other.min_candidates())) } } /// Note that an `OrdSet` is used for the remaining dependencies that need /// activation. This set is sorted by how many candidates each dependency has. /// /// This helps us get through super constrained portions of the dependency /// graph quickly and hopefully lock down what later larger dependencies can /// use (those with more candidates). #[derive(Clone)] pub struct RemainingDeps { /// a monotonic counter, increased for each new insertion. time: u32, /// the data is augmented by the insertion time. /// This insures that no two items will cmp eq. /// Forcing the OrdSet into a multi set. data: im_rc::OrdSet<(DepsFrame, u32)>, } impl RemainingDeps { pub fn new() -> RemainingDeps { RemainingDeps { time: 0, data: im_rc::OrdSet::new(), } } pub fn push(&mut self, x: DepsFrame) { let insertion_time = self.time; self.data.insert((x, insertion_time)); self.time += 1; } pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, DepInfo))> { while let Some((mut deps_frame, insertion_time)) = self.data.remove_min() { let just_here_for_the_error_messages = deps_frame.just_for_error_messages; // Figure out what our next dependency to activate is, and if nothing is // listed then we're entirely done with this frame (yay!) and we can // move on to the next frame. if let Some(sibling) = deps_frame.remaining_siblings.next() { let parent = Summary::clone(&deps_frame.parent); self.data.insert((deps_frame, insertion_time)); return Some((just_here_for_the_error_messages, (parent, sibling))); } } None } pub fn iter(&mut self) -> impl Iterator + '_ { self.data.iter().flat_map(|(other, _)| other.flatten()) } } /// Information about the dependencies for a crate, a tuple of: /// /// (dependency info, candidates, features activated) pub type DepInfo = (Dependency, Rc>, FeaturesSet); /// All possible reasons that a package might fail to activate. /// /// We maintain a list of conflicts for error reporting as well as backtracking /// purposes. Each reason here is why candidates may be rejected or why we may /// fail to resolve a dependency. #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] pub enum ConflictReason { /// There was a semver conflict, for example we tried to activate a package /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version /// is already activated) Semver, /// The `links` key is being violated. For example one crate in the /// dependency graph has `links = "foo"` but this crate also had that, and /// we're only allowed one per dependency graph. Links(InternedString), /// A dependency listed features that weren't actually available on the /// candidate. For example we tried to activate feature `foo` but the /// candidate we're activating didn't actually have the feature `foo`. MissingFeatures(String), /// A dependency listed a feature that ended up being a required dependency. /// For example we tried to activate feature `foo` but the /// candidate we're activating didn't actually have the feature `foo` /// it had a dependency `foo` instead. RequiredDependencyAsFeature(InternedString), /// A dependency listed a feature for an optional dependency, but that /// optional dependency is "hidden" using namespaced `dep:` syntax. NonImplicitDependencyAsFeature(InternedString), // TODO: needs more info for `activation_error` // TODO: needs more info for `find_candidate` /// pub dep error PublicDependency(PackageId), PubliclyExports(PackageId), } impl ConflictReason { pub fn is_links(&self) -> bool { matches!(self, ConflictReason::Links(_)) } pub fn is_missing_features(&self) -> bool { matches!(self, ConflictReason::MissingFeatures(_)) } pub fn is_required_dependency_as_features(&self) -> bool { matches!(self, ConflictReason::RequiredDependencyAsFeature(_)) } pub fn is_public_dependency(&self) -> bool { matches!( self, ConflictReason::PublicDependency(_) | ConflictReason::PubliclyExports(_) ) } } /// A list of packages that have gotten in the way of resolving a dependency. /// If resolving a dependency fails then this represents an incompatibility, /// that dependency will never be resolve while all of these packages are active. /// This is useless if the packages can't be simultaneously activated for other reasons. pub type ConflictMap = BTreeMap; pub struct RcVecIter { vec: Rc>, rest: Range, } impl RcVecIter { pub fn new(vec: Rc>) -> RcVecIter { RcVecIter { rest: 0..vec.len(), vec, } } fn peek(&self) -> Option<(usize, &T)> { self.rest .clone() .next() .and_then(|i| self.vec.get(i).map(|val| (i, &*val))) } } // Not derived to avoid `T: Clone` impl Clone for RcVecIter { fn clone(&self) -> RcVecIter { RcVecIter { vec: self.vec.clone(), rest: self.rest.clone(), } } } impl Iterator for RcVecIter where T: Clone, { type Item = T; fn next(&mut self) -> Option { self.rest.next().and_then(|i| self.vec.get(i).cloned()) } fn size_hint(&self) -> (usize, Option) { // rest is a std::ops::Range, which is an ExactSizeIterator. self.rest.size_hint() } } impl ExactSizeIterator for RcVecIter {} cargo-0.66.0/src/cargo/core/resolver/version_prefs.rs000066400000000000000000000142221432416201200225770ustar00rootroot00000000000000//! This module implements support for preferring some versions of a package //! over other versions. use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; use crate::core::{Dependency, PackageId, Summary}; use crate::util::interning::InternedString; /// A collection of preferences for particular package versions. /// /// This is built up with [`Self::prefer_package_id`] and [`Self::prefer_dependency`], then used to sort the set of /// summaries for a package during resolution via [`Self::sort_summaries`]. /// /// As written, a version is either "preferred" or "not preferred". Later extensions may /// introduce more granular preferences. #[derive(Default)] pub struct VersionPreferences { try_to_use: HashSet, prefer_patch_deps: HashMap>, } pub enum VersionOrdering { MaximumVersionsFirst, MinimumVersionsFirst, } impl VersionPreferences { /// Indicate that the given package (specified as a [`PackageId`]) should be preferred. pub fn prefer_package_id(&mut self, pkg_id: PackageId) { self.try_to_use.insert(pkg_id); } /// Indicate that the given package (specified as a [`Dependency`]) should be preferred. pub fn prefer_dependency(&mut self, dep: Dependency) { self.prefer_patch_deps .entry(dep.package_name()) .or_insert_with(HashSet::new) .insert(dep); } /// Sort the given vector of summaries in-place, with all summaries presumed to be for /// the same package. Preferred versions appear first in the result, sorted by /// `version_ordering`, followed by non-preferred versions sorted the same way. pub fn sort_summaries(&self, summaries: &mut Vec, version_ordering: VersionOrdering) { let should_prefer = |pkg_id: &PackageId| { self.try_to_use.contains(pkg_id) || self .prefer_patch_deps .get(&pkg_id.name()) .map(|deps| deps.iter().any(|d| d.matches_id(*pkg_id))) .unwrap_or(false) }; summaries.sort_unstable_by(|a, b| { let prefer_a = should_prefer(&a.package_id()); let prefer_b = should_prefer(&b.package_id()); let previous_cmp = prefer_a.cmp(&prefer_b).reverse(); match previous_cmp { Ordering::Equal => { let cmp = a.version().cmp(b.version()); match version_ordering { VersionOrdering::MaximumVersionsFirst => cmp.reverse(), VersionOrdering::MinimumVersionsFirst => cmp, } } _ => previous_cmp, } }); } } #[cfg(test)] mod test { use super::*; use crate::core::SourceId; use crate::util::Config; use std::collections::BTreeMap; fn pkgid(name: &str, version: &str) -> PackageId { let src_id = SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap(); PackageId::new(name, version, src_id).unwrap() } fn dep(name: &str, version: &str) -> Dependency { let src_id = SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap(); Dependency::parse(name, Some(version), src_id).unwrap() } fn summ(name: &str, version: &str) -> Summary { let pkg_id = pkgid(name, version); let config = Config::default().unwrap(); let features = BTreeMap::new(); Summary::new(&config, pkg_id, Vec::new(), &features, None::<&String>).unwrap() } fn describe(summaries: &Vec) -> String { let strs: Vec = summaries .iter() .map(|summary| format!("{}/{}", summary.name(), summary.version())) .collect(); strs.join(", ") } #[test] fn test_prefer_package_id() { let mut vp = VersionPreferences::default(); vp.prefer_package_id(pkgid("foo", "1.2.3")); let mut summaries = vec![ summ("foo", "1.2.4"), summ("foo", "1.2.3"), summ("foo", "1.1.0"), summ("foo", "1.0.9"), ]; vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string() ); vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string() ); } #[test] fn test_prefer_dependency() { let mut vp = VersionPreferences::default(); vp.prefer_dependency(dep("foo", "=1.2.3")); let mut summaries = vec![ summ("foo", "1.2.4"), summ("foo", "1.2.3"), summ("foo", "1.1.0"), summ("foo", "1.0.9"), ]; vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string() ); vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string() ); } #[test] fn test_prefer_both() { let mut vp = VersionPreferences::default(); vp.prefer_package_id(pkgid("foo", "1.2.3")); vp.prefer_dependency(dep("foo", "=1.1.0")); let mut summaries = vec![ summ("foo", "1.2.4"), summ("foo", "1.2.3"), summ("foo", "1.1.0"), summ("foo", "1.0.9"), ]; vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.2.3, foo/1.1.0, foo/1.2.4, foo/1.0.9".to_string() ); vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); assert_eq!( describe(&summaries), "foo/1.1.0, foo/1.2.3, foo/1.0.9, foo/1.2.4".to_string() ); } } cargo-0.66.0/src/cargo/core/shell.rs000066400000000000000000000456451432416201200171760ustar00rootroot00000000000000use std::fmt; use std::io::prelude::*; use termcolor::Color::{Cyan, Green, Red, Yellow}; use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor}; use crate::util::errors::CargoResult; pub enum TtyWidth { NoTty, Known(usize), Guess(usize), } impl TtyWidth { /// Returns the width provided with `-Z terminal-width` to rustc to truncate diagnostics with /// long lines. pub fn diagnostic_terminal_width(&self) -> Option { match *self { TtyWidth::NoTty | TtyWidth::Guess(_) => None, TtyWidth::Known(width) => Some(width), } } /// Returns the width used by progress bars for the tty. pub fn progress_max_width(&self) -> Option { match *self { TtyWidth::NoTty => None, TtyWidth::Known(width) | TtyWidth::Guess(width) => Some(width), } } } /// The requested verbosity of output. #[derive(Debug, Clone, Copy, PartialEq)] pub enum Verbosity { Verbose, Normal, Quiet, } /// An abstraction around console output that remembers preferences for output /// verbosity and color. pub struct Shell { /// Wrapper around stdout/stderr. This helps with supporting sending /// output to a memory buffer which is useful for tests. output: ShellOut, /// How verbose messages should be. verbosity: Verbosity, /// Flag that indicates the current line needs to be cleared before /// printing. Used when a progress bar is currently displayed. needs_clear: bool, } impl fmt::Debug for Shell { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.output { ShellOut::Write(_) => f .debug_struct("Shell") .field("verbosity", &self.verbosity) .finish(), ShellOut::Stream { color_choice, .. } => f .debug_struct("Shell") .field("verbosity", &self.verbosity) .field("color_choice", &color_choice) .finish(), } } } /// A `Write`able object, either with or without color support enum ShellOut { /// A plain write object without color support Write(Box), /// Color-enabled stdio, with information on whether color should be used Stream { stdout: StandardStream, stderr: StandardStream, stderr_tty: bool, color_choice: ColorChoice, }, } /// Whether messages should use color output #[derive(Debug, PartialEq, Clone, Copy)] pub enum ColorChoice { /// Force color output Always, /// Force disable color output Never, /// Intelligently guess whether to use color output CargoAuto, } impl Shell { /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose /// output. pub fn new() -> Shell { let auto_clr = ColorChoice::CargoAuto; Shell { output: ShellOut::Stream { stdout: StandardStream::stdout( auto_clr.to_termcolor_color_choice(atty::Stream::Stdout), ), stderr: StandardStream::stderr( auto_clr.to_termcolor_color_choice(atty::Stream::Stderr), ), color_choice: ColorChoice::CargoAuto, stderr_tty: atty::is(atty::Stream::Stderr), }, verbosity: Verbosity::Verbose, needs_clear: false, } } /// Creates a shell from a plain writable object, with no color, and max verbosity. pub fn from_write(out: Box) -> Shell { Shell { output: ShellOut::Write(out), verbosity: Verbosity::Verbose, needs_clear: false, } } /// Prints a message, where the status will have `color` color, and can be justified. The /// messages follows without color. fn print( &mut self, status: &dyn fmt::Display, message: Option<&dyn fmt::Display>, color: Color, justified: bool, ) -> CargoResult<()> { match self.verbosity { Verbosity::Quiet => Ok(()), _ => { if self.needs_clear { self.err_erase_line(); } self.output .message_stderr(status, message, color, justified) } } } /// Sets whether the next print should clear the current line. pub fn set_needs_clear(&mut self, needs_clear: bool) { self.needs_clear = needs_clear; } /// Returns `true` if the `needs_clear` flag is unset. pub fn is_cleared(&self) -> bool { !self.needs_clear } /// Returns the width of the terminal in spaces, if any. pub fn err_width(&self) -> TtyWidth { match self.output { ShellOut::Stream { stderr_tty: true, .. } => imp::stderr_width(), _ => TtyWidth::NoTty, } } /// Returns `true` if stderr is a tty. pub fn is_err_tty(&self) -> bool { match self.output { ShellOut::Stream { stderr_tty, .. } => stderr_tty, _ => false, } } /// Gets a reference to the underlying stdout writer. pub fn out(&mut self) -> &mut dyn Write { if self.needs_clear { self.err_erase_line(); } self.output.stdout() } /// Gets a reference to the underlying stderr writer. pub fn err(&mut self) -> &mut dyn Write { if self.needs_clear { self.err_erase_line(); } self.output.stderr() } /// Erase from cursor to end of line. pub fn err_erase_line(&mut self) { if self.err_supports_color() { imp::err_erase_line(self); self.needs_clear = false; } } /// Shortcut to right-align and color green a status message. pub fn status(&mut self, status: T, message: U) -> CargoResult<()> where T: fmt::Display, U: fmt::Display, { self.print(&status, Some(&message), Green, true) } pub fn status_header(&mut self, status: T) -> CargoResult<()> where T: fmt::Display, { self.print(&status, None, Cyan, true) } /// Shortcut to right-align a status message. pub fn status_with_color( &mut self, status: T, message: U, color: Color, ) -> CargoResult<()> where T: fmt::Display, U: fmt::Display, { self.print(&status, Some(&message), color, true) } /// Runs the callback only if we are in verbose mode. pub fn verbose(&mut self, mut callback: F) -> CargoResult<()> where F: FnMut(&mut Shell) -> CargoResult<()>, { match self.verbosity { Verbosity::Verbose => callback(self), _ => Ok(()), } } /// Runs the callback if we are not in verbose mode. pub fn concise(&mut self, mut callback: F) -> CargoResult<()> where F: FnMut(&mut Shell) -> CargoResult<()>, { match self.verbosity { Verbosity::Verbose => Ok(()), _ => callback(self), } } /// Prints a red 'error' message. pub fn error(&mut self, message: T) -> CargoResult<()> { if self.needs_clear { self.err_erase_line(); } self.output .message_stderr(&"error", Some(&message), Red, false) } /// Prints an amber 'warning' message. pub fn warn(&mut self, message: T) -> CargoResult<()> { match self.verbosity { Verbosity::Quiet => Ok(()), _ => self.print(&"warning", Some(&message), Yellow, false), } } /// Prints a cyan 'note' message. pub fn note(&mut self, message: T) -> CargoResult<()> { self.print(&"note", Some(&message), Cyan, false) } /// Updates the verbosity of the shell. pub fn set_verbosity(&mut self, verbosity: Verbosity) { self.verbosity = verbosity; } /// Gets the verbosity of the shell. pub fn verbosity(&self) -> Verbosity { self.verbosity } /// Updates the color choice (always, never, or auto) from a string.. pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { if let ShellOut::Stream { ref mut stdout, ref mut stderr, ref mut color_choice, .. } = self.output { let cfg = match color { Some("always") => ColorChoice::Always, Some("never") => ColorChoice::Never, Some("auto") | None => ColorChoice::CargoAuto, Some(arg) => anyhow::bail!( "argument for --color must be auto, always, or \ never, but found `{}`", arg ), }; *color_choice = cfg; *stdout = StandardStream::stdout(cfg.to_termcolor_color_choice(atty::Stream::Stdout)); *stderr = StandardStream::stderr(cfg.to_termcolor_color_choice(atty::Stream::Stderr)); } Ok(()) } /// Gets the current color choice. /// /// If we are not using a color stream, this will always return `Never`, even if the color /// choice has been set to something else. pub fn color_choice(&self) -> ColorChoice { match self.output { ShellOut::Stream { color_choice, .. } => color_choice, ShellOut::Write(_) => ColorChoice::Never, } } /// Whether the shell supports color. pub fn err_supports_color(&self) -> bool { match &self.output { ShellOut::Write(_) => false, ShellOut::Stream { stderr, .. } => stderr.supports_color(), } } pub fn out_supports_color(&self) -> bool { match &self.output { ShellOut::Write(_) => false, ShellOut::Stream { stdout, .. } => stdout.supports_color(), } } /// Write a styled fragment /// /// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output. pub fn write_stdout( &mut self, fragment: impl fmt::Display, color: &ColorSpec, ) -> CargoResult<()> { self.output.write_stdout(fragment, color) } /// Write a styled fragment /// /// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output. pub fn write_stderr( &mut self, fragment: impl fmt::Display, color: &ColorSpec, ) -> CargoResult<()> { self.output.write_stderr(fragment, color) } /// Prints a message to stderr and translates ANSI escape code into console colors. pub fn print_ansi_stderr(&mut self, message: &[u8]) -> CargoResult<()> { if self.needs_clear { self.err_erase_line(); } #[cfg(windows)] { if let ShellOut::Stream { stderr, .. } = &mut self.output { ::fwdansi::write_ansi(stderr, message)?; return Ok(()); } } self.err().write_all(message)?; Ok(()) } /// Prints a message to stdout and translates ANSI escape code into console colors. pub fn print_ansi_stdout(&mut self, message: &[u8]) -> CargoResult<()> { if self.needs_clear { self.err_erase_line(); } #[cfg(windows)] { if let ShellOut::Stream { stdout, .. } = &mut self.output { ::fwdansi::write_ansi(stdout, message)?; return Ok(()); } } self.out().write_all(message)?; Ok(()) } pub fn print_json(&mut self, obj: &T) -> CargoResult<()> { // Path may fail to serialize to JSON ... let encoded = serde_json::to_string(&obj)?; // ... but don't fail due to a closed pipe. drop(writeln!(self.out(), "{}", encoded)); Ok(()) } } impl Default for Shell { fn default() -> Self { Self::new() } } impl ShellOut { /// Prints out a message with a status. The status comes first, and is bold plus the given /// color. The status can be justified, in which case the max width that will right align is /// 12 chars. fn message_stderr( &mut self, status: &dyn fmt::Display, message: Option<&dyn fmt::Display>, color: Color, justified: bool, ) -> CargoResult<()> { match *self { ShellOut::Stream { ref mut stderr, .. } => { stderr.reset()?; stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; if justified { write!(stderr, "{:>12}", status)?; } else { write!(stderr, "{}", status)?; stderr.set_color(ColorSpec::new().set_bold(true))?; write!(stderr, ":")?; } stderr.reset()?; match message { Some(message) => writeln!(stderr, " {}", message)?, None => write!(stderr, " ")?, } } ShellOut::Write(ref mut w) => { if justified { write!(w, "{:>12}", status)?; } else { write!(w, "{}:", status)?; } match message { Some(message) => writeln!(w, " {}", message)?, None => write!(w, " ")?, } } } Ok(()) } /// Write a styled fragment fn write_stdout(&mut self, fragment: impl fmt::Display, color: &ColorSpec) -> CargoResult<()> { match *self { ShellOut::Stream { ref mut stdout, .. } => { stdout.reset()?; stdout.set_color(&color)?; write!(stdout, "{}", fragment)?; stdout.reset()?; } ShellOut::Write(ref mut w) => { write!(w, "{}", fragment)?; } } Ok(()) } /// Write a styled fragment fn write_stderr(&mut self, fragment: impl fmt::Display, color: &ColorSpec) -> CargoResult<()> { match *self { ShellOut::Stream { ref mut stderr, .. } => { stderr.reset()?; stderr.set_color(&color)?; write!(stderr, "{}", fragment)?; stderr.reset()?; } ShellOut::Write(ref mut w) => { write!(w, "{}", fragment)?; } } Ok(()) } /// Gets stdout as a `io::Write`. fn stdout(&mut self) -> &mut dyn Write { match *self { ShellOut::Stream { ref mut stdout, .. } => stdout, ShellOut::Write(ref mut w) => w, } } /// Gets stderr as a `io::Write`. fn stderr(&mut self) -> &mut dyn Write { match *self { ShellOut::Stream { ref mut stderr, .. } => stderr, ShellOut::Write(ref mut w) => w, } } } impl ColorChoice { /// Converts our color choice to termcolor's version. fn to_termcolor_color_choice(self, stream: atty::Stream) -> termcolor::ColorChoice { match self { ColorChoice::Always => termcolor::ColorChoice::Always, ColorChoice::Never => termcolor::ColorChoice::Never, ColorChoice::CargoAuto => { if atty::is(stream) { termcolor::ColorChoice::Auto } else { termcolor::ColorChoice::Never } } } } } #[cfg(unix)] mod imp { use super::{Shell, TtyWidth}; use std::mem; pub fn stderr_width() -> TtyWidth { unsafe { let mut winsize: libc::winsize = mem::zeroed(); // The .into() here is needed for FreeBSD which defines TIOCGWINSZ // as c_uint but ioctl wants c_ulong. if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 { return TtyWidth::NoTty; } if winsize.ws_col > 0 { TtyWidth::Known(winsize.ws_col as usize) } else { TtyWidth::NoTty } } } pub fn err_erase_line(shell: &mut Shell) { // This is the "EL - Erase in Line" sequence. It clears from the cursor // to the end of line. // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences let _ = shell.output.stderr().write_all(b"\x1B[K"); } } #[cfg(windows)] mod imp { use std::{cmp, mem, ptr}; use winapi::um::fileapi::*; use winapi::um::handleapi::*; use winapi::um::processenv::*; use winapi::um::winbase::*; use winapi::um::wincon::*; use winapi::um::winnt::*; pub(super) use super::{default_err_erase_line as err_erase_line, TtyWidth}; pub fn stderr_width() -> TtyWidth { unsafe { let stdout = GetStdHandle(STD_ERROR_HANDLE); let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 { return TtyWidth::Known((csbi.srWindow.Right - csbi.srWindow.Left) as usize); } // On mintty/msys/cygwin based terminals, the above fails with // INVALID_HANDLE_VALUE. Use an alternate method which works // in that case as well. let h = CreateFileA( "CONOUT$\0".as_ptr() as *const CHAR, GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, ptr::null_mut(), OPEN_EXISTING, 0, ptr::null_mut(), ); if h == INVALID_HANDLE_VALUE { return TtyWidth::NoTty; } let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); let rc = GetConsoleScreenBufferInfo(h, &mut csbi); CloseHandle(h); if rc != 0 { let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize; // Unfortunately cygwin/mintty does not set the size of the // backing console to match the actual window size. This // always reports a size of 80 or 120 (not sure what // determines that). Use a conservative max of 60 which should // work in most circumstances. ConEmu does some magic to // resize the console correctly, but there's no reasonable way // to detect which kind of terminal we are running in, or if // GetConsoleScreenBufferInfo returns accurate information. return TtyWidth::Guess(cmp::min(60, width)); } TtyWidth::NoTty } } } #[cfg(windows)] fn default_err_erase_line(shell: &mut Shell) { match imp::stderr_width() { TtyWidth::Known(max_width) | TtyWidth::Guess(max_width) => { let blank = " ".repeat(max_width); drop(write!(shell.output.stderr(), "{}\r", blank)); } _ => (), } } cargo-0.66.0/src/cargo/core/source/000077500000000000000000000000001432416201200170035ustar00rootroot00000000000000cargo-0.66.0/src/cargo/core/source/mod.rs000066400000000000000000000233261432416201200201360ustar00rootroot00000000000000use std::collections::hash_map::HashMap; use std::fmt; use std::task::Poll; use crate::core::package::PackageSet; use crate::core::{Dependency, Package, PackageId, Summary}; use crate::util::{CargoResult, Config}; mod source_id; pub use self::source_id::{GitReference, SourceId}; /// Something that finds and downloads remote packages based on names and versions. pub trait Source { /// Returns the `SourceId` corresponding to this source. fn source_id(&self) -> SourceId; /// Returns the replaced `SourceId` corresponding to this source. fn replaced_source_id(&self) -> SourceId { self.source_id() } /// Returns whether or not this source will return summaries with /// checksums listed. fn supports_checksums(&self) -> bool; /// Returns whether or not this source will return summaries with /// the `precise` field in the source id listed. fn requires_precise(&self) -> bool; /// Attempts to find the packages that match a dependency request. fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll>; fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll>> { let mut ret = Vec::new(); self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|_| ret) } /// Ensure that the source is fully up-to-date for the current session on the next query. fn invalidate_cache(&mut self); /// Fetches the full package for each name and version specified. fn download(&mut self, package: PackageId) -> CargoResult; fn download_now(self: Box, package: PackageId, config: &Config) -> CargoResult where Self: std::marker::Sized, { let mut sources = SourceMap::new(); sources.insert(self); let pkg_set = PackageSet::new(&[package], sources, config)?; let pkg = pkg_set.get_one(package)?; Ok(Package::clone(pkg)) } fn finish_download(&mut self, package: PackageId, contents: Vec) -> CargoResult; /// Generates a unique string which represents the fingerprint of the /// current state of the source. /// /// This fingerprint is used to determine the "fresheness" of the source /// later on. It must be guaranteed that the fingerprint of a source is /// constant if and only if the output product will remain constant. /// /// The `pkg` argument is the package which this fingerprint should only be /// interested in for when this source may contain multiple packages. fn fingerprint(&self, pkg: &Package) -> CargoResult; /// If this source supports it, verifies the source of the package /// specified. /// /// Note that the source may also have performed other checksum-based /// verification during the `download` step, but this is intended to be run /// just before a crate is compiled so it may perform more expensive checks /// which may not be cacheable. fn verify(&self, _pkg: PackageId) -> CargoResult<()> { Ok(()) } /// Describes this source in a human readable fashion, used for display in /// resolver error messages currently. fn describe(&self) -> String; /// Returns whether a source is being replaced by another here. fn is_replaced(&self) -> bool { false } /// Add a number of crates that should be whitelisted for showing up during /// queries, even if they are yanked. Currently only applies to registry /// sources. fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]); /// Query if a package is yanked. Only registry sources can mark packages /// as yanked. This ignores the yanked whitelist. fn is_yanked(&mut self, _pkg: PackageId) -> Poll>; /// Block until all outstanding Poll::Pending requests are `Poll::Ready`. /// /// After calling this function, the source should return `Poll::Ready` for /// any queries that previously returned `Poll::Pending`. /// /// If no queries previously returned `Poll::Pending`, and `invalidate_cache` /// was not called, this function should be a no-op. fn block_until_ready(&mut self) -> CargoResult<()>; } #[derive(Copy, Clone, PartialEq, Eq)] pub enum QueryKind { Exact, /// Each source gets to define what `close` means for it. /// Path/Git sources may return all dependencies that are at that URI, /// whereas an `Index` source may return dependencies that have the same canonicalization. Fuzzy, } pub enum MaybePackage { Ready(Package), Download { url: String, descriptor: String }, } impl<'a, T: Source + ?Sized + 'a> Source for Box { /// Forwards to `Source::source_id`. fn source_id(&self) -> SourceId { (**self).source_id() } /// Forwards to `Source::replaced_source_id`. fn replaced_source_id(&self) -> SourceId { (**self).replaced_source_id() } /// Forwards to `Source::supports_checksums`. fn supports_checksums(&self) -> bool { (**self).supports_checksums() } /// Forwards to `Source::requires_precise`. fn requires_precise(&self) -> bool { (**self).requires_precise() } /// Forwards to `Source::query`. fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { (**self).query(dep, kind, f) } fn invalidate_cache(&mut self) { (**self).invalidate_cache() } /// Forwards to `Source::download`. fn download(&mut self, id: PackageId) -> CargoResult { (**self).download(id) } fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { (**self).finish_download(id, data) } /// Forwards to `Source::fingerprint`. fn fingerprint(&self, pkg: &Package) -> CargoResult { (**self).fingerprint(pkg) } /// Forwards to `Source::verify`. fn verify(&self, pkg: PackageId) -> CargoResult<()> { (**self).verify(pkg) } fn describe(&self) -> String { (**self).describe() } fn is_replaced(&self) -> bool { (**self).is_replaced() } fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { (**self).add_to_yanked_whitelist(pkgs); } fn is_yanked(&mut self, pkg: PackageId) -> Poll> { (**self).is_yanked(pkg) } fn block_until_ready(&mut self) -> CargoResult<()> { (**self).block_until_ready() } } impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T { fn source_id(&self) -> SourceId { (**self).source_id() } fn replaced_source_id(&self) -> SourceId { (**self).replaced_source_id() } fn supports_checksums(&self) -> bool { (**self).supports_checksums() } fn requires_precise(&self) -> bool { (**self).requires_precise() } fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { (**self).query(dep, kind, f) } fn invalidate_cache(&mut self) { (**self).invalidate_cache() } fn download(&mut self, id: PackageId) -> CargoResult { (**self).download(id) } fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { (**self).finish_download(id, data) } fn fingerprint(&self, pkg: &Package) -> CargoResult { (**self).fingerprint(pkg) } fn verify(&self, pkg: PackageId) -> CargoResult<()> { (**self).verify(pkg) } fn describe(&self) -> String { (**self).describe() } fn is_replaced(&self) -> bool { (**self).is_replaced() } fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { (**self).add_to_yanked_whitelist(pkgs); } fn is_yanked(&mut self, pkg: PackageId) -> Poll> { (**self).is_yanked(pkg) } fn block_until_ready(&mut self) -> CargoResult<()> { (**self).block_until_ready() } } /// A `HashMap` of `SourceId` -> `Box`. #[derive(Default)] pub struct SourceMap<'src> { map: HashMap>, } // `impl Debug` on source requires specialization, if even desirable at all. impl<'src> fmt::Debug for SourceMap<'src> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "SourceMap ")?; f.debug_set().entries(self.map.keys()).finish() } } impl<'src> SourceMap<'src> { /// Creates an empty map. pub fn new() -> SourceMap<'src> { SourceMap { map: HashMap::new(), } } /// Like `HashMap::get`. pub fn get(&self, id: SourceId) -> Option<&(dyn Source + 'src)> { self.map.get(&id).map(|s| s.as_ref()) } /// Like `HashMap::get_mut`. pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (dyn Source + 'src)> { self.map.get_mut(&id).map(|s| s.as_mut()) } /// Like `HashMap::insert`, but derives the `SourceId` key from the `Source`. pub fn insert(&mut self, source: Box) { let id = source.source_id(); self.map.insert(id, source); } /// Like `HashMap::len`. pub fn len(&self) -> usize { self.map.len() } /// Like `HashMap::iter_mut`. pub fn sources_mut<'a>( &'a mut self, ) -> impl Iterator { self.map.iter_mut().map(|(a, b)| (a, &mut **b)) } /// Merge the given map into self. pub fn add_source_map(&mut self, other: SourceMap<'src>) { for (key, value) in other.map { self.map.entry(key).or_insert(value); } } } cargo-0.66.0/src/cargo/core/source/source_id.rs000066400000000000000000000644021432416201200213330ustar00rootroot00000000000000use crate::core::PackageId; use crate::sources::registry::CRATES_IO_HTTP_INDEX; use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::sources::{GitSource, PathSource, RegistrySource}; use crate::util::{CanonicalUrl, CargoResult, Config, IntoUrl}; use log::trace; use serde::de; use serde::ser; use std::cmp::{self, Ordering}; use std::collections::HashSet; use std::fmt::{self, Formatter}; use std::hash::{self, Hash}; use std::path::{Path, PathBuf}; use std::ptr; use std::sync::Mutex; use url::Url; lazy_static::lazy_static! { static ref SOURCE_ID_CACHE: Mutex> = Default::default(); } /// Unique identifier for a source of packages. #[derive(Clone, Copy, Eq, Debug)] pub struct SourceId { inner: &'static SourceIdInner, } #[derive(Eq, Clone, Debug)] struct SourceIdInner { /// The source URL. url: Url, /// The canonical version of the above url canonical_url: CanonicalUrl, /// The source kind. kind: SourceKind, /// For example, the exact Git revision of the specified branch for a Git Source. precise: Option, /// Name of the registry source for alternative registries /// WARNING: this is not always set for alt-registries when the name is /// not known. name: Option, } /// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the /// source. #[derive(Debug, Clone, PartialEq, Eq, Hash)] enum SourceKind { /// A git repository. Git(GitReference), /// A local path. Path, /// A remote registry. Registry, /// A local filesystem-based registry. LocalRegistry, /// A directory-based registry. Directory, } /// Information to find a specific commit in a Git repository. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum GitReference { /// From a tag. Tag(String), /// From a branch. Branch(String), /// From a specific revision. Rev(String), /// The default branch of the repository, the reference named `HEAD`. DefaultBranch, } impl SourceId { /// Creates a `SourceId` object from the kind and URL. /// /// The canonical url will be calculated, but the precise field will not fn new(kind: SourceKind, url: Url, name: Option<&str>) -> CargoResult { let source_id = SourceId::wrap(SourceIdInner { kind, canonical_url: CanonicalUrl::new(&url)?, url, precise: None, name: name.map(|n| n.into()), }); Ok(source_id) } fn wrap(inner: SourceIdInner) -> SourceId { let mut cache = SOURCE_ID_CACHE.lock().unwrap(); let inner = cache.get(&inner).cloned().unwrap_or_else(|| { let inner = Box::leak(Box::new(inner)); cache.insert(inner); inner }); SourceId { inner } } /// Parses a source URL and returns the corresponding ID. /// /// ## Example /// /// ``` /// use cargo::core::SourceId; /// SourceId::from_url("git+https://github.com/alexcrichton/\ /// libssh2-static-sys#80e71a3021618eb05\ /// 656c58fb7c5ef5f12bc747f"); /// ``` pub fn from_url(string: &str) -> CargoResult { let mut parts = string.splitn(2, '+'); let kind = parts.next().unwrap(); let url = parts .next() .ok_or_else(|| anyhow::format_err!("invalid source `{}`", string))?; match kind { "git" => { let mut url = url.into_url()?; let mut reference = GitReference::DefaultBranch; for (k, v) in url.query_pairs() { match &k[..] { // Map older 'ref' to branch. "branch" | "ref" => reference = GitReference::Branch(v.into_owned()), "rev" => reference = GitReference::Rev(v.into_owned()), "tag" => reference = GitReference::Tag(v.into_owned()), _ => {} } } let precise = url.fragment().map(|s| s.to_owned()); url.set_fragment(None); url.set_query(None); Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) } "registry" => { let url = url.into_url()?; Ok(SourceId::new(SourceKind::Registry, url, None)? .with_precise(Some("locked".to_string()))) } "sparse" => { let url = string.into_url()?; Ok(SourceId::new(SourceKind::Registry, url, None)? .with_precise(Some("locked".to_string()))) } "path" => { let url = url.into_url()?; SourceId::new(SourceKind::Path, url, None) } kind => Err(anyhow::format_err!("unsupported source protocol: {}", kind)), } } /// A view of the `SourceId` that can be `Display`ed as a URL. pub fn as_url(&self) -> SourceIdAsUrl<'_> { SourceIdAsUrl { inner: &*self.inner, } } /// Creates a `SourceId` from a filesystem path. /// /// `path`: an absolute path. pub fn for_path(path: &Path) -> CargoResult { let url = path.into_url()?; SourceId::new(SourceKind::Path, url, None) } /// Creates a `SourceId` from a Git reference. pub fn for_git(url: &Url, reference: GitReference) -> CargoResult { SourceId::new(SourceKind::Git(reference), url.clone(), None) } /// Creates a SourceId from a remote registry URL when the registry name /// cannot be determined, e.g. a user passes `--index` directly from CLI. /// /// Use [`SourceId::for_alt_registry`] if a name can provided, which /// generates better messages for cargo. pub fn for_registry(url: &Url) -> CargoResult { SourceId::new(SourceKind::Registry, url.clone(), None) } /// Creates a `SourceId` from a remote registry URL with given name. pub fn for_alt_registry(url: &Url, name: &str) -> CargoResult { SourceId::new(SourceKind::Registry, url.clone(), Some(name)) } /// Creates a SourceId from a local registry path. pub fn for_local_registry(path: &Path) -> CargoResult { let url = path.into_url()?; SourceId::new(SourceKind::LocalRegistry, url, None) } /// Creates a `SourceId` from a directory path. pub fn for_directory(path: &Path) -> CargoResult { let url = path.into_url()?; SourceId::new(SourceKind::Directory, url, None) } /// Returns the `SourceId` corresponding to the main repository. /// /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config.toml`. pub fn crates_io(config: &Config) -> CargoResult { config.crates_io_source_id(|| { config.check_registry_index_not_set()?; let url = CRATES_IO_INDEX.into_url().unwrap(); SourceId::new(SourceKind::Registry, url, Some(CRATES_IO_REGISTRY)) }) } /// Returns the `SourceId` corresponding to the main repository, using the /// sparse HTTP index if allowed. pub fn crates_io_maybe_sparse_http(config: &Config) -> CargoResult { if config.cli_unstable().sparse_registry { config.check_registry_index_not_set()?; let url = CRATES_IO_HTTP_INDEX.into_url().unwrap(); SourceId::new(SourceKind::Registry, url, Some(CRATES_IO_REGISTRY)) } else { Self::crates_io(config) } } /// Gets the `SourceId` associated with given name of the remote registry. pub fn alt_registry(config: &Config, key: &str) -> CargoResult { let url = config.get_registry_index(key)?; Ok(SourceId::wrap(SourceIdInner { kind: SourceKind::Registry, canonical_url: CanonicalUrl::new(&url)?, url, precise: None, name: Some(key.to_string()), })) } /// Gets this source URL. pub fn url(&self) -> &Url { &self.inner.url } /// Gets the canonical URL of this source, used for internal comparison /// purposes. pub fn canonical_url(&self) -> &CanonicalUrl { &self.inner.canonical_url } pub fn display_index(self) -> String { if self.is_default_registry() { format!("{} index", CRATES_IO_DOMAIN) } else { format!("`{}` index", self.display_registry_name()) } } pub fn display_registry_name(self) -> String { if self.is_default_registry() { CRATES_IO_REGISTRY.to_string() } else if let Some(name) = &self.inner.name { name.clone() } else if self.precise().is_some() { // We remove `precise` here to retrieve an permissive version of // `SourceIdInner`, which may contain the registry name. self.with_precise(None).display_registry_name() } else { url_display(self.url()) } } /// Returns `true` if this source is from a filesystem path. pub fn is_path(self) -> bool { self.inner.kind == SourceKind::Path } /// Returns the local path if this is a path dependency. pub fn local_path(self) -> Option { if self.inner.kind != SourceKind::Path { return None; } Some(self.inner.url.to_file_path().unwrap()) } /// Returns `true` if this source is from a registry (either local or not). pub fn is_registry(self) -> bool { matches!( self.inner.kind, SourceKind::Registry | SourceKind::LocalRegistry ) } /// Returns `true` if this source is a "remote" registry. /// /// "remote" may also mean a file URL to a git index, so it is not /// necessarily "remote". This just means it is not `local-registry`. pub fn is_remote_registry(self) -> bool { matches!(self.inner.kind, SourceKind::Registry) } /// Returns `true` if this source from a Git repository. pub fn is_git(self) -> bool { matches!(self.inner.kind, SourceKind::Git(_)) } /// Creates an implementation of `Source` corresponding to this ID. pub fn load<'a>( self, config: &'a Config, yanked_whitelist: &HashSet, ) -> CargoResult> { trace!("loading SourceId; {}", self); match self.inner.kind { SourceKind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), SourceKind::Path => { let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => panic!("path sources cannot be remote"), }; Ok(Box::new(PathSource::new(&path, self, config))) } SourceKind::Registry => Ok(Box::new(RegistrySource::remote( self, yanked_whitelist, config, )?)), SourceKind::LocalRegistry => { let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => panic!("path sources cannot be remote"), }; Ok(Box::new(RegistrySource::local( self, &path, yanked_whitelist, config, ))) } SourceKind::Directory => { let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => panic!("path sources cannot be remote"), }; Ok(Box::new(DirectorySource::new(&path, self, config))) } } } /// Gets the value of the precise field. pub fn precise(self) -> Option<&'static str> { self.inner.precise.as_deref() } /// Gets the Git reference if this is a git source, otherwise `None`. pub fn git_reference(self) -> Option<&'static GitReference> { match self.inner.kind { SourceKind::Git(ref s) => Some(s), _ => None, } } /// Creates a new `SourceId` from this source with the given `precise`. pub fn with_precise(self, v: Option) -> SourceId { SourceId::wrap(SourceIdInner { precise: v, ..(*self.inner).clone() }) } /// Returns `true` if the remote registry is the standard . pub fn is_default_registry(self) -> bool { match self.inner.kind { SourceKind::Registry => {} _ => return false, } let url = self.inner.url.as_str(); url == CRATES_IO_INDEX || url == CRATES_IO_HTTP_INDEX } /// Hashes `self`. /// /// For paths, remove the workspace prefix so the same source will give the /// same hash in different locations. pub fn stable_hash(self, workspace: &Path, into: &mut S) { if self.is_path() { if let Ok(p) = self .inner .url .to_file_path() .unwrap() .strip_prefix(workspace) { self.inner.kind.hash(into); p.to_str().unwrap().hash(into); return; } } self.hash(into) } pub fn full_eq(self, other: SourceId) -> bool { ptr::eq(self.inner, other.inner) } pub fn full_hash(self, into: &mut S) { ptr::NonNull::from(self.inner).hash(into) } } impl PartialEq for SourceId { fn eq(&self, other: &SourceId) -> bool { self.cmp(other) == Ordering::Equal } } impl PartialOrd for SourceId { fn partial_cmp(&self, other: &SourceId) -> Option { Some(self.cmp(other)) } } // Custom comparison defined as canonical URL equality for git sources and URL // equality for other sources, ignoring the `precise` and `name` fields. impl Ord for SourceId { fn cmp(&self, other: &SourceId) -> Ordering { // If our interior pointers are to the exact same `SourceIdInner` then // we're guaranteed to be equal. if ptr::eq(self.inner, other.inner) { return Ordering::Equal; } // Sort first based on `kind`, deferring to the URL comparison below if // the kinds are equal. match self.inner.kind.cmp(&other.inner.kind) { Ordering::Equal => {} other => return other, } // If the `kind` and the `url` are equal, then for git sources we also // ensure that the canonical urls are equal. match (&self.inner.kind, &other.inner.kind) { (SourceKind::Git(_), SourceKind::Git(_)) => { self.inner.canonical_url.cmp(&other.inner.canonical_url) } _ => self.inner.url.cmp(&other.inner.url), } } } impl ser::Serialize for SourceId { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { if self.is_path() { None::.serialize(s) } else { s.collect_str(&self.as_url()) } } } impl<'de> de::Deserialize<'de> for SourceId { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let string = String::deserialize(d)?; SourceId::from_url(&string).map_err(de::Error::custom) } } fn url_display(url: &Url) -> String { if url.scheme() == "file" { if let Ok(path) = url.to_file_path() { if let Some(path_str) = path.to_str() { return path_str.to_string(); } } } url.as_str().to_string() } impl fmt::Display for SourceId { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self.inner.kind { SourceKind::Git(ref reference) => { // Don't replace the URL display for git references, // because those are kind of expected to be URLs. write!(f, "{}", self.inner.url)?; if let Some(pretty) = reference.pretty_ref() { write!(f, "?{}", pretty)?; } if let Some(ref s) = self.inner.precise { let len = cmp::min(s.len(), 8); write!(f, "#{}", &s[..len])?; } Ok(()) } SourceKind::Path => write!(f, "{}", url_display(&self.inner.url)), SourceKind::Registry => write!(f, "registry `{}`", self.display_registry_name()), SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)), SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)), } } } // The hash of SourceId is used in the name of some Cargo folders, so shouldn't // vary. `as_str` gives the serialisation of a url (which has a spec) and so // insulates against possible changes in how the url crate does hashing. impl Hash for SourceId { fn hash(&self, into: &mut S) { self.inner.kind.hash(into); match self.inner.kind { SourceKind::Git(_) => self.inner.canonical_url.hash(into), _ => self.inner.url.as_str().hash(into), } } } impl Hash for SourceIdInner { /// The hash of `SourceIdInner` is used to retrieve its interned value. We /// only care about fields that make `SourceIdInner` unique, which are: /// /// - `kind` /// - `precise` /// - `canonical_url` fn hash(&self, into: &mut S) { self.kind.hash(into); self.precise.hash(into); self.canonical_url.hash(into); } } impl PartialEq for SourceIdInner { /// This implementation must be synced with [`SourceIdInner::hash`]. fn eq(&self, other: &Self) -> bool { self.kind == other.kind && self.precise == other.precise && self.canonical_url == other.canonical_url } } // forward to `Ord` impl PartialOrd for SourceKind { fn partial_cmp(&self, other: &SourceKind) -> Option { Some(self.cmp(other)) } } // Note that this is specifically not derived on `SourceKind` although the // implementation here is very similar to what it might look like if it were // otherwise derived. // // The reason for this is somewhat obtuse. First of all the hash value of // `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX` // which means that changes to the hash means that all Rust users need to // redownload the crates.io index and all their crates. If possible we strive to // not change this to make this redownloading behavior happen as little as // possible. How is this connected to `Ord` you might ask? That's a good // question! // // Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for // the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522, // however, the implementation of `Ord` changed. This handwritten implementation // forgot to sync itself with the originally derived implementation, namely // placing git dependencies as sorted after all other dependencies instead of // first as before. // // This regression in #8522 (Rust 1.47) went unnoticed. When we switched back // to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically // saw an issue (#9334). In #9334 it was observed that stable Rust at the time // (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort // git dependencies first. This is because the `PartialOrd` implementation in // 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52 // it was (unknowingly) restored to the pre-1.47 behavior with git dependencies // first. // // Because the breakage was only witnessed after the original breakage, this // trait implementation is preserving the "broken" behavior. Put a different way: // // * Rust pre-1.47 sorted git deps first. // * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that // was never noticed. // * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did // so), and breakage was witnessed by actual users due to difference with // 1.51. // * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51 // behavior (#9383), which is now considered intentionally breaking from the // pre-1.47 behavior. // // Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was // in beta. #9133 was in both beta and nightly at the time of discovery. For // 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly // (1.53) #9397 was created to fix the regression introduced by #9133 relative // to the current stable (1.51). // // That's all a long winded way of saying "it's weird that git deps hash first // and are sorted last, but it's the way it is right now". The author of this // comment chose to handwrite the `Ord` implementation instead of the `Hash` // implementation, but it's only required that at most one of them is // hand-written because the other can be derived. Perhaps one day in // the future someone can figure out how to remove this behavior. impl Ord for SourceKind { fn cmp(&self, other: &SourceKind) -> Ordering { match (self, other) { (SourceKind::Path, SourceKind::Path) => Ordering::Equal, (SourceKind::Path, _) => Ordering::Less, (_, SourceKind::Path) => Ordering::Greater, (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal, (SourceKind::Registry, _) => Ordering::Less, (_, SourceKind::Registry) => Ordering::Greater, (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal, (SourceKind::LocalRegistry, _) => Ordering::Less, (_, SourceKind::LocalRegistry) => Ordering::Greater, (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal, (SourceKind::Directory, _) => Ordering::Less, (_, SourceKind::Directory) => Ordering::Greater, (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b), } } } // This is a test that the hash of the `SourceId` for crates.io is a well-known // value. // // Note that the hash value matches what the crates.io source id has hashed // since long before Rust 1.30. We strive to keep this value the same across // versions of Cargo because changing it means that users will need to // redownload the index and all crates they use when using a new Cargo version. // // This isn't to say that this hash can *never* change, only that when changing // this it should be explicitly done. If this hash changes accidentally and // you're able to restore the hash to its original value, please do so! // Otherwise please just leave a comment in your PR as to why the hash value is // changing and why the old value can't be easily preserved. // // The hash value depends on endianness and bit-width, so we only run this test on // little-endian 64-bit CPUs (such as x86-64 and ARM64) where it matches the // well-known value. #[test] #[cfg(all(target_endian = "little", target_pointer_width = "64"))] fn test_cratesio_hash() { let config = Config::default().unwrap(); let crates_io = SourceId::crates_io(&config).unwrap(); assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); } /// A `Display`able view into a `SourceId` that will write it as a url pub struct SourceIdAsUrl<'a> { inner: &'a SourceIdInner, } impl<'a> fmt::Display for SourceIdAsUrl<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self.inner { SourceIdInner { kind: SourceKind::Path, ref url, .. } => write!(f, "path+{}", url), SourceIdInner { kind: SourceKind::Git(ref reference), ref url, ref precise, .. } => { write!(f, "git+{}", url)?; if let Some(pretty) = reference.pretty_ref() { write!(f, "?{}", pretty)?; } if let Some(precise) = precise.as_ref() { write!(f, "#{}", precise)?; } Ok(()) } SourceIdInner { kind: SourceKind::Registry, ref url, .. } => write!(f, "registry+{}", url), SourceIdInner { kind: SourceKind::LocalRegistry, ref url, .. } => write!(f, "local-registry+{}", url), SourceIdInner { kind: SourceKind::Directory, ref url, .. } => write!(f, "directory+{}", url), } } } impl GitReference { /// Returns a `Display`able view of this git reference, or None if using /// the head of the default branch pub fn pretty_ref(&self) -> Option> { match self { GitReference::DefaultBranch => None, _ => Some(PrettyRef { inner: self }), } } } /// A git reference that can be `Display`ed pub struct PrettyRef<'a> { inner: &'a GitReference, } impl<'a> fmt::Display for PrettyRef<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self.inner { GitReference::Branch(ref b) => write!(f, "branch={}", b), GitReference::Tag(ref s) => write!(f, "tag={}", s), GitReference::Rev(ref s) => write!(f, "rev={}", s), GitReference::DefaultBranch => unreachable!(), } } } #[cfg(test)] mod tests { use super::{GitReference, SourceId, SourceKind}; use crate::util::IntoUrl; #[test] fn github_sources_equal() { let loc = "https://github.com/foo/bar".into_url().unwrap(); let default = SourceKind::Git(GitReference::DefaultBranch); let s1 = SourceId::new(default.clone(), loc, None).unwrap(); let loc = "git://github.com/foo/bar".into_url().unwrap(); let s2 = SourceId::new(default, loc.clone(), None).unwrap(); assert_eq!(s1, s2); let foo = SourceKind::Git(GitReference::Branch("foo".to_string())); let s3 = SourceId::new(foo, loc, None).unwrap(); assert_ne!(s1, s3); } } cargo-0.66.0/src/cargo/core/summary.rs000066400000000000000000000350141432416201200175510ustar00rootroot00000000000000use crate::core::{Dependency, PackageId, SourceId}; use crate::util::interning::InternedString; use crate::util::{CargoResult, Config}; use anyhow::bail; use semver::Version; use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt; use std::hash::{Hash, Hasher}; use std::mem; use std::rc::Rc; /// Subset of a `Manifest`. Contains only the most important information about /// a package. /// /// Summaries are cloned, and should not be mutated after creation #[derive(Debug, Clone)] pub struct Summary { inner: Rc, } #[derive(Debug, Clone)] struct Inner { package_id: PackageId, dependencies: Vec, features: Rc, checksum: Option, links: Option, } impl Summary { pub fn new( config: &Config, pkg_id: PackageId, dependencies: Vec, features: &BTreeMap>, links: Option>, ) -> CargoResult { // ****CAUTION**** If you change anything here that may raise a new // error, be sure to coordinate that change with either the index // schema field or the SummariesCache version. for dep in dependencies.iter() { let dep_name = dep.name_in_toml(); if dep.is_optional() && !dep.is_transitive() { bail!( "dev-dependencies are not allowed to be optional: `{}`", dep_name ) } } let feature_map = build_feature_map(config, pkg_id, features, &dependencies)?; Ok(Summary { inner: Rc::new(Inner { package_id: pkg_id, dependencies, features: Rc::new(feature_map), checksum: None, links: links.map(|l| l.into()), }), }) } pub fn package_id(&self) -> PackageId { self.inner.package_id } pub fn name(&self) -> InternedString { self.package_id().name() } pub fn version(&self) -> &Version { self.package_id().version() } pub fn source_id(&self) -> SourceId { self.package_id().source_id() } pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies } pub fn features(&self) -> &FeatureMap { &self.inner.features } pub fn checksum(&self) -> Option<&str> { self.inner.checksum.as_deref() } pub fn links(&self) -> Option { self.inner.links } pub fn override_id(mut self, id: PackageId) -> Summary { Rc::make_mut(&mut self.inner).package_id = id; self } pub fn set_checksum(&mut self, cksum: String) { Rc::make_mut(&mut self.inner).checksum = Some(cksum); } pub fn map_dependencies(mut self, f: F) -> Summary where F: FnMut(Dependency) -> Dependency, { { let slot = &mut Rc::make_mut(&mut self.inner).dependencies; *slot = mem::take(slot).into_iter().map(f).collect(); } self } pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary { let me = if self.package_id().source_id() == to_replace { let new_id = self.package_id().with_source_id(replace_with); self.override_id(new_id) } else { self }; me.map_dependencies(|dep| dep.map_source(to_replace, replace_with)) } } impl PartialEq for Summary { fn eq(&self, other: &Summary) -> bool { self.inner.package_id == other.inner.package_id } } impl Eq for Summary {} impl Hash for Summary { fn hash(&self, state: &mut H) { self.inner.package_id.hash(state); } } /// Checks features for errors, bailing out a CargoResult:Err if invalid, /// and creates FeatureValues for each feature. fn build_feature_map( config: &Config, pkg_id: PackageId, features: &BTreeMap>, dependencies: &[Dependency], ) -> CargoResult { use self::FeatureValue::*; let mut dep_map = HashMap::new(); for dep in dependencies.iter() { dep_map .entry(dep.name_in_toml()) .or_insert_with(Vec::new) .push(dep); } let mut map: FeatureMap = features .iter() .map(|(feature, list)| { let fvs: Vec<_> = list .iter() .map(|feat_value| FeatureValue::new(*feat_value)) .collect(); (*feature, fvs) }) .collect(); // Add implicit features for optional dependencies if they weren't // explicitly listed anywhere. let explicitly_listed: HashSet<_> = map .values() .flatten() .filter_map(|fv| match fv { Dep { dep_name } => Some(*dep_name), _ => None, }) .collect(); for dep in dependencies { if !dep.is_optional() { continue; } let dep_name_in_toml = dep.name_in_toml(); if features.contains_key(&dep_name_in_toml) || explicitly_listed.contains(&dep_name_in_toml) { continue; } let fv = Dep { dep_name: dep_name_in_toml, }; map.insert(dep_name_in_toml, vec![fv]); } // Validate features are listed properly. for (feature, fvs) in &map { if feature.starts_with("dep:") { bail!( "feature named `{}` is not allowed to start with `dep:`", feature ); } if feature.contains('/') { bail!( "feature named `{}` is not allowed to contain slashes", feature ); } validate_feature_name(config, pkg_id, feature)?; for fv in fvs { // Find data for the referenced dependency... let dep_data = { match fv { Feature(dep_name) | Dep { dep_name, .. } | DepFeature { dep_name, .. } => { dep_map.get(dep_name) } } }; let is_optional_dep = dep_data .iter() .flat_map(|d| d.iter()) .any(|d| d.is_optional()); let is_any_dep = dep_data.is_some(); match fv { Feature(f) => { if !features.contains_key(f) { if !is_any_dep { bail!( "feature `{}` includes `{}` which is neither a dependency \ nor another feature", feature, fv ); } if is_optional_dep { if !map.contains_key(f) { bail!( "feature `{}` includes `{}`, but `{}` is an \ optional dependency without an implicit feature\n\ Use `dep:{}` to enable the dependency.", feature, fv, f, f ); } } else { bail!("feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ consider adding `optional = true` to its definition.", feature, fv, f); } } } Dep { dep_name } => { if !is_any_dep { bail!( "feature `{}` includes `{}`, but `{}` is not listed as a dependency", feature, fv, dep_name ); } if !is_optional_dep { bail!( "feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ consider adding `optional = true` to its definition.", feature, fv, dep_name ); } } DepFeature { dep_name, dep_feature, weak, .. } => { // Early check for some unlikely syntax. if dep_feature.contains('/') { bail!( "multiple slashes in feature `{}` (included by feature `{}`) are not allowed", fv, feature ); } // Validation of the feature name will be performed in the resolver. if !is_any_dep { bail!( "feature `{}` includes `{}`, but `{}` is not a dependency", feature, fv, dep_name ); } if *weak && !is_optional_dep { bail!("feature `{}` includes `{}` with a `?`, but `{}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ consider removing the `?` or changing the dependency to be optional", feature, fv, dep_name); } } } } } // Make sure every optional dep is mentioned at least once. let used: HashSet<_> = map .values() .flatten() .filter_map(|fv| match fv { Dep { dep_name } | DepFeature { dep_name, .. } => Some(dep_name), _ => None, }) .collect(); if let Some(dep) = dependencies .iter() .find(|dep| dep.is_optional() && !used.contains(&dep.name_in_toml())) { bail!( "optional dependency `{}` is not included in any feature\n\ Make sure that `dep:{}` is included in one of features in the [features] table.", dep.name_in_toml(), dep.name_in_toml(), ); } Ok(map) } /// FeatureValue represents the types of dependencies a feature can have. #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum FeatureValue { /// A feature enabling another feature. Feature(InternedString), /// A feature enabling a dependency with `dep:dep_name` syntax. Dep { dep_name: InternedString }, /// A feature enabling a feature on a dependency with `crate_name/feat_name` syntax. DepFeature { dep_name: InternedString, dep_feature: InternedString, /// If `true`, indicates the `?` syntax is used, which means this will /// not automatically enable the dependency unless the dependency is /// activated through some other means. weak: bool, }, } impl FeatureValue { pub fn new(feature: InternedString) -> FeatureValue { match feature.find('/') { Some(pos) => { let (dep, dep_feat) = feature.split_at(pos); let dep_feat = &dep_feat[1..]; let (dep, weak) = if let Some(dep) = dep.strip_suffix('?') { (dep, true) } else { (dep, false) }; FeatureValue::DepFeature { dep_name: InternedString::new(dep), dep_feature: InternedString::new(dep_feat), weak, } } None => { if let Some(dep_name) = feature.strip_prefix("dep:") { FeatureValue::Dep { dep_name: InternedString::new(dep_name), } } else { FeatureValue::Feature(feature) } } } } /// Returns `true` if this feature explicitly used `dep:` syntax. pub fn has_dep_prefix(&self) -> bool { matches!(self, FeatureValue::Dep { .. }) } } impl fmt::Display for FeatureValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::FeatureValue::*; match self { Feature(feat) => write!(f, "{}", feat), Dep { dep_name } => write!(f, "dep:{}", dep_name), DepFeature { dep_name, dep_feature, weak, } => { let weak = if *weak { "?" } else { "" }; write!(f, "{}{}/{}", dep_name, weak, dep_feature) } } } } pub type FeatureMap = BTreeMap>; fn validate_feature_name(config: &Config, pkg_id: PackageId, name: &str) -> CargoResult<()> { let mut chars = name.chars(); const FUTURE: &str = "This was previously accepted but is being phased out; \ it will become a hard error in a future release.\n\ For more information, see issue #8813 , \ and please leave a comment if this will be a problem for your project."; if let Some(ch) = chars.next() { if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) { config.shell().warn(&format!( "invalid character `{}` in feature `{}` in package {}, \ the first character must be a Unicode XID start character or digit \ (most letters or `_` or `0` to `9`)\n\ {}", ch, name, pkg_id, FUTURE ))?; } } for ch in chars { if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') { config.shell().warn(&format!( "invalid character `{}` in feature `{}` in package {}, \ characters must be Unicode XID characters, `+`, or `.` \ (numbers, `+`, `-`, `_`, `.`, or most letters)\n\ {}", ch, name, pkg_id, FUTURE ))?; } } Ok(()) } cargo-0.66.0/src/cargo/core/workspace.rs000066400000000000000000002113011432416201200200450ustar00rootroot00000000000000use std::cell::RefCell; use std::collections::hash_map::{Entry, HashMap}; use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::path::{Path, PathBuf}; use std::rc::Rc; use anyhow::{anyhow, bail, Context as _}; use glob::glob; use itertools::Itertools; use log::debug; use toml_edit::easy as toml; use url::Url; use crate::core::compiler::Unit; use crate::core::features::Features; use crate::core::registry::PackageRegistry; use crate::core::resolver::features::CliFeatures; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec}; use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; use crate::ops; use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; use crate::util::lev_distance; use crate::util::toml::{read_manifest, InheritableFields, TomlDependency, TomlProfiles}; use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl}; use cargo_util::paths; use cargo_util::paths::normalize_path; use pathdiff::diff_paths; /// The core abstraction in Cargo for working with a workspace of crates. /// /// A workspace is often created very early on and then threaded through all /// other functions. It's typically through this object that the current /// package is loaded and/or learned about. #[derive(Debug)] pub struct Workspace<'cfg> { config: &'cfg Config, // This path is a path to where the current cargo subcommand was invoked // from. That is the `--manifest-path` argument to Cargo, and // points to the "main crate" that we're going to worry about. current_manifest: PathBuf, // A list of packages found in this workspace. Always includes at least the // package mentioned by `current_manifest`. packages: Packages<'cfg>, // If this workspace includes more than one crate, this points to the root // of the workspace. This is `None` in the case that `[workspace]` is // missing, `package.workspace` is missing, and no `Cargo.toml` above // `current_manifest` was found on the filesystem with `[workspace]`. root_manifest: Option, // Shared target directory for all the packages of this workspace. // `None` if the default path of `root/target` should be used. target_dir: Option, // List of members in this workspace with a listing of all their manifest // paths. The packages themselves can be looked up through the `packages` // set above. members: Vec, member_ids: HashSet, // The subset of `members` that are used by the // `build`, `check`, `test`, and `bench` subcommands // when no package is selected with `--package` / `-p` and `--workspace` // is not used. // // This is set by the `default-members` config // in the `[workspace]` section. // When unset, this is the same as `members` for virtual workspaces // (`--workspace` is implied) // or only the root package for non-virtual workspaces. default_members: Vec, // `true` if this is a temporary workspace created for the purposes of the // `cargo install` or `cargo package` commands. is_ephemeral: bool, // `true` if this workspace should enforce optional dependencies even when // not needed; false if this workspace should only enforce dependencies // needed by the current configuration (such as in cargo install). In some // cases `false` also results in the non-enforcement of dev-dependencies. require_optional_deps: bool, // A cache of loaded packages for particular paths which is disjoint from // `packages` up above, used in the `load` method down below. loaded_packages: RefCell>, // If `true`, then the resolver will ignore any existing `Cargo.lock` // file. This is set for `cargo install` without `--locked`. ignore_lock: bool, /// The resolver behavior specified with the `resolver` field. resolve_behavior: ResolveBehavior, /// Workspace-level custom metadata custom_metadata: Option, } // Separate structure for tracking loaded packages (to avoid loading anything // twice), and this is separate to help appease the borrow checker. #[derive(Debug)] struct Packages<'cfg> { config: &'cfg Config, packages: HashMap, } #[derive(Debug)] pub enum MaybePackage { Package(Package), Virtual(VirtualManifest), } /// Configuration of a workspace in a manifest. #[derive(Debug, Clone)] pub enum WorkspaceConfig { /// Indicates that `[workspace]` was present and the members were /// optionally specified as well. Root(WorkspaceRootConfig), /// Indicates that `[workspace]` was present and the `root` field is the /// optional value of `package.workspace`, if present. Member { root: Option }, } impl WorkspaceConfig { pub fn inheritable(&self) -> Option<&InheritableFields> { match self { WorkspaceConfig::Root(root) => Some(&root.inheritable_fields), WorkspaceConfig::Member { .. } => None, } } /// Returns the path of the workspace root based on this `[workspace]` configuration. /// /// Returns `None` if the root is not explicitly known. /// /// * `self_path` is the path of the manifest this `WorkspaceConfig` is located. /// * `look_from` is the path where discovery started (usually the current /// working directory), used for `workspace.exclude` checking. fn get_ws_root(&self, self_path: &Path, look_from: &Path) -> Option { match self { WorkspaceConfig::Root(ances_root_config) => { debug!("find_root - found a root checking exclusion"); if !ances_root_config.is_excluded(look_from) { debug!("find_root - found!"); Some(self_path.to_owned()) } else { None } } WorkspaceConfig::Member { root: Some(path_to_root), } => { debug!("find_root - found pointer"); Some(read_root_pointer(self_path, path_to_root)) } WorkspaceConfig::Member { .. } => None, } } } /// Intermediate configuration of a workspace root in a manifest. /// /// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which /// together tell if some path is recognized as a member by this root or not. #[derive(Debug, Clone)] pub struct WorkspaceRootConfig { root_dir: PathBuf, members: Option>, default_members: Option>, exclude: Vec, inheritable_fields: InheritableFields, custom_metadata: Option, } impl<'cfg> Workspace<'cfg> { /// Creates a new workspace given the target manifest pointed to by /// `manifest_path`. /// /// This function will construct the entire workspace by determining the /// root and all member packages. It will then validate the workspace /// before returning it, so `Ok` is only returned for valid workspaces. pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config); ws.target_dir = config.target_dir()?; if manifest_path.is_relative() { bail!( "manifest_path:{:?} is not an absolute path. Please provide an absolute path.", manifest_path ) } else { ws.root_manifest = ws.find_root(manifest_path)?; } ws.custom_metadata = ws .load_workspace_config()? .and_then(|cfg| cfg.custom_metadata); ws.find_members()?; ws.set_resolve_behavior(); ws.validate()?; Ok(ws) } fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> { Workspace { config, current_manifest, packages: Packages { config, packages: HashMap::new(), }, root_manifest: None, target_dir: None, members: Vec::new(), member_ids: HashSet::new(), default_members: Vec::new(), is_ephemeral: false, require_optional_deps: true, loaded_packages: RefCell::new(HashMap::new()), ignore_lock: false, resolve_behavior: ResolveBehavior::V1, custom_metadata: None, } } pub fn new_virtual( root_path: PathBuf, current_manifest: PathBuf, manifest: VirtualManifest, config: &'cfg Config, ) -> CargoResult> { let mut ws = Workspace::new_default(current_manifest, config); ws.root_manifest = Some(root_path.join("Cargo.toml")); ws.target_dir = config.target_dir()?; ws.packages .packages .insert(root_path, MaybePackage::Virtual(manifest)); ws.find_members()?; ws.set_resolve_behavior(); // TODO: validation does not work because it walks up the directory // tree looking for the root which is a fake file that doesn't exist. Ok(ws) } /// Creates a "temporary workspace" from one package which only contains /// that package. /// /// This constructor will not touch the filesystem and only creates an /// in-memory workspace. That is, all configuration is ignored, it's just /// intended for that one package. /// /// This is currently only used in niche situations like `cargo install` or /// `cargo package`. pub fn ephemeral( package: Package, config: &'cfg Config, target_dir: Option, require_optional_deps: bool, ) -> CargoResult> { let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config); ws.is_ephemeral = true; ws.require_optional_deps = require_optional_deps; let key = ws.current_manifest.parent().unwrap(); let id = package.package_id(); let package = MaybePackage::Package(package); ws.packages.packages.insert(key.to_path_buf(), package); ws.target_dir = if let Some(dir) = target_dir { Some(dir) } else { ws.config.target_dir()? }; ws.members.push(ws.current_manifest.clone()); ws.member_ids.insert(id); ws.default_members.push(ws.current_manifest.clone()); ws.set_resolve_behavior(); Ok(ws) } fn set_resolve_behavior(&mut self) { // - If resolver is specified in the workspace definition, use that. // - If the root package specifies the resolver, use that. // - If the root package specifies edition 2021, use v2. // - Otherwise, use the default v1. self.resolve_behavior = match self.root_maybe() { MaybePackage::Package(p) => p .manifest() .resolve_behavior() .unwrap_or_else(|| p.manifest().edition().default_resolve_behavior()), MaybePackage::Virtual(vm) => vm.resolve_behavior().unwrap_or(ResolveBehavior::V1), } } /// Returns the current package of this workspace. /// /// Note that this can return an error if it the current manifest is /// actually a "virtual Cargo.toml", in which case an error is returned /// indicating that something else should be passed. pub fn current(&self) -> CargoResult<&Package> { let pkg = self.current_opt().ok_or_else(|| { anyhow::format_err!( "manifest path `{}` is a virtual manifest, but this \ command requires running against an actual package in \ this workspace", self.current_manifest.display() ) })?; Ok(pkg) } pub fn current_mut(&mut self) -> CargoResult<&mut Package> { let cm = self.current_manifest.clone(); let pkg = self.current_opt_mut().ok_or_else(|| { anyhow::format_err!( "manifest path `{}` is a virtual manifest, but this \ command requires running against an actual package in \ this workspace", cm.display() ) })?; Ok(pkg) } pub fn current_opt(&self) -> Option<&Package> { match *self.packages.get(&self.current_manifest) { MaybePackage::Package(ref p) => Some(p), MaybePackage::Virtual(..) => None, } } pub fn current_opt_mut(&mut self) -> Option<&mut Package> { match *self.packages.get_mut(&self.current_manifest) { MaybePackage::Package(ref mut p) => Some(p), MaybePackage::Virtual(..) => None, } } pub fn is_virtual(&self) -> bool { match *self.packages.get(&self.current_manifest) { MaybePackage::Package(..) => false, MaybePackage::Virtual(..) => true, } } /// Returns the `Config` this workspace is associated with. pub fn config(&self) -> &'cfg Config { self.config } pub fn profiles(&self) -> Option<&TomlProfiles> { match self.root_maybe() { MaybePackage::Package(p) => p.manifest().profiles(), MaybePackage::Virtual(vm) => vm.profiles(), } } /// Returns the root path of this workspace. /// /// That is, this returns the path of the directory containing the /// `Cargo.toml` which is the root of this workspace. pub fn root(&self) -> &Path { self.root_manifest().parent().unwrap() } /// Returns the path of the `Cargo.toml` which is the root of this /// workspace. pub fn root_manifest(&self) -> &Path { self.root_manifest .as_ref() .unwrap_or(&self.current_manifest) } /// Returns the root Package or VirtualManifest. pub fn root_maybe(&self) -> &MaybePackage { self.packages.get(self.root_manifest()) } pub fn target_dir(&self) -> Filesystem { self.target_dir .clone() .unwrap_or_else(|| Filesystem::new(self.root().join("target"))) } /// Returns the root `[replace]` section of this workspace. /// /// This may be from a virtual crate or an actual crate. pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { match self.root_maybe() { MaybePackage::Package(p) => p.manifest().replace(), MaybePackage::Virtual(vm) => vm.replace(), } } fn config_patch(&self) -> CargoResult>> { let config_patch: Option< BTreeMap>>, > = self.config.get("patch")?; let source = SourceId::for_path(self.root())?; let mut warnings = Vec::new(); let mut nested_paths = Vec::new(); let mut patch = HashMap::new(); for (url, deps) in config_patch.into_iter().flatten() { let url = match &url[..] { CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), url => self .config .get_registry_index(url) .or_else(|_| url.into_url()) .with_context(|| { format!("[patch] entry `{}` should be a URL or registry name", url) })?, }; patch.insert( url, deps.iter() .map(|(name, dep)| { dep.to_dependency_split( name, source, &mut nested_paths, self.config, &mut warnings, /* platform */ None, // NOTE: Since we use ConfigRelativePath, this root isn't used as // any relative paths are resolved before they'd be joined with root. Path::new("unused-relative-path"), self.unstable_features(), /* kind */ None, ) }) .collect::>>()?, ); } for message in warnings { self.config .shell() .warn(format!("[patch] in cargo config: {}", message))? } Ok(patch) } /// Returns the root `[patch]` section of this workspace. /// /// This may be from a virtual crate or an actual crate. pub fn root_patch(&self) -> CargoResult>> { let from_manifest = match self.root_maybe() { MaybePackage::Package(p) => p.manifest().patch(), MaybePackage::Virtual(vm) => vm.patch(), }; let from_config = self.config_patch()?; if from_config.is_empty() { return Ok(from_manifest.clone()); } if from_manifest.is_empty() { return Ok(from_config); } // We could just chain from_manifest and from_config, // but that's not quite right as it won't deal with overlaps. let mut combined = from_config; for (url, deps_from_manifest) in from_manifest { if let Some(deps_from_config) = combined.get_mut(url) { // We want from_config to take precedence for each patched name. // NOTE: This is inefficient if the number of patches is large! let mut from_manifest_pruned = deps_from_manifest.clone(); for dep_from_config in &mut *deps_from_config { if let Some(i) = from_manifest_pruned.iter().position(|dep_from_manifest| { // XXX: should this also take into account version numbers? dep_from_config.name_in_toml() == dep_from_manifest.name_in_toml() }) { from_manifest_pruned.swap_remove(i); } } // Whatever is left does not exist in manifest dependencies. deps_from_config.extend(from_manifest_pruned); } else { combined.insert(url.clone(), deps_from_manifest.clone()); } } Ok(combined) } /// Returns an iterator over all packages in this workspace pub fn members(&self) -> impl Iterator { let packages = &self.packages; self.members .iter() .filter_map(move |path| match packages.get(path) { &MaybePackage::Package(ref p) => Some(p), _ => None, }) } /// Returns a mutable iterator over all packages in this workspace pub fn members_mut(&mut self) -> impl Iterator { let packages = &mut self.packages.packages; let members: HashSet<_> = self .members .iter() .map(|path| path.parent().unwrap().to_owned()) .collect(); packages.iter_mut().filter_map(move |(path, package)| { if members.contains(path) { if let MaybePackage::Package(ref mut p) = package { return Some(p); } } None }) } /// Returns an iterator over default packages in this workspace pub fn default_members<'a>(&'a self) -> impl Iterator { let packages = &self.packages; self.default_members .iter() .filter_map(move |path| match packages.get(path) { &MaybePackage::Package(ref p) => Some(p), _ => None, }) } /// Returns an iterator over default packages in this workspace pub fn default_members_mut(&mut self) -> impl Iterator { let packages = &mut self.packages.packages; let members: HashSet<_> = self .default_members .iter() .map(|path| path.parent().unwrap().to_owned()) .collect(); packages.iter_mut().filter_map(move |(path, package)| { if members.contains(path) { if let MaybePackage::Package(ref mut p) = package { return Some(p); } } None }) } /// Returns true if the package is a member of the workspace. pub fn is_member(&self, pkg: &Package) -> bool { self.member_ids.contains(&pkg.package_id()) } pub fn is_ephemeral(&self) -> bool { self.is_ephemeral } pub fn require_optional_deps(&self) -> bool { self.require_optional_deps } pub fn set_require_optional_deps( &mut self, require_optional_deps: bool, ) -> &mut Workspace<'cfg> { self.require_optional_deps = require_optional_deps; self } pub fn ignore_lock(&self) -> bool { self.ignore_lock } pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'cfg> { self.ignore_lock = ignore_lock; self } pub fn custom_metadata(&self) -> Option<&toml::Value> { self.custom_metadata.as_ref() } pub fn load_workspace_config(&mut self) -> CargoResult> { // If we didn't find a root, it must mean there is no [workspace] section, and thus no // metadata. if let Some(root_path) = &self.root_manifest { let root_package = self.packages.load(root_path)?; match root_package.workspace_config() { WorkspaceConfig::Root(ref root_config) => { return Ok(Some(root_config.clone())); } _ => bail!( "root of a workspace inferred but wasn't a root: {}", root_path.display() ), } } Ok(None) } /// Finds the root of a workspace for the crate whose manifest is located /// at `manifest_path`. /// /// This will parse the `Cargo.toml` at `manifest_path` and then interpret /// the workspace configuration, optionally walking up the filesystem /// looking for other workspace roots. /// /// Returns an error if `manifest_path` isn't actually a valid manifest or /// if some other transient error happens. fn find_root(&mut self, manifest_path: &Path) -> CargoResult> { let current = self.packages.load(manifest_path)?; match current .workspace_config() .get_ws_root(manifest_path, manifest_path) { Some(root_path) => { debug!("find_root - is root {}", manifest_path.display()); Ok(Some(root_path)) } None => find_workspace_root_with_loader(manifest_path, self.config, |self_path| { Ok(self .packages .load(self_path)? .workspace_config() .get_ws_root(self_path, manifest_path)) }), } } /// After the root of a workspace has been located, probes for all members /// of a workspace. /// /// If the `workspace.members` configuration is present, then this just /// verifies that those are all valid packages to point to. Otherwise, this /// will transitively follow all `path` dependencies looking for members of /// the workspace. fn find_members(&mut self) -> CargoResult<()> { let workspace_config = match self.load_workspace_config()? { Some(workspace_config) => workspace_config, None => { debug!("find_members - only me as a member"); self.members.push(self.current_manifest.clone()); self.default_members.push(self.current_manifest.clone()); if let Ok(pkg) = self.current() { let id = pkg.package_id(); self.member_ids.insert(id); } return Ok(()); } }; // self.root_manifest must be Some to have retrieved workspace_config let root_manifest_path = self.root_manifest.clone().unwrap(); let members_paths = workspace_config.members_paths(workspace_config.members.as_ref().unwrap_or(&vec![]))?; let default_members_paths = if root_manifest_path == self.current_manifest { if let Some(ref default) = workspace_config.default_members { Some(workspace_config.members_paths(default)?) } else { None } } else { None }; for path in &members_paths { self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false) .with_context(|| { format!( "failed to load manifest for workspace member `{}`", path.display() ) })?; } self.find_path_deps(&root_manifest_path, &root_manifest_path, false)?; if let Some(default) = default_members_paths { for path in default { let normalized_path = paths::normalize_path(&path); let manifest_path = normalized_path.join("Cargo.toml"); if !self.members.contains(&manifest_path) { // default-members are allowed to be excluded, but they // still must be referred to by the original (unfiltered) // members list. Note that we aren't testing against the // manifest path, both because `members_paths` doesn't // include `/Cargo.toml`, and because excluded paths may not // be crates. let exclude = members_paths.contains(&normalized_path) && workspace_config.is_excluded(&normalized_path); if exclude { continue; } bail!( "package `{}` is listed in workspace’s default-members \ but is not a member.", path.display() ) } self.default_members.push(manifest_path) } } else if self.is_virtual() { self.default_members = self.members.clone() } else { self.default_members.push(self.current_manifest.clone()) } Ok(()) } fn find_path_deps( &mut self, manifest_path: &Path, root_manifest: &Path, is_path_dep: bool, ) -> CargoResult<()> { let manifest_path = paths::normalize_path(manifest_path); if self.members.contains(&manifest_path) { return Ok(()); } if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root()) && self.find_root(&manifest_path)? != self.root_manifest { // If `manifest_path` is a path dependency outside of the workspace, // don't add it, or any of its dependencies, as a members. return Ok(()); } if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() { if root_config.is_excluded(&manifest_path) { return Ok(()); } } debug!("find_members - {}", manifest_path.display()); self.members.push(manifest_path.clone()); let candidates = { let pkg = match *self.packages.load(&manifest_path)? { MaybePackage::Package(ref p) => p, MaybePackage::Virtual(_) => return Ok(()), }; self.member_ids.insert(pkg.package_id()); pkg.dependencies() .iter() .map(|d| (d.source_id(), d.package_name())) .filter(|(s, _)| s.is_path()) .filter_map(|(s, n)| s.url().to_file_path().ok().map(|p| (p, n))) .map(|(p, n)| (p.join("Cargo.toml"), n)) .collect::>() }; for (path, name) in candidates { self.find_path_deps(&path, root_manifest, true) .with_context(|| format!("failed to load manifest for dependency `{}`", name)) .map_err(|err| ManifestError::new(err, manifest_path.clone()))?; } Ok(()) } /// Returns the unstable nightly-only features enabled via `cargo-features` in the manifest. pub fn unstable_features(&self) -> &Features { match self.root_maybe() { MaybePackage::Package(p) => p.manifest().unstable_features(), MaybePackage::Virtual(vm) => vm.unstable_features(), } } pub fn resolve_behavior(&self) -> ResolveBehavior { self.resolve_behavior } /// Returns `true` if this workspace uses the new CLI features behavior. /// /// The old behavior only allowed choosing the features from the package /// in the current directory, regardless of which packages were chosen /// with the -p flags. The new behavior allows selecting features from the /// packages chosen on the command line (with -p or --workspace flags), /// ignoring whatever is in the current directory. pub fn allows_new_cli_feature_behavior(&self) -> bool { self.is_virtual() || match self.resolve_behavior() { ResolveBehavior::V1 => false, ResolveBehavior::V2 => true, } } /// Validates a workspace, ensuring that a number of invariants are upheld: /// /// 1. A workspace only has one root. /// 2. All workspace members agree on this one root as the root. /// 3. The current crate is a member of this workspace. fn validate(&mut self) -> CargoResult<()> { // The rest of the checks require a VirtualManifest or multiple members. if self.root_manifest.is_none() { return Ok(()); } self.validate_unique_names()?; self.validate_workspace_roots()?; self.validate_members()?; self.error_if_manifest_not_in_members()?; self.validate_manifest() } fn validate_unique_names(&self) -> CargoResult<()> { let mut names = BTreeMap::new(); for member in self.members.iter() { let package = self.packages.get(member); let name = match *package { MaybePackage::Package(ref p) => p.name(), MaybePackage::Virtual(_) => continue, }; if let Some(prev) = names.insert(name, member) { bail!( "two packages named `{}` in this workspace:\n\ - {}\n\ - {}", name, prev.display(), member.display() ); } } Ok(()) } fn validate_workspace_roots(&self) -> CargoResult<()> { let roots: Vec = self .members .iter() .filter(|&member| { let config = self.packages.get(member).workspace_config(); matches!(config, WorkspaceConfig::Root(_)) }) .map(|member| member.parent().unwrap().to_path_buf()) .collect(); match roots.len() { 1 => Ok(()), 0 => bail!( "`package.workspace` configuration points to a crate \ which is not configured with [workspace]: \n\ configuration at: {}\n\ points to: {}", self.current_manifest.display(), self.root_manifest.as_ref().unwrap().display() ), _ => { bail!( "multiple workspace roots found in the same workspace:\n{}", roots .iter() .map(|r| format!(" {}", r.display())) .collect::>() .join("\n") ); } } } fn validate_members(&mut self) -> CargoResult<()> { for member in self.members.clone() { let root = self.find_root(&member)?; if root == self.root_manifest { continue; } match root { Some(root) => { bail!( "package `{}` is a member of the wrong workspace\n\ expected: {}\n\ actual: {}", member.display(), self.root_manifest.as_ref().unwrap().display(), root.display() ); } None => { bail!( "workspace member `{}` is not hierarchically below \ the workspace root `{}`", member.display(), self.root_manifest.as_ref().unwrap().display() ); } } } Ok(()) } fn error_if_manifest_not_in_members(&mut self) -> CargoResult<()> { if self.members.contains(&self.current_manifest) { return Ok(()); } let root = self.root_manifest.as_ref().unwrap(); let root_dir = root.parent().unwrap(); let current_dir = self.current_manifest.parent().unwrap(); let root_pkg = self.packages.get(root); // FIXME: Make this more generic by using a relative path resolver between member and root. let members_msg = match current_dir.strip_prefix(root_dir) { Ok(rel) => format!( "this may be fixable by adding `{}` to the \ `workspace.members` array of the manifest \ located at: {}", rel.display(), root.display() ), Err(_) => format!( "this may be fixable by adding a member to \ the `workspace.members` array of the \ manifest located at: {}", root.display() ), }; let extra = match *root_pkg { MaybePackage::Virtual(_) => members_msg, MaybePackage::Package(ref p) => { let has_members_list = match *p.manifest().workspace_config() { WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(), WorkspaceConfig::Member { .. } => unreachable!(), }; if !has_members_list { format!( "this may be fixable by ensuring that this \ crate is depended on by the workspace \ root: {}", root.display() ) } else { members_msg } } }; bail!( "current package believes it's in a workspace when it's not:\n\ current: {}\n\ workspace: {}\n\n{}\n\ Alternatively, to keep it out of the workspace, add the package \ to the `workspace.exclude` array, or add an empty `[workspace]` \ table to the package's manifest.", self.current_manifest.display(), root.display(), extra ); } fn validate_manifest(&mut self) -> CargoResult<()> { if let Some(ref root_manifest) = self.root_manifest { for pkg in self .members() .filter(|p| p.manifest_path() != root_manifest) { let manifest = pkg.manifest(); let emit_warning = |what| -> CargoResult<()> { let msg = format!( "{} for the non root package will be ignored, \ specify {} at the workspace root:\n\ package: {}\n\ workspace: {}", what, what, pkg.manifest_path().display(), root_manifest.display(), ); self.config.shell().warn(&msg) }; if manifest.original().has_profiles() { emit_warning("profiles")?; } if !manifest.replace().is_empty() { emit_warning("replace")?; } if !manifest.patch().is_empty() { emit_warning("patch")?; } if let Some(behavior) = manifest.resolve_behavior() { if behavior != self.resolve_behavior { // Only warn if they don't match. emit_warning("resolver")?; } } } } Ok(()) } pub fn load(&self, manifest_path: &Path) -> CargoResult { match self.packages.maybe_get(manifest_path) { Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), None => {} } let mut loaded = self.loaded_packages.borrow_mut(); if let Some(p) = loaded.get(manifest_path).cloned() { return Ok(p); } let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?; loaded.insert(manifest_path.to_path_buf(), package.clone()); Ok(package) } /// Preload the provided registry with already loaded packages. /// /// A workspace may load packages during construction/parsing/early phases /// for various operations, and this preload step avoids doubly-loading and /// parsing crates on the filesystem by inserting them all into the registry /// with their in-memory formats. pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { // These can get weird as this generally represents a workspace during // `cargo install`. Things like git repositories will actually have a // `PathSource` with multiple entries in it, so the logic below is // mostly just an optimization for normal `cargo build` in workspaces // during development. if self.is_ephemeral { return; } for pkg in self.packages.packages.values() { let pkg = match *pkg { MaybePackage::Package(ref p) => p.clone(), MaybePackage::Virtual(_) => continue, }; let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.config); src.preload_with(pkg); registry.add_preloaded(Box::new(src)); } } pub fn emit_warnings(&self) -> CargoResult<()> { for (path, maybe_pkg) in &self.packages.packages { let warnings = match maybe_pkg { MaybePackage::Package(pkg) => pkg.manifest().warnings().warnings(), MaybePackage::Virtual(vm) => vm.warnings().warnings(), }; let path = path.join("Cargo.toml"); for warning in warnings { if warning.is_critical { let err = anyhow::format_err!("{}", warning.message); let cx = anyhow::format_err!("failed to parse manifest at `{}`", path.display()); return Err(err.context(cx)); } else { let msg = if self.root_manifest.is_none() { warning.message.to_string() } else { // In a workspace, it can be confusing where a warning // originated, so include the path. format!("{}: {}", path.display(), warning.message) }; self.config.shell().warn(msg)? } } } Ok(()) } pub fn set_target_dir(&mut self, target_dir: Filesystem) { self.target_dir = Some(target_dir); } /// Returns a Vec of `(&Package, RequestedFeatures)` tuples that /// represent the workspace members that were requested on the command-line. /// /// `specs` may be empty, which indicates it should return all workspace /// members. In this case, `requested_features.all_features` must be /// `true`. This is used for generating `Cargo.lock`, which must include /// all members with all features enabled. pub fn members_with_features( &self, specs: &[PackageIdSpec], cli_features: &CliFeatures, ) -> CargoResult> { assert!( !specs.is_empty() || cli_features.all_features, "no specs requires all_features" ); if specs.is_empty() { // When resolving the entire workspace, resolve each member with // all features enabled. return Ok(self .members() .map(|m| (m, CliFeatures::new_all(true))) .collect()); } if self.allows_new_cli_feature_behavior() { self.members_with_features_new(specs, cli_features) } else { Ok(self.members_with_features_old(specs, cli_features)) } } /// Returns the requested features for the given member. /// This filters out any named features that the member does not have. fn collect_matching_features( member: &Package, cli_features: &CliFeatures, found_features: &mut BTreeSet, ) -> CliFeatures { if cli_features.features.is_empty() { return cli_features.clone(); } // Only include features this member defines. let summary = member.summary(); // Features defined in the manifest let summary_features = summary.features(); // Dependency name -> dependency let dependencies: BTreeMap = summary .dependencies() .iter() .map(|dep| (dep.name_in_toml(), dep)) .collect(); // Features that enable optional dependencies let optional_dependency_names: BTreeSet<_> = dependencies .iter() .filter(|(_, dep)| dep.is_optional()) .map(|(name, _)| name) .copied() .collect(); let mut features = BTreeSet::new(); // Checks if a member contains the given feature. let summary_or_opt_dependency_feature = |feature: &InternedString| -> bool { summary_features.contains_key(feature) || optional_dependency_names.contains(feature) }; for feature in cli_features.features.iter() { match feature { FeatureValue::Feature(f) => { if summary_or_opt_dependency_feature(f) { // feature exists in this member. features.insert(feature.clone()); found_features.insert(feature.clone()); } } // This should be enforced by CliFeatures. FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), FeatureValue::DepFeature { dep_name, dep_feature, weak: _, } => { if dependencies.contains_key(dep_name) { // pkg/feat for a dependency. // Will rely on the dependency resolver to validate `dep_feature`. features.insert(feature.clone()); found_features.insert(feature.clone()); } else if *dep_name == member.name() && summary_or_opt_dependency_feature(dep_feature) { // member/feat where "feat" is a feature in member. // // `weak` can be ignored here, because the member // either is or isn't being built. features.insert(FeatureValue::Feature(*dep_feature)); found_features.insert(feature.clone()); } } } } CliFeatures { features: Rc::new(features), all_features: cli_features.all_features, uses_default_features: cli_features.uses_default_features, } } fn report_unknown_features_error( &self, specs: &[PackageIdSpec], cli_features: &CliFeatures, found_features: &BTreeSet, ) -> CargoResult<()> { // Keeps track of which features were contained in summary of `member` to suggest similar features in errors let mut summary_features: Vec = Default::default(); // Keeps track of `member` dependencies (`dep/feature`) and their features names to suggest similar features in error let mut dependencies_features: BTreeMap = Default::default(); // Keeps track of `member` optional dependencies names (which can be enabled with feature) to suggest similar features in error let mut optional_dependency_names: Vec = Default::default(); // Keeps track of which features were contained in summary of `member` to suggest similar features in errors let mut summary_features_per_member: BTreeMap<&Package, BTreeSet> = Default::default(); // Keeps track of `member` optional dependencies (which can be enabled with feature) to suggest similar features in error let mut optional_dependency_names_per_member: BTreeMap<&Package, BTreeSet> = Default::default(); for member in self .members() .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) { // Only include features this member defines. let summary = member.summary(); // Features defined in the manifest summary_features.extend(summary.features().keys()); summary_features_per_member .insert(member, summary.features().keys().copied().collect()); // Dependency name -> dependency let dependencies: BTreeMap = summary .dependencies() .iter() .map(|dep| (dep.name_in_toml(), dep)) .collect(); dependencies_features.extend( dependencies .iter() .map(|(name, dep)| (*name, dep.features())), ); // Features that enable optional dependencies let optional_dependency_names_raw: BTreeSet<_> = dependencies .iter() .filter(|(_, dep)| dep.is_optional()) .map(|(name, _)| name) .copied() .collect(); optional_dependency_names.extend(optional_dependency_names_raw.iter()); optional_dependency_names_per_member.insert(member, optional_dependency_names_raw); } let levenshtein_test = |a: InternedString, b: InternedString| lev_distance(a.as_str(), b.as_str()) < 4; let suggestions: Vec<_> = cli_features .features .difference(found_features) .map(|feature| match feature { // Simple feature, check if any of the optional dependency features or member features are close enough FeatureValue::Feature(typo) => { // Finds member features which are similar to the requested feature. let summary_features = summary_features .iter() .filter(move |feature| levenshtein_test(**feature, *typo)); // Finds optional dependencies which name is similar to the feature let optional_dependency_features = optional_dependency_names .iter() .filter(move |feature| levenshtein_test(**feature, *typo)); summary_features .chain(optional_dependency_features) .map(|s| s.to_string()) .collect::>() } FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), FeatureValue::DepFeature { dep_name, dep_feature, weak: _, } => { // Finds set of `pkg/feat` that are very similar to current `pkg/feat`. let pkg_feat_similar = dependencies_features .iter() .filter(|(name, _)| levenshtein_test(**name, *dep_name)) .map(|(name, features)| { ( name, features .iter() .filter(|feature| levenshtein_test(**feature, *dep_feature)) .collect::>(), ) }) .map(|(name, features)| { features .into_iter() .map(move |feature| format!("{}/{}", name, feature)) }) .flatten(); // Finds set of `member/optional_dep` features which name is similar to current `pkg/feat`. let optional_dependency_features = optional_dependency_names_per_member .iter() .filter(|(package, _)| levenshtein_test(package.name(), *dep_name)) .map(|(package, optional_dependencies)| { optional_dependencies .into_iter() .filter(|optional_dependency| { levenshtein_test(**optional_dependency, *dep_name) }) .map(move |optional_dependency| { format!("{}/{}", package.name(), optional_dependency) }) }) .flatten(); // Finds set of `member/feat` features which name is similar to current `pkg/feat`. let summary_features = summary_features_per_member .iter() .filter(|(package, _)| levenshtein_test(package.name(), *dep_name)) .map(|(package, summary_features)| { summary_features .into_iter() .filter(|summary_feature| { levenshtein_test(**summary_feature, *dep_feature) }) .map(move |summary_feature| { format!("{}/{}", package.name(), summary_feature) }) }) .flatten(); pkg_feat_similar .chain(optional_dependency_features) .chain(summary_features) .collect::>() } }) .map(|v| v.into_iter()) .flatten() .unique() .filter(|element| { let feature = FeatureValue::new(InternedString::new(element)); !cli_features.features.contains(&feature) && !found_features.contains(&feature) }) .sorted() .take(5) .collect(); let unknown: Vec<_> = cli_features .features .difference(found_features) .map(|feature| feature.to_string()) .sorted() .collect(); if suggestions.is_empty() { bail!( "none of the selected packages contains these features: {}", unknown.join(", ") ); } else { bail!( "none of the selected packages contains these features: {}, did you mean: {}?", unknown.join(", "), suggestions.join(", ") ); } } /// New command-line feature selection behavior with resolver = "2" or the /// root of a virtual workspace. See `allows_new_cli_feature_behavior`. fn members_with_features_new( &self, specs: &[PackageIdSpec], cli_features: &CliFeatures, ) -> CargoResult> { // Keeps track of which features matched `member` to produce an error // if any of them did not match anywhere. let mut found_features = Default::default(); let members: Vec<(&Package, CliFeatures)> = self .members() .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) .map(|m| { ( m, Workspace::collect_matching_features(m, cli_features, &mut found_features), ) }) .collect(); if members.is_empty() { // `cargo build -p foo`, where `foo` is not a member. // Do not allow any command-line flags (defaults only). if !(cli_features.features.is_empty() && !cli_features.all_features && cli_features.uses_default_features) { bail!("cannot specify features for packages outside of workspace"); } // Add all members from the workspace so we can ensure `-p nonmember` // is in the resolve graph. return Ok(self .members() .map(|m| (m, CliFeatures::new_all(false))) .collect()); } if *cli_features.features != found_features { self.report_unknown_features_error(specs, cli_features, &found_features)?; } Ok(members) } /// This is the "old" behavior for command-line feature selection. /// See `allows_new_cli_feature_behavior`. fn members_with_features_old( &self, specs: &[PackageIdSpec], cli_features: &CliFeatures, ) -> Vec<(&Package, CliFeatures)> { // Split off any features with the syntax `member-name/feature-name` into a map // so that those features can be applied directly to those workspace-members. let mut member_specific_features: HashMap> = HashMap::new(); // Features for the member in the current directory. let mut cwd_features = BTreeSet::new(); for feature in cli_features.features.iter() { match feature { FeatureValue::Feature(_) => { cwd_features.insert(feature.clone()); } // This should be enforced by CliFeatures. FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), FeatureValue::DepFeature { dep_name, dep_feature, weak: _, } => { // I think weak can be ignored here. // * With `--features member?/feat -p member`, the ? doesn't // really mean anything (either the member is built or it isn't). // * With `--features nonmember?/feat`, cwd_features will // handle processing it correctly. let is_member = self.members().any(|member| { // Check if `dep_name` is member of the workspace, but isn't associated with current package. self.current_opt() != Some(member) && member.name() == *dep_name }); if is_member && specs.iter().any(|spec| spec.name() == *dep_name) { member_specific_features .entry(*dep_name) .or_default() .insert(FeatureValue::Feature(*dep_feature)); } else { cwd_features.insert(feature.clone()); } } } } let ms: Vec<_> = self .members() .filter_map(|member| { let member_id = member.package_id(); match self.current_opt() { // The features passed on the command-line only apply to // the "current" package (determined by the cwd). Some(current) if member_id == current.package_id() => { let feats = CliFeatures { features: Rc::new(cwd_features.clone()), all_features: cli_features.all_features, uses_default_features: cli_features.uses_default_features, }; Some((member, feats)) } _ => { // Ignore members that are not enabled on the command-line. if specs.iter().any(|spec| spec.matches(member_id)) { // -p for a workspace member that is not the "current" // one. // // The odd behavior here is due to backwards // compatibility. `--features` and // `--no-default-features` used to only apply to the // "current" package. As an extension, this allows // member-name/feature-name to set member-specific // features, which should be backwards-compatible. let feats = CliFeatures { features: Rc::new( member_specific_features .remove(member.name().as_str()) .unwrap_or_default(), ), uses_default_features: true, all_features: cli_features.all_features, }; Some((member, feats)) } else { // This member was not requested on the command-line, skip. None } } } }) .collect(); // If any member specific features were not removed while iterating over members // some features will be ignored. assert!(member_specific_features.is_empty()); ms } /// Returns true if `unit` should depend on the output of Docscrape units. pub fn unit_needs_doc_scrape(&self, unit: &Unit) -> bool { // We do not add scraped units for Host units, as they're either build scripts // (not documented) or proc macros (have no scrape-able exports). Additionally, // naively passing a proc macro's unit_for to new_unit_dep will currently cause // Cargo to panic, see issue #10545. self.is_member(&unit.pkg) && !unit.target.for_host() } } impl<'cfg> Packages<'cfg> { fn get(&self, manifest_path: &Path) -> &MaybePackage { self.maybe_get(manifest_path).unwrap() } fn get_mut(&mut self, manifest_path: &Path) -> &mut MaybePackage { self.maybe_get_mut(manifest_path).unwrap() } fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> { self.packages.get(manifest_path.parent().unwrap()) } fn maybe_get_mut(&mut self, manifest_path: &Path) -> Option<&mut MaybePackage> { self.packages.get_mut(manifest_path.parent().unwrap()) } fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { let key = manifest_path.parent().unwrap(); match self.packages.entry(key.to_path_buf()) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(v) => { let source_id = SourceId::for_path(key)?; let (manifest, _nested_paths) = read_manifest(manifest_path, source_id, self.config)?; Ok(v.insert(match manifest { EitherManifest::Real(manifest) => { MaybePackage::Package(Package::new(manifest, manifest_path)) } EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm), })) } } } } impl MaybePackage { fn workspace_config(&self) -> &WorkspaceConfig { match *self { MaybePackage::Package(ref p) => p.manifest().workspace_config(), MaybePackage::Virtual(ref vm) => vm.workspace_config(), } } } impl WorkspaceRootConfig { /// Creates a new Intermediate Workspace Root configuration. pub fn new( root_dir: &Path, members: &Option>, default_members: &Option>, exclude: &Option>, inheritable: &Option, custom_metadata: &Option, ) -> WorkspaceRootConfig { WorkspaceRootConfig { root_dir: root_dir.to_path_buf(), members: members.clone(), default_members: default_members.clone(), exclude: exclude.clone().unwrap_or_default(), inheritable_fields: inheritable.clone().unwrap_or_default(), custom_metadata: custom_metadata.clone(), } } /// Checks the path against the `excluded` list. /// /// This method does **not** consider the `members` list. fn is_excluded(&self, manifest_path: &Path) -> bool { let excluded = self .exclude .iter() .any(|ex| manifest_path.starts_with(self.root_dir.join(ex))); let explicit_member = match self.members { Some(ref members) => members .iter() .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))), None => false, }; !explicit_member && excluded } fn has_members_list(&self) -> bool { self.members.is_some() } fn members_paths(&self, globs: &[String]) -> CargoResult> { let mut expanded_list = Vec::new(); for glob in globs { let pathbuf = self.root_dir.join(glob); let expanded_paths = Self::expand_member_path(&pathbuf)?; // If glob does not find any valid paths, then put the original // path in the expanded list to maintain backwards compatibility. if expanded_paths.is_empty() { expanded_list.push(pathbuf); } else { // Some OS can create system support files anywhere. // (e.g. macOS creates `.DS_Store` file if you visit a directory using Finder.) // Such files can be reported as a member path unexpectedly. // Check and filter out non-directory paths to prevent pushing such accidental unwanted path // as a member. for expanded_path in expanded_paths { if expanded_path.is_dir() { expanded_list.push(expanded_path); } } } } Ok(expanded_list) } fn expand_member_path(path: &Path) -> CargoResult> { let path = match path.to_str() { Some(p) => p, None => return Ok(Vec::new()), }; let res = glob(path).with_context(|| format!("could not parse pattern `{}`", &path))?; let res = res .map(|p| p.with_context(|| format!("unable to match path to pattern `{}`", &path))) .collect::, _>>()?; Ok(res) } pub fn inheritable(&self) -> &InheritableFields { &self.inheritable_fields } } pub fn resolve_relative_path( label: &str, old_root: &Path, new_root: &Path, rel_path: &str, ) -> CargoResult { let joined_path = normalize_path(&old_root.join(rel_path)); match diff_paths(joined_path, new_root) { None => Err(anyhow!( "`{}` was defined in {} but could not be resolved with {}", label, old_root.display(), new_root.display() )), Some(path) => Ok(path .to_str() .ok_or_else(|| { anyhow!( "`{}` resolved to non-UTF value (`{}`)", label, path.display() ) })? .to_owned()), } } /// Finds the path of the root of the workspace. pub fn find_workspace_root(manifest_path: &Path, config: &Config) -> CargoResult> { find_workspace_root_with_loader(manifest_path, config, |self_path| { let key = self_path.parent().unwrap(); let source_id = SourceId::for_path(key)?; let (manifest, _nested_paths) = read_manifest(self_path, source_id, config)?; Ok(manifest .workspace_config() .get_ws_root(self_path, manifest_path)) }) } /// Finds the path of the root of the workspace. /// /// This uses a callback to determine if the given path tells us what the /// workspace root is. fn find_workspace_root_with_loader( manifest_path: &Path, config: &Config, mut loader: impl FnMut(&Path) -> CargoResult>, ) -> CargoResult> { // Check if there are any workspace roots that have already been found that would work { let roots = config.ws_roots.borrow(); // Iterate through the manifests parent directories until we find a workspace // root. Note we skip the first item since that is just the path itself for current in manifest_path.ancestors().skip(1) { if let Some(ws_config) = roots.get(current) { if !ws_config.is_excluded(manifest_path) { // Add `Cargo.toml` since ws_root is the root and not the file return Ok(Some(current.join("Cargo.toml"))); } } } } for ances_manifest_path in find_root_iter(manifest_path, config) { debug!("find_root - trying {}", ances_manifest_path.display()); if let Some(ws_root_path) = loader(&ances_manifest_path)? { return Ok(Some(ws_root_path)); } } Ok(None) } fn read_root_pointer(member_manifest: &Path, root_link: &str) -> PathBuf { let path = member_manifest .parent() .unwrap() .join(root_link) .join("Cargo.toml"); debug!("find_root - pointer {}", path.display()); paths::normalize_path(&path) } fn find_root_iter<'a>( manifest_path: &'a Path, config: &'a Config, ) -> impl Iterator + 'a { LookBehind::new(paths::ancestors(manifest_path, None).skip(2)) .take_while(|path| !path.curr.ends_with("target/package")) // Don't walk across `CARGO_HOME` when we're looking for the // workspace root. Sometimes a package will be organized with // `CARGO_HOME` pointing inside of the workspace root or in the // current package, but we don't want to mistakenly try to put // crates.io crates into the workspace by accident. .take_while(|path| { if let Some(last) = path.last { config.home() != last } else { true } }) .map(|path| path.curr.join("Cargo.toml")) .filter(|ances_manifest_path| ances_manifest_path.exists()) } struct LookBehindWindow<'a, T: ?Sized> { curr: &'a T, last: Option<&'a T>, } struct LookBehind<'a, T: ?Sized, K: Iterator> { iter: K, last: Option<&'a T>, } impl<'a, T: ?Sized, K: Iterator> LookBehind<'a, T, K> { fn new(items: K) -> Self { Self { iter: items, last: None, } } } impl<'a, T: ?Sized, K: Iterator> Iterator for LookBehind<'a, T, K> { type Item = LookBehindWindow<'a, T>; fn next(&mut self) -> Option { match self.iter.next() { None => None, Some(next) => { let last = self.last; self.last = Some(next); Some(LookBehindWindow { curr: next, last }) } } } } cargo-0.66.0/src/cargo/lib.rs000066400000000000000000000154061432416201200156750ustar00rootroot00000000000000// For various reasons, some idioms are still allow'ed, but we would like to // test and enforce them. #![warn(rust_2018_idioms)] #![cfg_attr(test, deny(warnings))] // Due to some of the default clippy lints being somewhat subjective and not // necessarily an improvement, we prefer to not use them at this time. #![allow(clippy::all)] #![allow(rustdoc::private_intra_doc_links)] //! # Cargo as a library //! //! Cargo, the Rust package manager, is also provided as a library. //! //! There are two places you can find API documentation of cargo-the-library, //! //! - and //! - . //! //! Each of them targets on a slightly different audience. //! //! ## For external tool developers //! //! The documentation on contains public-facing items in cargo-the-library. //! External tool developers may find it useful when trying to reuse existing building blocks from Cargo. //! However, using Cargo as a library has drawbacks, especially cargo-the-library is unstable, //! and there is no clear path to stabilize it soon at the time of writing. //! See [The Cargo Book: External tools] for more on this topic. //! //! Cargo API documentation on docs.rs gets updates along with each Rust release. //! Its version always has a 0 major version to state it is unstable. //! The minor version is always +1 of rustc's minor version //! (that is, `cargo 0.66.0` corresponds to `rustc 1.65`). //! //! ## For Cargo contributors //! //! The documentation on contains all items in Cargo. //! Contributors of Cargo may find it useful as a reference of Cargo's implementation details. //! It's built with `--document-private-items` rustdoc flag, //! so you might expect to see some noise and strange items here. //! The Cargo team and contributors strive for jotting down every details //! from their brains in each issue and PR. //! However, something might just disappear in the air with no reason. //! This documentation can be seen as their extended minds, //! sharing designs and hacks behind both public and private interfaces. //! //! If you are just diving into Cargo internals, [Cargo Architecture Overview] //! is the best material to get a broader context of how Cargo works under the hood. //! Things also worth a read are important concepts reside in source code, //! which Cargo developers have been crafting for a while, namely //! //! - [`cargo::core::resolver`](crate::core::resolver), //! - [`cargo::core::compiler::fingerprint`](core/compiler/fingerprint/index.html), //! - [`cargo::util::config`](crate::util::config), //! - [`cargo::ops::fix`](ops/fix/index.html), and //! - [`cargo::sources::registry`](crate::sources::registry). //! //! This API documentation is published on each push of rust-lang/cargo master branch. //! In other words, it always reflects the latest doc comments in source code on master branch. //! //! ## Contribute to Cargo documentations //! //! The Cargo team always continues improving all external and internal documentations. //! If you spot anything could be better, don't hesitate to discuss with the team on //! Zulip [`t-cargo` stream], or [submit an issue] right on GitHub. //! There is also an issue label [`A-documenting-cargo-itself`], //! which is generally for documenting user-facing [The Cargo Book], //! but the Cargo team is welcome any form of enhancement for the [Cargo Contributor Guide] //! and this API documentation as well. //! //! [The Cargo Book: External tools]: https://doc.rust-lang.org/stable/cargo/reference/external-tools.html //! [Cargo Architecture Overview]: https://doc.crates.io/contrib/architecture //! [`t-cargo` stream]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo //! [submit an issue]: https://github.com/rust-lang/cargo/issues/new/choose //! [`A-documenting-cargo-itself`]: https://github.com/rust-lang/cargo/labels/A-documenting-cargo-itself //! [The Cargo Book]: https://doc.rust-lang.org/cargo/ //! [Cargo Contributor Guide]: https://doc.crates.io/contrib/ use crate::core::shell::Verbosity::Verbose; use crate::core::Shell; use anyhow::Error; use log::debug; pub use crate::util::errors::{AlreadyPrintedError, InternalError, VerboseError}; pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, Config}; pub use crate::version::version; pub const CARGO_ENV: &str = "CARGO"; #[macro_use] mod macros; pub mod core; pub mod ops; pub mod sources; pub mod util; mod version; pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { debug!("exit_with_error; err={:?}", err); if let Some(ref err) = err.error { if let Some(clap_err) = err.downcast_ref::() { let exit_code = if clap_err.use_stderr() { 1 } else { 0 }; let _ = clap_err.print(); std::process::exit(exit_code) } } let CliError { error, exit_code } = err; if let Some(error) = error { display_error(&error, shell); } std::process::exit(exit_code) } /// Displays an error, and all its causes, to stderr. pub fn display_error(err: &Error, shell: &mut Shell) { debug!("display_error; err={:?}", err); _display_error(err, shell, true); if err .chain() .any(|e| e.downcast_ref::().is_some()) { drop(shell.note("this is an unexpected cargo internal error")); drop( shell.note( "we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/", ), ); drop(shell.note(format!("cargo {}", version()))); // Once backtraces are stabilized, this should print out a backtrace // if it is available. } } /// Displays a warning, with an error object providing detailed information /// and context. pub fn display_warning_with_error(warning: &str, err: &Error, shell: &mut Shell) { drop(shell.warn(warning)); drop(writeln!(shell.err())); _display_error(err, shell, false); } fn _display_error(err: &Error, shell: &mut Shell, as_err: bool) -> bool { for (i, err) in err.chain().enumerate() { // If we're not in verbose mode then only print cause chain until one // marked as `VerboseError` appears. // // Generally the top error shouldn't be verbose, but check it anyways. if shell.verbosity() != Verbose && err.is::() { return true; } if err.is::() { break; } if i == 0 { if as_err { drop(shell.error(&err)); } else { drop(writeln!(shell.err(), "{}", err)); } } else { drop(writeln!(shell.err(), "\nCaused by:")); drop(write!(shell.err(), "{}", indented_lines(&err.to_string()))); } } false } cargo-0.66.0/src/cargo/macros.rs000066400000000000000000000032201432416201200164020ustar00rootroot00000000000000use std::fmt; macro_rules! compact_debug { ( impl fmt::Debug for $ty:ident { fn fmt(&$this:ident, f: &mut fmt::Formatter) -> fmt::Result { let (default, default_name) = $e:expr; [debug_the_fields($($field:ident)*)] } } ) => ( impl fmt::Debug for $ty { fn fmt(&$this, f: &mut fmt::Formatter<'_>) -> fmt::Result { // Try printing a pretty version where we collapse as many fields as // possible, indicating that they're equivalent to a function call // that's hopefully enough to indicate what each value is without // actually dumping everything so verbosely. let mut s = f.debug_struct(stringify!($ty)); let (default, default_name) = $e; let mut any_default = false; // Exhaustively match so when fields are added we get a compile // failure let $ty { $($field),* } = $this; $( if *$field == default.$field { any_default = true; } else { s.field(stringify!($field), $field); } )* if any_default { s.field("..", &crate::macros::DisplayAsDebug(default_name)); } s.finish() } } ) } pub struct DisplayAsDebug(pub T); impl fmt::Debug for DisplayAsDebug { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } cargo-0.66.0/src/cargo/ops/000077500000000000000000000000001432416201200153545ustar00rootroot00000000000000cargo-0.66.0/src/cargo/ops/cargo_add/000077500000000000000000000000001432416201200172575ustar00rootroot00000000000000cargo-0.66.0/src/cargo/ops/cargo_add/crate_spec.rs000066400000000000000000000030651432416201200217410ustar00rootroot00000000000000//! Crate name parsing. use anyhow::Context as _; use super::Dependency; use super::RegistrySource; use crate::util::validate_package_name; use crate::CargoResult; /// User-specified crate /// /// This can be a /// - Name (e.g. `docopt`) /// - Name and a version req (e.g. `docopt@^0.8`) /// - Path #[derive(Debug)] pub struct CrateSpec { /// Crate name name: String, /// Optional version requirement version_req: Option, } impl CrateSpec { /// Convert a string to a `Crate` pub fn resolve(pkg_id: &str) -> CargoResult { let (name, version) = pkg_id .split_once('@') .map(|(n, v)| (n, Some(v))) .unwrap_or((pkg_id, None)); validate_package_name(name, "dependency name", "")?; if let Some(version) = version { semver::VersionReq::parse(version) .with_context(|| format!("invalid version requirement `{version}`"))?; } let id = Self { name: name.to_owned(), version_req: version.map(|s| s.to_owned()), }; Ok(id) } /// Generate a dependency entry for this crate specifier pub fn to_dependency(&self) -> CargoResult { let mut dep = Dependency::new(self.name()); if let Some(version_req) = self.version_req() { dep = dep.set_source(RegistrySource::new(version_req)); } Ok(dep) } pub fn name(&self) -> &str { &self.name } pub fn version_req(&self) -> Option<&str> { self.version_req.as_deref() } } cargo-0.66.0/src/cargo/ops/cargo_add/dependency.rs000066400000000000000000001142561432416201200217540ustar00rootroot00000000000000use std::fmt::{Display, Formatter}; use std::path::{Path, PathBuf}; use indexmap::IndexSet; use toml_edit::KeyMut; use super::manifest::str_or_1_len_table; use crate::core::GitReference; use crate::core::SourceId; use crate::core::Summary; use crate::CargoResult; use crate::Config; /// A dependency handled by Cargo /// /// `None` means the field will be blank in TOML #[derive(Debug, PartialEq, Eq, Clone)] #[non_exhaustive] pub struct Dependency { /// The name of the dependency (as it is set in its `Cargo.toml` and known to crates.io) pub name: String, /// Whether the dependency is opted-in with a feature flag pub optional: Option, /// List of features to add (or None to keep features unchanged). pub features: Option>, /// Whether default features are enabled pub default_features: Option, /// List of features inherited from a workspace dependency pub inherited_features: Option>, /// Where the dependency comes from pub source: Option, /// Non-default registry pub registry: Option, /// If the dependency is renamed, this is the new name for the dependency /// as a string. None if it is not renamed. pub rename: Option, } impl Dependency { /// Create a new dependency with a name pub fn new(name: &str) -> Self { Self { name: name.into(), optional: None, features: None, default_features: None, inherited_features: None, source: None, registry: None, rename: None, } } /// Set dependency to a given version pub fn set_source(mut self, source: impl Into) -> Self { self.source = Some(source.into()); self } /// Remove the existing version requirement pub fn clear_version(mut self) -> Self { match &mut self.source { Some(Source::Registry(_)) => { self.source = None; } Some(Source::Path(path)) => { path.version = None; } Some(Source::Git(git)) => { git.version = None; } Some(Source::Workspace(_workspace)) => {} None => {} } self } /// Set whether the dependency is optional #[allow(dead_code)] pub fn set_optional(mut self, opt: bool) -> Self { self.optional = Some(opt); self } /// Set features as an array of string (does some basic parsing) #[allow(dead_code)] pub fn set_features(mut self, features: IndexSet) -> Self { self.features = Some(features); self } /// Set features as an array of string (does some basic parsing) pub fn extend_features(mut self, features: impl IntoIterator) -> Self { self.features .get_or_insert_with(Default::default) .extend(features); self } /// Set the value of default-features for the dependency #[allow(dead_code)] pub fn set_default_features(mut self, default_features: bool) -> Self { self.default_features = Some(default_features); self } /// Set the alias for the dependency pub fn set_rename(mut self, rename: &str) -> Self { self.rename = Some(rename.into()); self } /// Set the value of registry for the dependency pub fn set_registry(mut self, registry: impl Into) -> Self { self.registry = Some(registry.into()); self } /// Set features as an array of string (does some basic parsing) pub fn set_inherited_features(mut self, features: IndexSet) -> Self { self.inherited_features = Some(features); self } /// Get the dependency source pub fn source(&self) -> Option<&Source> { self.source.as_ref() } /// Get version of dependency pub fn version(&self) -> Option<&str> { match self.source()? { Source::Registry(src) => Some(src.version.as_str()), Source::Path(src) => src.version.as_deref(), Source::Git(src) => src.version.as_deref(), Source::Workspace(_) => None, } } /// Get registry of the dependency pub fn registry(&self) -> Option<&str> { self.registry.as_deref() } /// Get the alias for the dependency (if any) pub fn rename(&self) -> Option<&str> { self.rename.as_deref() } /// Whether default features are activated pub fn default_features(&self) -> Option { self.default_features } /// Get whether the dep is optional pub fn optional(&self) -> Option { self.optional } /// Get the SourceID for this dependency pub fn source_id(&self, config: &Config) -> CargoResult> { match &self.source.as_ref() { Some(Source::Registry(_)) | None => { if let Some(r) = self.registry() { let source_id = SourceId::alt_registry(config, r)?; Ok(MaybeWorkspace::Other(source_id)) } else { let source_id = SourceId::crates_io(config)?; Ok(MaybeWorkspace::Other(source_id)) } } Some(Source::Path(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), Some(Source::Git(source)) => Ok(MaybeWorkspace::Other(source.source_id()?)), Some(Source::Workspace(workspace)) => Ok(MaybeWorkspace::Workspace(workspace.clone())), } } /// Query to find this dependency pub fn query( &self, config: &Config, ) -> CargoResult> { let source_id = self.source_id(config)?; match source_id { MaybeWorkspace::Workspace(workspace) => Ok(MaybeWorkspace::Workspace(workspace)), MaybeWorkspace::Other(source_id) => Ok(MaybeWorkspace::Other( crate::core::dependency::Dependency::parse( self.name.as_str(), self.version(), source_id, )?, )), } } } pub enum MaybeWorkspace { Workspace(WorkspaceSource), Other(T), } impl Dependency { /// Create a dependency from a TOML table entry pub fn from_toml(crate_root: &Path, key: &str, item: &toml_edit::Item) -> CargoResult { if let Some(version) = item.as_str() { let dep = Self::new(key).set_source(RegistrySource::new(version)); Ok(dep) } else if let Some(table) = item.as_table_like() { let (name, rename) = if let Some(value) = table.get("package") { ( value .as_str() .ok_or_else(|| invalid_type(key, "package", value.type_name(), "string"))? .to_owned(), Some(key.to_owned()), ) } else { (key.to_owned(), None) }; let source: Source = if let Some(git) = table.get("git") { let mut src = GitSource::new( git.as_str() .ok_or_else(|| invalid_type(key, "git", git.type_name(), "string"))?, ); if let Some(value) = table.get("branch") { src = src.set_branch(value.as_str().ok_or_else(|| { invalid_type(key, "branch", value.type_name(), "string") })?); } if let Some(value) = table.get("tag") { src = src.set_tag(value.as_str().ok_or_else(|| { invalid_type(key, "tag", value.type_name(), "string") })?); } if let Some(value) = table.get("rev") { src = src.set_rev(value.as_str().ok_or_else(|| { invalid_type(key, "rev", value.type_name(), "string") })?); } if let Some(value) = table.get("version") { src = src.set_version(value.as_str().ok_or_else(|| { invalid_type(key, "version", value.type_name(), "string") })?); } src.into() } else if let Some(path) = table.get("path") { let path = crate_root .join(path.as_str().ok_or_else(|| { invalid_type(key, "path", path.type_name(), "string") })?); let mut src = PathSource::new(path); if let Some(value) = table.get("version") { src = src.set_version(value.as_str().ok_or_else(|| { invalid_type(key, "version", value.type_name(), "string") })?); } src.into() } else if let Some(version) = table.get("version") { let src = RegistrySource::new(version.as_str().ok_or_else(|| { invalid_type(key, "version", version.type_name(), "string") })?); src.into() } else if let Some(workspace) = table.get("workspace") { let workspace_bool = workspace.as_bool().ok_or_else(|| { invalid_type(key, "workspace", workspace.type_name(), "bool") })?; if !workspace_bool { anyhow::bail!("`{key}.workspace = false` is unsupported") } let src = WorkspaceSource::new(); src.into() } else { anyhow::bail!("Unrecognized dependency source for `{key}`"); }; let registry = if let Some(value) = table.get("registry") { Some( value .as_str() .ok_or_else(|| invalid_type(key, "registry", value.type_name(), "string"))? .to_owned(), ) } else { None }; let default_features = table.get("default-features").and_then(|v| v.as_bool()); if table.contains_key("default_features") { anyhow::bail!("Use of `default_features` in `{key}` is unsupported, please switch to `default-features`"); } let features = if let Some(value) = table.get("features") { Some( value .as_array() .ok_or_else(|| invalid_type(key, "features", value.type_name(), "array"))? .iter() .map(|v| { v.as_str().map(|s| s.to_owned()).ok_or_else(|| { invalid_type(key, "features", v.type_name(), "string") }) }) .collect::>>()?, ) } else { None }; let optional = table.get("optional").and_then(|v| v.as_bool()); let dep = Self { name, rename, source: Some(source), registry, default_features, features, optional, inherited_features: None, }; Ok(dep) } else { anyhow::bail!("Unrecognized` dependency entry format for `{key}"); } } /// Get the dependency name as defined in the manifest, /// that is, either the alias (rename field if Some), /// or the official package name (name field). pub fn toml_key(&self) -> &str { self.rename().unwrap_or(&self.name) } /// Convert dependency to TOML /// /// Returns a tuple with the dependency's name and either the version as a `String` /// or the path/git repository as an `InlineTable`. /// (If the dependency is set as `optional` or `default-features` is set to `false`, /// an `InlineTable` is returned in any case.) /// /// # Panic /// /// Panics if the path is relative pub fn to_toml(&self, crate_root: &Path) -> toml_edit::Item { assert!( crate_root.is_absolute(), "Absolute path needed, got: {}", crate_root.display() ); let table: toml_edit::Item = match ( self.optional.unwrap_or(false), self.features.as_ref(), self.default_features.unwrap_or(true), self.source.as_ref(), self.registry.as_ref(), self.rename.as_ref(), ) { // Extra short when version flag only ( false, None, true, Some(Source::Registry(RegistrySource { version: v })), None, None, ) => toml_edit::value(v), (false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => { let mut table = toml_edit::InlineTable::default(); table.set_dotted(true); table.insert("workspace", true.into()); toml_edit::value(toml_edit::Value::InlineTable(table)) } // Other cases are represented as an inline table (_, _, _, _, _, _) => { let mut table = toml_edit::InlineTable::default(); match &self.source { Some(Source::Registry(src)) => { table.insert("version", src.version.as_str().into()); } Some(Source::Path(src)) => { let relpath = path_field(crate_root, &src.path); if let Some(r) = src.version.as_deref() { table.insert("version", r.into()); } table.insert("path", relpath.into()); } Some(Source::Git(src)) => { table.insert("git", src.git.as_str().into()); if let Some(branch) = src.branch.as_deref() { table.insert("branch", branch.into()); } if let Some(tag) = src.tag.as_deref() { table.insert("tag", tag.into()); } if let Some(rev) = src.rev.as_deref() { table.insert("rev", rev.into()); } if let Some(r) = src.version.as_deref() { table.insert("version", r.into()); } } Some(Source::Workspace(_)) => { table.insert("workspace", true.into()); } None => {} } if table.contains_key("version") { if let Some(r) = self.registry.as_deref() { table.insert("registry", r.into()); } } if self.rename.is_some() { table.insert("package", self.name.as_str().into()); } if let Some(v) = self.default_features { table.insert("default-features", v.into()); } if let Some(features) = self.features.as_ref() { let features: toml_edit::Value = features.iter().cloned().collect(); table.insert("features", features); } if let Some(v) = self.optional { table.insert("optional", v.into()); } toml_edit::value(toml_edit::Value::InlineTable(table)) } }; table } /// Modify existing entry to match this dependency pub fn update_toml<'k>( &self, crate_root: &Path, key: &mut KeyMut<'k>, item: &mut toml_edit::Item, ) { if str_or_1_len_table(item) { // Nothing to preserve *item = self.to_toml(crate_root); key.fmt(); } else if let Some(table) = item.as_table_like_mut() { match &self.source { Some(Source::Registry(src)) => { table.insert("version", toml_edit::value(src.version.as_str())); for key in ["path", "git", "branch", "tag", "rev", "workspace"] { table.remove(key); } } Some(Source::Path(src)) => { let relpath = path_field(crate_root, &src.path); table.insert("path", toml_edit::value(relpath)); if let Some(r) = src.version.as_deref() { table.insert("version", toml_edit::value(r)); } else { table.remove("version"); } for key in ["git", "branch", "tag", "rev", "workspace"] { table.remove(key); } } Some(Source::Git(src)) => { table.insert("git", toml_edit::value(src.git.as_str())); if let Some(branch) = src.branch.as_deref() { table.insert("branch", toml_edit::value(branch)); } else { table.remove("branch"); } if let Some(tag) = src.tag.as_deref() { table.insert("tag", toml_edit::value(tag)); } else { table.remove("tag"); } if let Some(rev) = src.rev.as_deref() { table.insert("rev", toml_edit::value(rev)); } else { table.remove("rev"); } if let Some(r) = src.version.as_deref() { table.insert("version", toml_edit::value(r)); } else { table.remove("version"); } for key in ["path", "workspace"] { table.remove(key); } } Some(Source::Workspace(_)) => { table.insert("workspace", toml_edit::value(true)); table.set_dotted(true); key.fmt(); for key in [ "version", "registry", "registry-index", "path", "git", "branch", "tag", "rev", "package", "default-features", ] { table.remove(key); } } None => {} } if table.contains_key("version") { if let Some(r) = self.registry.as_deref() { table.insert("registry", toml_edit::value(r)); } else { table.remove("registry"); } } else { table.remove("registry"); } if self.rename.is_some() { table.insert("package", toml_edit::value(self.name.as_str())); } match self.default_features { Some(v) => { table.insert("default-features", toml_edit::value(v)); } None => { table.remove("default-features"); } } if let Some(new_features) = self.features.as_ref() { let mut features = table .get("features") .and_then(|i| i.as_value()) .and_then(|v| v.as_array()) .and_then(|a| { a.iter() .map(|v| v.as_str()) .collect::>>() }) .unwrap_or_default(); features.extend(new_features.iter().map(|s| s.as_str())); let features = toml_edit::value(features.into_iter().collect::()); table.set_dotted(false); table.insert("features", features); } else { table.remove("features"); } match self.optional { Some(v) => { table.set_dotted(false); table.insert("optional", toml_edit::value(v)); } None => { table.remove("optional"); } } table.fmt(); } else { unreachable!("Invalid dependency type: {}", item.type_name()); } } } fn invalid_type(dep: &str, key: &str, actual: &str, expected: &str) -> anyhow::Error { anyhow::format_err!("Found {actual} for {key} when {expected} was expected for {dep}") } impl std::fmt::Display for Dependency { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(source) = self.source() { write!(f, "{}@{}", self.name, source) } else { self.toml_key().fmt(f) } } } impl<'s> From<&'s Summary> for Dependency { fn from(other: &'s Summary) -> Self { let source: Source = if let Some(path) = other.source_id().local_path() { PathSource::new(path) .set_version(other.version().to_string()) .into() } else if let Some(git_ref) = other.source_id().git_reference() { let mut src = GitSource::new(other.source_id().url().to_string()) .set_version(other.version().to_string()); match git_ref { GitReference::Branch(branch) => src = src.set_branch(branch), GitReference::Tag(tag) => src = src.set_tag(tag), GitReference::Rev(rev) => src = src.set_rev(rev), GitReference::DefaultBranch => {} } src.into() } else { RegistrySource::new(other.version().to_string()).into() }; Dependency::new(other.name().as_str()).set_source(source) } } impl From for Dependency { fn from(other: Summary) -> Self { (&other).into() } } fn path_field(crate_root: &Path, abs_path: &Path) -> String { let relpath = pathdiff::diff_paths(abs_path, crate_root).expect("both paths are absolute"); let relpath = relpath.to_str().unwrap().replace('\\', "/"); relpath } /// Primary location of a dependency #[derive(Debug, Hash, PartialEq, Eq, Clone)] pub enum Source { /// Dependency from a registry Registry(RegistrySource), /// Dependency from a local path Path(PathSource), /// Dependency from a git repo Git(GitSource), /// Dependency from a workspace Workspace(WorkspaceSource), } impl Source { /// Access the registry source, if present pub fn as_registry(&self) -> Option<&RegistrySource> { match self { Self::Registry(src) => Some(src), _ => None, } } /// Access the path source, if present #[allow(dead_code)] pub fn as_path(&self) -> Option<&PathSource> { match self { Self::Path(src) => Some(src), _ => None, } } /// Access the git source, if present #[allow(dead_code)] pub fn as_git(&self) -> Option<&GitSource> { match self { Self::Git(src) => Some(src), _ => None, } } /// Access the workspace source, if present #[allow(dead_code)] pub fn as_workspace(&self) -> Option<&WorkspaceSource> { match self { Self::Workspace(src) => Some(src), _ => None, } } } impl std::fmt::Display for Source { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Registry(src) => src.fmt(f), Self::Path(src) => src.fmt(f), Self::Git(src) => src.fmt(f), Self::Workspace(src) => src.fmt(f), } } } impl<'s> From<&'s Source> for Source { fn from(inner: &'s Source) -> Self { inner.clone() } } impl From for Source { fn from(inner: RegistrySource) -> Self { Self::Registry(inner) } } impl From for Source { fn from(inner: PathSource) -> Self { Self::Path(inner) } } impl From for Source { fn from(inner: GitSource) -> Self { Self::Git(inner) } } impl From for Source { fn from(inner: WorkspaceSource) -> Self { Self::Workspace(inner) } } /// Dependency from a registry #[derive(Debug, Hash, PartialEq, Eq, Clone)] #[non_exhaustive] pub struct RegistrySource { /// Version requirement pub version: String, } impl RegistrySource { /// Specify dependency by version requirement pub fn new(version: impl AsRef) -> Self { // versions might have semver metadata appended which we do not want to // store in the cargo toml files. This would cause a warning upon compilation // ("version requirement […] includes semver metadata which will be ignored") let version = version.as_ref().split('+').next().unwrap(); Self { version: version.to_owned(), } } } impl std::fmt::Display for RegistrySource { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.version.fmt(f) } } /// Dependency from a local path #[derive(Debug, Hash, PartialEq, Eq, Clone)] #[non_exhaustive] pub struct PathSource { /// Local, absolute path pub path: PathBuf, /// Version requirement for when published pub version: Option, } impl PathSource { /// Specify dependency from a path pub fn new(path: impl Into) -> Self { Self { path: path.into(), version: None, } } /// Set an optional version requirement pub fn set_version(mut self, version: impl AsRef) -> Self { // versions might have semver metadata appended which we do not want to // store in the cargo toml files. This would cause a warning upon compilation // ("version requirement […] includes semver metadata which will be ignored") let version = version.as_ref().split('+').next().unwrap(); self.version = Some(version.to_owned()); self } /// Get the SourceID for this dependency pub fn source_id(&self) -> CargoResult { SourceId::for_path(&self.path) } } impl std::fmt::Display for PathSource { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.path.display().fmt(f) } } /// Dependency from a git repo #[derive(Debug, Hash, PartialEq, Eq, Clone)] #[non_exhaustive] pub struct GitSource { /// Repo URL pub git: String, /// Select specific branch pub branch: Option, /// Select specific tag pub tag: Option, /// Select specific rev pub rev: Option, /// Version requirement for when published pub version: Option, } impl GitSource { /// Specify dependency from a git repo pub fn new(git: impl Into) -> Self { Self { git: git.into(), branch: None, tag: None, rev: None, version: None, } } /// Specify an optional branch pub fn set_branch(mut self, branch: impl Into) -> Self { self.branch = Some(branch.into()); self.tag = None; self.rev = None; self } /// Specify an optional tag pub fn set_tag(mut self, tag: impl Into) -> Self { self.branch = None; self.tag = Some(tag.into()); self.rev = None; self } /// Specify an optional rev pub fn set_rev(mut self, rev: impl Into) -> Self { self.branch = None; self.tag = None; self.rev = Some(rev.into()); self } /// Get the SourceID for this dependency pub fn source_id(&self) -> CargoResult { let git_url = self.git.parse::()?; let git_ref = self.git_ref(); SourceId::for_git(&git_url, git_ref) } fn git_ref(&self) -> GitReference { match ( self.branch.as_deref(), self.tag.as_deref(), self.rev.as_deref(), ) { (Some(branch), _, _) => GitReference::Branch(branch.to_owned()), (_, Some(tag), _) => GitReference::Tag(tag.to_owned()), (_, _, Some(rev)) => GitReference::Rev(rev.to_owned()), _ => GitReference::DefaultBranch, } } /// Set an optional version requirement pub fn set_version(mut self, version: impl AsRef) -> Self { // versions might have semver metadata appended which we do not want to // store in the cargo toml files. This would cause a warning upon compilation // ("version requirement […] includes semver metadata which will be ignored") let version = version.as_ref().split('+').next().unwrap(); self.version = Some(version.to_owned()); self } } impl std::fmt::Display for GitSource { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let git_ref = self.git_ref(); if let Some(pretty_ref) = git_ref.pretty_ref() { write!(f, "{}?{}", self.git, pretty_ref) } else { write!(f, "{}", self.git) } } } /// Dependency from a workspace #[derive(Debug, Hash, PartialEq, Eq, Clone)] #[non_exhaustive] pub struct WorkspaceSource; impl WorkspaceSource { pub fn new() -> Self { Self } } impl Display for WorkspaceSource { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { "workspace".fmt(f) } } #[cfg(test)] mod tests { use std::path::Path; use crate::ops::cargo_add::manifest::LocalManifest; use cargo_util::paths; use super::*; #[test] fn to_toml_simple_dep() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_simple_dep_with_version() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep").set_source(RegistrySource::new("1.0")); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert_eq!(item.as_str(), Some("1.0")); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_optional_dep() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep") .set_source(RegistrySource::new("1.0")) .set_optional(true); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("optional").unwrap().as_bool(), Some(true)); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_dep_without_default_features() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep") .set_source(RegistrySource::new("1.0")) .set_default_features(false); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_dep_with_path_source() { let root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let crate_root = root.join("foo"); let dep = Dependency::new("dep").set_source(PathSource::new(root.join("bar"))); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("path").unwrap().as_str(), Some("../bar")); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_dep_with_git_source() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep").set_source(GitSource::new("https://foor/bar.git")); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!( dep.get("git").unwrap().as_str(), Some("https://foor/bar.git") ); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_renamed_dep() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep") .set_source(RegistrySource::new("1.0")) .set_rename("d"); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "d".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_dep_from_alt_registry() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep") .set_source(RegistrySource::new("1.0")) .set_registry("alternative"); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "dep".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("registry").unwrap().as_str(), Some("alternative")); verify_roundtrip(&crate_root, key, &item); } #[test] fn to_toml_complex_dep() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let dep = Dependency::new("dep") .set_source(RegistrySource::new("1.0")) .set_default_features(false) .set_rename("d"); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); assert_eq!(key, "d".to_owned()); assert!(item.is_inline_table()); let dep = item.as_inline_table().unwrap(); assert_eq!(dep.get("package").unwrap().as_str(), Some("dep")); assert_eq!(dep.get("version").unwrap().as_str(), Some("1.0")); assert_eq!(dep.get("default-features").unwrap().as_bool(), Some(false)); verify_roundtrip(&crate_root, key, &item); } #[test] fn paths_with_forward_slashes_are_left_as_is() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let path = crate_root.join("sibling/crate"); let relpath = "sibling/crate"; let dep = Dependency::new("dep").set_source(PathSource::new(path)); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); let table = item.as_inline_table().unwrap(); let got = table.get("path").unwrap().as_str().unwrap(); assert_eq!(got, relpath); verify_roundtrip(&crate_root, key, &item); } #[test] fn overwrite_with_workspace_source_fmt_key() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("./"))); let toml = "dep = \"1.0\"\n"; let manifest = toml.parse().unwrap(); let mut local = LocalManifest { path: crate_root.clone(), manifest, }; assert_eq!(local.manifest.to_string(), toml); for (key, item) in local.data.clone().iter() { let dep = Dependency::from_toml(&crate_root, key, item).unwrap(); let dep = dep.set_source(WorkspaceSource::new()); local.insert_into_table(&vec![], &dep).unwrap(); assert_eq!(local.data.to_string(), "dep.workspace = true\n"); } } #[test] #[cfg(windows)] fn normalise_windows_style_paths() { let crate_root = paths::normalize_path(&std::env::current_dir().unwrap().join(Path::new("/"))); let original = crate_root.join(r"sibling\crate"); let should_be = "sibling/crate"; let dep = Dependency::new("dep").set_source(PathSource::new(original)); let key = dep.toml_key(); let item = dep.to_toml(&crate_root); let table = item.as_inline_table().unwrap(); let got = table.get("path").unwrap().as_str().unwrap(); assert_eq!(got, should_be); verify_roundtrip(&crate_root, key, &item); } #[track_caller] fn verify_roundtrip(crate_root: &Path, key: &str, item: &toml_edit::Item) { let roundtrip = Dependency::from_toml(crate_root, key, item).unwrap(); let round_key = roundtrip.toml_key(); let round_item = roundtrip.to_toml(crate_root); assert_eq!(key, round_key); assert_eq!(item.to_string(), round_item.to_string()); } } cargo-0.66.0/src/cargo/ops/cargo_add/manifest.rs000066400000000000000000000400061432416201200214330ustar00rootroot00000000000000use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; use std::str; use anyhow::Context as _; use super::dependency::Dependency; use crate::core::dependency::DepKind; use crate::core::FeatureValue; use crate::util::interning::InternedString; use crate::CargoResult; /// Dependency table to add dep to #[derive(Clone, Debug, PartialEq, Eq)] pub struct DepTable { kind: DepKind, target: Option, } impl DepTable { const KINDS: &'static [Self] = &[ Self::new().set_kind(DepKind::Normal), Self::new().set_kind(DepKind::Development), Self::new().set_kind(DepKind::Build), ]; /// Reference to a Dependency Table pub const fn new() -> Self { Self { kind: DepKind::Normal, target: None, } } /// Choose the type of dependency pub const fn set_kind(mut self, kind: DepKind) -> Self { self.kind = kind; self } /// Choose the platform for the dependency pub fn set_target(mut self, target: impl Into) -> Self { self.target = Some(target.into()); self } /// Type of dependency pub fn kind(&self) -> DepKind { self.kind } /// Platform for the dependency pub fn target(&self) -> Option<&str> { self.target.as_deref() } /// Keys to the table pub fn to_table(&self) -> Vec<&str> { if let Some(target) = &self.target { vec!["target", target, self.kind_table()] } else { vec![self.kind_table()] } } fn kind_table(&self) -> &str { match self.kind { DepKind::Normal => "dependencies", DepKind::Development => "dev-dependencies", DepKind::Build => "build-dependencies", } } } impl Default for DepTable { fn default() -> Self { Self::new() } } impl From for DepTable { fn from(other: DepKind) -> Self { Self::new().set_kind(other) } } /// A Cargo manifest #[derive(Debug, Clone)] pub struct Manifest { /// Manifest contents as TOML data pub data: toml_edit::Document, } impl Manifest { /// Get the manifest's package name pub fn package_name(&self) -> CargoResult<&str> { self.data .as_table() .get("package") .and_then(|m| m.get("name")) .and_then(|m| m.as_str()) .ok_or_else(parse_manifest_err) } /// Get the specified table from the manifest. pub fn get_table<'a>(&'a self, table_path: &[String]) -> CargoResult<&'a toml_edit::Item> { /// Descend into a manifest until the required table is found. fn descend<'a>( input: &'a toml_edit::Item, path: &[String], ) -> CargoResult<&'a toml_edit::Item> { if let Some(segment) = path.get(0) { let value = input .get(&segment) .ok_or_else(|| non_existent_table_err(segment))?; if value.is_table_like() { descend(value, &path[1..]) } else { Err(non_existent_table_err(segment)) } } else { Ok(input) } } descend(self.data.as_item(), table_path) } /// Get the specified table from the manifest. pub fn get_table_mut<'a>( &'a mut self, table_path: &[String], ) -> CargoResult<&'a mut toml_edit::Item> { /// Descend into a manifest until the required table is found. fn descend<'a>( input: &'a mut toml_edit::Item, path: &[String], ) -> CargoResult<&'a mut toml_edit::Item> { if let Some(segment) = path.get(0) { let mut default_table = toml_edit::Table::new(); default_table.set_implicit(true); let value = input[&segment].or_insert(toml_edit::Item::Table(default_table)); if value.is_table_like() { descend(value, &path[1..]) } else { Err(non_existent_table_err(segment)) } } else { Ok(input) } } descend(self.data.as_item_mut(), table_path) } /// Get all sections in the manifest that exist and might contain dependencies. /// The returned items are always `Table` or `InlineTable`. pub fn get_sections(&self) -> Vec<(DepTable, toml_edit::Item)> { let mut sections = Vec::new(); for table in DepTable::KINDS { let dependency_type = table.kind_table(); // Dependencies can be in the three standard sections... if self .data .get(dependency_type) .map(|t| t.is_table_like()) .unwrap_or(false) { sections.push((table.clone(), self.data[dependency_type].clone())) } // ... and in `target..(build-/dev-)dependencies`. let target_sections = self .data .as_table() .get("target") .and_then(toml_edit::Item::as_table_like) .into_iter() .flat_map(toml_edit::TableLike::iter) .filter_map(|(target_name, target_table)| { let dependency_table = target_table.get(dependency_type)?; dependency_table.as_table_like().map(|_| { ( table.clone().set_target(target_name), dependency_table.clone(), ) }) }); sections.extend(target_sections); } sections } pub fn get_legacy_sections(&self) -> Vec { let mut result = Vec::new(); for dependency_type in ["dev_dependencies", "build_dependencies"] { if self.data.contains_key(dependency_type) { result.push(dependency_type.to_owned()); } // ... and in `target..(build-/dev-)dependencies`. result.extend( self.data .as_table() .get("target") .and_then(toml_edit::Item::as_table_like) .into_iter() .flat_map(toml_edit::TableLike::iter) .filter_map(|(target_name, target_table)| { if target_table.as_table_like()?.contains_key(dependency_type) { Some(format!("target.{target_name}.{dependency_type}")) } else { None } }), ); } result } } impl str::FromStr for Manifest { type Err = anyhow::Error; /// Read manifest data from string fn from_str(input: &str) -> ::std::result::Result { let d: toml_edit::Document = input.parse().context("Manifest not valid TOML")?; Ok(Manifest { data: d }) } } impl std::fmt::Display for Manifest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.data.fmt(f) } } /// A Cargo manifest that is available locally. #[derive(Debug)] pub struct LocalManifest { /// Path to the manifest pub path: PathBuf, /// Manifest contents pub manifest: Manifest, } impl Deref for LocalManifest { type Target = Manifest; fn deref(&self) -> &Manifest { &self.manifest } } impl DerefMut for LocalManifest { fn deref_mut(&mut self) -> &mut Manifest { &mut self.manifest } } impl LocalManifest { /// Construct the `LocalManifest` corresponding to the `Path` provided. pub fn try_new(path: &Path) -> CargoResult { if !path.is_absolute() { anyhow::bail!("can only edit absolute paths, got {}", path.display()); } let data = cargo_util::paths::read(&path)?; let manifest = data.parse().context("Unable to parse Cargo.toml")?; Ok(LocalManifest { manifest, path: path.to_owned(), }) } /// Write changes back to the file pub fn write(&self) -> CargoResult<()> { if !self.manifest.data.contains_key("package") && !self.manifest.data.contains_key("project") { if self.manifest.data.contains_key("workspace") { anyhow::bail!( "found virtual manifest at {}, but this command requires running against an \ actual package in this workspace.", self.path.display() ); } else { anyhow::bail!( "missing expected `package` or `project` fields in {}", self.path.display() ); } } let s = self.manifest.data.to_string(); let new_contents_bytes = s.as_bytes(); cargo_util::paths::write(&self.path, new_contents_bytes) } /// Lookup a dependency pub fn get_dependency_versions<'s>( &'s self, dep_key: &'s str, ) -> impl Iterator)> + 's { let crate_root = self.path.parent().expect("manifest path is absolute"); self.get_sections() .into_iter() .filter_map(move |(table_path, table)| { let table = table.into_table().ok()?; Some( table .into_iter() .filter_map(|(key, item)| { if key.as_str() == dep_key { Some((table_path.clone(), key, item)) } else { None } }) .collect::>(), ) }) .flatten() .map(move |(table_path, dep_key, dep_item)| { let dep = Dependency::from_toml(crate_root, &dep_key, &dep_item); (table_path, dep) }) } /// Add entry to a Cargo.toml. pub fn insert_into_table( &mut self, table_path: &[String], dep: &Dependency, ) -> CargoResult<()> { let crate_root = self .path .parent() .expect("manifest path is absolute") .to_owned(); let dep_key = dep.toml_key(); let table = self.get_table_mut(table_path)?; if let Some((mut dep_key, dep_item)) = table .as_table_like_mut() .unwrap() .get_key_value_mut(dep_key) { dep.update_toml(&crate_root, &mut dep_key, dep_item); } else { let new_dependency = dep.to_toml(&crate_root); table[dep_key] = new_dependency; } if let Some(t) = table.as_inline_table_mut() { t.fmt() } Ok(()) } /// Remove references to `dep_key` if its no longer present pub fn gc_dep(&mut self, dep_key: &str) { let explicit_dep_activation = self.is_explicit_dep_activation(dep_key); let status = self.dep_status(dep_key); if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table_mut().get_mut("features") { for (_feature, mut feature_values) in feature_table.iter_mut() { if let toml_edit::Item::Value(toml_edit::Value::Array(feature_values)) = &mut feature_values { fix_feature_activations( feature_values, dep_key, status, explicit_dep_activation, ); } } } } fn is_explicit_dep_activation(&self, dep_key: &str) -> bool { if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table().get("features") { for values in feature_table .iter() .map(|(_, a)| a) .filter_map(|i| i.as_value()) .filter_map(|v| v.as_array()) { for value in values.iter().filter_map(|v| v.as_str()) { let value = FeatureValue::new(InternedString::new(value)); if let FeatureValue::Dep { dep_name } = &value { if dep_name.as_str() == dep_key { return true; } } } } } false } fn dep_status(&self, dep_key: &str) -> DependencyStatus { let mut status = DependencyStatus::None; for (_, tbl) in self.get_sections() { if let toml_edit::Item::Table(tbl) = tbl { if let Some(dep_item) = tbl.get(dep_key) { let optional = dep_item .get("optional") .and_then(|i| i.as_value()) .and_then(|i| i.as_bool()) .unwrap_or(false); if optional { return DependencyStatus::Optional; } else { status = DependencyStatus::Required; } } } } status } } impl std::fmt::Display for LocalManifest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.manifest.fmt(f) } } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] enum DependencyStatus { None, Optional, Required, } fn fix_feature_activations( feature_values: &mut toml_edit::Array, dep_key: &str, status: DependencyStatus, explicit_dep_activation: bool, ) { let remove_list: Vec = feature_values .iter() .enumerate() .filter_map(|(idx, value)| value.as_str().map(|s| (idx, s))) .filter_map(|(idx, value)| { let parsed_value = FeatureValue::new(InternedString::new(value)); match status { DependencyStatus::None => match (parsed_value, explicit_dep_activation) { (FeatureValue::Feature(dep_name), false) | (FeatureValue::Dep { dep_name }, _) | (FeatureValue::DepFeature { dep_name, .. }, _) => dep_name == dep_key, _ => false, }, DependencyStatus::Optional => false, DependencyStatus::Required => match (parsed_value, explicit_dep_activation) { (FeatureValue::Feature(dep_name), false) | (FeatureValue::Dep { dep_name }, _) => dep_name == dep_key, (FeatureValue::Feature(_), true) | (FeatureValue::DepFeature { .. }, _) => { false } }, } .then(|| idx) }) .collect(); // Remove found idx in revers order so we don't invalidate the idx. for idx in remove_list.iter().rev() { feature_values.remove(*idx); } if status == DependencyStatus::Required { for value in feature_values.iter_mut() { let parsed_value = if let Some(value) = value.as_str() { FeatureValue::new(InternedString::new(value)) } else { continue; }; if let FeatureValue::DepFeature { dep_name, dep_feature, weak, } = parsed_value { if dep_name == dep_key && weak { *value = format!("{dep_name}/{dep_feature}").into(); } } } } } pub fn str_or_1_len_table(item: &toml_edit::Item) -> bool { item.is_str() || item.as_table_like().map(|t| t.len() == 1).unwrap_or(false) } fn parse_manifest_err() -> anyhow::Error { anyhow::format_err!("unable to parse external Cargo.toml") } fn non_existent_table_err(table: impl std::fmt::Display) -> anyhow::Error { anyhow::format_err!("the table `{table}` could not be found.") } cargo-0.66.0/src/cargo/ops/cargo_add/mod.rs000066400000000000000000000737061432416201200204210ustar00rootroot00000000000000//! Core of cargo-add command mod crate_spec; mod dependency; mod manifest; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::VecDeque; use std::path::Path; use anyhow::Context as _; use cargo_util::paths; use indexmap::IndexSet; use termcolor::Color::Green; use termcolor::Color::Red; use termcolor::ColorSpec; use toml_edit::Item as TomlItem; use crate::core::dependency::DepKind; use crate::core::registry::PackageRegistry; use crate::core::FeatureValue; use crate::core::Package; use crate::core::QueryKind; use crate::core::Registry; use crate::core::Shell; use crate::core::Summary; use crate::core::Workspace; use crate::CargoResult; use crate::Config; use crate_spec::CrateSpec; use dependency::Dependency; use dependency::GitSource; use dependency::PathSource; use dependency::RegistrySource; use dependency::Source; use manifest::LocalManifest; use crate::ops::cargo_add::dependency::{MaybeWorkspace, WorkspaceSource}; pub use manifest::DepTable; /// Information on what dependencies should be added #[derive(Clone, Debug)] pub struct AddOptions<'a> { /// Configuration information for cargo operations pub config: &'a Config, /// Package to add dependencies to pub spec: &'a Package, /// Dependencies to add or modify pub dependencies: Vec, /// Which dependency section to add these to pub section: DepTable, /// Act as if dependencies will be added pub dry_run: bool, } /// Add dependencies to a manifest pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<()> { let dep_table = options .section .to_table() .into_iter() .map(String::from) .collect::>(); let manifest_path = options.spec.manifest_path().to_path_buf(); let mut manifest = LocalManifest::try_new(&manifest_path)?; let original_raw_manifest = manifest.to_string(); let legacy = manifest.get_legacy_sections(); if !legacy.is_empty() { anyhow::bail!( "Deprecated dependency sections are unsupported: {}", legacy.join(", ") ); } let mut registry = PackageRegistry::new(options.config)?; let deps = { let _lock = options.config.acquire_package_cache_lock()?; registry.lock_patches(); options .dependencies .iter() .map(|raw| { resolve_dependency( &manifest, raw, workspace, &options.section, options.config, &mut registry, ) }) .collect::>>()? }; let was_sorted = manifest .get_table(&dep_table) .map(TomlItem::as_table) .map_or(true, |table_option| { table_option.map_or(true, |table| is_sorted(table.iter().map(|(name, _)| name))) }); for dep in deps { print_msg(&mut options.config.shell(), &dep, &dep_table)?; if let Some(Source::Path(src)) = dep.source() { if src.path == manifest.path.parent().unwrap_or_else(|| Path::new("")) { anyhow::bail!( "cannot add `{}` as a dependency to itself", manifest.package_name()? ) } } let available_features = dep .available_features .keys() .map(|s| s.as_ref()) .collect::>(); let mut unknown_features: Vec<&str> = Vec::new(); if let Some(req_feats) = dep.features.as_ref() { let req_feats: BTreeSet<_> = req_feats.iter().map(|s| s.as_str()).collect(); unknown_features.extend(req_feats.difference(&available_features).copied()); } if let Some(inherited_features) = dep.inherited_features.as_ref() { let inherited_features: BTreeSet<_> = inherited_features.iter().map(|s| s.as_str()).collect(); unknown_features.extend(inherited_features.difference(&available_features).copied()); } unknown_features.sort(); if !unknown_features.is_empty() { anyhow::bail!("unrecognized features: {unknown_features:?}"); } manifest.insert_into_table(&dep_table, &dep)?; manifest.gc_dep(dep.toml_key()); } if was_sorted { if let Some(table) = manifest .get_table_mut(&dep_table) .ok() .and_then(TomlItem::as_table_like_mut) { table.sort_values(); } } if options.config.locked() { let new_raw_manifest = manifest.to_string(); if original_raw_manifest != new_raw_manifest { anyhow::bail!( "the manifest file {} needs to be updated but --locked was passed to prevent this", manifest.path.display() ); } } if options.dry_run { options.config.shell().warn("aborting add due to dry run")?; } else { manifest.write()?; } Ok(()) } /// Dependency entry operation #[derive(Clone, Debug, PartialEq, Eq)] pub struct DepOp { /// Describes the crate pub crate_spec: Option, /// Dependency key, overriding the package name in crate_spec pub rename: Option, /// Feature flags to activate pub features: Option>, /// Whether the default feature should be activated pub default_features: Option, /// Whether dependency is optional pub optional: Option, /// Registry for looking up dependency version pub registry: Option, /// Git repo for dependency pub path: Option, /// Git repo for dependency pub git: Option, /// Specify an alternative git branch pub branch: Option, /// Specify a specific git rev pub rev: Option, /// Specify a specific git tag pub tag: Option, } fn resolve_dependency( manifest: &LocalManifest, arg: &DepOp, ws: &Workspace<'_>, section: &DepTable, config: &Config, registry: &mut PackageRegistry<'_>, ) -> CargoResult { let crate_spec = arg .crate_spec .as_deref() .map(CrateSpec::resolve) .transpose()?; let mut selected_dep = if let Some(url) = &arg.git { let mut src = GitSource::new(url); if let Some(branch) = &arg.branch { src = src.set_branch(branch); } if let Some(tag) = &arg.tag { src = src.set_tag(tag); } if let Some(rev) = &arg.rev { src = src.set_rev(rev); } let selected = if let Some(crate_spec) = &crate_spec { if let Some(v) = crate_spec.version_req() { // crate specifier includes a version (e.g. `docopt@0.8`) anyhow::bail!("cannot specify a git URL (`{url}`) with a version (`{v}`)."); } let dependency = crate_spec.to_dependency()?.set_source(src); let selected = select_package(&dependency, config, registry)?; if dependency.name != selected.name { config.shell().warn(format!( "translating `{}` to `{}`", dependency.name, selected.name, ))?; } selected } else { let mut source = crate::sources::GitSource::new(src.source_id()?, config)?; let packages = source.read_packages()?; let package = infer_package(packages, &src)?; Dependency::from(package.summary()) }; selected } else if let Some(raw_path) = &arg.path { let path = paths::normalize_path(&std::env::current_dir()?.join(raw_path)); let src = PathSource::new(&path); let selected = if let Some(crate_spec) = &crate_spec { if let Some(v) = crate_spec.version_req() { // crate specifier includes a version (e.g. `docopt@0.8`) anyhow::bail!("cannot specify a path (`{raw_path}`) with a version (`{v}`)."); } let dependency = crate_spec.to_dependency()?.set_source(src); let selected = select_package(&dependency, config, registry)?; if dependency.name != selected.name { config.shell().warn(format!( "translating `{}` to `{}`", dependency.name, selected.name, ))?; } selected } else { let source = crate::sources::PathSource::new(&path, src.source_id()?, config); let packages = source.read_packages()?; let package = infer_package(packages, &src)?; Dependency::from(package.summary()) }; selected } else if let Some(crate_spec) = &crate_spec { crate_spec.to_dependency()? } else { anyhow::bail!("dependency name is required"); }; selected_dep = populate_dependency(selected_dep, arg); let old_dep = get_existing_dependency(manifest, selected_dep.toml_key(), section)?; let mut dependency = if let Some(mut old_dep) = old_dep.clone() { if old_dep.name != selected_dep.name { // Assuming most existing keys are not relevant when the package changes if selected_dep.optional.is_none() { selected_dep.optional = old_dep.optional; } selected_dep } else { if selected_dep.source().is_some() { // Overwrite with `crate_spec` old_dep.source = selected_dep.source; } populate_dependency(old_dep, arg) } } else { selected_dep }; if dependency.source().is_none() { // Checking for a workspace dependency happens first since a member could be specified // in the workspace dependencies table as a dependency if let Some(_dep) = find_workspace_dep(dependency.toml_key(), ws.root_manifest()).ok() { dependency = dependency.set_source(WorkspaceSource::new()); } else if let Some(package) = ws.members().find(|p| p.name().as_str() == dependency.name) { // Only special-case workspaces when the user doesn't provide any extra // information, otherwise, trust the user. let mut src = PathSource::new(package.root()); // dev-dependencies do not need the version populated if section.kind() != DepKind::Development { let op = ""; let v = format!("{op}{version}", version = package.version()); src = src.set_version(v); } dependency = dependency.set_source(src); } else { let latest = get_latest_dependency(&dependency, false, config, registry)?; if dependency.name != latest.name { config.shell().warn(format!( "translating `{}` to `{}`", dependency.name, latest.name, ))?; dependency.name = latest.name; // Normalize the name } dependency = dependency.set_source(latest.source.expect("latest always has a source")); } } if let Some(Source::Workspace(_)) = dependency.source() { check_invalid_ws_keys(dependency.toml_key(), arg)?; } let version_required = dependency.source().and_then(|s| s.as_registry()).is_some(); let version_optional_in_section = section.kind() == DepKind::Development; let preserve_existing_version = old_dep .as_ref() .map(|d| d.version().is_some()) .unwrap_or(false); if !version_required && !preserve_existing_version && version_optional_in_section { // dev-dependencies do not need the version populated dependency = dependency.clear_version(); } let query = dependency.query(config)?; let query = match query { MaybeWorkspace::Workspace(_workspace) => { let dep = find_workspace_dep(dependency.toml_key(), ws.root_manifest())?; if let Some(features) = dep.features.clone() { dependency = dependency.set_inherited_features(features); } let query = dep.query(config)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("This should have been caught when parsing a workspace root") } MaybeWorkspace::Other(query) => query, } } MaybeWorkspace::Other(query) => query, }; let dependency = populate_available_features(dependency, &query, registry)?; Ok(dependency) } /// When { workspace = true } you cannot define other keys that configure /// the source of the dependency such as `version`, `registry`, `registry-index`, /// `path`, `git`, `branch`, `tag`, `rev`, or `package`. You can also not define /// `default-features`. /// /// Only `default-features`, `registry` and `rename` need to be checked /// for currently. This is because `git` and its associated keys, `path`, and /// `version` should all bee checked before this is called. `rename` is checked /// for as it turns into `package` fn check_invalid_ws_keys(toml_key: &str, arg: &DepOp) -> CargoResult<()> { fn err_msg(toml_key: &str, flag: &str, field: &str) -> String { format!( "cannot override workspace dependency with `{flag}`, \ either change `workspace.dependencies.{toml_key}.{field}` \ or define the dependency exclusively in the package's manifest" ) } if arg.default_features.is_some() { anyhow::bail!( "{}", err_msg(toml_key, "--default-features", "default-features") ) } if arg.registry.is_some() { anyhow::bail!("{}", err_msg(toml_key, "--registry", "registry")) } // rename is `package` if arg.rename.is_some() { anyhow::bail!("{}", err_msg(toml_key, "--rename", "package")) } Ok(()) } /// Provide the existing dependency for the target table /// /// If it doesn't exist but exists in another table, let's use that as most likely users /// want to use the same version across all tables unless they are renaming. fn get_existing_dependency( manifest: &LocalManifest, dep_key: &str, section: &DepTable, ) -> CargoResult> { #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] enum Key { Error, Dev, Build, Normal, Existing, } let mut possible: Vec<_> = manifest .get_dependency_versions(dep_key) .map(|(path, dep)| { let key = if path == *section { (Key::Existing, true) } else if dep.is_err() { (Key::Error, path.target().is_some()) } else { let key = match path.kind() { DepKind::Normal => Key::Normal, DepKind::Build => Key::Build, DepKind::Development => Key::Dev, }; (key, path.target().is_some()) }; (key, dep) }) .collect(); possible.sort_by_key(|(key, _)| *key); let (key, dep) = if let Some(item) = possible.pop() { item } else { return Ok(None); }; let mut dep = dep?; if key.0 != Key::Existing { // When the dep comes from a different section, we only care about the source and not any // of the other fields, like `features` let unrelated = dep; dep = Dependency::new(&unrelated.name); dep.source = unrelated.source.clone(); dep.registry = unrelated.registry.clone(); // dev-dependencies do not need the version populated when path is set though we // should preserve it if the user chose to populate it. let version_required = unrelated.source().and_then(|s| s.as_registry()).is_some(); let version_optional_in_section = section.kind() == DepKind::Development; if !version_required && version_optional_in_section { dep = dep.clear_version(); } } Ok(Some(dep)) } fn get_latest_dependency( dependency: &Dependency, _flag_allow_prerelease: bool, config: &Config, registry: &mut PackageRegistry<'_>, ) -> CargoResult { let query = dependency.query(config)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("registry dependencies required, found a workspace dependency"); } MaybeWorkspace::Other(query) => { let possibilities = loop { match registry.query_vec(&query, QueryKind::Fuzzy) { std::task::Poll::Ready(res) => { break res?; } std::task::Poll::Pending => registry.block_until_ready()?, } }; let latest = possibilities .iter() .max_by_key(|s| { // Fallback to a pre-release if no official release is available by sorting them as // less. let stable = s.version().pre.is_empty(); (stable, s.version()) }) .ok_or_else(|| { anyhow::format_err!( "the crate `{dependency}` could not be found in registry index." ) })?; let mut dep = Dependency::from(latest); if let Some(reg_name) = dependency.registry.as_deref() { dep = dep.set_registry(reg_name); } Ok(dep) } } } fn select_package( dependency: &Dependency, config: &Config, registry: &mut PackageRegistry<'_>, ) -> CargoResult { let query = dependency.query(config)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("path or git dependency expected, found workspace dependency"); } MaybeWorkspace::Other(query) => { let possibilities = loop { // Exact to avoid returning all for path/git match registry.query_vec(&query, QueryKind::Exact) { std::task::Poll::Ready(res) => { break res?; } std::task::Poll::Pending => registry.block_until_ready()?, } }; match possibilities.len() { 0 => { let source = dependency .source() .expect("source should be resolved before here"); anyhow::bail!("the crate `{dependency}` could not be found at `{source}`") } 1 => { let mut dep = Dependency::from(&possibilities[0]); if let Some(reg_name) = dependency.registry.as_deref() { dep = dep.set_registry(reg_name); } Ok(dep) } _ => { let source = dependency .source() .expect("source should be resolved before here"); anyhow::bail!( "unexpectedly found multiple copies of crate `{dependency}` at `{source}`" ) } } } } } fn infer_package(mut packages: Vec, src: &dyn std::fmt::Display) -> CargoResult { let package = match packages.len() { 0 => { anyhow::bail!("no packages found at `{src}`"); } 1 => packages.pop().expect("match ensured element is present"), _ => { let mut names: Vec<_> = packages .iter() .map(|p| p.name().as_str().to_owned()) .collect(); names.sort_unstable(); anyhow::bail!("multiple packages found at `{src}`: {}", names.join(", ")); } }; Ok(package) } fn populate_dependency(mut dependency: Dependency, arg: &DepOp) -> Dependency { if let Some(registry) = &arg.registry { if registry.is_empty() { dependency.registry = None; } else { dependency.registry = Some(registry.to_owned()); } } if let Some(value) = arg.optional { if value { dependency.optional = Some(true); } else { dependency.optional = None; } } if let Some(value) = arg.default_features { if value { dependency.default_features = None; } else { dependency.default_features = Some(false); } } if let Some(value) = arg.features.as_ref() { dependency = dependency.extend_features(value.iter().cloned()); } if let Some(rename) = &arg.rename { dependency = dependency.set_rename(rename); } dependency } /// Track presentation-layer information with the editable representation of a `[dependencies]` /// entry (Dependency) pub struct DependencyUI { /// Editable representation of a `[depednencies]` entry dep: Dependency, /// The version of the crate that we pulled `available_features` from available_version: Option, /// The widest set of features compatible with `Dependency`s version requirement available_features: BTreeMap>, } impl DependencyUI { fn new(dep: Dependency) -> Self { Self { dep, available_version: None, available_features: Default::default(), } } fn apply_summary(&mut self, summary: &Summary) { self.available_version = Some(summary.version().clone()); self.available_features = summary .features() .iter() .map(|(k, v)| { ( k.as_str().to_owned(), v.iter() .filter_map(|v| match v { FeatureValue::Feature(f) => Some(f.as_str().to_owned()), FeatureValue::Dep { .. } | FeatureValue::DepFeature { .. } => None, }) .collect::>(), ) }) .collect(); } } impl<'s> From<&'s Summary> for DependencyUI { fn from(other: &'s Summary) -> Self { let dep = Dependency::from(other); let mut dep = Self::new(dep); dep.apply_summary(other); dep } } impl std::fmt::Display for DependencyUI { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.dep.fmt(f) } } impl std::ops::Deref for DependencyUI { type Target = Dependency; fn deref(&self) -> &Self::Target { &self.dep } } /// Lookup available features fn populate_available_features( dependency: Dependency, query: &crate::core::dependency::Dependency, registry: &mut PackageRegistry<'_>, ) -> CargoResult { let mut dependency = DependencyUI::new(dependency); if !dependency.available_features.is_empty() { return Ok(dependency); } let possibilities = loop { match registry.query_vec(&query, QueryKind::Fuzzy) { std::task::Poll::Ready(res) => { break res?; } std::task::Poll::Pending => registry.block_until_ready()?, } }; // Ensure widest feature flag compatibility by picking the earliest version that could show up // in the lock file for a given version requirement. let lowest_common_denominator = possibilities .iter() .min_by_key(|s| { // Fallback to a pre-release if no official release is available by sorting them as // more. let is_pre = !s.version().pre.is_empty(); (is_pre, s.version()) }) .ok_or_else(|| { anyhow::format_err!("the crate `{dependency}` could not be found in registry index.") })?; dependency.apply_summary(&lowest_common_denominator); Ok(dependency) } fn print_msg(shell: &mut Shell, dep: &DependencyUI, section: &[String]) -> CargoResult<()> { use std::fmt::Write; if matches!(shell.verbosity(), crate::core::shell::Verbosity::Quiet) { return Ok(()); } let mut message = String::new(); write!(message, "{}", dep.name)?; match dep.source() { Some(Source::Registry(src)) => { if src.version.chars().next().unwrap_or('0').is_ascii_digit() { write!(message, " v{}", src.version)?; } else { write!(message, " {}", src.version)?; } } Some(Source::Path(_)) => { write!(message, " (local)")?; } Some(Source::Git(_)) => { write!(message, " (git)")?; } Some(Source::Workspace(_)) => { write!(message, " (workspace)")?; } None => {} } write!(message, " to")?; if dep.optional().unwrap_or(false) { write!(message, " optional")?; } let section = if section.len() == 1 { section[0].clone() } else { format!("{} for target `{}`", §ion[2], §ion[1]) }; write!(message, " {section}")?; write!(message, ".")?; shell.status("Adding", message)?; let mut activated: IndexSet<_> = dep.features.iter().flatten().map(|s| s.as_str()).collect(); if dep.default_features().unwrap_or(true) { activated.insert("default"); } activated.extend(dep.inherited_features.iter().flatten().map(|s| s.as_str())); let mut walk: VecDeque<_> = activated.iter().cloned().collect(); while let Some(next) = walk.pop_front() { walk.extend( dep.available_features .get(next) .into_iter() .flatten() .map(|s| s.as_str()), ); activated.extend( dep.available_features .get(next) .into_iter() .flatten() .map(|s| s.as_str()), ); } activated.remove("default"); activated.sort(); let mut deactivated = dep .available_features .keys() .filter(|f| !activated.contains(f.as_str()) && *f != "default") .collect::>(); deactivated.sort(); if !activated.is_empty() || !deactivated.is_empty() { let prefix = format!("{:>13}", " "); let suffix = if let Some(version) = &dep.available_version { let mut version = version.clone(); version.build = Default::default(); let version = version.to_string(); // Avoid displaying the version if it will visually look like the version req that we // showed earlier let version_req = dep .version() .and_then(|v| semver::VersionReq::parse(v).ok()) .and_then(|v| precise_version(&v)); if version_req.as_deref() != Some(version.as_str()) { format!(" as of v{version}") } else { "".to_owned() } } else { "".to_owned() }; shell.write_stderr( format_args!("{}Features{}:\n", prefix, suffix), &ColorSpec::new(), )?; for feat in activated { shell.write_stderr(&prefix, &ColorSpec::new())?; shell.write_stderr('+', &ColorSpec::new().set_bold(true).set_fg(Some(Green)))?; shell.write_stderr(format_args!(" {}\n", feat), &ColorSpec::new())?; } for feat in deactivated { shell.write_stderr(&prefix, &ColorSpec::new())?; shell.write_stderr('-', &ColorSpec::new().set_bold(true).set_fg(Some(Red)))?; shell.write_stderr(format_args!(" {}\n", feat), &ColorSpec::new())?; } } Ok(()) } // Based on Iterator::is_sorted from nightly std; remove in favor of that when stabilized. fn is_sorted(mut it: impl Iterator) -> bool { let mut last = match it.next() { Some(e) => e, None => return true, }; for curr in it { if curr < last { return false; } last = curr; } true } fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult { let manifest = LocalManifest::try_new(root_manifest)?; let manifest = manifest .data .as_item() .as_table_like() .context("could not make `manifest.data` into a table")?; let workspace = manifest .get("workspace") .context("could not find `workspace`")? .as_table_like() .context("could not make `manifest.data.workspace` into a table")?; let dependencies = workspace .get("dependencies") .context("could not find `dependencies` table in `workspace`")? .as_table_like() .context("could not make `dependencies` into a table")?; let dep_item = dependencies.get(toml_key).context(format!( "could not find {} in `workspace.dependencies`", toml_key ))?; Dependency::from_toml(root_manifest.parent().unwrap(), toml_key, dep_item) } /// Convert a `semver::VersionReq` into a rendered `semver::Version` if all fields are fully /// specified. fn precise_version(version_req: &semver::VersionReq) -> Option { version_req .comparators .iter() .filter(|c| { matches!( c.op, // Only ops we can determine a precise version from semver::Op::Exact | semver::Op::GreaterEq | semver::Op::LessEq | semver::Op::Tilde | semver::Op::Caret | semver::Op::Wildcard ) }) .filter_map(|c| { // Only do it when full precision is specified c.minor.and_then(|minor| { c.patch.map(|patch| semver::Version { major: c.major, minor, patch, pre: c.pre.clone(), build: Default::default(), }) }) }) .max() .map(|v| v.to_string()) } cargo-0.66.0/src/cargo/ops/cargo_clean.rs000066400000000000000000000321241432416201200201610ustar00rootroot00000000000000use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData}; use crate::core::profiles::Profiles; use crate::core::{PackageIdSpec, TargetKind, Workspace}; use crate::ops; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; use crate::util::{Config, Progress, ProgressStyle}; use anyhow::Context as _; use cargo_util::paths; use std::fs; use std::path::Path; pub struct CleanOptions<'a> { pub config: &'a Config, /// A list of packages to clean. If empty, everything is cleaned. pub spec: Vec, /// The target arch triple to clean, or None for the host arch pub targets: Vec, /// Whether to clean the release directory pub profile_specified: bool, /// Whether to clean the directory of a certain build profile pub requested_profile: InternedString, /// Whether to just clean the doc directory pub doc: bool, } /// Cleans the package's build artifacts. pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { let mut target_dir = ws.target_dir(); let config = ws.config(); // If the doc option is set, we just want to delete the doc directory. if opts.doc { target_dir = target_dir.join("doc"); return clean_entire_folder(&target_dir.into_path_unlocked(), config); } let profiles = Profiles::new(ws, opts.requested_profile)?; if opts.profile_specified { // After parsing profiles we know the dir-name of the profile, if a profile // was passed from the command line. If so, delete only the directory of // that profile. let dir_name = profiles.get_dir_name(); target_dir = target_dir.join(dir_name); } // If we have a spec, then we need to delete some packages, otherwise, just // remove the whole target directory and be done with it! // // Note that we don't bother grabbing a lock here as we're just going to // blow it all away anyway. if opts.spec.is_empty() { return clean_entire_folder(&target_dir.into_path_unlocked(), config); } // Clean specific packages. let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; let (pkg_set, resolve) = ops::resolve_ws(ws)?; let prof_dir_name = profiles.get_dir_name(); let host_layout = Layout::new(ws, None, &prof_dir_name)?; // Convert requested kinds to a Vec of layouts. let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds .into_iter() .filter_map(|kind| match kind { CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) { Ok(layout) => Some(Ok((kind, layout))), Err(e) => Some(Err(e)), }, CompileKind::Host => None, }) .collect::>()?; // A Vec of layouts. This is a little convoluted because there can only be // one host_layout. let layouts = if opts.targets.is_empty() { vec![(CompileKind::Host, &host_layout)] } else { target_layouts .iter() .map(|(kind, layout)| (*kind, layout)) .collect() }; // Create a Vec that also includes the host for things that need to clean both. let layouts_with_host: Vec<(CompileKind, &Layout)> = std::iter::once((CompileKind::Host, &host_layout)) .chain(layouts.iter().map(|(k, l)| (*k, *l))) .collect(); // Cleaning individual rustdoc crates is currently not supported. // For example, the search index would need to be rebuilt to fully // remove it (otherwise you're left with lots of broken links). // Doc tests produce no output. // Get Packages for the specified specs. let mut pkg_ids = Vec::new(); for spec_str in opts.spec.iter() { // Translate the spec to a Package. let spec = PackageIdSpec::parse(spec_str)?; if spec.version().is_some() { config.shell().warn(&format!( "version qualifier in `-p {}` is ignored, \ cleaning all versions of `{}` found", spec_str, spec.name() ))?; } if spec.url().is_some() { config.shell().warn(&format!( "url qualifier in `-p {}` ignored, \ cleaning all versions of `{}` found", spec_str, spec.name() ))?; } let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect(); if matches.is_empty() { let mut suggestion = String::new(); suggestion.push_str(&lev_distance::closest_msg( &spec.name(), resolve.iter(), |id| id.name().as_str(), )); anyhow::bail!( "package ID specification `{}` did not match any packages{}", spec, suggestion ); } pkg_ids.extend(matches); } let packages = pkg_set.get_many(pkg_ids)?; let mut progress = CleaningPackagesBar::new(config, packages.len()); for pkg in packages { let pkg_dir = format!("{}-*", pkg.name()); progress.on_cleaning_package(&pkg.name())?; // Clean fingerprints. for (_, layout) in &layouts_with_host { let dir = escape_glob_path(layout.fingerprint())?; rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config, &mut progress)?; } for target in pkg.targets() { if target.is_custom_build() { // Get both the build_script_build and the output directory. for (_, layout) in &layouts_with_host { let dir = escape_glob_path(layout.build())?; rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config, &mut progress)?; } continue; } let crate_name = target.crate_name(); for &mode in &[ CompileMode::Build, CompileMode::Test, CompileMode::Check { test: false }, ] { for (compile_kind, layout) in &layouts { let triple = target_data.short_name(compile_kind); let (file_types, _unsupported) = target_data .info(*compile_kind) .rustc_outputs(mode, target.kind(), triple)?; let (dir, uplift_dir) = match target.kind() { TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { (layout.examples(), Some(layout.examples())) } // Tests/benchmarks are never uplifted. TargetKind::Test | TargetKind::Bench => (layout.deps(), None), _ => (layout.deps(), Some(layout.dest())), }; for file_type in file_types { // Some files include a hash in the filename, some don't. let hashed_name = file_type.output_filename(target, Some("*")); let unhashed_name = file_type.output_filename(target, None); let dir_glob = escape_glob_path(dir)?; let dir_glob = Path::new(&dir_glob); rm_rf_glob(&dir_glob.join(&hashed_name), config, &mut progress)?; rm_rf(&dir.join(&unhashed_name), config, &mut progress)?; // Remove dep-info file generated by rustc. It is not tracked in // file_types. It does not have a prefix. let hashed_dep_info = dir_glob.join(format!("{}-*.d", crate_name)); rm_rf_glob(&hashed_dep_info, config, &mut progress)?; let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); rm_rf(&unhashed_dep_info, config, &mut progress)?; // Remove split-debuginfo files generated by rustc. let split_debuginfo_obj = dir_glob.join(format!("{}.*.o", crate_name)); rm_rf_glob(&split_debuginfo_obj, config, &mut progress)?; let split_debuginfo_dwo = dir_glob.join(format!("{}.*.dwo", crate_name)); rm_rf_glob(&split_debuginfo_dwo, config, &mut progress)?; // Remove the uplifted copy. if let Some(uplift_dir) = uplift_dir { let uplifted_path = uplift_dir.join(file_type.uplift_filename(target)); rm_rf(&uplifted_path, config, &mut progress)?; // Dep-info generated by Cargo itself. let dep_info = uplifted_path.with_extension("d"); rm_rf(&dep_info, config, &mut progress)?; } } // TODO: what to do about build_script_build? let dir = escape_glob_path(layout.incremental())?; let incremental = Path::new(&dir).join(format!("{}-*", crate_name)); rm_rf_glob(&incremental, config, &mut progress)?; } } } } Ok(()) } fn escape_glob_path(pattern: &Path) -> CargoResult { let pattern = pattern .to_str() .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; Ok(glob::Pattern::escape(pattern)) } fn rm_rf_glob( pattern: &Path, config: &Config, progress: &mut dyn CleaningProgressBar, ) -> CargoResult<()> { // TODO: Display utf8 warning to user? Or switch to globset? let pattern = pattern .to_str() .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; for path in glob::glob(pattern)? { rm_rf(&path?, config, progress)?; } Ok(()) } fn rm_rf(path: &Path, config: &Config, progress: &mut dyn CleaningProgressBar) -> CargoResult<()> { if fs::symlink_metadata(path).is_err() { return Ok(()); } config .shell() .verbose(|shell| shell.status("Removing", path.display()))?; progress.display_now()?; for entry in walkdir::WalkDir::new(path).contents_first(true) { let entry = entry?; progress.on_clean()?; if entry.file_type().is_dir() { paths::remove_dir(entry.path()).with_context(|| "could not remove build directory")?; } else { paths::remove_file(entry.path()).with_context(|| "failed to remove build artifact")?; } } Ok(()) } fn clean_entire_folder(path: &Path, config: &Config) -> CargoResult<()> { let num_paths = walkdir::WalkDir::new(path).into_iter().count(); let mut progress = CleaningFolderBar::new(config, num_paths); rm_rf(path, config, &mut progress) } trait CleaningProgressBar { fn display_now(&mut self) -> CargoResult<()>; fn on_clean(&mut self) -> CargoResult<()>; } struct CleaningFolderBar<'cfg> { bar: Progress<'cfg>, max: usize, cur: usize, } impl<'cfg> CleaningFolderBar<'cfg> { fn new(cfg: &'cfg Config, max: usize) -> Self { Self { bar: Progress::with_style("Cleaning", ProgressStyle::Percentage, cfg), max, cur: 0, } } fn cur_progress(&self) -> usize { std::cmp::min(self.cur, self.max) } } impl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> { fn display_now(&mut self) -> CargoResult<()> { self.bar.tick_now(self.cur_progress(), self.max, "") } fn on_clean(&mut self) -> CargoResult<()> { self.cur += 1; self.bar.tick(self.cur_progress(), self.max, "") } } struct CleaningPackagesBar<'cfg> { bar: Progress<'cfg>, max: usize, cur: usize, num_files_folders_cleaned: usize, package_being_cleaned: String, } impl<'cfg> CleaningPackagesBar<'cfg> { fn new(cfg: &'cfg Config, max: usize) -> Self { Self { bar: Progress::with_style("Cleaning", ProgressStyle::Ratio, cfg), max, cur: 0, num_files_folders_cleaned: 0, package_being_cleaned: String::new(), } } fn on_cleaning_package(&mut self, package: &str) -> CargoResult<()> { self.cur += 1; self.package_being_cleaned = String::from(package); self.bar .tick(self.cur_progress(), self.max, &self.format_message()) } fn cur_progress(&self) -> usize { std::cmp::min(self.cur, self.max) } fn format_message(&self) -> String { format!( ": {}, {} files/folders cleaned", self.package_being_cleaned, self.num_files_folders_cleaned ) } } impl<'cfg> CleaningProgressBar for CleaningPackagesBar<'cfg> { fn display_now(&mut self) -> CargoResult<()> { self.bar .tick_now(self.cur_progress(), self.max, &self.format_message()) } fn on_clean(&mut self) -> CargoResult<()> { self.bar .tick(self.cur_progress(), self.max, &self.format_message())?; self.num_files_folders_cleaned += 1; Ok(()) } } cargo-0.66.0/src/cargo/ops/cargo_compile.rs000066400000000000000000002103061432416201200205270ustar00rootroot00000000000000//! The Cargo "compile" operation. //! //! This module contains the entry point for starting the compilation process //! for commands like `build`, `test`, `doc`, `rustc`, etc. //! //! The `compile` function will do all the work to compile a workspace. A //! rough outline is: //! //! - Resolve the dependency graph (see `ops::resolve`). //! - Download any packages needed (see `PackageSet`). //! - Generate a list of top-level "units" of work for the targets the user //! requested on the command-line. Each `Unit` corresponds to a compiler //! invocation. This is done in this module (`generate_targets`). //! - Build the graph of `Unit` dependencies (see //! `core::compiler::context::unit_dependencies`). //! - Create a `Context` which will perform the following steps: //! - Prepare the `target` directory (see `Layout`). //! - Create a job queue (see `JobQueue`). The queue checks the //! fingerprint of each `Unit` to determine if it should run or be //! skipped. //! - Execute the queue. Each leaf in the queue's dependency graph is //! executed, and then removed from the graph when finished. This //! repeats until the queue is empty. use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Write; use std::hash::{Hash, Hasher}; use std::sync::Arc; use crate::core::compiler::unit_dependencies::{build_unit_dependencies, IsArtifact}; use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; use crate::core::resolver::{HasDevUnits, Resolve}; use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace}; use crate::drop_println; use crate::ops; use crate::ops::resolve::WorkspaceResolve; use crate::util::config::Config; use crate::util::interning::InternedString; use crate::util::restricted_names::is_glob_pattern; use crate::util::{closest_msg, profile, CargoResult, StableHasher}; use anyhow::{bail, Context as _}; /// Contains information about how a package should be compiled. /// /// Note on distinction between `CompileOptions` and `BuildConfig`: /// `BuildConfig` contains values that need to be retained after /// `BuildContext` is created. The other fields are no longer necessary. Think /// of it as `CompileOptions` are high-level settings requested on the /// command-line, and `BuildConfig` are low-level settings for actually /// driving `rustc`. #[derive(Debug)] pub struct CompileOptions { /// Configuration information for a rustc build pub build_config: BuildConfig, /// Feature flags requested by the user. pub cli_features: CliFeatures, /// A set of packages to build. pub spec: Packages, /// Filter to apply to the root package to select which targets will be /// built. pub filter: CompileFilter, /// Extra arguments to be passed to rustdoc (single target only) pub target_rustdoc_args: Option>, /// The specified target will be compiled with all the available arguments, /// note that this only accounts for the *final* invocation of rustc pub target_rustc_args: Option>, /// Crate types to be passed to rustc (single target only) pub target_rustc_crate_types: Option>, /// Extra arguments passed to all selected targets for rustdoc. pub local_rustdoc_args: Option>, /// Whether the `--document-private-items` flags was specified and should /// be forwarded to `rustdoc`. pub rustdoc_document_private_items: bool, /// Whether the build process should check the minimum Rust version /// defined in the cargo metadata for a crate. pub honor_rust_version: bool, } impl CompileOptions { pub fn new(config: &Config, mode: CompileMode) -> CargoResult { let jobs = None; let keep_going = false; Ok(CompileOptions { build_config: BuildConfig::new(config, jobs, keep_going, &[], mode)?, cli_features: CliFeatures::new_all(false), spec: ops::Packages::Packages(Vec::new()), filter: CompileFilter::Default { required_features_filterable: false, }, target_rustdoc_args: None, target_rustc_args: None, target_rustc_crate_types: None, local_rustdoc_args: None, rustdoc_document_private_items: false, honor_rust_version: true, }) } } #[derive(PartialEq, Eq, Debug)] pub enum Packages { Default, All, OptOut(Vec), Packages(Vec), } impl Packages { pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { Ok(match (all, exclude.len(), package.len()) { (false, 0, 0) => Packages::Default, (false, 0, _) => Packages::Packages(package), (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"), (true, 0, _) => Packages::All, (true, _, _) => Packages::OptOut(exclude), }) } /// Converts selected packages from a workspace to `PackageIdSpec`s. pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult> { let specs = match self { Packages::All => ws .members() .map(Package::package_id) .map(PackageIdSpec::from_package_id) .collect(), Packages::OptOut(opt_out) => { let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; let specs = ws .members() .filter(|pkg| { !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) }) .map(Package::package_id) .map(PackageIdSpec::from_package_id) .collect(); let warn = |e| ws.config().shell().warn(e); emit_package_not_found(ws, names, true).or_else(warn)?; emit_pattern_not_found(ws, patterns, true).or_else(warn)?; specs } Packages::Packages(packages) if packages.is_empty() => { vec![PackageIdSpec::from_package_id(ws.current()?.package_id())] } Packages::Packages(opt_in) => { let (mut patterns, packages) = opt_patterns_and_names(opt_in)?; let mut specs = packages .iter() .map(|p| PackageIdSpec::parse(p)) .collect::>>()?; if !patterns.is_empty() { let matched_pkgs = ws .members() .filter(|pkg| match_patterns(pkg, &mut patterns)) .map(Package::package_id) .map(PackageIdSpec::from_package_id); specs.extend(matched_pkgs); } emit_pattern_not_found(ws, patterns, false)?; specs } Packages::Default => ws .default_members() .map(Package::package_id) .map(PackageIdSpec::from_package_id) .collect(), }; if specs.is_empty() { if ws.is_virtual() { bail!( "manifest path `{}` contains no package: The manifest is virtual, \ and the workspace has no members.", ws.root().display() ) } bail!("no packages to compile") } Ok(specs) } /// Gets a list of selected packages from a workspace. pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult> { let packages: Vec<_> = match self { Packages::Default => ws.default_members().collect(), Packages::All => ws.members().collect(), Packages::OptOut(opt_out) => { let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; let packages = ws .members() .filter(|pkg| { !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) }) .collect(); emit_package_not_found(ws, names, true)?; emit_pattern_not_found(ws, patterns, true)?; packages } Packages::Packages(opt_in) => { let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?; let packages = ws .members() .filter(|pkg| { names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns) }) .collect(); emit_package_not_found(ws, names, false)?; emit_pattern_not_found(ws, patterns, false)?; packages } }; Ok(packages) } /// Returns whether or not the user needs to pass a `-p` flag to target a /// specific package in the workspace. pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool { match self { Packages::Default => ws.default_members().count() > 1, Packages::All => ws.members().count() > 1, Packages::Packages(_) => true, Packages::OptOut(_) => true, } } } #[derive(Debug, PartialEq, Eq)] pub enum LibRule { /// Include the library, fail if not present True, /// Include the library if present Default, /// Exclude the library False, } #[derive(Debug)] pub enum FilterRule { All, Just(Vec), } #[derive(Debug)] pub enum CompileFilter { Default { /// Flag whether targets can be safely skipped when required-features are not satisfied. required_features_filterable: bool, }, Only { all_targets: bool, lib: LibRule, bins: FilterRule, examples: FilterRule, tests: FilterRule, benches: FilterRule, }, } pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult> { let exec: Arc = Arc::new(DefaultExecutor); compile_with_exec(ws, options, &exec) } /// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build /// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through. pub fn compile_with_exec<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc, ) -> CargoResult> { ws.emit_warnings()?; compile_ws(ws, options, exec) } pub fn compile_ws<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc, ) -> CargoResult> { let interner = UnitInterner::new(); let bcx = create_bcx(ws, options, &interner)?; if options.build_config.unit_graph { unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; return Compilation::new(&bcx); } let _p = profile::start("compiling"); let cx = Context::new(&bcx)?; cx.compile(exec) } pub fn print<'a>( ws: &Workspace<'a>, options: &CompileOptions, print_opt_value: &str, ) -> CargoResult<()> { let CompileOptions { ref build_config, ref target_rustc_args, .. } = *options; let config = ws.config(); let rustc = config.load_global_rustc(Some(ws))?; for (index, kind) in build_config.requested_kinds.iter().enumerate() { if index != 0 { drop_println!(config); } let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; let mut process = rustc.process(); process.args(&target_info.rustflags); if let Some(args) = target_rustc_args { process.args(args); } if let CompileKind::Target(t) = kind { process.arg("--target").arg(t.short_name()); } process.arg("--print").arg(print_opt_value); process.exec()?; } Ok(()) } pub fn create_bcx<'a, 'cfg>( ws: &'a Workspace<'cfg>, options: &'a CompileOptions, interner: &'a UnitInterner, ) -> CargoResult> { let CompileOptions { ref build_config, ref spec, ref cli_features, ref filter, ref target_rustdoc_args, ref target_rustc_args, ref target_rustc_crate_types, ref local_rustdoc_args, rustdoc_document_private_items, honor_rust_version, } = *options; let config = ws.config(); // Perform some pre-flight validation. match build_config.mode { CompileMode::Test | CompileMode::Build | CompileMode::Check { .. } | CompileMode::Bench | CompileMode::RunCustomBuild => { if std::env::var("RUST_FLAGS").is_ok() { config.shell().warn( "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", )?; } } CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => { if std::env::var("RUSTDOC_FLAGS").is_ok() { config.shell().warn( "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" )?; } } } config.validate_term_config()?; let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; let all_packages = &Packages::All; let rustdoc_scrape_examples = &config.cli_unstable().rustdoc_scrape_examples; let need_reverse_dependencies = rustdoc_scrape_examples.is_some(); let full_specs = if need_reverse_dependencies { all_packages } else { spec }; let resolve_specs = full_specs.to_package_id_specs(ws)?; let has_dev_units = if filter.need_dev_deps(build_config.mode) || need_reverse_dependencies { HasDevUnits::Yes } else { HasDevUnits::No }; let resolve = ops::resolve_ws_with_opts( ws, &target_data, &build_config.requested_kinds, cli_features, &resolve_specs, has_dev_units, crate::core::resolver::features::ForceAllTargets::No, )?; let WorkspaceResolve { mut pkg_set, workspace_resolve, targeted_resolve: resolve, resolved_features, } = resolve; let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std { let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(ws, &target_data, &build_config, crates)?; pkg_set.add_set(std_package_set); Some((std_resolve, std_features)) } else { None }; // Find the packages in the resolver that the user wants to build (those // passed in with `-p` or the defaults from the workspace), and convert // Vec to a Vec. let specs = if need_reverse_dependencies { spec.to_package_id_specs(ws)? } else { resolve_specs.clone() }; let to_build_ids = resolve.specs_to_ids(&specs)?; // Now get the `Package` for each `PackageId`. This may trigger a download // if the user specified `-p` for a dependency that is not downloaded. // Dependencies will be downloaded during build_unit_dependencies. let mut to_builds = pkg_set.get_many(to_build_ids)?; // The ordering here affects some error messages coming out of cargo, so // let's be test and CLI friendly by always printing in the same order if // there's an error. to_builds.sort_by_key(|p| p.package_id()); for pkg in to_builds.iter() { pkg.manifest().print_teapot(config); if build_config.mode.is_any_test() && !ws.is_member(pkg) && pkg.dependencies().iter().any(|dep| !dep.is_transitive()) { anyhow::bail!( "package `{}` cannot be tested because it requires dev-dependencies \ and is not a member of the workspace", pkg.name() ); } } let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { (&Some(ref args), _) => (Some(args.clone()), "rustc"), (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"), _ => (None, ""), }; if extra_args.is_some() && to_builds.len() != 1 { panic!( "`{}` should not accept multiple `-p` flags", extra_args_name ); } let profiles = Profiles::new(ws, build_config.requested_profile)?; profiles.validate_packages( ws.profiles(), &mut config.shell(), workspace_resolve.as_ref().unwrap_or(&resolve), )?; // If `--target` has not been specified, then the unit graph is built // assuming `--target $HOST` was specified. See // `rebuild_unit_graph_shared` for more on why this is done. let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?); let explicit_host_kinds: Vec<_> = build_config .requested_kinds .iter() .map(|kind| match kind { CompileKind::Host => explicit_host_kind, CompileKind::Target(t) => CompileKind::Target(*t), }) .collect(); // Passing `build_config.requested_kinds` instead of // `explicit_host_kinds` here so that `generate_targets` can do // its own special handling of `CompileKind::Host`. It will // internally replace the host kind by the `explicit_host_kind` // before setting as a unit. let mut units = generate_targets( ws, &to_builds, filter, &build_config.requested_kinds, explicit_host_kind, build_config.mode, &resolve, &workspace_resolve, &resolved_features, &pkg_set, &profiles, interner, )?; if let Some(args) = target_rustc_crate_types { override_rustc_crate_types(&mut units, args, interner)?; } let mut scrape_units = match rustdoc_scrape_examples { Some(arg) => { let filter = match arg.as_str() { "all" => CompileFilter::new_all_targets(), "examples" => CompileFilter::new( LibRule::False, FilterRule::none(), FilterRule::none(), FilterRule::All, FilterRule::none(), ), _ => { bail!( r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# ) } }; let to_build_ids = resolve.specs_to_ids(&resolve_specs)?; let to_builds = pkg_set.get_many(to_build_ids)?; let mode = CompileMode::Docscrape; generate_targets( ws, &to_builds, &filter, &build_config.requested_kinds, explicit_host_kind, mode, &resolve, &workspace_resolve, &resolved_features, &pkg_set, &profiles, interner, )? .into_iter() // Proc macros should not be scraped for functions, since they only export macros .filter(|unit| !unit.target.proc_macro()) .collect::>() } None => Vec::new(), }; let std_roots = if let Some(crates) = standard_lib::std_crates(config, Some(&units)) { let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap(); standard_lib::generate_std_roots( &crates, std_resolve, std_features, &explicit_host_kinds, &pkg_set, interner, &profiles, )? } else { Default::default() }; let mut unit_graph = build_unit_dependencies( ws, &pkg_set, &resolve, &resolved_features, std_resolve_features.as_ref(), &units, &scrape_units, &std_roots, build_config.mode, &target_data, &profiles, interner, )?; // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain // what heuristics to use in that case. if build_config.mode == (CompileMode::Doc { deps: true }) { remove_duplicate_doc(build_config, &units, &mut unit_graph); } if build_config .requested_kinds .iter() .any(CompileKind::is_host) { // Rebuild the unit graph, replacing the explicit host targets with // CompileKind::Host, merging any dependencies shared with build // dependencies. let new_graph = rebuild_unit_graph_shared( interner, unit_graph, &units, &scrape_units, explicit_host_kind, ); // This would be nicer with destructuring assignment. units = new_graph.0; scrape_units = new_graph.1; unit_graph = new_graph.2; } let mut extra_compiler_args = HashMap::new(); if let Some(args) = extra_args { if units.len() != 1 { anyhow::bail!( "extra arguments to `{}` can only be passed to one \ target, consider filtering\nthe package by passing, \ e.g., `--lib` or `--bin NAME` to specify a single target", extra_args_name ); } extra_compiler_args.insert(units[0].clone(), args); } for unit in &units { if unit.mode.is_doc() || unit.mode.is_doc_test() { let mut extra_args = local_rustdoc_args.clone(); // Add `--document-private-items` rustdoc flag if requested or if // the target is a binary. Binary crates get their private items // documented by default. if rustdoc_document_private_items || unit.target.is_bin() { let mut args = extra_args.take().unwrap_or_default(); args.push("--document-private-items".into()); if unit.target.is_bin() { // This warning only makes sense if it's possible to document private items // sometimes and ignore them at other times. But cargo consistently passes // `--document-private-items`, so the warning isn't useful. args.push("-Arustdoc::private-intra-doc-links".into()); } extra_args = Some(args); } if let Some(args) = extra_args { extra_compiler_args .entry(unit.clone()) .or_default() .extend(args); } } } if honor_rust_version { // Remove any pre-release identifiers for easier comparison let current_version = &target_data.rustc.version; let untagged_version = semver::Version::new( current_version.major, current_version.minor, current_version.patch, ); for unit in unit_graph.keys() { let version = match unit.pkg.rust_version() { Some(v) => v, None => continue, }; let req = semver::VersionReq::parse(version).unwrap(); if req.matches(&untagged_version) { continue; } let guidance = if ws.is_ephemeral() { if ws.ignore_lock() { "Try re-running cargo install with `--locked`".to_string() } else { String::new() } } else if !unit.is_local() { format!( "Either upgrade to rustc {} or newer, or use\n\ cargo update -p {}@{} --precise ver\n\ where `ver` is the latest version of `{}` supporting rustc {}", version, unit.pkg.name(), unit.pkg.version(), unit.pkg.name(), current_version, ) } else { String::new() }; anyhow::bail!( "package `{}` cannot be built because it requires rustc {} or newer, \ while the currently active rustc version is {}\n{}", unit.pkg, version, current_version, guidance, ); } } let bcx = BuildContext::new( ws, pkg_set, build_config, profiles, extra_compiler_args, target_data, units, unit_graph, scrape_units, )?; Ok(bcx) } impl FilterRule { pub fn new(targets: Vec, all: bool) -> FilterRule { if all { FilterRule::All } else { FilterRule::Just(targets) } } pub fn none() -> FilterRule { FilterRule::Just(Vec::new()) } fn matches(&self, target: &Target) -> bool { match *self { FilterRule::All => true, FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), } } fn is_specific(&self) -> bool { match *self { FilterRule::All => true, FilterRule::Just(ref targets) => !targets.is_empty(), } } pub fn try_collect(&self) -> Option> { match *self { FilterRule::All => None, FilterRule::Just(ref targets) => Some(targets.clone()), } } pub(crate) fn contains_glob_patterns(&self) -> bool { match self { FilterRule::All => false, FilterRule::Just(targets) => targets.iter().any(is_glob_pattern), } } } impl CompileFilter { /// Constructs a filter from raw command line arguments. pub fn from_raw_arguments( lib_only: bool, bins: Vec, all_bins: bool, tsts: Vec, all_tsts: bool, exms: Vec, all_exms: bool, bens: Vec, all_bens: bool, all_targets: bool, ) -> CompileFilter { if all_targets { return CompileFilter::new_all_targets(); } let rule_lib = if lib_only { LibRule::True } else { LibRule::False }; let rule_bins = FilterRule::new(bins, all_bins); let rule_tsts = FilterRule::new(tsts, all_tsts); let rule_exms = FilterRule::new(exms, all_exms); let rule_bens = FilterRule::new(bens, all_bens); CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens) } /// Constructs a filter from underlying primitives. pub fn new( rule_lib: LibRule, rule_bins: FilterRule, rule_tsts: FilterRule, rule_exms: FilterRule, rule_bens: FilterRule, ) -> CompileFilter { if rule_lib == LibRule::True || rule_bins.is_specific() || rule_tsts.is_specific() || rule_exms.is_specific() || rule_bens.is_specific() { CompileFilter::Only { all_targets: false, lib: rule_lib, bins: rule_bins, examples: rule_exms, benches: rule_bens, tests: rule_tsts, } } else { CompileFilter::Default { required_features_filterable: true, } } } /// Constructs a filter that includes all targets. pub fn new_all_targets() -> CompileFilter { CompileFilter::Only { all_targets: true, lib: LibRule::Default, bins: FilterRule::All, examples: FilterRule::All, benches: FilterRule::All, tests: FilterRule::All, } } /// Constructs a filter that includes all test targets. /// /// Being different from the behavior of [`CompileFilter::Default`], this /// function only recognizes test targets, which means cargo might compile /// all targets with `tested` flag on, whereas [`CompileFilter::Default`] /// may include additional example targets to ensure they can be compiled. /// /// Note that the actual behavior is subject to `filter_default_targets` /// and `generate_targets` though. pub fn all_test_targets() -> Self { Self::Only { all_targets: false, lib: LibRule::Default, bins: FilterRule::none(), examples: FilterRule::none(), tests: FilterRule::All, benches: FilterRule::none(), } } /// Constructs a filter that includes lib target only. pub fn lib_only() -> Self { Self::Only { all_targets: false, lib: LibRule::True, bins: FilterRule::none(), examples: FilterRule::none(), tests: FilterRule::none(), benches: FilterRule::none(), } } /// Constructs a filter that includes the given binary. No more. No less. pub fn single_bin(bin: String) -> Self { Self::Only { all_targets: false, lib: LibRule::False, bins: FilterRule::new(vec![bin], false), examples: FilterRule::none(), tests: FilterRule::none(), benches: FilterRule::none(), } } /// Indicates if Cargo needs to build any dev dependency. pub fn need_dev_deps(&self, mode: CompileMode) -> bool { match mode { CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, CompileMode::Check { test: true } => true, CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Docscrape | CompileMode::Check { test: false } => match *self { CompileFilter::Default { .. } => false, CompileFilter::Only { ref examples, ref tests, ref benches, .. } => examples.is_specific() || tests.is_specific() || benches.is_specific(), }, CompileMode::RunCustomBuild => panic!("Invalid mode"), } } /// Selects targets for "cargo run". for logic to select targets for other /// subcommands, see `generate_targets` and `filter_default_targets`. pub fn target_run(&self, target: &Target) -> bool { match *self { CompileFilter::Default { .. } => true, CompileFilter::Only { ref lib, ref bins, ref examples, ref tests, ref benches, .. } => { let rule = match *target.kind() { TargetKind::Bin => bins, TargetKind::Test => tests, TargetKind::Bench => benches, TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, TargetKind::Lib(..) => { return match *lib { LibRule::True => true, LibRule::Default => true, LibRule::False => false, }; } TargetKind::CustomBuild => return false, }; rule.matches(target) } } } pub fn is_specific(&self) -> bool { match *self { CompileFilter::Default { .. } => false, CompileFilter::Only { .. } => true, } } pub fn is_all_targets(&self) -> bool { matches!( *self, CompileFilter::Only { all_targets: true, .. } ) } pub(crate) fn contains_glob_patterns(&self) -> bool { match self { CompileFilter::Default { .. } => false, CompileFilter::Only { bins, examples, tests, benches, .. } => { bins.contains_glob_patterns() || examples.contains_glob_patterns() || tests.contains_glob_patterns() || benches.contains_glob_patterns() } } } } /// A proposed target. /// /// Proposed targets are later filtered into actual `Unit`s based on whether or /// not the target requires its features to be present. #[derive(Debug)] struct Proposal<'a> { pkg: &'a Package, target: &'a Target, /// Indicates whether or not all required features *must* be present. If /// false, and the features are not available, then it will be silently /// skipped. Generally, targets specified by name (`--bin foo`) are /// required, all others can be silently skipped if features are missing. requires_features: bool, mode: CompileMode, } /// Generates all the base targets for the packages the user has requested to /// compile. Dependencies for these targets are computed later in `unit_dependencies`. fn generate_targets( ws: &Workspace<'_>, packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, profiles: &Profiles, interner: &UnitInterner, ) -> CargoResult> { let config = ws.config(); // Helper for creating a list of `Unit` structures let new_unit = |units: &mut HashSet, pkg: &Package, target: &Target, initial_target_mode: CompileMode| { // Custom build units are added in `build_unit_dependencies`. assert!(!target.is_custom_build()); let target_mode = match initial_target_mode { CompileMode::Test => { if target.is_example() && !filter.is_specific() && !target.tested() { // Examples are included as regular binaries to verify // that they compile. CompileMode::Build } else { CompileMode::Test } } CompileMode::Build => match *target.kind() { TargetKind::Test => CompileMode::Test, TargetKind::Bench => CompileMode::Bench, _ => CompileMode::Build, }, // `CompileMode::Bench` is only used to inform `filter_default_targets` // which command is being used (`cargo bench`). Afterwards, tests // and benches are treated identically. Switching the mode allows // de-duplication of units that are essentially identical. For // example, `cargo build --all-targets --release` creates the units // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench) // and since these are the same, we want them to be de-duplicated in // `unit_dependencies`. CompileMode::Bench => CompileMode::Test, _ => initial_target_mode, }; let is_local = pkg.package_id().source_id().is_path(); // No need to worry about build-dependencies, roots are never build dependencies. let features_for = FeaturesFor::from_for_host(target.proc_macro()); let features = resolved_features.activated_features(pkg.package_id(), features_for); // If `--target` has not been specified, then the unit // graph is built almost like if `--target $HOST` was // specified. See `rebuild_unit_graph_shared` for more on // why this is done. However, if the package has its own // `package.target` key, then this gets used instead of // `$HOST` let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { vec![k] } else { requested_kinds .iter() .map(|kind| match kind { CompileKind::Host => { pkg.manifest().default_kind().unwrap_or(explicit_host_kind) } CompileKind::Target(t) => CompileKind::Target(*t), }) .collect() }; for kind in explicit_kinds.iter() { let unit_for = if initial_target_mode.is_any_test() { // NOTE: the `UnitFor` here is subtle. If you have a profile // with `panic` set, the `panic` flag is cleared for // tests/benchmarks and their dependencies. If this // was `normal`, then the lib would get compiled three // times (once with panic, once without, and once with // `--test`). // // This would cause a problem for doc tests, which would fail // because `rustdoc` would attempt to link with both libraries // at the same time. Also, it's probably not important (or // even desirable?) for rustdoc to link with a lib with // `panic` set. // // As a consequence, Examples and Binaries get compiled // without `panic` set. This probably isn't a bad deal. // // Forcing the lib to be compiled three times during `cargo // test` is probably also not desirable. UnitFor::new_test(config, *kind) } else if target.for_host() { // Proc macro / plugin should not have `panic` set. UnitFor::new_compiler(*kind) } else { UnitFor::new_normal(*kind) }; let profile = profiles.get_profile( pkg.package_id(), ws.is_member(pkg), is_local, unit_for, *kind, ); let unit = interner.intern( pkg, target, profile, kind.for_target(target), target_mode, features.clone(), /*is_std*/ false, /*dep_hash*/ 0, IsArtifact::No, ); units.insert(unit); } }; // Create a list of proposed targets. let mut proposals: Vec> = Vec::new(); match *filter { CompileFilter::Default { required_features_filterable, } => { for pkg in packages { let default = filter_default_targets(pkg.targets(), mode); proposals.extend(default.into_iter().map(|target| Proposal { pkg, target, requires_features: !required_features_filterable, mode, })); if mode == CompileMode::Test { if let Some(t) = pkg .targets() .iter() .find(|t| t.is_lib() && t.doctested() && t.doctestable()) { proposals.push(Proposal { pkg, target: t, requires_features: false, mode: CompileMode::Doctest, }); } } } } CompileFilter::Only { all_targets, ref lib, ref bins, ref examples, ref tests, ref benches, } => { if *lib != LibRule::False { let mut libs = Vec::new(); for proposal in filter_targets(packages, Target::is_lib, false, mode) { let Proposal { target, pkg, .. } = proposal; if mode.is_doc_test() && !target.doctestable() { let types = target.rustc_crate_types(); let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); ws.config().shell().warn(format!( "doc tests are not supported for crate type(s) `{}` in package `{}`", types_str.join(", "), pkg.name() ))?; } else { libs.push(proposal) } } if !all_targets && libs.is_empty() && *lib == LibRule::True { let names = packages.iter().map(|pkg| pkg.name()).collect::>(); if names.len() == 1 { anyhow::bail!("no library targets found in package `{}`", names[0]); } else { anyhow::bail!("no library targets found in packages: {}", names.join(", ")); } } proposals.extend(libs); } // If `--tests` was specified, add all targets that would be // generated by `cargo test`. let test_filter = match tests { FilterRule::All => Target::tested, FilterRule::Just(_) => Target::is_test, }; let test_mode = match mode { CompileMode::Build => CompileMode::Test, CompileMode::Check { .. } => CompileMode::Check { test: true }, _ => mode, }; // If `--benches` was specified, add all targets that would be // generated by `cargo bench`. let bench_filter = match benches { FilterRule::All => Target::benched, FilterRule::Just(_) => Target::is_bench, }; let bench_mode = match mode { CompileMode::Build => CompileMode::Bench, CompileMode::Check { .. } => CompileMode::Check { test: true }, _ => mode, }; proposals.extend(list_rule_targets( packages, bins, "bin", Target::is_bin, mode, )?); proposals.extend(list_rule_targets( packages, examples, "example", Target::is_example, mode, )?); proposals.extend(list_rule_targets( packages, tests, "test", test_filter, test_mode, )?); proposals.extend(list_rule_targets( packages, benches, "bench", bench_filter, bench_mode, )?); } } // Only include targets that are libraries or have all required // features available. // // `features_map` is a map of &Package -> enabled_features // It is computed by the set of enabled features for the package plus // every enabled feature of every enabled dependency. let mut features_map = HashMap::new(); // This needs to be a set to de-duplicate units. Due to the way the // targets are filtered, it is possible to have duplicate proposals for // the same thing. let mut units = HashSet::new(); for Proposal { pkg, target, requires_features, mode, } in proposals { let unavailable_features = match target.required_features() { Some(rf) => { validate_required_features( workspace_resolve, target.name(), rf, pkg.summary(), &mut config.shell(), )?; let features = features_map.entry(pkg).or_insert_with(|| { resolve_all_features(resolve, resolved_features, package_set, pkg.package_id()) }); rf.iter().filter(|f| !features.contains(*f)).collect() } None => Vec::new(), }; if target.is_lib() || unavailable_features.is_empty() { new_unit(&mut units, pkg, target, mode); } else if requires_features { let required_features = target.required_features().unwrap(); let quoted_required_features: Vec = required_features .iter() .map(|s| format!("`{}`", s)) .collect(); anyhow::bail!( "target `{}` in package `{}` requires the features: {}\n\ Consider enabling them by passing, e.g., `--features=\"{}\"`", target.name(), pkg.name(), quoted_required_features.join(", "), required_features.join(" ") ); } // else, silently skip target. } let mut units: Vec<_> = units.into_iter().collect(); unmatched_target_filters(&units, filter, &mut ws.config().shell())?; // Keep the roots in a consistent order, which helps with checking test output. units.sort_unstable(); Ok(units) } /// Checks if the unit list is empty and the user has passed any combination of /// --tests, --examples, --benches or --bins, and we didn't match on any targets. /// We want to emit a warning to make sure the user knows that this run is a no-op, /// and their code remains unchecked despite cargo not returning any errors fn unmatched_target_filters( units: &[Unit], filter: &CompileFilter, shell: &mut Shell, ) -> CargoResult<()> { if let CompileFilter::Only { all_targets, lib: _, ref bins, ref examples, ref tests, ref benches, } = *filter { if units.is_empty() { let mut filters = String::new(); let mut miss_count = 0; let mut append = |t: &FilterRule, s| { if let FilterRule::All = *t { miss_count += 1; filters.push_str(s); } }; if all_targets { filters.push_str(" `all-targets`"); } else { append(bins, " `bins`,"); append(tests, " `tests`,"); append(examples, " `examples`,"); append(benches, " `benches`,"); filters.pop(); } return shell.warn(format!( "Target {}{} specified, but no targets matched. This is a no-op", if miss_count > 1 { "filters" } else { "filter" }, filters, )); } } Ok(()) } /// Warns if a target's required-features references a feature that doesn't exist. /// /// This is a warning because historically this was not validated, and it /// would cause too much breakage to make it an error. fn validate_required_features( resolve: &Option, target_name: &str, required_features: &[String], summary: &Summary, shell: &mut Shell, ) -> CargoResult<()> { let resolve = match resolve { None => return Ok(()), Some(resolve) => resolve, }; for feature in required_features { let fv = FeatureValue::new(feature.into()); match &fv { FeatureValue::Feature(f) => { if !summary.features().contains_key(f) { shell.warn(format!( "invalid feature `{}` in required-features of target `{}`: \ `{}` is not present in [features] section", fv, target_name, fv ))?; } } FeatureValue::Dep { .. } => { anyhow::bail!( "invalid feature `{}` in required-features of target `{}`: \ `dep:` prefixed feature values are not allowed in required-features", fv, target_name ); } FeatureValue::DepFeature { weak: true, .. } => { anyhow::bail!( "invalid feature `{}` in required-features of target `{}`: \ optional dependency with `?` is not allowed in required-features", fv, target_name ); } // Handling of dependent_crate/dependent_crate_feature syntax FeatureValue::DepFeature { dep_name, dep_feature, weak: false, } => { match resolve .deps(summary.package_id()) .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name)) { Some((dep_id, _deps)) => { let dep_summary = resolve.summary(dep_id); if !dep_summary.features().contains_key(dep_feature) && !dep_summary .dependencies() .iter() .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional()) { shell.warn(format!( "invalid feature `{}` in required-features of target `{}`: \ feature `{}` does not exist in package `{}`", fv, target_name, dep_feature, dep_id ))?; } } None => { shell.warn(format!( "invalid feature `{}` in required-features of target `{}`: \ dependency `{}` does not exist", fv, target_name, dep_name ))?; } } } } } Ok(()) } /// Gets all of the features enabled for a package, plus its dependencies' /// features. /// /// Dependencies are added as `dep_name/feat_name` because `required-features` /// wants to support that syntax. pub fn resolve_all_features( resolve_with_overrides: &Resolve, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, package_id: PackageId, ) -> HashSet { let mut features: HashSet = resolved_features .activated_features(package_id, FeaturesFor::NormalOrDevOrArtifactTarget(None)) .iter() .map(|s| s.to_string()) .collect(); // Include features enabled for use by dependencies so targets can also use them with the // required-features field when deciding whether to be built or skipped. for (dep_id, deps) in resolve_with_overrides.deps(package_id) { let is_proc_macro = package_set .get_one(dep_id) .expect("packages downloaded") .proc_macro(); for dep in deps { let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); for feature in resolved_features .activated_features_unverified(dep_id, features_for) .unwrap_or_default() { features.insert(format!("{}/{}", dep.name_in_toml(), feature)); } } } features } /// Given a list of all targets for a package, filters out only the targets /// that are automatically included when the user doesn't specify any targets. fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { match mode { CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), CompileMode::Test => targets .iter() .filter(|t| t.tested() || t.is_example()) .collect(), CompileMode::Build | CompileMode::Check { .. } => targets .iter() .filter(|t| t.is_bin() || t.is_lib()) .collect(), CompileMode::Doc { .. } => { // `doc` does lib and bins (bin with same name as lib is skipped). targets .iter() .filter(|t| { t.documented() && (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) }) .collect() } CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { panic!("Invalid mode {:?}", mode) } } } /// Returns a list of proposed targets based on command-line target selection flags. fn list_rule_targets<'a>( packages: &[&'a Package], rule: &FilterRule, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult>> { let mut proposals = Vec::new(); match rule { FilterRule::All => { proposals.extend(filter_targets(packages, is_expected_kind, false, mode)) } FilterRule::Just(names) => { for name in names { proposals.extend(find_named_targets( packages, name, target_desc, is_expected_kind, mode, )?); } } } Ok(proposals) } /// Finds the targets for a specifically named target. fn find_named_targets<'a>( packages: &[&'a Package], target_name: &str, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult>> { let is_glob = is_glob_pattern(target_name); let proposals = if is_glob { let pattern = build_glob(target_name)?; let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name()); filter_targets(packages, filter, true, mode) } else { let filter = |t: &Target| t.name() == target_name && is_expected_kind(t); filter_targets(packages, filter, true, mode) }; if proposals.is_empty() { let targets = packages .iter() .flat_map(|pkg| { pkg.targets() .iter() .filter(|target| is_expected_kind(target)) }) .collect::>(); let suggestion = closest_msg(target_name, targets.iter(), |t| t.name()); if !suggestion.is_empty() { anyhow::bail!( "no {} target {} `{}`{}", target_desc, if is_glob { "matches pattern" } else { "named" }, target_name, suggestion ); } else { let mut msg = String::new(); writeln!( msg, "no {} target {} `{}`.", target_desc, if is_glob { "matches pattern" } else { "named" }, target_name, )?; if !targets.is_empty() { writeln!(msg, "Available {} targets:", target_desc)?; for target in targets { writeln!(msg, " {}", target.name())?; } } anyhow::bail!(msg); } } Ok(proposals) } fn filter_targets<'a>( packages: &[&'a Package], predicate: impl Fn(&Target) -> bool, requires_features: bool, mode: CompileMode, ) -> Vec> { let mut proposals = Vec::new(); for pkg in packages { for target in pkg.targets().iter().filter(|t| predicate(t)) { proposals.push(Proposal { pkg, target, requires_features, mode, }); } } proposals } /// This is used to rebuild the unit graph, sharing host dependencies if possible. /// /// This will translate any unit's `CompileKind::Target(host)` to /// `CompileKind::Host` if the kind is equal to `to_host`. This also handles /// generating the unit `dep_hash`, and merging shared units if possible. /// /// This is necessary because if normal dependencies used `CompileKind::Host`, /// there would be no way to distinguish those units from build-dependency /// units. This can cause a problem if a shared normal/build dependency needs /// to link to another dependency whose features differ based on whether or /// not it is a normal or build dependency. If both units used /// `CompileKind::Host`, then they would end up being identical, causing a /// collision in the `UnitGraph`, and Cargo would end up randomly choosing one /// value or the other. /// /// The solution is to keep normal and build dependencies separate when /// building the unit graph, and then run this second pass which will try to /// combine shared dependencies safely. By adding a hash of the dependencies /// to the `Unit`, this allows the `CompileKind` to be changed back to `Host` /// without fear of an unwanted collision. fn rebuild_unit_graph_shared( interner: &UnitInterner, unit_graph: UnitGraph, roots: &[Unit], scrape_units: &[Unit], to_host: CompileKind, ) -> (Vec, Vec, UnitGraph) { let mut result = UnitGraph::new(); // Map of the old unit to the new unit, used to avoid recursing into units // that have already been computed to improve performance. let mut memo = HashMap::new(); let new_roots = roots .iter() .map(|root| { traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host) }) .collect(); let new_scrape_units = scrape_units .iter() .map(|unit| memo.get(unit).unwrap().clone()) .collect(); (new_roots, new_scrape_units, result) } /// Recursive function for rebuilding the graph. /// /// This walks `unit_graph`, starting at the given `unit`. It inserts the new /// units into `new_graph`, and returns a new updated version of the given /// unit (`dep_hash` is filled in, and `kind` switched if necessary). fn traverse_and_share( interner: &UnitInterner, memo: &mut HashMap, new_graph: &mut UnitGraph, unit_graph: &UnitGraph, unit: &Unit, to_host: CompileKind, ) -> Unit { if let Some(new_unit) = memo.get(unit) { // Already computed, no need to recompute. return new_unit.clone(); } let mut dep_hash = StableHasher::new(); let new_deps: Vec<_> = unit_graph[unit] .iter() .map(|dep| { let new_dep_unit = traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host); new_dep_unit.hash(&mut dep_hash); UnitDep { unit: new_dep_unit, ..dep.clone() } }) .collect(); let new_dep_hash = dep_hash.finish(); let new_kind = if unit.kind == to_host { CompileKind::Host } else { unit.kind }; let new_unit = interner.intern( &unit.pkg, &unit.target, unit.profile.clone(), new_kind, unit.mode, unit.features.clone(), unit.is_std, new_dep_hash, unit.artifact, ); assert!(memo.insert(unit.clone(), new_unit.clone()).is_none()); new_graph.entry(new_unit.clone()).or_insert(new_deps); new_unit } /// Build `glob::Pattern` with informative context. fn build_glob(pat: &str) -> CargoResult { glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat)) } /// Emits "package not found" error. /// /// > This function should be used only in package selection processes such like /// `Packages::to_package_id_specs` and `Packages::get_packages`. fn emit_package_not_found( ws: &Workspace<'_>, opt_names: BTreeSet<&str>, opt_out: bool, ) -> CargoResult<()> { if !opt_names.is_empty() { anyhow::bail!( "{}package(s) `{}` not found in workspace `{}`", if opt_out { "excluded " } else { "" }, opt_names.into_iter().collect::>().join(", "), ws.root().display(), ) } Ok(()) } /// Emits "glob pattern not found" error. /// /// > This function should be used only in package selection processes such like /// `Packages::to_package_id_specs` and `Packages::get_packages`. fn emit_pattern_not_found( ws: &Workspace<'_>, opt_patterns: Vec<(glob::Pattern, bool)>, opt_out: bool, ) -> CargoResult<()> { let not_matched = opt_patterns .iter() .filter(|(_, matched)| !*matched) .map(|(pat, _)| pat.as_str()) .collect::>(); if !not_matched.is_empty() { anyhow::bail!( "{}package pattern(s) `{}` not found in workspace `{}`", if opt_out { "excluded " } else { "" }, not_matched.join(", "), ws.root().display(), ) } Ok(()) } /// Checks whether a package matches any of a list of glob patterns generated /// from `opt_patterns_and_names`. /// /// > This function should be used only in package selection processes such like /// `Packages::to_package_id_specs` and `Packages::get_packages`. fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool { patterns.iter_mut().any(|(m, matched)| { let is_matched = m.matches(pkg.name().as_str()); *matched |= is_matched; is_matched }) } /// Given a list opt-in or opt-out package selection strings, generates two /// collections that represent glob patterns and package names respectively. /// /// > This function should be used only in package selection processes such like /// `Packages::to_package_id_specs` and `Packages::get_packages`. fn opt_patterns_and_names( opt: &[String], ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> { let mut opt_patterns = Vec::new(); let mut opt_names = BTreeSet::new(); for x in opt.iter() { if is_glob_pattern(x) { opt_patterns.push((build_glob(x)?, false)); } else { opt_names.insert(String::as_str(x)); } } Ok((opt_patterns, opt_names)) } /// Removes duplicate CompileMode::Doc units that would cause problems with /// filename collisions. /// /// Rustdoc only separates units by crate name in the file directory /// structure. If any two units with the same crate name exist, this would /// cause a filename collision, causing different rustdoc invocations to stomp /// on one another's files. /// /// Unfortunately this does not remove all duplicates, as some of them are /// either user error, or difficult to remove. Cases that I can think of: /// /// - Same target name in different packages. See the `collision_doc` test. /// - Different sources. See `collision_doc_sources` test. /// /// Ideally this would not be necessary. fn remove_duplicate_doc( build_config: &BuildConfig, root_units: &[Unit], unit_graph: &mut UnitGraph, ) { // First, create a mapping of crate_name -> Unit so we can see where the // duplicates are. let mut all_docs: HashMap> = HashMap::new(); for unit in unit_graph.keys() { if unit.mode.is_doc() { all_docs .entry(unit.target.crate_name()) .or_default() .push(unit.clone()); } } // Keep track of units to remove so that they can be efficiently removed // from the unit_deps. let mut removed_units: HashSet = HashSet::new(); let mut remove = |units: Vec, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec { let (to_remove, remaining_units): (Vec, Vec) = units .into_iter() .partition(|unit| cb(unit) && !root_units.contains(unit)); for unit in to_remove { log::debug!( "removing duplicate doc due to {} for package {} target `{}`", reason, unit.pkg, unit.target.name() ); unit_graph.remove(&unit); removed_units.insert(unit); } remaining_units }; // Iterate over the duplicates and try to remove them from unit_graph. for (_crate_name, mut units) in all_docs { if units.len() == 1 { continue; } // Prefer target over host if --target was not specified. if build_config .requested_kinds .iter() .all(CompileKind::is_host) { // Note these duplicates may not be real duplicates, since they // might get merged in rebuild_unit_graph_shared. Either way, it // shouldn't hurt to remove them early (although the report in the // log might be confusing). units = remove(units, "host/target merger", &|unit| unit.kind.is_host()); if units.len() == 1 { continue; } } // Prefer newer versions over older. let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec> = HashMap::new(); for unit in units { let pkg_id = unit.pkg.package_id(); // Note, this does not detect duplicates from different sources. source_map .entry((pkg_id.name(), pkg_id.source_id(), unit.kind)) .or_default() .push(unit); } let mut remaining_units = Vec::new(); for (_key, mut units) in source_map { if units.len() > 1 { units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap()); // Remove any entries with version < newest. let newest_version = units.last().unwrap().pkg.version().clone(); let keep_units = remove(units, "older version", &|unit| { unit.pkg.version() < &newest_version }); remaining_units.extend(keep_units); } else { remaining_units.extend(units); } } if remaining_units.len() == 1 { continue; } // Are there other heuristics to remove duplicates that would make // sense? Maybe prefer path sources over all others? } // Also remove units from the unit_deps so there aren't any dangling edges. for unit_deps in unit_graph.values_mut() { unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit)); } // Remove any orphan units that were detached from the graph. let mut visited = HashSet::new(); fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet) { if !visited.insert(unit.clone()) { return; } for dep in &graph[unit] { visit(&dep.unit, graph, visited); } } for unit in root_units { visit(unit, unit_graph, &mut visited); } unit_graph.retain(|unit, _| visited.contains(unit)); } /// Override crate types for given units. /// /// This is primarily used by `cargo rustc --crate-type`. fn override_rustc_crate_types( units: &mut [Unit], args: &[String], interner: &UnitInterner, ) -> CargoResult<()> { if units.len() != 1 { anyhow::bail!( "crate types to rustc can only be passed to one \ target, consider filtering\nthe package by passing, \ e.g., `--lib` or `--example` to specify a single target" ); } let unit = &units[0]; let override_unit = |f: fn(Vec) -> TargetKind| { let crate_types = args.iter().map(|s| s.into()).collect(); let mut target = unit.target.clone(); target.set_kind(f(crate_types)); interner.intern( &unit.pkg, &target, unit.profile.clone(), unit.kind, unit.mode, unit.features.clone(), unit.is_std, unit.dep_hash, unit.artifact, ) }; units[0] = match unit.target.kind() { TargetKind::Lib(_) => override_unit(TargetKind::Lib), TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib), _ => { anyhow::bail!( "crate types can only be specified for libraries and example libraries.\n\ Binaries, tests, and benchmarks are always the `bin` crate type" ); } }; Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_config.rs000066400000000000000000000243731432416201200203530ustar00rootroot00000000000000//! Implementation of `cargo config` subcommand. use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition}; use crate::util::errors::CargoResult; use crate::{drop_eprintln, drop_println}; use anyhow::{bail, format_err, Error}; use serde_json::json; use std::borrow::Cow; use std::fmt; use std::str::FromStr; pub enum ConfigFormat { Toml, Json, JsonValue, } impl ConfigFormat { /// For clap. pub const POSSIBLE_VALUES: [&'static str; 3] = ["toml", "json", "json-value"]; } impl FromStr for ConfigFormat { type Err = Error; fn from_str(s: &str) -> CargoResult { match s { "toml" => Ok(ConfigFormat::Toml), "json" => Ok(ConfigFormat::Json), "json-value" => Ok(ConfigFormat::JsonValue), f => bail!("unknown config format `{}`", f), } } } impl fmt::Display for ConfigFormat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { ConfigFormat::Toml => write!(f, "toml"), ConfigFormat::Json => write!(f, "json"), ConfigFormat::JsonValue => write!(f, "json-value"), } } } /// Options for `cargo config get`. pub struct GetOptions<'a> { pub key: Option<&'a str>, pub format: ConfigFormat, pub show_origin: bool, pub merged: bool, } pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { if opts.show_origin && !matches!(opts.format, ConfigFormat::Toml) { bail!( "the `{}` format does not support --show-origin, try the `toml` format instead", opts.format ); } let key = match opts.key { Some(key) => ConfigKey::from_str(key), None => ConfigKey::new(), }; if opts.merged { let cv = config .get_cv_with_env(&key)? .ok_or_else(|| format_err!("config value `{}` is not set", key))?; match opts.format { ConfigFormat::Toml => print_toml(config, opts, &key, &cv), ConfigFormat::Json => print_json(config, &key, &cv, true), ConfigFormat::JsonValue => print_json(config, &key, &cv, false), } if let Some(env) = maybe_env(config, &key, &cv) { match opts.format { ConfigFormat::Toml => print_toml_env(config, &env), ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env), } } } else { match &opts.format { ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?, format => bail!( "the `{}` format does not support --merged=no, try the `toml` format instead", format ), } } Ok(()) } /// Checks for environment variables that might be used. fn maybe_env<'config>( config: &'config Config, key: &ConfigKey, cv: &CV, ) -> Option> { // Only fetching a table is unable to load env values. Leaf entries should // work properly. match cv { CV::Table(_map, _def) => {} _ => return None, } let mut env: Vec<_> = config .env() .iter() .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key()))) .collect(); env.sort_by_key(|x| x.0); if env.is_empty() { None } else { Some(env) } } fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { let origin = |def: &Definition| -> String { if !opts.show_origin { return "".to_string(); } format!(" # {}", def) }; match cv { CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), CV::String(val, def) => drop_println!( config, "{} = {}{}", key, toml_edit::Value::from(val), origin(def) ), CV::List(vals, _def) => { if opts.show_origin { drop_println!(config, "{} = [", key); for (val, def) in vals { drop_println!( config, " {}, # {}", toml_edit::ser::to_item(&val).unwrap(), def ); } drop_println!(config, "]"); } else { let vals: toml_edit::Array = vals.iter().map(|x| &x.0).collect(); drop_println!(config, "{} = {}", key, vals); } } CV::Table(table, _def) => { let mut key_vals: Vec<_> = table.iter().collect(); key_vals.sort_by(|a, b| a.0.cmp(b.0)); for (table_key, val) in key_vals { let mut subkey = key.clone(); // push or push_sensitive shouldn't matter here, since this is // not dealing with environment variables. subkey.push(table_key); print_toml(config, opts, &subkey, val); } } } } fn print_toml_env(config: &Config, env: &[(&String, &String)]) { drop_println!( config, "# The following environment variables may affect the loaded values." ); for (env_key, env_value) in env { let val = shell_escape::escape(Cow::Borrowed(env_value)); drop_println!(config, "# {}={}", env_key, val); } } fn print_json_env(config: &Config, env: &[(&String, &String)]) { drop_eprintln!( config, "note: The following environment variables may affect the loaded values." ); for (env_key, env_value) in env { let val = shell_escape::escape(Cow::Borrowed(env_value)); drop_eprintln!(config, "{}={}", env_key, val); } } fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { let json_value = if key.is_root() || !include_key { cv_to_json(cv) } else { let mut parts: Vec<_> = key.parts().collect(); let last_part = parts.pop().unwrap(); let mut root_table = json!({}); // Create a JSON object with nested keys up to the value being displayed. let mut table = &mut root_table; for part in parts { table[part] = json!({}); table = table.get_mut(part).unwrap(); } table[last_part] = cv_to_json(cv); root_table }; drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap()); // Helper for recursively converting a CV to JSON. fn cv_to_json(cv: &CV) -> serde_json::Value { match cv { CV::Boolean(val, _def) => json!(val), CV::Integer(val, _def) => json!(val), CV::String(val, _def) => json!(val), CV::List(vals, _def) => { let jvals: Vec<_> = vals.iter().map(|(val, _def)| json!(val)).collect(); json!(jvals) } CV::Table(map, _def) => { let mut table = json!({}); for (key, val) in map { table[key] = cv_to_json(val); } table } } } } fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> { let print_table = |cv: &CV| { drop_println!(config, "# {}", cv.definition()); print_toml(config, opts, &ConfigKey::new(), cv); drop_println!(config, ""); }; // This removes entries from the given CV so that all that remains is the // given key. Returns false if no entries were found. fn trim_cv(mut cv: &mut CV, key: &ConfigKey) -> CargoResult { for (i, part) in key.parts().enumerate() { match cv { CV::Table(map, _def) => { map.retain(|key, _value| key == part); match map.get_mut(part) { Some(val) => cv = val, None => return Ok(false), } } _ => { let mut key_so_far = ConfigKey::new(); for part in key.parts().take(i) { key_so_far.push(part); } bail!( "expected table for configuration key `{}`, \ but found {} in {}", key_so_far, cv.desc(), cv.definition() ) } } } Ok(match cv { CV::Table(map, _def) => !map.is_empty(), _ => true, }) } let mut cli_args = config.cli_args_as_table()?; if trim_cv(&mut cli_args, key)? { print_table(&cli_args); } // This slurps up some extra env vars that aren't technically part of the // "config" (or are special-cased). I'm personally fine with just keeping // them here, though it might be confusing. The vars I'm aware of: // // * CARGO // * CARGO_HOME // * CARGO_NAME // * CARGO_EMAIL // * CARGO_INCREMENTAL // * CARGO_TARGET_DIR // * CARGO_CACHE_RUSTC_INFO // // All of these except CARGO, CARGO_HOME, and CARGO_CACHE_RUSTC_INFO are // actually part of the config, but they are special-cased in the code. // // TODO: It might be a good idea to teach the Config loader to support // environment variable aliases so that these special cases are less // special, and will just naturally get loaded as part of the config. let mut env: Vec<_> = config .env() .iter() .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key())) .collect(); if !env.is_empty() { env.sort_by_key(|x| x.0); drop_println!(config, "# Environment variables"); for (key, value) in env { // Displaying this in "shell" syntax instead of TOML, since that // somehow makes more sense to me. let val = shell_escape::escape(Cow::Borrowed(value)); drop_println!(config, "# {}={}", key, val); } drop_println!(config, ""); } let unmerged = config.load_values_unmerged()?; for mut cv in unmerged { if trim_cv(&mut cv, key)? { print_table(&cv); } } Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_doc.rs000066400000000000000000000044621432416201200176500ustar00rootroot00000000000000use crate::core::{Shell, Workspace}; use crate::ops; use crate::util::config::PathAndArgs; use crate::util::CargoResult; use std::path::Path; use std::path::PathBuf; use std::process::Command; /// Strongly typed options for the `cargo doc` command. #[derive(Debug)] pub struct DocOptions { /// Whether to attempt to open the browser after compiling the docs pub open_result: bool, /// Options to pass through to the compiler pub compile_opts: ops::CompileOptions, } /// Main method for `cargo doc`. pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { let compilation = ops::compile(ws, &options.compile_opts)?; if options.open_result { let name = &compilation .root_crate_names .get(0) .ok_or_else(|| anyhow::anyhow!("no crates with documentation"))?; let kind = options.compile_opts.build_config.single_requested_kind()?; let path = compilation.root_output[&kind] .with_file_name("doc") .join(&name) .join("index.html"); if path.exists() { let config_browser = { let cfg: Option = ws.config().get("doc.browser")?; cfg.map(|path_args| (path_args.path.resolve_program(ws.config()), path_args.args)) }; let mut shell = ws.config().shell(); shell.status("Opening", path.display())?; open_docs(&path, &mut shell, config_browser)?; } } Ok(()) } fn open_docs( path: &Path, shell: &mut Shell, config_browser: Option<(PathBuf, Vec)>, ) -> CargoResult<()> { let browser = config_browser.or_else(|| Some((PathBuf::from(std::env::var_os("BROWSER")?), Vec::new()))); match browser { Some((browser, initial_args)) => { if let Err(e) = Command::new(&browser).args(initial_args).arg(path).status() { shell.warn(format!( "Couldn't open docs with {}: {}", browser.to_string_lossy(), e ))?; } } None => { if let Err(e) = opener::open(&path) { let e = e.into(); crate::display_warning_with_error("couldn't open docs", &e, shell); } } }; Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_fetch.rs000066400000000000000000000052551432416201200201750ustar00rootroot00000000000000use crate::core::compiler::standard_lib; use crate::core::compiler::{BuildConfig, CompileMode, RustcTargetData}; use crate::core::{PackageSet, Resolve, Workspace}; use crate::ops; use crate::util::CargoResult; use crate::util::Config; use std::collections::HashSet; pub struct FetchOptions<'a> { pub config: &'a Config, /// The target arch triple to fetch dependencies for pub targets: Vec, } /// Executes `cargo fetch`. pub fn fetch<'a>( ws: &Workspace<'a>, options: &FetchOptions<'a>, ) -> CargoResult<(Resolve, PackageSet<'a>)> { ws.emit_warnings()?; let (mut packages, resolve) = ops::resolve_ws(ws)?; let jobs = Some(1); let keep_going = false; let config = ws.config(); let build_config = BuildConfig::new( config, jobs, keep_going, &options.targets, CompileMode::Build, )?; let data = RustcTargetData::new(ws, &build_config.requested_kinds)?; let mut fetched_packages = HashSet::new(); let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::>(); let mut to_download = Vec::new(); while let Some(id) = deps_to_fetch.pop() { if !fetched_packages.insert(id) { continue; } to_download.push(id); let deps = resolve .deps(id) .filter(|&(_id, deps)| { deps.iter().any(|d| { // If no target was specified then all dependencies are // fetched. if options.targets.is_empty() { return true; } // Otherwise we only download this dependency if any of the // requested platforms would match this dependency. Note // that this is a bit lossy because not all dependencies are // always compiled for all platforms, but it should be // "close enough" for now. build_config .requested_kinds .iter() .any(|kind| data.dep_platform_activated(d, *kind)) }) }) .map(|(id, _deps)| id); deps_to_fetch.extend(deps); } // If -Zbuild-std was passed, download dependencies for the standard library. // We don't know ahead of time what jobs we'll be running, so tell `std_crates` that. if let Some(crates) = standard_lib::std_crates(config, None) { let (std_package_set, _, _) = standard_lib::resolve_std(ws, &data, &build_config, &crates)?; packages.add_set(std_package_set); } packages.get_many(to_download)?; Ok((resolve, packages)) } cargo-0.66.0/src/cargo/ops/cargo_generate_lockfile.rs000066400000000000000000000230331432416201200225400ustar00rootroot00000000000000use crate::core::registry::PackageRegistry; use crate::core::resolver::features::{CliFeatures, HasDevUnits}; use crate::core::{PackageId, PackageIdSpec}; use crate::core::{Resolve, SourceId, Workspace}; use crate::ops; use crate::util::config::Config; use crate::util::CargoResult; use anyhow::Context; use log::debug; use std::collections::{BTreeMap, HashSet}; use termcolor::Color::{self, Cyan, Green, Red}; pub struct UpdateOptions<'a> { pub config: &'a Config, pub to_update: Vec, pub precise: Option<&'a str>, pub aggressive: bool, pub dry_run: bool, pub workspace: bool, } pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { let mut registry = PackageRegistry::new(ws.config())?; let mut resolve = ops::resolve_with_previous( &mut registry, ws, &CliFeatures::new_all(true), HasDevUnits::Yes, None, None, &[], true, )?; ops::write_pkg_lockfile(ws, &mut resolve)?; Ok(()) } pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoResult<()> { // Currently this is only a warning, but after a transition period this will become // a hard error. // See https://github.com/rust-lang/cargo/issues/10919#issuecomment-1214464756. // We should declare the `precise` and `aggressive` arguments // require the `package` argument in the clap. if opts.aggressive && opts.to_update.is_empty() { ws.config().shell().warn( "aggressive is only supported with \"--package \", \ this will become a hard error in a future release.", )?; } if opts.precise.is_some() && opts.to_update.is_empty() { ws.config().shell().warn( "precise is only supported with \"--package \", \ this will become a hard error in a future release.", )?; } if opts.aggressive && opts.precise.is_some() { anyhow::bail!("cannot specify both aggressive and precise simultaneously") } if ws.members().count() == 0 { anyhow::bail!("you can't generate a lockfile for an empty workspace.") } // Updates often require a lot of modifications to the registry, so ensure // that we're synchronized against other Cargos. let _lock = ws.config().acquire_package_cache_lock()?; let previous_resolve = match ops::load_pkg_lockfile(ws)? { Some(resolve) => resolve, None => { match opts.precise { None => return generate_lockfile(ws), // Precise option specified, so calculate a previous_resolve required // by precise package update later. Some(_) => { let mut registry = PackageRegistry::new(opts.config)?; ops::resolve_with_previous( &mut registry, ws, &CliFeatures::new_all(true), HasDevUnits::Yes, None, None, &[], true, )? } } } }; let mut registry = PackageRegistry::new(opts.config)?; let mut to_avoid = HashSet::new(); if opts.to_update.is_empty() { if !opts.workspace { to_avoid.extend(previous_resolve.iter()); to_avoid.extend(previous_resolve.unused_patches()); } } else { let mut sources = Vec::new(); for name in opts.to_update.iter() { let dep = previous_resolve.query(name)?; if opts.aggressive { fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); } else { to_avoid.insert(dep); sources.push(match opts.precise { Some(precise) => { // TODO: see comment in `resolve.rs` as well, but this // seems like a pretty hokey reason to single out // the registry as well. let precise = if dep.source_id().is_registry() { semver::Version::parse(precise).with_context(|| { format!("invalid version format for precise version `{}`", precise) })?; format!("{}={}->{}", dep.name(), dep.version(), precise) } else { precise.to_string() }; dep.source_id().with_precise(Some(precise)) } None => dep.source_id().with_precise(None), }); } if let Ok(unused_id) = PackageIdSpec::query_str(name, previous_resolve.unused_patches().iter().cloned()) { to_avoid.insert(unused_id); } } registry.add_sources(sources)?; } let mut resolve = ops::resolve_with_previous( &mut registry, ws, &CliFeatures::new_all(true), HasDevUnits::Yes, Some(&previous_resolve), Some(&to_avoid), &[], true, )?; // Summarize what is changing for the user. let print_change = |status: &str, msg: String, color: Color| { opts.config.shell().status_with_color(status, msg, color) }; for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { if removed.len() == 1 && added.len() == 1 { let msg = if removed[0].source_id().is_git() { format!( "{} -> #{}", removed[0], &added[0].source_id().precise().unwrap()[..8] ) } else { format!("{} -> v{}", removed[0], added[0].version()) }; print_change("Updating", msg, Green)?; } else { for package in removed.iter() { print_change("Removing", format!("{}", package), Red)?; } for package in added.iter() { print_change("Adding", format!("{}", package), Cyan)?; } } } if opts.dry_run { opts.config .shell() .warn("not updating lockfile due to dry run")?; } else { ops::write_pkg_lockfile(ws, &mut resolve)?; } return Ok(()); fn fill_with_deps<'a>( resolve: &'a Resolve, dep: PackageId, set: &mut HashSet, visited: &mut HashSet, ) { if !visited.insert(dep) { return; } set.insert(dep); for (dep, _) in resolve.deps_not_replaced(dep) { fill_with_deps(resolve, dep, set, visited); } } fn compare_dependency_graphs( previous_resolve: &Resolve, resolve: &Resolve, ) -> Vec<(Vec, Vec)> { fn key(dep: PackageId) -> (&'static str, SourceId) { (dep.name().as_str(), dep.source_id()) } // Removes all package IDs in `b` from `a`. Note that this is somewhat // more complicated because the equality for source IDs does not take // precise versions into account (e.g., git shas), but we want to take // that into account here. fn vec_subtract(a: &[PackageId], b: &[PackageId]) -> Vec { a.iter() .filter(|a| { // If this package ID is not found in `b`, then it's definitely // in the subtracted set. let i = match b.binary_search(a) { Ok(i) => i, Err(..) => return true, }; // If we've found `a` in `b`, then we iterate over all instances // (we know `b` is sorted) and see if they all have different // precise versions. If so, then `a` isn't actually in `b` so // we'll let it through. // // Note that we only check this for non-registry sources, // however, as registries contain enough version information in // the package ID to disambiguate. if a.source_id().is_registry() { return false; } b[i..] .iter() .take_while(|b| a == b) .all(|b| a.source_id().precise() != b.source_id().precise()) }) .cloned() .collect() } // Map `(package name, package source)` to `(removed versions, added versions)`. let mut changes = BTreeMap::new(); let empty = (Vec::new(), Vec::new()); for dep in previous_resolve.iter() { changes .entry(key(dep)) .or_insert_with(|| empty.clone()) .0 .push(dep); } for dep in resolve.iter() { changes .entry(key(dep)) .or_insert_with(|| empty.clone()) .1 .push(dep); } for v in changes.values_mut() { let (ref mut old, ref mut new) = *v; old.sort(); new.sort(); let removed = vec_subtract(old, new); let added = vec_subtract(new, old); *old = removed; *new = added; } debug!("{:#?}", changes); changes.into_iter().map(|(_, v)| v).collect() } } cargo-0.66.0/src/cargo/ops/cargo_install.rs000066400000000000000000000757141432416201200205610ustar00rootroot00000000000000use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::{env, fs}; use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, Freshness, UnitOutput}; use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Workspace}; use crate::ops::CompileFilter; use crate::ops::{common_for_install_and_uninstall::*, FilterRule}; use crate::sources::{GitSource, PathSource, SourceConfigMap}; use crate::util::errors::CargoResult; use crate::util::{Config, Filesystem, Rustc, ToSemver, VersionReqExt}; use crate::{drop_println, ops}; use anyhow::{bail, format_err, Context as _}; use cargo_util::paths; use semver::VersionReq; use tempfile::Builder as TempFileBuilder; struct Transaction { bins: Vec, } impl Transaction { fn success(mut self) { self.bins.clear(); } } impl Drop for Transaction { fn drop(&mut self) { for bin in self.bins.iter() { let _ = paths::remove_file(bin); } } } struct InstallablePackage<'cfg, 'a> { config: &'cfg Config, opts: &'a ops::CompileOptions, root: Filesystem, source_id: SourceId, vers: Option<&'a str>, force: bool, no_track: bool, pkg: Package, ws: Workspace<'cfg>, rustc: Rustc, target: String, } impl<'cfg, 'a> InstallablePackage<'cfg, 'a> { // Returns pkg to install. None if pkg is already installed pub fn new( config: &'cfg Config, root: Filesystem, map: SourceConfigMap<'_>, krate: Option<&str>, source_id: SourceId, from_cwd: bool, vers: Option<&'a str>, opts: &'a ops::CompileOptions, force: bool, no_track: bool, needs_update_if_source_is_index: bool, ) -> CargoResult>> { if let Some(name) = krate { if name == "." { bail!( "To install the binaries for the package in current working \ directory use `cargo install --path .`. \ Use `cargo build` if you want to simply build the package." ) } } let dst = root.join("bin").into_path_unlocked(); let pkg = { let dep = { if let Some(krate) = krate { let vers = if let Some(vers_flag) = vers { Some(parse_semver_flag(vers_flag)?.to_string()) } else if source_id.is_registry() { // Avoid pre-release versions from crate.io // unless explicitly asked for Some(String::from("*")) } else { None }; Some(Dependency::parse(krate, vers.as_deref(), source_id)?) } else { None } }; if source_id.is_git() { let mut source = GitSource::new(source_id, config)?; select_pkg( &mut source, dep, |git: &mut GitSource<'_>| git.read_packages(), config, )? } else if source_id.is_path() { let mut src = path_source(source_id, config)?; if !src.path().is_dir() { bail!( "`{}` is not a directory. \ --path must point to a directory containing a Cargo.toml file.", src.path().display() ) } if !src.path().join("Cargo.toml").exists() { if from_cwd { bail!( "`{}` is not a crate root; specify a crate to \ install from crates.io, or use --path or --git to \ specify an alternate source", src.path().display() ); } else if src.path().join("cargo.toml").exists() { bail!( "`{}` does not contain a Cargo.toml file, but found cargo.toml please try to rename it to Cargo.toml. \ --path must point to a directory containing a Cargo.toml file.", src.path().display() ) } else { bail!( "`{}` does not contain a Cargo.toml file. \ --path must point to a directory containing a Cargo.toml file.", src.path().display() ) } } select_pkg( &mut src, dep, |path: &mut PathSource<'_>| path.read_packages(), config, )? } else if let Some(dep) = dep { let mut source = map.load(source_id, &HashSet::new())?; if let Ok(Some(pkg)) = installed_exact_package( dep.clone(), &mut source, config, opts, &root, &dst, force, ) { let msg = format!( "package `{}` is already installed, use --force to override", pkg ); config.shell().status("Ignored", &msg)?; return Ok(None); } select_dep_pkg(&mut source, dep, config, needs_update_if_source_is_index)? } else { bail!( "must specify a crate to install from \ crates.io, or use --path or --git to \ specify alternate source" ) } }; let (ws, rustc, target) = make_ws_rustc_target(config, opts, &source_id, pkg.clone())?; // If we're installing in --locked mode and there's no `Cargo.lock` published // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 if config.locked() && !ws.root().join("Cargo.lock").exists() { config.shell().warn(format!( "no Cargo.lock file published in {}", pkg.to_string() ))?; } let pkg = if source_id.is_git() { // Don't use ws.current() in order to keep the package source as a git source so that // install tracking uses the correct source. pkg } else { ws.current()?.clone() }; if from_cwd { if pkg.manifest().edition() == Edition::Edition2015 { config.shell().warn( "Using `cargo install` to install the binaries for the \ package in current working directory is deprecated, \ use `cargo install --path .` instead. \ Use `cargo build` if you want to simply build the package.", )? } else { bail!( "Using `cargo install` to install the binaries for the \ package in current working directory is no longer supported, \ use `cargo install --path .` instead. \ Use `cargo build` if you want to simply build the package." ) } }; // For bare `cargo install` (no `--bin` or `--example`), check if there is // *something* to install. Explicit `--bin` or `--example` flags will be // checked at the start of `compile_ws`. if !opts.filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { bail!( "there is nothing to install in `{}`, because it has no binaries\n\ `cargo install` is only for installing programs, and can't be used with libraries.\n\ To use a library crate, add it as a dependency in a Cargo project instead.", pkg ); } let ip = InstallablePackage { config, opts, root, source_id, vers, force, no_track, pkg, ws, rustc, target, }; // WARNING: no_track does not perform locking, so there is no protection // of concurrent installs. if no_track { // Check for conflicts. ip.no_track_duplicates(&dst)?; } else if is_installed( &ip.pkg, config, opts, &ip.rustc, &ip.target, &ip.root, &dst, force, )? { let msg = format!( "package `{}` is already installed, use --force to override", ip.pkg ); config.shell().status("Ignored", &msg)?; return Ok(None); } Ok(Some(ip)) } fn no_track_duplicates(&self, dst: &Path) -> CargoResult>> { // Helper for --no-track flag to make sure it doesn't overwrite anything. let duplicates: BTreeMap> = exe_names(&self.pkg, &self.opts.filter) .into_iter() .filter(|name| dst.join(name).exists()) .map(|name| (name, None)) .collect(); if !self.force && !duplicates.is_empty() { let mut msg: Vec = duplicates .iter() .map(|(name, _)| { format!( "binary `{}` already exists in destination `{}`", name, dst.join(name).to_string_lossy() ) }) .collect(); msg.push("Add --force to overwrite".to_string()); bail!("{}", msg.join("\n")); } Ok(duplicates) } fn install_one(mut self) -> CargoResult { self.config.shell().status("Installing", &self.pkg)?; let dst = self.root.join("bin").into_path_unlocked(); let mut td_opt = None; let mut needs_cleanup = false; if !self.source_id.is_path() { let target_dir = if let Some(dir) = self.config.target_dir()? { dir } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() { let p = td.path().to_owned(); td_opt = Some(td); Filesystem::new(p) } else { needs_cleanup = true; Filesystem::new(self.config.cwd().join("target-install")) }; self.ws.set_target_dir(target_dir); } self.check_yanked_install()?; let exec: Arc = Arc::new(DefaultExecutor); let compile = ops::compile_ws(&self.ws, self.opts, &exec).with_context(|| { if let Some(td) = td_opt.take() { // preserve the temporary directory, so the user can inspect it td.into_path(); } format!( "failed to compile `{}`, intermediate artifacts can be \ found at `{}`", self.pkg, self.ws.target_dir().display() ) })?; let mut binaries: Vec<(&str, &Path)> = compile .binaries .iter() .map(|UnitOutput { path, .. }| { let name = path.file_name().unwrap(); if let Some(s) = name.to_str() { Ok((s, path.as_ref())) } else { bail!("Binary `{:?}` name can't be serialized into string", name) } }) .collect::>()?; if binaries.is_empty() { // Cargo already warns the user if they use a target specifier that matches nothing, // but we want to error if the user asked for a _particular_ binary to be installed, // and we didn't end up installing it. // // NOTE: This _should_ be impossible to hit since --bin=does_not_exist will fail on // target selection, and --bin=requires_a without --features=a will fail with "target // .. requires the features ..". But rather than assume that's the case, we define the // behavior for this fallback case as well. if let CompileFilter::Only { bins, examples, .. } = &self.opts.filter { let mut any_specific = false; if let FilterRule::Just(ref v) = bins { if !v.is_empty() { any_specific = true; } } if let FilterRule::Just(ref v) = examples { if !v.is_empty() { any_specific = true; } } if any_specific { bail!("no binaries are available for install using the selected features"); } } // If there _are_ binaries available, but none were selected given the current set of // features, let the user know. // // Note that we know at this point that _if_ bins or examples is set to `::Just`, // they're `::Just([])`, which is `FilterRule::none()`. if self.pkg.targets().iter().any(|t| t.is_executable()) { self.config .shell() .warn("none of the package's binaries are available for install using the selected features")?; } return Ok(false); } // This is primarily to make testing easier. binaries.sort_unstable(); let (tracker, duplicates) = if self.no_track { (None, self.no_track_duplicates(&dst)?) } else { let tracker = InstallTracker::load(self.config, &self.root)?; let (_freshness, duplicates) = tracker.check_upgrade( &dst, &self.pkg, self.force, self.opts, &self.target, &self.rustc.verbose_version, )?; (Some(tracker), duplicates) }; paths::create_dir_all(&dst)?; // Copy all binaries to a temporary directory under `dst` first, catching // some failure modes (e.g., out of space) before touching the existing // binaries. This directory will get cleaned up via RAII. let staging_dir = TempFileBuilder::new() .prefix("cargo-install") .tempdir_in(&dst)?; for &(bin, src) in binaries.iter() { let dst = staging_dir.path().join(bin); // Try to move if `target_dir` is transient. if !self.source_id.is_path() && fs::rename(src, &dst).is_ok() { continue; } paths::copy(src, &dst)?; } let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries .iter() .map(|&(bin, _)| bin) .partition(|&bin| duplicates.contains_key(bin)); let mut installed = Transaction { bins: Vec::new() }; let mut successful_bins = BTreeSet::new(); // Move the temporary copies into `dst` starting with new binaries. for bin in to_install.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); self.config.shell().status("Installing", dst.display())?; fs::rename(&src, &dst).with_context(|| { format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; installed.bins.push(dst); successful_bins.insert(bin.to_string()); } // Repeat for binaries which replace existing ones but don't pop the error // up until after updating metadata. let replace_result = { let mut try_install = || -> CargoResult<()> { for &bin in to_replace.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); self.config.shell().status("Replacing", dst.display())?; fs::rename(&src, &dst).with_context(|| { format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; successful_bins.insert(bin.to_string()); } Ok(()) }; try_install() }; if let Some(mut tracker) = tracker { tracker.mark_installed( &self.pkg, &successful_bins, self.vers.map(|s| s.to_string()), self.opts, &self.target, &self.rustc.verbose_version, ); if let Err(e) = remove_orphaned_bins(&self.ws, &mut tracker, &duplicates, &self.pkg, &dst) { // Don't hard error on remove. self.config .shell() .warn(format!("failed to remove orphan: {:?}", e))?; } match tracker.save() { Err(err) => replace_result.with_context(|| err)?, Ok(_) => replace_result?, } } // Reaching here means all actions have succeeded. Clean up. installed.success(); if needs_cleanup { // Don't bother grabbing a lock as we're going to blow it all away // anyway. let target_dir = self.ws.target_dir().into_path_unlocked(); paths::remove_dir_all(&target_dir)?; } // Helper for creating status messages. fn executables>(mut names: impl Iterator + Clone) -> String { if names.clone().count() == 1 { format!("(executable `{}`)", names.next().unwrap().as_ref()) } else { format!( "(executables {})", names .map(|b| format!("`{}`", b.as_ref())) .collect::>() .join(", ") ) } } if duplicates.is_empty() { self.config.shell().status( "Installed", format!( "package `{}` {}", self.pkg, executables(successful_bins.iter()) ), )?; Ok(true) } else { if !to_install.is_empty() { self.config.shell().status( "Installed", format!("package `{}` {}", self.pkg, executables(to_install.iter())), )?; } // Invert the duplicate map. let mut pkg_map = BTreeMap::new(); for (bin_name, opt_pkg_id) in &duplicates { let key = opt_pkg_id.map_or_else(|| "unknown".to_string(), |pkg_id| pkg_id.to_string()); pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name); } for (pkg_descr, bin_names) in &pkg_map { self.config.shell().status( "Replaced", format!( "package `{}` with `{}` {}", pkg_descr, self.pkg, executables(bin_names.iter()) ), )?; } Ok(true) } } fn check_yanked_install(&self) -> CargoResult<()> { if self.ws.ignore_lock() || !self.ws.root().join("Cargo.lock").exists() { return Ok(()); } // It would be best if `source` could be passed in here to avoid a // duplicate "Updating", but since `source` is taken by value, then it // wouldn't be available for `compile_ws`. let (pkg_set, resolve) = ops::resolve_ws(&self.ws)?; ops::check_yanked( self.ws.config(), &pkg_set, &resolve, "consider running without --locked", ) } } pub fn install( config: &Config, root: Option<&str>, krates: Vec<(&str, Option<&str>)>, source_id: SourceId, from_cwd: bool, opts: &ops::CompileOptions, force: bool, no_track: bool, ) -> CargoResult<()> { let root = resolve_root(root, config)?; let dst = root.join("bin").into_path_unlocked(); let map = SourceConfigMap::new(config)?; let (installed_anything, scheduled_error) = if krates.len() <= 1 { let (krate, vers) = krates .into_iter() .next() .map(|(k, v)| (Some(k), v)) .unwrap_or((None, None)); let installable_pkg = InstallablePackage::new( config, root, map, krate, source_id, from_cwd, vers, opts, force, no_track, true, )?; let mut installed_anything = true; if let Some(installable_pkg) = installable_pkg { installed_anything = installable_pkg.install_one()?; } (installed_anything, false) } else { let mut succeeded = vec![]; let mut failed = vec![]; // "Tracks whether or not the source (such as a registry or git repo) has been updated. // This is used to avoid updating it multiple times when installing multiple crates. let mut did_update = false; let pkgs_to_install: Vec<_> = krates .into_iter() .filter_map(|(krate, vers)| { let root = root.clone(); let map = map.clone(); match InstallablePackage::new( config, root, map, Some(krate), source_id, from_cwd, vers, opts, force, no_track, !did_update, ) { Ok(Some(installable_pkg)) => { did_update = true; Some((krate, installable_pkg)) } Ok(None) => { // Already installed succeeded.push(krate); None } Err(e) => { crate::display_error(&e, &mut config.shell()); failed.push(krate); // We assume an update was performed if we got an error. did_update = true; None } } }) .collect(); let install_results: Vec<_> = pkgs_to_install .into_iter() .map(|(krate, installable_pkg)| (krate, installable_pkg.install_one())) .collect(); for (krate, result) in install_results { match result { Ok(installed) => { if installed { succeeded.push(krate); } } Err(e) => { crate::display_error(&e, &mut config.shell()); failed.push(krate); } } } let mut summary = vec![]; if !succeeded.is_empty() { summary.push(format!("Successfully installed {}!", succeeded.join(", "))); } if !failed.is_empty() { summary.push(format!( "Failed to install {} (see error(s) above).", failed.join(", ") )); } if !succeeded.is_empty() || !failed.is_empty() { config.shell().status("Summary", summary.join(" "))?; } (!succeeded.is_empty(), !failed.is_empty()) }; if installed_anything { // Print a warning that if this directory isn't in PATH that they won't be // able to run these commands. let path = env::var_os("PATH").unwrap_or_default(); let dst_in_path = env::split_paths(&path).any(|path| path == dst); if !dst_in_path { config.shell().warn(&format!( "be sure to add `{}` to your PATH to be \ able to run the installed binaries", dst.display() ))?; } } if scheduled_error { bail!("some crates failed to install"); } Ok(()) } fn is_installed( pkg: &Package, config: &Config, opts: &ops::CompileOptions, rustc: &Rustc, target: &str, root: &Filesystem, dst: &Path, force: bool, ) -> CargoResult { let tracker = InstallTracker::load(config, root)?; let (freshness, _duplicates) = tracker.check_upgrade(dst, pkg, force, opts, target, &rustc.verbose_version)?; Ok(freshness == Freshness::Fresh) } /// Checks if vers can only be satisfied by exactly one version of a package in a registry, and it's /// already installed. If this is the case, we can skip interacting with a registry to check if /// newer versions may be installable, as no newer version can exist. fn installed_exact_package( dep: Dependency, source: &mut T, config: &Config, opts: &ops::CompileOptions, root: &Filesystem, dst: &Path, force: bool, ) -> CargoResult> where T: Source, { if !dep.version_req().is_exact() { // If the version isn't exact, we may need to update the registry and look for a newer // version - we can't know if the package is installed without doing so. return Ok(None); } // Try getting the package from the registry without updating it, to avoid a potentially // expensive network call in the case that the package is already installed. // If this fails, the caller will possibly do an index update and try again, this is just a // best-effort check to see if we can avoid hitting the network. if let Ok(pkg) = select_dep_pkg(source, dep, config, false) { let (_ws, rustc, target) = make_ws_rustc_target(config, opts, &source.source_id(), pkg.clone())?; if let Ok(true) = is_installed(&pkg, config, opts, &rustc, &target, root, dst, force) { return Ok(Some(pkg)); } } Ok(None) } fn make_ws_rustc_target<'cfg>( config: &'cfg Config, opts: &ops::CompileOptions, source_id: &SourceId, pkg: Package, ) -> CargoResult<(Workspace<'cfg>, Rustc, String)> { let mut ws = if source_id.is_git() || source_id.is_path() { Workspace::new(pkg.manifest_path(), config)? } else { Workspace::ephemeral(pkg, config, None, false)? }; ws.set_ignore_lock(config.lock_update_allowed()); ws.set_require_optional_deps(false); let rustc = config.load_global_rustc(Some(&ws))?; let target = match &opts.build_config.single_requested_kind()? { CompileKind::Host => rustc.host.as_str().to_owned(), CompileKind::Target(target) => target.short_name().to_owned(), }; Ok((ws, rustc, target)) } /// Parses x.y.z as if it were =x.y.z, and gives CLI-specific error messages in the case of invalid /// values. fn parse_semver_flag(v: &str) -> CargoResult { // If the version begins with character <, >, =, ^, ~ parse it as a // version range, otherwise parse it as a specific version let first = v .chars() .next() .ok_or_else(|| format_err!("no version provided for the `--version` flag"))?; let is_req = "<>=^~".contains(first) || v.contains('*'); if is_req { match v.parse::() { Ok(v) => Ok(v), Err(_) => bail!( "the `--version` provided, `{}`, is \ not a valid semver version requirement\n\n\ Please have a look at \ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html \ for the correct format", v ), } } else { match v.to_semver() { Ok(v) => Ok(VersionReq::exact(&v)), Err(e) => { let mut msg = format!( "the `--version` provided, `{}`, is \ not a valid semver version: {}\n", v, e ); // If it is not a valid version but it is a valid version // requirement, add a note to the warning if v.parse::().is_ok() { msg.push_str(&format!( "\nif you want to specify semver range, \ add an explicit qualifier, like ^{}", v )); } bail!(msg); } } } } /// Display a list of installed binaries. pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { let root = resolve_root(dst, config)?; let tracker = InstallTracker::load(config, &root)?; for (k, v) in tracker.all_installed_bins() { drop_println!(config, "{}:", k); for bin in v { drop_println!(config, " {}", bin); } } Ok(()) } /// Removes executables that are no longer part of a package that was /// previously installed. fn remove_orphaned_bins( ws: &Workspace<'_>, tracker: &mut InstallTracker, duplicates: &BTreeMap>, pkg: &Package, dst: &Path, ) -> CargoResult<()> { let filter = ops::CompileFilter::new_all_targets(); let all_self_names = exe_names(pkg, &filter); let mut to_remove: HashMap> = HashMap::new(); // For each package that we stomped on. for other_pkg in duplicates.values().flatten() { // Only for packages with the same name. if other_pkg.name() == pkg.name() { // Check what the old package had installed. if let Some(installed) = tracker.installed_bins(*other_pkg) { // If the old install has any names that no longer exist, // add them to the list to remove. for installed_name in installed { if !all_self_names.contains(installed_name.as_str()) { to_remove .entry(*other_pkg) .or_default() .insert(installed_name.clone()); } } } } } for (old_pkg, bins) in to_remove { tracker.remove(old_pkg, &bins); for bin in bins { let full_path = dst.join(bin); if full_path.exists() { ws.config().shell().status( "Removing", format!( "executable `{}` from previous version {}", full_path.display(), old_pkg ), )?; paths::remove_file(&full_path) .with_context(|| format!("failed to remove {:?}", full_path))?; } } } Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_new.rs000066400000000000000000000612651432416201200177000ustar00rootroot00000000000000use crate::core::{Edition, Shell, Workspace}; use crate::util::errors::CargoResult; use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; use crate::util::{restricted_names, Config}; use anyhow::Context as _; use cargo_util::paths; use serde::de; use serde::Deserialize; use std::collections::BTreeMap; use std::fmt; use std::io::{BufRead, BufReader, ErrorKind}; use std::path::{Path, PathBuf}; use std::str::FromStr; use toml_edit::easy as toml; #[derive(Clone, Copy, Debug, PartialEq)] pub enum VersionControl { Git, Hg, Pijul, Fossil, NoVcs, } impl FromStr for VersionControl { type Err = anyhow::Error; fn from_str(s: &str) -> Result { match s { "git" => Ok(VersionControl::Git), "hg" => Ok(VersionControl::Hg), "pijul" => Ok(VersionControl::Pijul), "fossil" => Ok(VersionControl::Fossil), "none" => Ok(VersionControl::NoVcs), other => anyhow::bail!("unknown vcs specification: `{}`", other), } } } impl<'de> de::Deserialize<'de> for VersionControl { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { let s = String::deserialize(deserializer)?; FromStr::from_str(&s).map_err(de::Error::custom) } } #[derive(Debug)] pub struct NewOptions { pub version_control: Option, pub kind: NewProjectKind, pub auto_detect_kind: bool, /// Absolute path to the directory for the new package pub path: PathBuf, pub name: Option, pub edition: Option, pub registry: Option, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum NewProjectKind { Bin, Lib, } impl NewProjectKind { fn is_bin(self) -> bool { self == NewProjectKind::Bin } } impl fmt::Display for NewProjectKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { NewProjectKind::Bin => "binary (application)", NewProjectKind::Lib => "library", } .fmt(f) } } struct SourceFileInformation { relative_path: String, target_name: String, bin: bool, } struct MkOptions<'a> { version_control: Option, path: &'a Path, name: &'a str, source_files: Vec, bin: bool, edition: Option<&'a str>, registry: Option<&'a str>, } impl NewOptions { pub fn new( version_control: Option, bin: bool, lib: bool, path: PathBuf, name: Option, edition: Option, registry: Option, ) -> CargoResult { let auto_detect_kind = !bin && !lib; let kind = match (bin, lib) { (true, true) => anyhow::bail!("can't specify both lib and binary outputs"), (false, true) => NewProjectKind::Lib, (_, false) => NewProjectKind::Bin, }; let opts = NewOptions { version_control, kind, auto_detect_kind, path, name, edition, registry, }; Ok(opts) } } #[derive(Deserialize)] struct CargoNewConfig { #[deprecated = "cargo-new no longer supports adding the authors field"] #[allow(dead_code)] name: Option, #[deprecated = "cargo-new no longer supports adding the authors field"] #[allow(dead_code)] email: Option, #[serde(rename = "vcs")] version_control: Option, } fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> { if let Some(ref name) = opts.name { return Ok(name); } let file_name = path.file_name().ok_or_else(|| { anyhow::format_err!( "cannot auto-detect package name from path {:?} ; use --name to override", path.as_os_str() ) })?; file_name.to_str().ok_or_else(|| { anyhow::format_err!( "cannot create package with a non-unicode name: {:?}", file_name ) }) } fn check_name( name: &str, show_name_help: bool, has_bin: bool, shell: &mut Shell, ) -> CargoResult<()> { // If --name is already used to override, no point in suggesting it // again as a fix. let name_help = if show_name_help { "\nIf you need a package name to not match the directory name, consider using --name flag." } else { "" }; let bin_help = || { let mut help = String::from(name_help); if has_bin { help.push_str(&format!( "\n\ If you need a binary with the name \"{name}\", use a valid package \ name, and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/{name}.rs` \ or change the name in Cargo.toml with:\n\ \n \ [[bin]]\n \ name = \"{name}\"\n \ path = \"src/main.rs\"\n\ ", name = name )); } help }; restricted_names::validate_package_name(name, "package name", &bin_help())?; if restricted_names::is_keyword(name) { anyhow::bail!( "the name `{}` cannot be used as a package name, it is a Rust keyword{}", name, bin_help() ); } if restricted_names::is_conflicting_artifact_name(name) { if has_bin { anyhow::bail!( "the name `{}` cannot be used as a package name, \ it conflicts with cargo's build directory names{}", name, name_help ); } else { shell.warn(format!( "the name `{}` will not support binary \ executables with that name, \ it conflicts with cargo's build directory names", name ))?; } } if name == "test" { anyhow::bail!( "the name `test` cannot be used as a package name, \ it conflicts with Rust's built-in test library{}", bin_help() ); } if ["core", "std", "alloc", "proc_macro", "proc-macro"].contains(&name) { shell.warn(format!( "the name `{}` is part of Rust's standard library\n\ It is recommended to use a different name to avoid problems.{}", name, bin_help() ))?; } if restricted_names::is_windows_reserved(name) { if cfg!(windows) { anyhow::bail!( "cannot use name `{}`, it is a reserved Windows filename{}", name, name_help ); } else { shell.warn(format!( "the name `{}` is a reserved Windows filename\n\ This package will not work on Windows platforms.", name ))?; } } if restricted_names::is_non_ascii_name(name) { shell.warn(format!( "the name `{}` contains non-ASCII characters\n\ Non-ASCII crate names are not supported by Rust.", name ))?; } Ok(()) } fn detect_source_paths_and_types( package_path: &Path, package_name: &str, detected_files: &mut Vec, ) -> CargoResult<()> { let path = package_path; let name = package_name; enum H { Bin, Lib, Detect, } struct Test { proposed_path: String, handling: H, } let tests = vec![ Test { proposed_path: "src/main.rs".to_string(), handling: H::Bin, }, Test { proposed_path: "main.rs".to_string(), handling: H::Bin, }, Test { proposed_path: format!("src/{}.rs", name), handling: H::Detect, }, Test { proposed_path: format!("{}.rs", name), handling: H::Detect, }, Test { proposed_path: "src/lib.rs".to_string(), handling: H::Lib, }, Test { proposed_path: "lib.rs".to_string(), handling: H::Lib, }, ]; for i in tests { let pp = i.proposed_path; // path/pp does not exist or is not a file if !path.join(&pp).is_file() { continue; } let sfi = match i.handling { H::Bin => SourceFileInformation { relative_path: pp, target_name: package_name.to_string(), bin: true, }, H::Lib => SourceFileInformation { relative_path: pp, target_name: package_name.to_string(), bin: false, }, H::Detect => { let content = paths::read(&path.join(pp.clone()))?; let isbin = content.contains("fn main"); SourceFileInformation { relative_path: pp, target_name: package_name.to_string(), bin: isbin, } } }; detected_files.push(sfi); } // Check for duplicate lib attempt let mut previous_lib_relpath: Option<&str> = None; let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); for i in detected_files { if i.bin { if let Some(x) = BTreeMap::get::(&duplicates_checker, i.target_name.as_ref()) { anyhow::bail!( "\ multiple possible binary sources found: {} {} cannot automatically generate Cargo.toml as the main target would be ambiguous", &x.relative_path, &i.relative_path ); } duplicates_checker.insert(i.target_name.as_ref(), i); } else { if let Some(plp) = previous_lib_relpath { anyhow::bail!( "cannot have a package with \ multiple libraries, \ found both `{}` and `{}`", plp, i.relative_path ) } previous_lib_relpath = Some(&i.relative_path); } } Ok(()) } fn plan_new_source_file(bin: bool, package_name: String) -> SourceFileInformation { if bin { SourceFileInformation { relative_path: "src/main.rs".to_string(), target_name: package_name, bin: true, } } else { SourceFileInformation { relative_path: "src/lib.rs".to_string(), target_name: package_name, bin: false, } } } fn calculate_new_project_kind( requested_kind: NewProjectKind, auto_detect_kind: bool, found_files: &Vec, ) -> NewProjectKind { let bin_file = found_files.iter().find(|x| x.bin); let kind_from_files = if !found_files.is_empty() && bin_file.is_none() { NewProjectKind::Lib } else { NewProjectKind::Bin }; if auto_detect_kind { return kind_from_files; } requested_kind } pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { let path = &opts.path; if path.exists() { anyhow::bail!( "destination `{}` already exists\n\n\ Use `cargo init` to initialize the directory", path.display() ) } let is_bin = opts.kind.is_bin(); let name = get_name(path, opts)?; check_name(name, opts.name.is_none(), is_bin, &mut config.shell())?; let mkopts = MkOptions { version_control: opts.version_control, path, name, source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())], bin: is_bin, edition: opts.edition.as_deref(), registry: opts.registry.as_deref(), }; mk(config, &mkopts).with_context(|| { format!( "Failed to create package `{}` at `{}`", name, path.display() ) })?; Ok(()) } pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { // This is here just as a random location to exercise the internal error handling. if std::env::var_os("__CARGO_TEST_INTERNAL_ERROR").is_some() { return Err(crate::util::internal("internal error test")); } let path = &opts.path; if path.join("Cargo.toml").exists() { anyhow::bail!("`cargo init` cannot be run on existing Cargo packages") } let name = get_name(path, opts)?; let mut src_paths_types = vec![]; detect_source_paths_and_types(path, name, &mut src_paths_types)?; let kind = calculate_new_project_kind(opts.kind, opts.auto_detect_kind, &src_paths_types); let has_bin = kind.is_bin(); if src_paths_types.is_empty() { src_paths_types.push(plan_new_source_file(has_bin, name.to_string())); } else if src_paths_types.len() == 1 && !src_paths_types.iter().any(|x| x.bin == has_bin) { // we've found the only file and it's not the type user wants. Change the type and warn let file_type = if src_paths_types[0].bin { NewProjectKind::Bin } else { NewProjectKind::Lib }; config.shell().warn(format!( "file `{}` seems to be a {} file", src_paths_types[0].relative_path, file_type ))?; src_paths_types[0].bin = has_bin } else if src_paths_types.len() > 1 && !has_bin { // We have found both lib and bin files and the user would like us to treat both as libs anyhow::bail!( "cannot have a package with \ multiple libraries, \ found both `{}` and `{}`", src_paths_types[0].relative_path, src_paths_types[1].relative_path ) } check_name(name, opts.name.is_none(), has_bin, &mut config.shell())?; let mut version_control = opts.version_control; if version_control == None { let mut num_detected_vsces = 0; if path.join(".git").exists() { version_control = Some(VersionControl::Git); num_detected_vsces += 1; } if path.join(".hg").exists() { version_control = Some(VersionControl::Hg); num_detected_vsces += 1; } if path.join(".pijul").exists() { version_control = Some(VersionControl::Pijul); num_detected_vsces += 1; } if path.join(".fossil").exists() { version_control = Some(VersionControl::Fossil); num_detected_vsces += 1; } // if none exists, maybe create git, like in `cargo new` if num_detected_vsces > 1 { anyhow::bail!( "more than one of .hg, .git, .pijul, .fossil configurations \ found and the ignore file can't be filled in as \ a result. specify --vcs to override detection" ); } } let mkopts = MkOptions { version_control, path, name, bin: has_bin, source_files: src_paths_types, edition: opts.edition.as_deref(), registry: opts.registry.as_deref(), }; mk(config, &mkopts).with_context(|| { format!( "Failed to create package `{}` at `{}`", name, path.display() ) })?; Ok(kind) } /// IgnoreList struct IgnoreList { /// git like formatted entries ignore: Vec, /// mercurial formatted entries hg_ignore: Vec, /// Fossil-formatted entries. fossil_ignore: Vec, } impl IgnoreList { /// constructor to build a new ignore file fn new() -> IgnoreList { IgnoreList { ignore: Vec::new(), hg_ignore: Vec::new(), fossil_ignore: Vec::new(), } } /// Add a new entry to the ignore list. Requires three arguments with the /// entry in possibly three different formats. One for "git style" entries, /// one for "mercurial style" entries and one for "fossil style" entries. fn push(&mut self, ignore: &str, hg_ignore: &str, fossil_ignore: &str) { self.ignore.push(ignore.to_string()); self.hg_ignore.push(hg_ignore.to_string()); self.fossil_ignore.push(fossil_ignore.to_string()); } /// Return the correctly formatted content of the ignore file for the given /// version control system as `String`. fn format_new(&self, vcs: VersionControl) -> String { let ignore_items = match vcs { VersionControl::Hg => &self.hg_ignore, VersionControl::Fossil => &self.fossil_ignore, _ => &self.ignore, }; ignore_items.join("\n") + "\n" } /// format_existing is used to format the IgnoreList when the ignore file /// already exists. It reads the contents of the given `BufRead` and /// checks if the contents of the ignore list are already existing in the /// file. fn format_existing(&self, existing: T, vcs: VersionControl) -> String { // TODO: is unwrap safe? let existing_items = existing.lines().collect::, _>>().unwrap(); let ignore_items = match vcs { VersionControl::Hg => &self.hg_ignore, VersionControl::Fossil => &self.fossil_ignore, _ => &self.ignore, }; let mut out = String::new(); // Fossil does not support `#` comments. if vcs != VersionControl::Fossil { out.push_str("\n\n# Added by cargo\n"); if ignore_items .iter() .any(|item| existing_items.contains(item)) { out.push_str("#\n# already existing elements were commented out\n"); } out.push('\n'); } for item in ignore_items { if existing_items.contains(item) { if vcs == VersionControl::Fossil { // Just merge for Fossil. continue; } out.push('#'); } out.push_str(item); out.push('\n'); } out } } /// Writes the ignore file to the given directory. If the ignore file for the /// given vcs system already exists, its content is read and duplicate ignore /// file entries are filtered out. fn write_ignore_file(base_path: &Path, list: &IgnoreList, vcs: VersionControl) -> CargoResult<()> { // Fossil only supports project-level settings in a dedicated subdirectory. if vcs == VersionControl::Fossil { paths::create_dir_all(base_path.join(".fossil-settings"))?; } for fp_ignore in match vcs { VersionControl::Git => vec![base_path.join(".gitignore")], VersionControl::Hg => vec![base_path.join(".hgignore")], VersionControl::Pijul => vec![base_path.join(".ignore")], // Fossil has a cleaning functionality configured in a separate file. VersionControl::Fossil => vec![ base_path.join(".fossil-settings/ignore-glob"), base_path.join(".fossil-settings/clean-glob"), ], VersionControl::NoVcs => return Ok(()), } { let ignore: String = match paths::open(&fp_ignore) { Err(err) => match err.downcast_ref::() { Some(io_err) if io_err.kind() == ErrorKind::NotFound => list.format_new(vcs), _ => return Err(err), }, Ok(file) => list.format_existing(BufReader::new(file), vcs), }; paths::append(&fp_ignore, ignore.as_bytes())?; } Ok(()) } /// Initializes the correct VCS system based on the provided config. fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<()> { match vcs { VersionControl::Git => { if !path.join(".git").exists() { // Temporary fix to work around bug in libgit2 when creating a // directory in the root of a posix filesystem. // See: https://github.com/libgit2/libgit2/issues/5130 paths::create_dir_all(path)?; GitRepo::init(path, config.cwd())?; } } VersionControl::Hg => { if !path.join(".hg").exists() { HgRepo::init(path, config.cwd())?; } } VersionControl::Pijul => { if !path.join(".pijul").exists() { PijulRepo::init(path, config.cwd())?; } } VersionControl::Fossil => { if !path.join(".fossil").exists() { FossilRepo::init(path, config.cwd())?; } } VersionControl::NoVcs => { paths::create_dir_all(path)?; } }; Ok(()) } fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { let path = opts.path; let name = opts.name; let cfg = config.get::("cargo-new")?; // Using the push method with multiple arguments ensures that the entries // for all mutually-incompatible VCS in terms of syntax are in sync. let mut ignore = IgnoreList::new(); ignore.push("/target", "^target/", "target"); if !opts.bin { ignore.push("/Cargo.lock", "^Cargo.lock$", "Cargo.lock"); } let vcs = opts.version_control.unwrap_or_else(|| { let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); match (cfg.version_control, in_existing_vcs) { (None, false) => VersionControl::Git, (Some(opt), false) => opt, (_, true) => VersionControl::NoVcs, } }); init_vcs(path, vcs, config)?; write_ignore_file(path, &ignore, vcs)?; let mut cargotoml_path_specifier = String::new(); // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`. for i in &opts.source_files { if i.bin { if i.relative_path != "src/main.rs" { cargotoml_path_specifier.push_str(&format!( r#" [[bin]] name = "{}" path = {} "#, i.target_name, toml::Value::String(i.relative_path.clone()) )); } } else if i.relative_path != "src/lib.rs" { cargotoml_path_specifier.push_str(&format!( r#" [lib] name = "{}" path = {} "#, i.target_name, toml::Value::String(i.relative_path.clone()) )); } } // Create `Cargo.toml` file with necessary `[lib]` and `[[bin]]` sections, if needed. paths::write( &path.join("Cargo.toml"), format!( r#"[package] name = "{}" version = "0.1.0" edition = {} {} # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] {}"#, name, match opts.edition { Some(edition) => toml::Value::String(edition.to_string()), None => toml::Value::String(Edition::LATEST_STABLE.to_string()), }, match opts.registry { Some(registry) => format!( "publish = {}\n", toml::Value::Array(vec!(toml::Value::String(registry.to_string()))) ), None => "".to_string(), }, cargotoml_path_specifier ) .as_bytes(), )?; // Create all specified source files (with respective parent directories) if they don't exist. for i in &opts.source_files { let path_of_source_file = path.join(i.relative_path.clone()); if let Some(src_dir) = path_of_source_file.parent() { paths::create_dir_all(src_dir)?; } let default_file_content: &[u8] = if i.bin { b"\ fn main() { println!(\"Hello, world!\"); } " } else { b"\ pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } " }; if !path_of_source_file.is_file() { paths::write(&path_of_source_file, default_file_content)?; // Format the newly created source file if let Err(e) = cargo_util::ProcessBuilder::new("rustfmt") .arg(&path_of_source_file) .exec_with_output() { log::warn!("failed to call rustfmt: {:#}", e); } } } if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { crate::display_warning_with_error( "compiling this new package may not work due to invalid \ workspace configuration", &e, &mut config.shell(), ); } Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_output_metadata.rs000066400000000000000000000201711432416201200222760ustar00rootroot00000000000000use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::package::SerializedPackage; use crate::core::resolver::{features::CliFeatures, HasDevUnits, Resolve}; use crate::core::{Dependency, Package, PackageId, Workspace}; use crate::ops::{self, Packages}; use crate::util::interning::InternedString; use crate::util::CargoResult; use cargo_platform::Platform; use serde::Serialize; use std::collections::BTreeMap; use std::path::PathBuf; use toml_edit::easy as toml; const VERSION: u32 = 1; pub struct OutputMetadataOptions { pub cli_features: CliFeatures, pub no_deps: bool, pub version: u32, pub filter_platforms: Vec, } /// Loads the manifest, resolves the dependencies of the package to the concrete /// used versions - considering overrides - and writes all dependencies in a JSON /// format to stdout. pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult { if opt.version != VERSION { anyhow::bail!( "metadata version {} not supported, only {} is currently supported", opt.version, VERSION ); } let (packages, resolve) = if opt.no_deps { let packages = ws.members().map(|pkg| pkg.serialized()).collect(); (packages, None) } else { let (packages, resolve) = build_resolve_graph(ws, opt)?; (packages, Some(resolve)) }; Ok(ExportInfo { packages, workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(), resolve, target_directory: ws.target_dir().into_path_unlocked(), version: VERSION, workspace_root: ws.root().to_path_buf(), metadata: ws.custom_metadata().cloned(), }) } /// This is the structure that is serialized and displayed to the user. /// /// See cargo-metadata.adoc for detailed documentation of the format. #[derive(Serialize)] pub struct ExportInfo { packages: Vec, workspace_members: Vec, resolve: Option, target_directory: PathBuf, version: u32, workspace_root: PathBuf, metadata: Option, } #[derive(Serialize)] struct MetadataResolve { nodes: Vec, root: Option, } #[derive(Serialize)] struct MetadataResolveNode { id: PackageId, dependencies: Vec, deps: Vec, features: Vec, } #[derive(Serialize)] struct Dep { name: InternedString, pkg: PackageId, dep_kinds: Vec, } #[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)] struct DepKindInfo { kind: DepKind, target: Option, } impl From<&Dependency> for DepKindInfo { fn from(dep: &Dependency) -> DepKindInfo { DepKindInfo { kind: dep.kind(), target: dep.platform().cloned(), } } } /// Builds the resolve graph as it will be displayed to the user. fn build_resolve_graph( ws: &Workspace<'_>, metadata_opts: &OutputMetadataOptions, ) -> CargoResult<(Vec, MetadataResolve)> { // TODO: Without --filter-platform, features are being resolved for `host` only. // How should this work? let requested_kinds = CompileKind::from_requested_targets(ws.config(), &metadata_opts.filter_platforms)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; // Resolve entire workspace. let specs = Packages::All.to_package_id_specs(ws)?; let force_all = if metadata_opts.filter_platforms.is_empty() { crate::core::resolver::features::ForceAllTargets::Yes } else { crate::core::resolver::features::ForceAllTargets::No }; // Note that even with --filter-platform we end up downloading host dependencies as well, // as that is the behavior of download_accessible. let ws_resolve = ops::resolve_ws_with_opts( ws, &target_data, &requested_kinds, &metadata_opts.cli_features, &specs, HasDevUnits::Yes, force_all, )?; let package_map: BTreeMap = ws_resolve .pkg_set .packages() // This is a little lazy, but serde doesn't handle Rc fields very well. .map(|pkg| (pkg.package_id(), Package::clone(pkg))) .collect(); // Start from the workspace roots, and recurse through filling out the // map, filtering targets as necessary. let mut node_map = BTreeMap::new(); for member_pkg in ws.members() { build_resolve_graph_r( &mut node_map, member_pkg.package_id(), &ws_resolve.targeted_resolve, &package_map, &target_data, &requested_kinds, ); } // Get a Vec of Packages. let actual_packages = package_map .into_iter() .filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg)) .map(|pkg| pkg.serialized()) .collect(); let mr = MetadataResolve { nodes: node_map.into_iter().map(|(_pkg_id, node)| node).collect(), root: ws.current_opt().map(|pkg| pkg.package_id()), }; Ok((actual_packages, mr)) } fn build_resolve_graph_r( node_map: &mut BTreeMap, pkg_id: PackageId, resolve: &Resolve, package_map: &BTreeMap, target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], ) { if node_map.contains_key(&pkg_id) { return; } // This normalizes the IDs so that they are consistent between the // `packages` array and the `resolve` map. This is a bit of a hack to // compensate for the fact that // SourceKind::Git(GitReference::Branch("master")) is the same as // SourceKind::Git(GitReference::DefaultBranch). We want IDs in the JSON // to be opaque, and compare with basic string equality, so this will // always prefer the style of ID in the Package instead of the resolver. // Cargo generally only exposes PackageIds from the Package struct, and // AFAIK this is the only place where the resolver variant is exposed. // // This diverges because the SourceIds created for Packages are built // based on the Dependency declaration, but the SourceIds in the resolver // are deserialized from Cargo.lock. Cargo.lock may have been generated by // an older (or newer!) version of Cargo which uses a different style. let normalize_id = |id| -> PackageId { *package_map.get_key_value(&id).unwrap().0 }; let features = resolve.features(pkg_id).to_vec(); let deps: Vec = resolve .deps(pkg_id) .filter(|(_dep_id, deps)| { if requested_kinds == [CompileKind::Host] { true } else { requested_kinds.iter().any(|kind| { deps.iter() .any(|dep| target_data.dep_platform_activated(dep, *kind)) }) } }) .filter_map(|(dep_id, deps)| { let mut dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect(); dep_kinds.sort(); package_map .get(&dep_id) .and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib())) .and_then(|lib_target| { resolve .extern_crate_name_and_dep_name(pkg_id, dep_id, lib_target) .map(|(ext_crate_name, _)| ext_crate_name) .ok() }) .map(|name| Dep { name, pkg: normalize_id(dep_id), dep_kinds, }) }) .collect(); let dumb_deps: Vec = deps.iter().map(|dep| normalize_id(dep.pkg)).collect(); let to_visit = dumb_deps.clone(); let node = MetadataResolveNode { id: normalize_id(pkg_id), dependencies: dumb_deps, deps, features, }; node_map.insert(pkg_id, node); for dep_id in to_visit { build_resolve_graph_r( node_map, dep_id, resolve, package_map, target_data, requested_kinds, ); } } cargo-0.66.0/src/cargo/ops/cargo_package.rs000066400000000000000000001024351432416201200204750ustar00rootroot00000000000000use std::collections::{BTreeSet, HashMap}; use std::fs::{self, File}; use std::io::prelude::*; use std::io::SeekFrom; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; use std::task::Poll; use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; use crate::core::resolver::CliFeatures; use crate::core::{Feature, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId}; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::toml::TomlManifest; use crate::util::{self, restricted_names, Config, FileLock}; use crate::{drop_println, ops}; use anyhow::Context as _; use cargo_util::paths; use flate2::read::GzDecoder; use flate2::{Compression, GzBuilder}; use log::debug; use serde::Serialize; use tar::{Archive, Builder, EntryType, Header, HeaderMode}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, pub list: bool, pub check_metadata: bool, pub allow_dirty: bool, pub verify: bool, pub jobs: Option, pub keep_going: bool, pub to_package: ops::Packages, pub targets: Vec, pub cli_features: CliFeatures, } const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig"; const VCS_INFO_FILE: &str = ".cargo_vcs_info.json"; struct ArchiveFile { /// The relative path in the archive (not including the top-level package /// name directory). rel_path: PathBuf, /// String variant of `rel_path`, for convenience. rel_str: String, /// The contents to add to the archive. contents: FileContents, } enum FileContents { /// Absolute path to the file on disk to add to the archive. OnDisk(PathBuf), /// Generates a file. Generated(GeneratedFile), } enum GeneratedFile { /// Generates `Cargo.toml` by rewriting the original. Manifest, /// Generates `Cargo.lock` in some cases (like if there is a binary). Lockfile, /// Adds a `.cargo_vcs_info.json` file if in a (clean) git repo. VcsInfo(VcsInfo), } #[derive(Serialize)] struct VcsInfo { git: GitVcsInfo, /// Path to the package within repo (empty string if root). / not \ path_in_vcs: String, } #[derive(Serialize)] struct GitVcsInfo { sha1: String, } pub fn package_one( ws: &Workspace<'_>, pkg: &Package, opts: &PackageOpts<'_>, ) -> CargoResult> { let config = ws.config(); let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); src.update()?; if opts.check_metadata { check_metadata(pkg, config)?; } if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() { config.shell().warn( "both package.include and package.exclude are specified; \ the exclude list will be ignored", )?; } let src_files = src.list_files(pkg)?; // Check (git) repository state, getting the current commit hash if not // dirty. let vcs_info = if !opts.allow_dirty { // This will error if a dirty repo is found. check_repo_state(pkg, &src_files, config)? } else { None }; let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?; if opts.list { for ar_file in ar_files { drop_println!(config, "{}", ar_file.rel_str); } return Ok(None); } // Check that the package dependencies are safe to deploy. for dep in pkg.dependencies() { super::check_dep_has_version(dep, false)?; } let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let dir = ws.target_dir().join("package"); let mut dst = { let tmp = format!(".{}", filename); dir.open_rw(&tmp, config, "package scratch space")? }; // Package up and test a temporary tarball and only move it to the final // location if it actually passes all our tests. Any previously existing // tarball can be assumed as corrupt or invalid, so we just blow it away if // it exists. config .shell() .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; tar(ws, pkg, ar_files, dst.file(), &filename) .with_context(|| "failed to prepare local package for uploading")?; if opts.verify { dst.seek(SeekFrom::Start(0))?; run_verify(ws, pkg, &dst, opts).with_context(|| "failed to verify package tarball")? } dst.seek(SeekFrom::Start(0))?; let src_path = dst.path(); let dst_path = dst.parent().join(&filename); fs::rename(&src_path, &dst_path) .with_context(|| "failed to move temporary tarball into final location")?; return Ok(Some(dst)); } pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult>> { let pkgs = ws.members_with_features( &opts.to_package.to_package_id_specs(ws)?, &opts.cli_features, )?; let mut dsts = Vec::with_capacity(pkgs.len()); if ws.root().join("Cargo.lock").exists() { // Make sure the Cargo.lock is up-to-date and valid. let _ = ops::resolve_ws(ws)?; // If Cargo.lock does not exist, it will be generated by `build_lock` // below, and will be validated during the verification step. } for (pkg, cli_features) in pkgs { let result = package_one( ws, pkg, &PackageOpts { config: opts.config, list: opts.list, check_metadata: opts.check_metadata, allow_dirty: opts.allow_dirty, verify: opts.verify, jobs: opts.jobs, keep_going: opts.keep_going, to_package: ops::Packages::Default, targets: opts.targets.clone(), cli_features: cli_features, }, )?; if !opts.list { dsts.push(result.unwrap()); } } if opts.list { // We're just listing, so there's no file output Ok(None) } else { Ok(Some(dsts)) } } /// Builds list of files to archive. fn build_ar_list( ws: &Workspace<'_>, pkg: &Package, src_files: Vec, vcs_info: Option, ) -> CargoResult> { let mut result = Vec::new(); let root = pkg.root(); for src_file in src_files { let rel_path = src_file.strip_prefix(&root)?.to_path_buf(); check_filename(&rel_path, &mut ws.config().shell())?; let rel_str = rel_path .to_str() .ok_or_else(|| { anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display()) })? .to_string(); match rel_str.as_ref() { "Cargo.toml" => { result.push(ArchiveFile { rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE), rel_str: ORIGINAL_MANIFEST_FILE.to_string(), contents: FileContents::OnDisk(src_file), }); result.push(ArchiveFile { rel_path, rel_str, contents: FileContents::Generated(GeneratedFile::Manifest), }); } "Cargo.lock" => continue, VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!( "invalid inclusion of reserved file name {} in package source", rel_str ), _ => { result.push(ArchiveFile { rel_path, rel_str, contents: FileContents::OnDisk(src_file), }); } } } if pkg.include_lockfile() { result.push(ArchiveFile { rel_path: PathBuf::from("Cargo.lock"), rel_str: "Cargo.lock".to_string(), contents: FileContents::Generated(GeneratedFile::Lockfile), }); } if let Some(vcs_info) = vcs_info { result.push(ArchiveFile { rel_path: PathBuf::from(VCS_INFO_FILE), rel_str: VCS_INFO_FILE.to_string(), contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)), }); } if let Some(license_file) = &pkg.manifest().metadata().license_file { let license_path = Path::new(license_file); let abs_file_path = paths::normalize_path(&pkg.root().join(license_path)); if abs_file_path.exists() { check_for_file_and_add( "license-file", license_path, abs_file_path, pkg, &mut result, ws, )?; } else { let rel_msg = if license_path.is_absolute() { "".to_string() } else { format!(" (relative to `{}`)", pkg.root().display()) }; ws.config().shell().warn(&format!( "license-file `{}` does not appear to exist{}.\n\ Please update the license-file setting in the manifest at `{}`\n\ This may become a hard error in the future.", license_path.display(), rel_msg, pkg.manifest_path().display() ))?; } } if let Some(readme) = &pkg.manifest().metadata().readme { let readme_path = Path::new(readme); let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path)); if abs_file_path.exists() { check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?; } } result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path)); Ok(result) } fn check_for_file_and_add( label: &str, file_path: &Path, abs_file_path: PathBuf, pkg: &Package, result: &mut Vec, ws: &Workspace<'_>, ) -> CargoResult<()> { match abs_file_path.strip_prefix(&pkg.root()) { Ok(rel_file_path) => { if !result.iter().any(|ar| ar.rel_path == rel_file_path) { result.push(ArchiveFile { rel_path: rel_file_path.to_path_buf(), rel_str: rel_file_path .to_str() .expect("everything was utf8") .to_string(), contents: FileContents::OnDisk(abs_file_path), }) } } Err(_) => { // The file exists somewhere outside of the package. let file_name = file_path.file_name().unwrap(); if result .iter() .any(|ar| ar.rel_path.file_name().unwrap() == file_name) { ws.config().shell().warn(&format!( "{} `{}` appears to be a path outside of the package, \ but there is already a file named `{}` in the root of the package. \ The archived crate will contain the copy in the root of the package. \ Update the {} to point to the path relative \ to the root of the package to remove this warning.", label, file_path.display(), file_name.to_str().unwrap(), label, ))?; } else { result.push(ArchiveFile { rel_path: PathBuf::from(file_name), rel_str: file_name.to_str().unwrap().to_string(), contents: FileContents::OnDisk(abs_file_path), }) } } } Ok(()) } /// Construct `Cargo.lock` for the package to be published. fn build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult { let config = ws.config(); let orig_resolve = ops::load_pkg_lockfile(ws)?; // Convert Package -> TomlManifest -> Manifest -> Package let toml_manifest = Rc::new( orig_pkg .manifest() .original() .prepare_for_publish(ws, orig_pkg.root())?, ); let package_root = orig_pkg.root(); let source_id = orig_pkg.package_id().source_id(); let (manifest, _nested_paths) = TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?; let new_pkg = Package::new(manifest, orig_pkg.manifest_path()); // Regenerate Cargo.lock using the old one as a guide. let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?; let (pkg_set, mut new_resolve) = ops::resolve_ws(&tmp_ws)?; if let Some(orig_resolve) = orig_resolve { compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?; } check_yanked( config, &pkg_set, &new_resolve, "consider updating to a version that is not yanked", )?; ops::resolve_to_string(&tmp_ws, &mut new_resolve) } // Checks that the package has some piece of metadata that a human can // use to tell what the package is about. fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { let md = pkg.manifest().metadata(); let mut missing = vec![]; macro_rules! lacking { ($( $($field: ident)||* ),*) => {{ $( if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* { $(missing.push(stringify!($field).replace("_", "-"));)* } )* }} } lacking!( description, license || license_file, documentation || homepage || repository ); if !missing.is_empty() { let mut things = missing[..missing.len() - 1].join(", "); // `things` will be empty if and only if its length is 1 (i.e., the only case // to have no `or`). if !things.is_empty() { things.push_str(" or "); } things.push_str(missing.last().unwrap()); config.shell().warn(&format!( "manifest has no {things}.\n\ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.", things = things ))? } Ok(()) } /// Checks if the package source is in a *git* DVCS repository. If *git*, and /// the source is *dirty* (e.g., has uncommitted changes) then `bail!` with an /// informative message. Otherwise return the sha1 hash of the current *HEAD* /// commit, or `None` if no repo is found. fn check_repo_state( p: &Package, src_files: &[PathBuf], config: &Config, ) -> CargoResult> { if let Ok(repo) = git2::Repository::discover(p.root()) { if let Some(workdir) = repo.workdir() { debug!("found a git repo at {:?}", workdir); let path = p.manifest_path(); let path = path.strip_prefix(workdir).unwrap_or(path); if let Ok(status) = repo.status_file(path) { if (status & git2::Status::IGNORED).is_empty() { debug!( "found (git) Cargo.toml at {:?} in workdir {:?}", path, workdir ); let path_in_vcs = path .parent() .and_then(|p| p.to_str()) .unwrap_or("") .replace("\\", "/"); return Ok(Some(VcsInfo { git: git(p, src_files, &repo)?, path_in_vcs, })); } } config.shell().verbose(|shell| { shell.warn(format!( "No (git) Cargo.toml found at `{}` in workdir `{}`", path.display(), workdir.display() )) })?; } } else { config.shell().verbose(|shell| { shell.warn(format!("No (git) VCS found for `{}`", p.root().display())) })?; } // No VCS with a checked in `Cargo.toml` found, so we don't know if the // directory is dirty or not, thus we have to assume that it's clean. return Ok(None); fn git(p: &Package, src_files: &[PathBuf], repo: &git2::Repository) -> CargoResult { // This is a collection of any dirty or untracked files. This covers: // - new/modified/deleted/renamed/type change (index or worktree) // - untracked files (which are "new" worktree files) // - ignored (in case the user has an `include` directive that // conflicts with .gitignore). let mut dirty_files = Vec::new(); collect_statuses(repo, &mut dirty_files)?; // Include each submodule so that the error message can provide // specifically *which* files in a submodule are modified. status_submodules(repo, &mut dirty_files)?; // Find the intersection of dirty in git, and the src_files that would // be packaged. This is a lazy n^2 check, but seems fine with // thousands of files. let dirty_src_files: Vec = src_files .iter() .filter(|src_file| dirty_files.iter().any(|path| src_file.starts_with(path))) .map(|path| { path.strip_prefix(p.root()) .unwrap_or(path) .display() .to_string() }) .collect(); if dirty_src_files.is_empty() { let rev_obj = repo.revparse_single("HEAD")?; Ok(GitVcsInfo { sha1: rev_obj.id().to_string(), }) } else { anyhow::bail!( "{} files in the working directory contain changes that were \ not yet committed into git:\n\n{}\n\n\ to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag", dirty_src_files.len(), dirty_src_files.join("\n") ) } } // Helper to collect dirty statuses for a single repo. fn collect_statuses( repo: &git2::Repository, dirty_files: &mut Vec, ) -> CargoResult<()> { let mut status_opts = git2::StatusOptions::new(); // Exclude submodules, as they are being handled manually by recursing // into each one so that details about specific files can be // retrieved. status_opts .exclude_submodules(true) .include_ignored(true) .include_untracked(true); let repo_statuses = repo.statuses(Some(&mut status_opts)).with_context(|| { format!( "failed to retrieve git status from repo {}", repo.path().display() ) })?; let workdir = repo.workdir().unwrap(); let this_dirty = repo_statuses.iter().filter_map(|entry| { let path = entry.path().expect("valid utf-8 path"); if path.ends_with("Cargo.lock") && entry.status() == git2::Status::IGNORED { // It is OK to include Cargo.lock even if it is ignored. return None; } // Use an absolute path, so that comparing paths is easier // (particularly with submodules). Some(workdir.join(path)) }); dirty_files.extend(this_dirty); Ok(()) } // Helper to collect dirty statuses while recursing into submodules. fn status_submodules( repo: &git2::Repository, dirty_files: &mut Vec, ) -> CargoResult<()> { for submodule in repo.submodules()? { // Ignore submodules that don't open, they are probably not initialized. // If its files are required, then the verification step should fail. if let Ok(sub_repo) = submodule.open() { status_submodules(&sub_repo, dirty_files)?; collect_statuses(&sub_repo, dirty_files)?; } } Ok(()) } } fn tar( ws: &Workspace<'_>, pkg: &Package, ar_files: Vec, dst: &File, filename: &str, ) -> CargoResult<()> { // Prepare the encoder and its header. let filename = Path::new(filename); let encoder = GzBuilder::new() .filename(paths::path2bytes(filename)?) .write(dst, Compression::best()); // Put all package files into a compressed archive. let mut ar = Builder::new(encoder); let config = ws.config(); let base_name = format!("{}-{}", pkg.name(), pkg.version()); let base_path = Path::new(&base_name); for ar_file in ar_files { let ArchiveFile { rel_path, rel_str, contents, } = ar_file; let ar_path = base_path.join(&rel_path); config .shell() .verbose(|shell| shell.status("Archiving", &rel_str))?; let mut header = Header::new_gnu(); match contents { FileContents::OnDisk(disk_path) => { let mut file = File::open(&disk_path).with_context(|| { format!("failed to open for archiving: `{}`", disk_path.display()) })?; let metadata = file.metadata().with_context(|| { format!("could not learn metadata for: `{}`", disk_path.display()) })?; header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic); header.set_cksum(); ar.append_data(&mut header, &ar_path, &mut file) .with_context(|| { format!("could not archive source file `{}`", disk_path.display()) })?; } FileContents::Generated(generated_kind) => { let contents = match generated_kind { GeneratedFile::Manifest => pkg.to_registry_toml(ws)?, GeneratedFile::Lockfile => build_lock(ws, pkg)?, GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?, }; header.set_entry_type(EntryType::file()); header.set_mode(0o644); header.set_size(contents.len() as u64); // use something nonzero to avoid rust-lang/cargo#9512 header.set_mtime(1); header.set_cksum(); ar.append_data(&mut header, &ar_path, contents.as_bytes()) .with_context(|| format!("could not archive source file `{}`", rel_str))?; } } } let encoder = ar.into_inner()?; encoder.finish()?; Ok(()) } /// Generate warnings when packaging Cargo.lock, and the resolve have changed. fn compare_resolve( config: &Config, current_pkg: &Package, orig_resolve: &Resolve, new_resolve: &Resolve, ) -> CargoResult<()> { if config.shell().verbosity() != Verbosity::Verbose { return Ok(()); } let new_set: BTreeSet = new_resolve.iter().collect(); let orig_set: BTreeSet = orig_resolve.iter().collect(); let added = new_set.difference(&orig_set); // Removed entries are ignored, this is used to quickly find hints for why // an entry changed. let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect(); for pkg_id in added { if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() { // Skip the package that is being created, since its SourceId // (directory) changes. continue; } // Check for candidates where the source has changed (such as [patch] // or a dependency with multiple sources like path/version). let removed_candidates: Vec<&PackageId> = removed .iter() .filter(|orig_pkg_id| { orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version() }) .cloned() .collect(); let extra = match removed_candidates.len() { 0 => { // This can happen if the original was out of date. let previous_versions: Vec<&PackageId> = removed .iter() .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name()) .cloned() .collect(); match previous_versions.len() { 0 => String::new(), 1 => format!( ", previous version was `{}`", previous_versions[0].version() ), _ => format!( ", previous versions were: {}", previous_versions .iter() .map(|pkg_id| format!("`{}`", pkg_id.version())) .collect::>() .join(", ") ), } } 1 => { // This can happen for multi-sourced dependencies like // `{path="...", version="..."}` or `[patch]` replacement. // `[replace]` is not captured in Cargo.lock. format!( ", was originally sourced from `{}`", removed_candidates[0].source_id() ) } _ => { // I don't know if there is a way to actually trigger this, // but handle it just in case. let comma_list = removed_candidates .iter() .map(|pkg_id| format!("`{}`", pkg_id.source_id())) .collect::>() .join(", "); format!( ", was originally sourced from one of these sources: {}", comma_list ) } }; let msg = format!( "package `{}` added to the packaged Cargo.lock file{}", pkg_id, extra ); config.shell().note(msg)?; } Ok(()) } pub fn check_yanked( config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve, hint: &str, ) -> CargoResult<()> { // Checking the yanked status involves taking a look at the registry and // maybe updating files, so be sure to lock it here. let _lock = config.acquire_package_cache_lock()?; let mut sources = pkg_set.sources_mut(); let mut pending: Vec = resolve.iter().collect(); let mut results = Vec::new(); for (_id, source) in sources.sources_mut() { source.invalidate_cache(); } while !pending.is_empty() { pending.retain(|pkg_id| { if let Some(source) = sources.get_mut(pkg_id.source_id()) { match source.is_yanked(*pkg_id) { Poll::Ready(result) => results.push((*pkg_id, result)), Poll::Pending => return true, } } false }); for (_id, source) in sources.sources_mut() { source.block_until_ready()?; } } for (pkg_id, is_yanked) in results { if is_yanked? { config.shell().warn(format!( "package `{}` in Cargo.lock is yanked in registry `{}`, {}", pkg_id, pkg_id.source_id().display_registry_name(), hint ))?; } } Ok(()) } fn run_verify( ws: &Workspace<'_>, pkg: &Package, tar: &FileLock, opts: &PackageOpts<'_>, ) -> CargoResult<()> { let config = ws.config(); config.shell().status("Verifying", pkg)?; let f = GzDecoder::new(tar.file()); let dst = tar .parent() .join(&format!("{}-{}", pkg.name(), pkg.version())); if dst.exists() { paths::remove_dir_all(&dst)?; } let mut archive = Archive::new(f); // We don't need to set the Modified Time, as it's not relevant to verification // and it errors on filesystems that don't support setting a modified timestamp archive.set_preserve_mtime(false); archive.unpack(dst.parent().unwrap())?; // Manufacture an ephemeral workspace to ensure that even if the top-level // package has a workspace we can still build our new crate. let id = SourceId::for_path(&dst)?; let mut src = PathSource::new(&dst, id, ws.config()); let new_pkg = src.root_package()?; let pkg_fingerprint = hash_all(&dst)?; let ws = Workspace::ephemeral(new_pkg, config, None, true)?; let rustc_args = if pkg .manifest() .unstable_features() .require(Feature::public_dependency()) .is_ok() { // FIXME: Turn this on at some point in the future //Some(vec!["-D exported_private_dependencies".to_string()]) Some(vec![]) } else { None }; let exec: Arc = Arc::new(DefaultExecutor); ops::compile_with_exec( &ws, &ops::CompileOptions { build_config: BuildConfig::new( config, opts.jobs, opts.keep_going, &opts.targets, CompileMode::Build, )?, cli_features: opts.cli_features.clone(), spec: ops::Packages::Packages(Vec::new()), filter: ops::CompileFilter::Default { required_features_filterable: true, }, target_rustdoc_args: None, target_rustc_args: rustc_args, target_rustc_crate_types: None, local_rustdoc_args: None, rustdoc_document_private_items: false, honor_rust_version: true, }, &exec, )?; // Check that `build.rs` didn't modify any files in the `src` directory. let ws_fingerprint = hash_all(&dst)?; if pkg_fingerprint != ws_fingerprint { let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint); anyhow::bail!( "Source directory was modified by build.rs during cargo publish. \ Build scripts should not modify anything outside of OUT_DIR.\n\ {}\n\n\ To proceed despite this, pass the `--no-verify` flag.", changes ) } Ok(()) } fn hash_all(path: &Path) -> CargoResult> { fn wrap(path: &Path) -> CargoResult> { let mut result = HashMap::new(); let walker = walkdir::WalkDir::new(path).into_iter(); for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) { let entry = entry?; let file_type = entry.file_type(); if file_type.is_file() { let file = File::open(entry.path())?; let hash = util::hex::hash_u64_file(&file)?; result.insert(entry.path().to_path_buf(), hash); } else if file_type.is_symlink() { let hash = util::hex::hash_u64(&fs::read_link(entry.path())?); result.insert(entry.path().to_path_buf(), hash); } else if file_type.is_dir() { let hash = util::hex::hash_u64(&()); result.insert(entry.path().to_path_buf(), hash); } } Ok(result) } let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?; Ok(result) } fn report_hash_difference(orig: &HashMap, after: &HashMap) -> String { let mut changed = Vec::new(); let mut removed = Vec::new(); for (key, value) in orig { match after.get(key) { Some(after_value) => { if value != after_value { changed.push(key.to_string_lossy()); } } None => removed.push(key.to_string_lossy()), } } let mut added: Vec<_> = after .keys() .filter(|key| !orig.contains_key(*key)) .map(|key| key.to_string_lossy()) .collect(); let mut result = Vec::new(); if !changed.is_empty() { changed.sort_unstable(); result.push(format!("Changed: {}", changed.join("\n\t"))); } if !added.is_empty() { added.sort_unstable(); result.push(format!("Added: {}", added.join("\n\t"))); } if !removed.is_empty() { removed.sort_unstable(); result.push(format!("Removed: {}", removed.join("\n\t"))); } assert!(!result.is_empty(), "unexpected empty change detection"); result.join("\n") } // It can often be the case that files of a particular name on one platform // can't actually be created on another platform. For example files with colons // in the name are allowed on Unix but not on Windows. // // To help out in situations like this, issue about weird filenames when // packaging as a "heads up" that something may not work on other platforms. fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> { let name = match file.file_name() { Some(name) => name, None => return Ok(()), }; let name = match name.to_str() { Some(name) => name, None => anyhow::bail!( "path does not have a unicode filename which may not unpack \ on all platforms: {}", file.display() ), }; let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { anyhow::bail!( "cannot package a filename with a special character `{}`: {}", c, file.display() ) } if restricted_names::is_windows_reserved_path(file) { shell.warn(format!( "file {} is a reserved Windows filename, \ it will not work on Windows platforms", file.display() ))?; } Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_pkgid.rs000066400000000000000000000010431432416201200201710ustar00rootroot00000000000000use crate::core::{PackageIdSpec, Workspace}; use crate::ops; use crate::util::CargoResult; pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult { let resolve = match ops::load_pkg_lockfile(ws)? { Some(resolve) => resolve, None => anyhow::bail!("a Cargo.lock must exist for this command"), }; let pkgid = match spec { Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, None => ws.current()?.package_id(), }; Ok(PackageIdSpec::from_package_id(pkgid)) } cargo-0.66.0/src/cargo/ops/cargo_read_manifest.rs000066400000000000000000000165361432416201200217110ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::fs; use std::io; use std::path::{Path, PathBuf}; use crate::core::{EitherManifest, Package, PackageId, SourceId}; use crate::util::errors::CargoResult; use crate::util::important_paths::find_project_manifest_exact; use crate::util::toml::read_manifest; use crate::util::Config; use cargo_util::paths; use log::{info, trace}; pub fn read_package( path: &Path, source_id: SourceId, config: &Config, ) -> CargoResult<(Package, Vec)> { trace!( "read_package; path={}; source-id={}", path.display(), source_id ); let (manifest, nested) = read_manifest(path, source_id, config)?; let manifest = match manifest { EitherManifest::Real(manifest) => manifest, EitherManifest::Virtual(..) => anyhow::bail!( "found a virtual manifest at `{}` instead of a package \ manifest", path.display() ), }; Ok((Package::new(manifest, path), nested)) } pub fn read_packages( path: &Path, source_id: SourceId, config: &Config, ) -> CargoResult> { let mut all_packages = HashMap::new(); let mut visited = HashSet::::new(); let mut errors = Vec::::new(); trace!( "looking for root package: {}, source_id={}", path.display(), source_id ); walk(path, &mut |dir| { trace!("looking for child package: {}", dir.display()); // Don't recurse into hidden/dot directories unless we're at the toplevel if dir != path { let name = dir.file_name().and_then(|s| s.to_str()); if name.map(|s| s.starts_with('.')) == Some(true) { return Ok(false); } // Don't automatically discover packages across git submodules if dir.join(".git").exists() { return Ok(false); } } // Don't ever look at target directories if dir.file_name().and_then(|s| s.to_str()) == Some("target") && has_manifest(dir.parent().unwrap()) { return Ok(false); } if has_manifest(dir) { read_nested_packages( dir, &mut all_packages, source_id, config, &mut visited, &mut errors, )?; } Ok(true) })?; if all_packages.is_empty() { match errors.pop() { Some(err) => Err(err), None => { if find_project_manifest_exact(path, "cargo.toml").is_ok() { Err(anyhow::format_err!( "Could not find Cargo.toml in `{}`, but found cargo.toml please try to rename it to Cargo.toml", path.display() )) } else { Err(anyhow::format_err!( "Could not find Cargo.toml in `{}`", path.display() )) } } } } else { Ok(all_packages.into_iter().map(|(_, v)| v).collect()) } } fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult) -> CargoResult<()> { if !callback(path)? { trace!("not processing {}", path.display()); return Ok(()); } // Ignore any permission denied errors because temporary directories // can often have some weird permissions on them. let dirs = match fs::read_dir(path) { Ok(dirs) => dirs, Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()), Err(e) => { let cx = format!("failed to read directory `{}`", path.display()); let e = anyhow::Error::from(e); return Err(e.context(cx)); } }; for dir in dirs { let dir = dir?; if dir.file_type()?.is_dir() { walk(&dir.path(), callback)?; } } Ok(()) } fn has_manifest(path: &Path) -> bool { find_project_manifest_exact(path, "Cargo.toml").is_ok() } fn read_nested_packages( path: &Path, all_packages: &mut HashMap, source_id: SourceId, config: &Config, visited: &mut HashSet, errors: &mut Vec, ) -> CargoResult<()> { if !visited.insert(path.to_path_buf()) { return Ok(()); } let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) { Err(err) => { // Ignore malformed manifests found on git repositories // // git source try to find and read all manifests from the repository // but since it's not possible to exclude folders from this search // it's safer to ignore malformed manifests to avoid // // TODO: Add a way to exclude folders? info!( "skipping malformed package found at `{}`", path.to_string_lossy() ); errors.push(err.into()); return Ok(()); } Ok(tuple) => tuple, }; let manifest = match manifest { EitherManifest::Real(manifest) => manifest, EitherManifest::Virtual(..) => return Ok(()), }; let pkg = Package::new(manifest, &manifest_path); let pkg_id = pkg.package_id(); use std::collections::hash_map::Entry; match all_packages.entry(pkg_id) { Entry::Vacant(v) => { v.insert(pkg); } Entry::Occupied(_) => { // We can assume a package with publish = false isn't intended to be seen // by users so we can hide the warning about those since the user is unlikely // to care about those cases. if pkg.publish().is_none() { let _ = config.shell().warn(format!( "skipping duplicate package `{}` found at `{}`", pkg.name(), path.display() )); } } } // Registry sources are not allowed to have `path=` dependencies because // they're all translated to actual registry dependencies. // // We normalize the path here ensure that we don't infinitely walk around // looking for crates. By normalizing we ensure that we visit this crate at // most once. // // TODO: filesystem/symlink implications? if !source_id.is_registry() { for p in nested.iter() { let path = paths::normalize_path(&path.join(p)); let result = read_nested_packages(&path, all_packages, source_id, config, visited, errors); // Ignore broken manifests found on git repositories. // // A well formed manifest might still fail to load due to reasons // like referring to a "path" that requires an extra build step. // // See https://github.com/rust-lang/cargo/issues/6822. if let Err(err) = result { if source_id.is_git() { info!( "skipping nested package found at `{}`: {:?}", path.display(), &err, ); errors.push(err); } else { return Err(err); } } } } Ok(()) } cargo-0.66.0/src/cargo/ops/cargo_run.rs000066400000000000000000000062201432416201200177010ustar00rootroot00000000000000use std::ffi::OsString; use std::iter; use std::path::Path; use crate::core::compiler::UnitOutput; use crate::core::{TargetKind, Workspace}; use crate::ops; use crate::util::CargoResult; pub fn run( ws: &Workspace<'_>, options: &ops::CompileOptions, args: &[OsString], ) -> CargoResult<()> { let config = ws.config(); if options.filter.contains_glob_patterns() { anyhow::bail!("`cargo run` does not support glob patterns on target selection") } // We compute the `bins` here *just for diagnosis*. The actual set of // packages to be run is determined by the `ops::compile` call below. let packages = options.spec.get_packages(ws)?; let bins: Vec<_> = packages .into_iter() .flat_map(|pkg| { iter::repeat(pkg).zip(pkg.manifest().targets().iter().filter(|target| { !target.is_lib() && !target.is_custom_build() && if !options.filter.is_specific() { target.is_bin() } else { options.filter.target_run(target) } })) }) .collect(); if bins.is_empty() { if !options.filter.is_specific() { anyhow::bail!("a bin target must be available for `cargo run`") } else { // This will be verified in `cargo_compile`. } } if bins.len() == 1 { let target = bins[0].1; if let TargetKind::ExampleLib(..) = target.kind() { anyhow::bail!( "example target `{}` is a library and cannot be executed", target.name() ) } } if bins.len() > 1 { if !options.filter.is_specific() { let mut names: Vec<&str> = bins .into_iter() .map(|(_pkg, target)| target.name()) .collect(); names.sort(); anyhow::bail!( "`cargo run` could not determine which binary to run. \ Use the `--bin` option to specify a binary, \ or the `default-run` manifest key.\n\ available binaries: {}", names.join(", ") ) } else { anyhow::bail!( "`cargo run` can run at most one executable, but \ multiple were specified" ) } } // `cargo run` is only compatible with one `--target` flag at most options.build_config.single_requested_kind()?; let compile = ops::compile(ws, options)?; assert_eq!(compile.binaries.len(), 1); let UnitOutput { unit, path, script_meta, } = &compile.binaries[0]; let exe = match path.strip_prefix(config.cwd()) { Ok(path) if path.file_name() == Some(path.as_os_str()) => Path::new(".").join(path), Ok(path) => path.to_path_buf(), Err(_) => path.to_path_buf(), }; let pkg = bins[0].0; let mut process = compile.target_process(exe, unit.kind, pkg, *script_meta)?; process.args(args).cwd(config.cwd()); config.shell().status("Running", process.to_string())?; process.exec_replace() } cargo-0.66.0/src/cargo/ops/cargo_test.rs000066400000000000000000000333331432416201200200610ustar00rootroot00000000000000use crate::core::compiler::{Compilation, CompileKind, Doctest, Metadata, Unit, UnitOutput}; use crate::core::shell::Verbosity; use crate::core::{TargetKind, Workspace}; use crate::ops; use crate::util::errors::CargoResult; use crate::util::{add_path_args, CliError, CliResult, Config}; use anyhow::format_err; use cargo_util::{ProcessBuilder, ProcessError}; use std::ffi::OsString; use std::fmt::Write; use std::path::{Path, PathBuf}; pub struct TestOptions { pub compile_opts: ops::CompileOptions, pub no_run: bool, pub no_fail_fast: bool, } /// The kind of test. /// /// This is needed because `Unit` does not track whether or not something is a /// benchmark. #[derive(Copy, Clone)] enum TestKind { Test, Bench, Doctest, } /// A unit that failed to run. struct UnitTestError { unit: Unit, kind: TestKind, } impl UnitTestError { /// Returns the CLI args needed to target this unit. fn cli_args(&self, ws: &Workspace<'_>, opts: &ops::CompileOptions) -> String { let mut args = if opts.spec.needs_spec_flag(ws) { format!("-p {} ", self.unit.pkg.name()) } else { String::new() }; let mut add = |which| write!(args, "--{which} {}", self.unit.target.name()).unwrap(); match self.kind { TestKind::Test | TestKind::Bench => match self.unit.target.kind() { TargetKind::Lib(_) => args.push_str("--lib"), TargetKind::Bin => add("bin"), TargetKind::Test => add("test"), TargetKind::Bench => add("bench"), TargetKind::ExampleLib(_) | TargetKind::ExampleBin => add("example"), TargetKind::CustomBuild => panic!("unexpected CustomBuild kind"), }, TestKind::Doctest => args.push_str("--doc"), } args } } /// Compiles and runs tests. /// /// On error, the returned [`CliError`] will have the appropriate process exit /// code that Cargo should use. pub fn run_tests(ws: &Workspace<'_>, options: &TestOptions, test_args: &[&str]) -> CliResult { let compilation = compile_tests(ws, options)?; if options.no_run { if !options.compile_opts.build_config.emit_json() { display_no_run_information(ws, test_args, &compilation, "unittests")?; } return Ok(()); } let mut errors = run_unit_tests(ws, options, test_args, &compilation, TestKind::Test)?; let doctest_errors = run_doc_tests(ws, options, test_args, &compilation)?; errors.extend(doctest_errors); no_fail_fast_err(ws, &options.compile_opts, &errors) } /// Compiles and runs benchmarks. /// /// On error, the returned [`CliError`] will have the appropriate process exit /// code that Cargo should use. pub fn run_benches(ws: &Workspace<'_>, options: &TestOptions, args: &[&str]) -> CliResult { let compilation = compile_tests(ws, options)?; if options.no_run { if !options.compile_opts.build_config.emit_json() { display_no_run_information(ws, args, &compilation, "benches")?; } return Ok(()); } let mut args = args.to_vec(); args.push("--bench"); let errors = run_unit_tests(ws, options, &args, &compilation, TestKind::Bench)?; no_fail_fast_err(ws, &options.compile_opts, &errors) } fn compile_tests<'a>(ws: &Workspace<'a>, options: &TestOptions) -> CargoResult> { let mut compilation = ops::compile(ws, &options.compile_opts)?; compilation.tests.sort(); Ok(compilation) } /// Runs the unit and integration tests of a package. /// /// Returns a `Vec` of tests that failed when `--no-fail-fast` is used. /// If `--no-fail-fast` is *not* used, then this returns an `Err`. fn run_unit_tests( ws: &Workspace<'_>, options: &TestOptions, test_args: &[&str], compilation: &Compilation<'_>, test_kind: TestKind, ) -> Result, CliError> { let config = ws.config(); let cwd = config.cwd(); let mut errors = Vec::new(); for UnitOutput { unit, path, script_meta, } in compilation.tests.iter() { let (exe_display, cmd) = cmd_builds( config, cwd, unit, path, script_meta, test_args, compilation, "unittests", )?; config .shell() .concise(|shell| shell.status("Running", &exe_display))?; config .shell() .verbose(|shell| shell.status("Running", &cmd))?; if let Err(e) = cmd.exec() { let code = fail_fast_code(&e); let unit_err = UnitTestError { unit: unit.clone(), kind: test_kind, }; report_test_error(ws, &options.compile_opts, &unit_err, e); errors.push(unit_err); if !options.no_fail_fast { return Err(CliError::code(code)); } } } Ok(errors) } /// Runs doc tests. /// /// Returns a `Vec` of tests that failed when `--no-fail-fast` is used. /// If `--no-fail-fast` is *not* used, then this returns an `Err`. fn run_doc_tests( ws: &Workspace<'_>, options: &TestOptions, test_args: &[&str], compilation: &Compilation<'_>, ) -> Result, CliError> { let config = ws.config(); let mut errors = Vec::new(); let doctest_xcompile = config.cli_unstable().doctest_xcompile; let doctest_in_workspace = config.cli_unstable().doctest_in_workspace; for doctest_info in &compilation.to_doc_test { let Doctest { args, unstable_opts, unit, linker, script_meta, env, } = doctest_info; if !doctest_xcompile { match unit.kind { CompileKind::Host => {} CompileKind::Target(target) => { if target.short_name() != compilation.host { // Skip doctests, -Zdoctest-xcompile not enabled. config.shell().verbose(|shell| { shell.note(format!( "skipping doctests for {} ({}), \ cross-compilation doctests are not yet supported\n\ See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ for more information.", unit.pkg, unit.target.description_named() )) })?; continue; } } } } config.shell().status("Doc-tests", unit.target.name())?; let mut p = compilation.rustdoc_process(unit, *script_meta)?; for (var, value) in env { p.env(var, value); } p.arg("--crate-name").arg(&unit.target.crate_name()); p.arg("--test"); if doctest_in_workspace { add_path_args(ws, unit, &mut p); // FIXME(swatinem): remove the `unstable-options` once rustdoc stabilizes the `test-run-directory` option p.arg("-Z").arg("unstable-options"); p.arg("--test-run-directory") .arg(unit.pkg.root().to_path_buf()); } else { p.arg(unit.target.src_path().path().unwrap()); } if let CompileKind::Target(target) = unit.kind { // use `rustc_target()` to properly handle JSON target paths p.arg("--target").arg(target.rustc_target()); } if doctest_xcompile { p.arg("-Zunstable-options"); p.arg("--enable-per-target-ignores"); if let Some((runtool, runtool_args)) = compilation.target_runner(unit.kind) { p.arg("--runtool").arg(runtool); for arg in runtool_args { p.arg("--runtool-arg").arg(arg); } } if let Some(linker) = linker { let mut joined = OsString::from("linker="); joined.push(linker); p.arg("-C").arg(joined); } } for &rust_dep in &[ &compilation.deps_output[&unit.kind], &compilation.deps_output[&CompileKind::Host], ] { let mut arg = OsString::from("dependency="); arg.push(rust_dep); p.arg("-L").arg(arg); } for native_dep in compilation.native_dirs.iter() { p.arg("-L").arg(native_dep); } for arg in test_args { p.arg("--test-args").arg(arg); } if config.shell().verbosity() == Verbosity::Quiet { p.arg("--test-args").arg("--quiet"); } p.args(args); if *unstable_opts { p.arg("-Zunstable-options"); } config .shell() .verbose(|shell| shell.status("Running", p.to_string()))?; if let Err(e) = p.exec() { let code = fail_fast_code(&e); let unit_err = UnitTestError { unit: unit.clone(), kind: TestKind::Doctest, }; report_test_error(ws, &options.compile_opts, &unit_err, e); errors.push(unit_err); if !options.no_fail_fast { return Err(CliError::code(code)); } } } Ok(errors) } /// Displays human-readable descriptions of the test executables. /// /// This is used when `cargo test --no-run` is used. fn display_no_run_information( ws: &Workspace<'_>, test_args: &[&str], compilation: &Compilation<'_>, exec_type: &str, ) -> CargoResult<()> { let config = ws.config(); let cwd = config.cwd(); for UnitOutput { unit, path, script_meta, } in compilation.tests.iter() { let (exe_display, cmd) = cmd_builds( config, cwd, unit, path, script_meta, test_args, compilation, exec_type, )?; config .shell() .concise(|shell| shell.status("Executable", &exe_display))?; config .shell() .verbose(|shell| shell.status("Executable", &cmd))?; } return Ok(()); } /// Creates a [`ProcessBuilder`] for executing a single test. /// /// Returns a tuple `(exe_display, process)` where `exe_display` is a string /// to display that describes the executable path in a human-readable form. /// `process` is the `ProcessBuilder` to use for executing the test. fn cmd_builds( config: &Config, cwd: &Path, unit: &Unit, path: &PathBuf, script_meta: &Option, test_args: &[&str], compilation: &Compilation<'_>, exec_type: &str, ) -> CargoResult<(String, ProcessBuilder)> { let test_path = unit.target.src_path().path().unwrap(); let short_test_path = test_path .strip_prefix(unit.pkg.root()) .unwrap_or(test_path) .display(); let exe_display = match unit.target.kind() { TargetKind::Test | TargetKind::Bench => format!( "{} ({})", short_test_path, path.strip_prefix(cwd).unwrap_or(path).display() ), _ => format!( "{} {} ({})", exec_type, short_test_path, path.strip_prefix(cwd).unwrap_or(path).display() ), }; let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, *script_meta)?; cmd.args(test_args); if unit.target.harness() && config.shell().verbosity() == Verbosity::Quiet { cmd.arg("--quiet"); } Ok((exe_display, cmd)) } /// Returns the error code to use when *not* using `--no-fail-fast`. /// /// Cargo will return the error code from the test process itself. If some /// other error happened (like a failure to launch the process), then it will /// return a standard 101 error code. /// /// When using `--no-fail-fast`, Cargo always uses the 101 exit code (since /// there may not be just one process to report). fn fail_fast_code(error: &anyhow::Error) -> i32 { if let Some(proc_err) = error.downcast_ref::() { if let Some(code) = proc_err.code { return code; } } 101 } /// Returns the `CliError` when using `--no-fail-fast` and there is at least /// one error. fn no_fail_fast_err( ws: &Workspace<'_>, opts: &ops::CompileOptions, errors: &[UnitTestError], ) -> CliResult { // TODO: This could be improved by combining the flags on a single line when feasible. let args: Vec<_> = errors .iter() .map(|unit_err| format!(" `{}`", unit_err.cli_args(ws, opts))) .collect(); let message = match errors.len() { 0 => return Ok(()), 1 => format!("1 target failed:\n{}", args.join("\n")), n => format!("{n} targets failed:\n{}", args.join("\n")), }; Err(anyhow::Error::msg(message).into()) } /// Displays an error on the console about a test failure. fn report_test_error( ws: &Workspace<'_>, opts: &ops::CompileOptions, unit_err: &UnitTestError, test_error: anyhow::Error, ) { let which = match unit_err.kind { TestKind::Test => "test failed", TestKind::Bench => "bench failed", TestKind::Doctest => "doctest failed", }; let mut err = format_err!("{}, to rerun pass `{}`", which, unit_err.cli_args(ws, opts)); // Don't show "process didn't exit successfully" for simple errors. // libtest exits with 101 for normal errors. let is_simple = test_error .downcast_ref::() .and_then(|proc_err| proc_err.code) .map_or(false, |code| code == 101); if !is_simple { err = test_error.context(err); } crate::display_error(&err, &mut ws.config().shell()); } cargo-0.66.0/src/cargo/ops/cargo_uninstall.rs000066400000000000000000000104661432416201200211150ustar00rootroot00000000000000use crate::core::PackageId; use crate::core::{PackageIdSpec, SourceId}; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::Config; use crate::util::Filesystem; use anyhow::bail; use cargo_util::paths; use std::collections::BTreeSet; use std::env; pub fn uninstall( root: Option<&str>, specs: Vec<&str>, bins: &[String], config: &Config, ) -> CargoResult<()> { if specs.len() > 1 && !bins.is_empty() { bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); } let root = resolve_root(root, config)?; let scheduled_error = if specs.len() == 1 { uninstall_one(&root, specs[0], bins, config)?; false } else if specs.is_empty() { uninstall_cwd(&root, bins, config)?; false } else { let mut succeeded = vec![]; let mut failed = vec![]; for spec in specs { let root = root.clone(); match uninstall_one(&root, spec, bins, config) { Ok(()) => succeeded.push(spec), Err(e) => { crate::display_error(&e, &mut config.shell()); failed.push(spec) } } } let mut summary = vec![]; if !succeeded.is_empty() { summary.push(format!( "Successfully uninstalled {}!", succeeded.join(", ") )); } if !failed.is_empty() { summary.push(format!( "Failed to uninstall {} (see error(s) above).", failed.join(", ") )); } if !succeeded.is_empty() || !failed.is_empty() { config.shell().status("Summary", summary.join(" "))?; } !failed.is_empty() }; if scheduled_error { bail!("some packages failed to uninstall"); } Ok(()) } pub fn uninstall_one( root: &Filesystem, spec: &str, bins: &[String], config: &Config, ) -> CargoResult<()> { let tracker = InstallTracker::load(config, root)?; let all_pkgs = tracker.all_installed_bins().map(|(pkg_id, _set)| *pkg_id); let pkgid = PackageIdSpec::query_str(spec, all_pkgs)?; uninstall_pkgid(root, tracker, pkgid, bins, config) } fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> { let tracker = InstallTracker::load(config, root)?; let source_id = SourceId::for_path(config.cwd())?; let mut src = path_source(source_id, config)?; let pkg = select_pkg( &mut src, None, |path: &mut PathSource<'_>| path.read_packages(), config, )?; let pkgid = pkg.package_id(); uninstall_pkgid(root, tracker, pkgid, bins, config) } fn uninstall_pkgid( root: &Filesystem, mut tracker: InstallTracker, pkgid: PackageId, bins: &[String], config: &Config, ) -> CargoResult<()> { let mut to_remove = Vec::new(); let installed = match tracker.installed_bins(pkgid) { Some(bins) => bins.clone(), None => bail!("package `{}` is not installed", pkgid), }; let dst = root.join("bin").into_path_unlocked(); for bin in &installed { let bin = dst.join(bin); if !bin.exists() { bail!( "corrupt metadata, `{}` does not exist when it should", bin.display() ) } } let bins = bins .iter() .map(|s| { if s.ends_with(env::consts::EXE_SUFFIX) { s.to_string() } else { format!("{}{}", s, env::consts::EXE_SUFFIX) } }) .collect::>(); for bin in bins.iter() { if !installed.contains(bin) { bail!("binary `{}` not installed as part of `{}`", bin, pkgid) } } if bins.is_empty() { to_remove.extend(installed.iter().map(|b| dst.join(b))); tracker.remove(pkgid, &installed); } else { for bin in bins.iter() { to_remove.push(dst.join(bin)); } tracker.remove(pkgid, &bins); } tracker.save()?; for bin in to_remove { config.shell().status("Removing", bin.display())?; paths::remove_file(bin)?; } Ok(()) } cargo-0.66.0/src/cargo/ops/common_for_install_and_uninstall.rs000066400000000000000000000617371432416201200245370ustar00rootroot00000000000000use std::collections::{btree_map, BTreeMap, BTreeSet}; use std::env; use std::io::prelude::*; use std::io::SeekFrom; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::task::Poll; use anyhow::{bail, format_err, Context as _}; use serde::{Deserialize, Serialize}; use toml_edit::easy as toml; use crate::core::compiler::Freshness; use crate::core::{Dependency, FeatureValue, Package, PackageId, QueryKind, Source, SourceId}; use crate::ops::{self, CompileFilter, CompileOptions}; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::Config; use crate::util::{FileLock, Filesystem}; /// On-disk tracking for which package installed which binary. /// /// v1 is an older style, v2 is a new style that tracks more information, and /// is both backwards and forwards compatible. Cargo keeps both files in sync, /// updating both v1 and v2 at the same time. Additionally, if it detects /// changes in v1 that are not in v2 (such as when an older version of Cargo /// is used), it will automatically propagate those changes to v2. /// /// This maintains a filesystem lock, preventing other instances of Cargo from /// modifying at the same time. Drop the value to unlock. /// /// It is intended that v1 should be retained for a while during a longish /// transition period, and then v1 can be removed. pub struct InstallTracker { v1: CrateListingV1, v2: CrateListingV2, v1_lock: FileLock, v2_lock: FileLock, } /// Tracking information for the set of installed packages. #[derive(Default, Deserialize, Serialize)] struct CrateListingV2 { /// Map of every installed package. installs: BTreeMap, /// Forwards compatibility. Unknown keys from future versions of Cargo /// will be stored here and retained when the file is saved. #[serde(flatten)] other: BTreeMap, } /// Tracking information for the installation of a single package. /// /// This tracks the settings that were used when the package was installed. /// Future attempts to install the same package will check these settings to /// determine if it needs to be rebuilt/reinstalled. If nothing has changed, /// then Cargo will inform the user that it is "up to date". /// /// This is only used for the v2 format. #[derive(Debug, Deserialize, Serialize)] struct InstallInfo { /// Version requested via `--version`. /// None if `--version` not specified. Currently not used, possibly may be /// used in the future. version_req: Option, /// Set of binary names installed. bins: BTreeSet, /// Set of features explicitly enabled. features: BTreeSet, all_features: bool, no_default_features: bool, /// Either "debug" or "release". profile: String, /// The installation target. /// Either the host or the value specified in `--target`. /// None if unknown (when loading from v1). target: Option, /// Output of `rustc -V`. /// None if unknown (when loading from v1). /// Currently not used, possibly may be used in the future. rustc: Option, /// Forwards compatibility. #[serde(flatten)] other: BTreeMap, } /// Tracking information for the set of installed packages. #[derive(Default, Deserialize, Serialize)] pub struct CrateListingV1 { /// Map of installed package id to the set of binary names for that package. v1: BTreeMap>, } impl InstallTracker { /// Create an InstallTracker from information on disk. pub fn load(config: &Config, root: &Filesystem) -> CargoResult { let v1_lock = root.open_rw(Path::new(".crates.toml"), config, "crate metadata")?; let v2_lock = root.open_rw(Path::new(".crates2.json"), config, "crate metadata")?; let v1 = (|| -> CargoResult<_> { let mut contents = String::new(); v1_lock.file().read_to_string(&mut contents)?; if contents.is_empty() { Ok(CrateListingV1::default()) } else { Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?) } })() .with_context(|| { format!( "failed to parse crate metadata at `{}`", v1_lock.path().to_string_lossy() ) })?; let v2 = (|| -> CargoResult<_> { let mut contents = String::new(); v2_lock.file().read_to_string(&mut contents)?; let mut v2 = if contents.is_empty() { CrateListingV2::default() } else { serde_json::from_str(&contents) .with_context(|| "invalid JSON found for metadata")? }; v2.sync_v1(&v1); Ok(v2) })() .with_context(|| { format!( "failed to parse crate metadata at `{}`", v2_lock.path().to_string_lossy() ) })?; Ok(InstallTracker { v1, v2, v1_lock, v2_lock, }) } /// Checks if the given package should be built, and checks if executables /// already exist in the destination directory. /// /// Returns a tuple `(freshness, map)`. `freshness` indicates if the /// package should be built (`Dirty`) or if it is already up-to-date /// (`Fresh`) and should be skipped. The map maps binary names to the /// PackageId that installed it (which is None if not known). /// /// If there are no duplicates, then it will be considered `Dirty` (i.e., /// it is OK to build/install). /// /// `force=true` will always be considered `Dirty` (i.e., it will always /// be rebuilt/reinstalled). /// /// Returns an error if there is a duplicate and `--force` is not used. pub fn check_upgrade( &self, dst: &Path, pkg: &Package, force: bool, opts: &CompileOptions, target: &str, _rustc: &str, ) -> CargoResult<(Freshness, BTreeMap>)> { let exes = exe_names(pkg, &opts.filter); // Check if any tracked exe's are already installed. let duplicates = self.find_duplicates(dst, &exes); if force || duplicates.is_empty() { return Ok((Freshness::Dirty, duplicates)); } // Check if all duplicates come from packages of the same name. If // there are duplicates from other packages, then --force will be // required. // // There may be multiple matching duplicates if different versions of // the same package installed different binaries. // // This does not check the source_id in order to allow the user to // switch between different sources. For example, installing from git, // and then switching to the official crates.io release or vice-versa. // If the source_id were included, then the user would get possibly // confusing errors like "package `foo 1.0.0` is already installed" // and the change of source may not be obvious why it fails. let matching_duplicates: Vec = duplicates .values() .filter_map(|v| match v { Some(dupe_pkg_id) if dupe_pkg_id.name() == pkg.name() => Some(*dupe_pkg_id), _ => None, }) .collect(); // If both sets are the same length, that means all duplicates come // from packages with the same name. if matching_duplicates.len() == duplicates.len() { // Determine if it is dirty or fresh. let source_id = pkg.package_id().source_id(); if source_id.is_path() { // `cargo install --path ...` is always rebuilt. return Ok((Freshness::Dirty, duplicates)); } let is_up_to_date = |dupe_pkg_id| { let info = self .v2 .installs .get(dupe_pkg_id) .expect("dupes must be in sync"); let precise_equal = if source_id.is_git() { // Git sources must have the exact same hash to be // considered "fresh". dupe_pkg_id.source_id().precise() == source_id.precise() } else { true }; dupe_pkg_id.version() == pkg.version() && dupe_pkg_id.source_id() == source_id && precise_equal && info.is_up_to_date(opts, target, &exes) }; if matching_duplicates.iter().all(is_up_to_date) { Ok((Freshness::Fresh, duplicates)) } else { Ok((Freshness::Dirty, duplicates)) } } else { // Format the error message. let mut msg = String::new(); for (bin, p) in duplicates.iter() { msg.push_str(&format!("binary `{}` already exists in destination", bin)); if let Some(p) = p.as_ref() { msg.push_str(&format!(" as part of `{}`\n", p)); } else { msg.push('\n'); } } msg.push_str("Add --force to overwrite"); bail!("{}", msg); } } /// Check if any executables are already installed. /// /// Returns a map of duplicates, the key is the executable name and the /// value is the PackageId that is already installed. The PackageId is /// None if it is an untracked executable. fn find_duplicates( &self, dst: &Path, exes: &BTreeSet, ) -> BTreeMap> { exes.iter() .filter_map(|name| { if !dst.join(&name).exists() { None } else { let p = self.v2.package_for_bin(name); Some((name.clone(), p)) } }) .collect() } /// Mark that a package was installed. pub fn mark_installed( &mut self, package: &Package, bins: &BTreeSet, version_req: Option, opts: &CompileOptions, target: &str, rustc: &str, ) { self.v2 .mark_installed(package, bins, version_req, opts, target, rustc); self.v1.mark_installed(package, bins); } /// Save tracking information to disk. pub fn save(&self) -> CargoResult<()> { self.v1.save(&self.v1_lock).with_context(|| { format!( "failed to write crate metadata at `{}`", self.v1_lock.path().to_string_lossy() ) })?; self.v2.save(&self.v2_lock).with_context(|| { format!( "failed to write crate metadata at `{}`", self.v2_lock.path().to_string_lossy() ) })?; Ok(()) } /// Iterator of all installed binaries. /// Items are `(pkg_id, bins)` where `bins` is the set of binaries that /// package installed. pub fn all_installed_bins(&self) -> impl Iterator)> { self.v1.v1.iter() } /// Set of binaries installed by a particular package. /// Returns None if the package is not installed. pub fn installed_bins(&self, pkg_id: PackageId) -> Option<&BTreeSet> { self.v1.v1.get(&pkg_id) } /// Remove a package from the tracker. pub fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { self.v1.remove(pkg_id, bins); self.v2.remove(pkg_id, bins); } } impl CrateListingV1 { fn mark_installed(&mut self, pkg: &Package, bins: &BTreeSet) { // Remove bins from any other packages. for other_bins in self.v1.values_mut() { for bin in bins { other_bins.remove(bin); } } // Remove entries where `bins` is empty. let to_remove = self .v1 .iter() .filter_map(|(&p, set)| if set.is_empty() { Some(p) } else { None }) .collect::>(); for p in to_remove.iter() { self.v1.remove(p); } // Add these bins. self.v1 .entry(pkg.package_id()) .or_insert_with(BTreeSet::new) .append(&mut bins.clone()); } fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { let mut installed = match self.v1.entry(pkg_id) { btree_map::Entry::Occupied(e) => e, btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id), }; for bin in bins { installed.get_mut().remove(bin); } if installed.get().is_empty() { installed.remove(); } } fn save(&self, lock: &FileLock) -> CargoResult<()> { let mut file = lock.file(); file.seek(SeekFrom::Start(0))?; file.set_len(0)?; let data = toml::to_string_pretty(self)?; file.write_all(data.as_bytes())?; Ok(()) } } impl CrateListingV2 { /// Incorporate any changes from v1 into self. /// This handles the initial upgrade to v2, *and* handles the case /// where v2 is in use, and a v1 update is made, then v2 is used again. /// i.e., `cargo +new install foo ; cargo +old install bar ; cargo +new install bar` /// For now, v1 is the source of truth, so its values are trusted over v2. fn sync_v1(&mut self, v1: &CrateListingV1) { // Make the `bins` entries the same. for (pkg_id, bins) in &v1.v1 { self.installs .entry(*pkg_id) .and_modify(|info| info.bins = bins.clone()) .or_insert_with(|| InstallInfo::from_v1(bins)); } // Remove any packages that aren't present in v1. let to_remove: Vec<_> = self .installs .keys() .filter(|pkg_id| !v1.v1.contains_key(pkg_id)) .cloned() .collect(); for pkg_id in to_remove { self.installs.remove(&pkg_id); } } fn package_for_bin(&self, bin_name: &str) -> Option { self.installs .iter() .find(|(_, info)| info.bins.contains(bin_name)) .map(|(pkg_id, _)| *pkg_id) } fn mark_installed( &mut self, pkg: &Package, bins: &BTreeSet, version_req: Option, opts: &CompileOptions, target: &str, rustc: &str, ) { // Remove bins from any other packages. for info in &mut self.installs.values_mut() { for bin in bins { info.bins.remove(bin); } } // Remove entries where `bins` is empty. let to_remove = self .installs .iter() .filter_map(|(&p, info)| if info.bins.is_empty() { Some(p) } else { None }) .collect::>(); for p in to_remove.iter() { self.installs.remove(p); } // Add these bins. if let Some(info) = self.installs.get_mut(&pkg.package_id()) { info.bins.append(&mut bins.clone()); info.version_req = version_req; info.features = feature_set(&opts.cli_features.features); info.all_features = opts.cli_features.all_features; info.no_default_features = !opts.cli_features.uses_default_features; info.profile = opts.build_config.requested_profile.to_string(); info.target = Some(target.to_string()); info.rustc = Some(rustc.to_string()); } else { self.installs.insert( pkg.package_id(), InstallInfo { version_req, bins: bins.clone(), features: feature_set(&opts.cli_features.features), all_features: opts.cli_features.all_features, no_default_features: !opts.cli_features.uses_default_features, profile: opts.build_config.requested_profile.to_string(), target: Some(target.to_string()), rustc: Some(rustc.to_string()), other: BTreeMap::new(), }, ); } } fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { let mut info_entry = match self.installs.entry(pkg_id) { btree_map::Entry::Occupied(e) => e, btree_map::Entry::Vacant(..) => panic!("v2 unexpected missing `{}`", pkg_id), }; for bin in bins { info_entry.get_mut().bins.remove(bin); } if info_entry.get().bins.is_empty() { info_entry.remove(); } } fn save(&self, lock: &FileLock) -> CargoResult<()> { let mut file = lock.file(); file.seek(SeekFrom::Start(0))?; file.set_len(0)?; let data = serde_json::to_string(self)?; file.write_all(data.as_bytes())?; Ok(()) } } impl InstallInfo { fn from_v1(set: &BTreeSet) -> InstallInfo { InstallInfo { version_req: None, bins: set.clone(), features: BTreeSet::new(), all_features: false, no_default_features: false, profile: "release".to_string(), target: None, rustc: None, other: BTreeMap::new(), } } /// Determine if this installation is "up to date", or if it needs to be reinstalled. /// /// This does not do Package/Source/Version checking. fn is_up_to_date(&self, opts: &CompileOptions, target: &str, exes: &BTreeSet) -> bool { self.features == feature_set(&opts.cli_features.features) && self.all_features == opts.cli_features.all_features && self.no_default_features != opts.cli_features.uses_default_features && self.profile.as_str() == opts.build_config.requested_profile.as_str() && (self.target.is_none() || self.target.as_deref() == Some(target)) && &self.bins == exes } } /// Determines the root directory where installation is done. pub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { let config_root = config.get_path("install.root")?; Ok(flag .map(PathBuf::from) .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)) .or_else(move || config_root.map(|v| v.val)) .map(Filesystem::new) .unwrap_or_else(|| config.home().clone())) } /// Determines the `PathSource` from a `SourceId`. pub fn path_source(source_id: SourceId, config: &Config) -> CargoResult> { let path = source_id .url() .to_file_path() .map_err(|()| format_err!("path sources must have a valid path"))?; Ok(PathSource::new(&path, source_id, config)) } /// Gets a Package based on command-line requirements. pub fn select_dep_pkg( source: &mut T, dep: Dependency, config: &Config, needs_update: bool, ) -> CargoResult where T: Source, { // This operation may involve updating some sources or making a few queries // which may involve frobbing caches, as a result make sure we synchronize // with other global Cargos let _lock = config.acquire_package_cache_lock()?; if needs_update { source.invalidate_cache(); } let deps = loop { match source.query_vec(&dep, QueryKind::Exact)? { Poll::Ready(deps) => break deps, Poll::Pending => source.block_until_ready()?, } }; match deps.iter().map(|p| p.package_id()).max() { Some(pkgid) => { let pkg = Box::new(source).download_now(pkgid, config)?; Ok(pkg) } None => { let is_yanked: bool = if dep.version_req().is_exact() { let version: String = dep.version_req().to_string(); if let Ok(pkg_id) = PackageId::new(dep.package_name(), &version[1..], source.source_id()) { source.invalidate_cache(); loop { match source.is_yanked(pkg_id) { Poll::Ready(Ok(is_yanked)) => break is_yanked, Poll::Ready(Err(_)) => break false, Poll::Pending => source.block_until_ready()?, } } } else { false } } else { false }; if is_yanked { bail!( "cannot install package `{}`, it has been yanked from {}", dep.package_name(), source.source_id() ) } else { bail!( "could not find `{}` in {} with version `{}`", dep.package_name(), source.source_id(), dep.version_req(), ) } } } } pub fn select_pkg( source: &mut T, dep: Option, mut list_all: F, config: &Config, ) -> CargoResult where T: Source, F: FnMut(&mut T) -> CargoResult>, { // This operation may involve updating some sources or making a few queries // which may involve frobbing caches, as a result make sure we synchronize // with other global Cargos let _lock = config.acquire_package_cache_lock()?; source.invalidate_cache(); return if let Some(dep) = dep { select_dep_pkg(source, dep, config, false) } else { let candidates = list_all(source)?; let binaries = candidates .iter() .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0); let examples = candidates .iter() .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0); let pkg = match one(binaries, |v| multi_err("binaries", v))? { Some(p) => p, None => match one(examples, |v| multi_err("examples", v))? { Some(p) => p, None => bail!( "no packages found with binaries or \ examples" ), }, }; Ok(pkg.clone()) }; fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { pkgs.sort_unstable_by_key(|a| a.name()); format!( "multiple packages with {} found: {}. When installing a git repository, \ cargo will always search the entire repo for any Cargo.toml. \ Please specify which to install.", kind, pkgs.iter() .map(|p| p.name().as_str()) .collect::>() .join(", ") ) } } /// Get one element from the iterator. /// Returns None if none left. /// Returns error if there is more than one item in the iterator. fn one(mut i: I, f: F) -> CargoResult> where I: Iterator, F: FnOnce(Vec) -> String, { match (i.next(), i.next()) { (Some(i1), Some(i2)) => { let mut v = vec![i1, i2]; v.extend(i); Err(format_err!("{}", f(v))) } (Some(i), None) => Ok(Some(i)), (None, _) => Ok(None), } } /// Helper to convert features to a BTreeSet. fn feature_set(features: &Rc>) -> BTreeSet { features.iter().map(|s| s.to_string()).collect() } /// Helper to get the executable names from a filter. pub fn exe_names(pkg: &Package, filter: &ops::CompileFilter) -> BTreeSet { let to_exe = |name| format!("{}{}", name, env::consts::EXE_SUFFIX); match filter { CompileFilter::Default { .. } => pkg .targets() .iter() .filter(|t| t.is_bin()) .map(|t| to_exe(t.name())) .collect(), CompileFilter::Only { all_targets: true, .. } => pkg .targets() .iter() .filter(|target| target.is_executable()) .map(|target| to_exe(target.name())) .collect(), CompileFilter::Only { ref bins, ref examples, .. } => { let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { pkg.targets() .iter() .filter(|t| t.is_bin()) .map(|t| t.name().to_string()) .collect() }); let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { pkg.targets() .iter() .filter(|t| t.is_exe_example()) .map(|t| t.name().to_string()) .collect() }); all_bins .iter() .chain(all_examples.iter()) .map(|name| to_exe(name)) .collect() } } } cargo-0.66.0/src/cargo/ops/fix.rs000066400000000000000000001113551432416201200165160ustar00rootroot00000000000000//! High-level overview of how `fix` works: //! //! The main goal is to run `cargo check` to get rustc to emit JSON //! diagnostics with suggested fixes that can be applied to the files on the //! filesystem, and validate that those changes didn't break anything. //! //! Cargo begins by launching a `LockServer` thread in the background to //! listen for network connections to coordinate locking when multiple targets //! are built simultaneously. It ensures each package has only one fix running //! at once. //! //! The `RustfixDiagnosticServer` is launched in a background thread (in //! `JobQueue`) to listen for network connections to coordinate displaying //! messages to the user on the console (so that multiple processes don't try //! to print at the same time). //! //! Cargo begins a normal `cargo check` operation with itself set as a proxy //! for rustc by setting `primary_unit_rustc` in the build config. When //! cargo launches rustc to check a crate, it is actually launching itself. //! The `FIX_ENV` environment variable is set so that cargo knows it is in //! fix-proxy-mode. //! //! Each proxied cargo-as-rustc detects it is in fix-proxy-mode (via `FIX_ENV` //! environment variable in `main`) and does the following: //! //! - Acquire a lock from the `LockServer` from the master cargo process. //! - Launches the real rustc (`rustfix_and_fix`), looking at the JSON output //! for suggested fixes. //! - Uses the `rustfix` crate to apply the suggestions to the files on the //! file system. //! - If rustfix fails to apply any suggestions (for example, they are //! overlapping), but at least some suggestions succeeded, it will try the //! previous two steps up to 4 times as long as some suggestions succeed. //! - Assuming there's at least one suggestion applied, and the suggestions //! applied cleanly, rustc is run again to verify the suggestions didn't //! break anything. The change will be backed out if it fails (unless //! `--broken-code` is used). //! - If there are any warnings or errors, rustc will be run one last time to //! show them to the user. use std::collections::{BTreeSet, HashMap, HashSet}; use std::ffi::OsString; use std::path::{Path, PathBuf}; use std::process::{self, ExitStatus}; use std::{env, fs, str}; use anyhow::{bail, Context as _}; use cargo_util::{exit_status_to_string, is_simple_exit_code, paths, ProcessBuilder}; use log::{debug, trace, warn}; use rustfix::diagnostics::Diagnostic; use rustfix::{self, CodeFix}; use semver::Version; use crate::core::compiler::RustcTargetData; use crate::core::resolver::features::{DiffMap, FeatureOpts, FeatureResolver, FeaturesFor}; use crate::core::resolver::{HasDevUnits, Resolve, ResolveBehavior}; use crate::core::{Edition, MaybePackage, PackageId, Workspace}; use crate::ops::resolve::WorkspaceResolve; use crate::ops::{self, CompileOptions}; use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; use crate::util::errors::CargoResult; use crate::util::Config; use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; use crate::{drop_eprint, drop_eprintln}; const FIX_ENV: &str = "__CARGO_FIX_PLZ"; const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE"; const EDITION_ENV: &str = "__CARGO_FIX_EDITION"; const IDIOMS_ENV: &str = "__CARGO_FIX_IDIOMS"; pub struct FixOptions { pub edition: bool, pub idioms: bool, pub compile_opts: CompileOptions, pub allow_dirty: bool, pub allow_no_vcs: bool, pub allow_staged: bool, pub broken_code: bool, } pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions) -> CargoResult<()> { check_version_control(ws.config(), opts)?; if opts.edition { check_resolver_change(ws, opts)?; } // Spin up our lock server, which our subprocesses will use to synchronize fixes. let lock_server = LockServer::new()?; let mut wrapper = ProcessBuilder::new(env::current_exe()?); wrapper.env(FIX_ENV, lock_server.addr().to_string()); let _started = lock_server.start()?; opts.compile_opts.build_config.force_rebuild = true; if opts.broken_code { wrapper.env(BROKEN_CODE_ENV, "1"); } if opts.edition { wrapper.env(EDITION_ENV, "1"); } if opts.idioms { wrapper.env(IDIOMS_ENV, "1"); } *opts .compile_opts .build_config .rustfix_diagnostic_server .borrow_mut() = Some(RustfixDiagnosticServer::new()?); if let Some(server) = opts .compile_opts .build_config .rustfix_diagnostic_server .borrow() .as_ref() { server.configure(&mut wrapper); } let rustc = ws.config().load_global_rustc(Some(ws))?; wrapper.arg(&rustc.path); // This is calling rustc in cargo fix-proxy-mode, so it also need to retry. // The argfile handling are located at `FixArgs::from_args`. wrapper.retry_with_argfile(true); // primary crates are compiled using a cargo subprocess to do extra work of applying fixes and // repeating build until there are no more changes to be applied opts.compile_opts.build_config.primary_unit_rustc = Some(wrapper); ops::compile(ws, &opts.compile_opts)?; Ok(()) } fn check_version_control(config: &Config, opts: &FixOptions) -> CargoResult<()> { if opts.allow_no_vcs { return Ok(()); } if !existing_vcs_repo(config.cwd(), config.cwd()) { bail!( "no VCS found for this package and `cargo fix` can potentially \ perform destructive changes; if you'd like to suppress this \ error pass `--allow-no-vcs`" ) } if opts.allow_dirty && opts.allow_staged { return Ok(()); } let mut dirty_files = Vec::new(); let mut staged_files = Vec::new(); if let Ok(repo) = git2::Repository::discover(config.cwd()) { let mut repo_opts = git2::StatusOptions::new(); repo_opts.include_ignored(false); for status in repo.statuses(Some(&mut repo_opts))?.iter() { if let Some(path) = status.path() { match status.status() { git2::Status::CURRENT => (), git2::Status::INDEX_NEW | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_DELETED | git2::Status::INDEX_RENAMED | git2::Status::INDEX_TYPECHANGE => { if !opts.allow_staged { staged_files.push(path.to_string()) } } _ => { if !opts.allow_dirty { dirty_files.push(path.to_string()) } } }; } } } if dirty_files.is_empty() && staged_files.is_empty() { return Ok(()); } let mut files_list = String::new(); for file in dirty_files { files_list.push_str(" * "); files_list.push_str(&file); files_list.push_str(" (dirty)\n"); } for file in staged_files { files_list.push_str(" * "); files_list.push_str(&file); files_list.push_str(" (staged)\n"); } bail!( "the working directory of this package has uncommitted changes, and \ `cargo fix` can potentially perform destructive changes; if you'd \ like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ or commit the changes to these files:\n\ \n\ {}\n\ ", files_list ); } fn check_resolver_change(ws: &Workspace<'_>, opts: &FixOptions) -> CargoResult<()> { let root = ws.root_maybe(); match root { MaybePackage::Package(root_pkg) => { if root_pkg.manifest().resolve_behavior().is_some() { // If explicitly specified by the user, no need to check. return Ok(()); } // Only trigger if updating the root package from 2018. let pkgs = opts.compile_opts.spec.get_packages(ws)?; if !pkgs.iter().any(|&pkg| pkg == root_pkg) { // The root is not being migrated. return Ok(()); } if root_pkg.manifest().edition() != Edition::Edition2018 { // V1 to V2 only happens on 2018 to 2021. return Ok(()); } } MaybePackage::Virtual(_vm) => { // Virtual workspaces don't have a global edition to set (yet). return Ok(()); } } // 2018 without `resolver` set must be V1 assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1); let specs = opts.compile_opts.spec.to_package_id_specs(ws)?; let target_data = RustcTargetData::new(ws, &opts.compile_opts.build_config.requested_kinds)?; let resolve_differences = |has_dev_units| -> CargoResult<(WorkspaceResolve<'_>, DiffMap)> { let ws_resolve = ops::resolve_ws_with_opts( ws, &target_data, &opts.compile_opts.build_config.requested_kinds, &opts.compile_opts.cli_features, &specs, has_dev_units, crate::core::resolver::features::ForceAllTargets::No, )?; let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, has_dev_units); let v2_features = FeatureResolver::resolve( ws, &target_data, &ws_resolve.targeted_resolve, &ws_resolve.pkg_set, &opts.compile_opts.cli_features, &specs, &opts.compile_opts.build_config.requested_kinds, feature_opts, )?; let diffs = v2_features.compare_legacy(&ws_resolve.resolved_features); Ok((ws_resolve, diffs)) }; let (_, without_dev_diffs) = resolve_differences(HasDevUnits::No)?; let (ws_resolve, mut with_dev_diffs) = resolve_differences(HasDevUnits::Yes)?; if without_dev_diffs.is_empty() && with_dev_diffs.is_empty() { // Nothing is different, nothing to report. return Ok(()); } // Only display unique changes with dev-dependencies. with_dev_diffs.retain(|k, vals| without_dev_diffs.get(k) != Some(vals)); let config = ws.config(); config.shell().note( "Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.", )?; drop_eprintln!( config, "This may cause some dependencies to be built with fewer features enabled than previously." ); drop_eprintln!( config, "More information about the resolver changes may be found \ at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html" ); drop_eprintln!( config, "When building the following dependencies, \ the given features will no longer be used:\n" ); let show_diffs = |differences: DiffMap| { for ((pkg_id, features_for), removed) in differences { drop_eprint!(config, " {}", pkg_id); if let FeaturesFor::HostDep = features_for { drop_eprint!(config, " (as host dependency)"); } drop_eprint!(config, " removed features: "); let joined: Vec<_> = removed.iter().map(|s| s.as_str()).collect(); drop_eprintln!(config, "{}", joined.join(", ")); } drop_eprint!(config, "\n"); }; if !without_dev_diffs.is_empty() { show_diffs(without_dev_diffs); } if !with_dev_diffs.is_empty() { drop_eprintln!( config, "The following differences only apply when building with dev-dependencies:\n" ); show_diffs(with_dev_diffs); } report_maybe_diesel(config, &ws_resolve.targeted_resolve)?; Ok(()) } fn report_maybe_diesel(config: &Config, resolve: &Resolve) -> CargoResult<()> { fn is_broken_diesel(pid: PackageId) -> bool { pid.name() == "diesel" && pid.version() < &Version::new(1, 4, 8) } fn is_broken_diesel_migration(pid: PackageId) -> bool { pid.name() == "diesel_migrations" && pid.version().major <= 1 } if resolve.iter().any(is_broken_diesel) && resolve.iter().any(is_broken_diesel_migration) { config.shell().note( "\ This project appears to use both diesel and diesel_migrations. These packages have a known issue where the build may fail due to the version 2 resolver preventing feature unification between those two packages. Please update to at least diesel 1.4.8 to prevent this issue from happening. ", )?; } Ok(()) } /// Provide the lock address when running in proxy mode /// /// Returns `None` if `fix` is not being run (not in proxy mode). Returns /// `Some(...)` if in `fix` proxy mode pub fn fix_get_proxy_lock_addr() -> Option { env::var(FIX_ENV).ok() } /// Entry point for `cargo` running as a proxy for `rustc`. /// /// This is called every time `cargo` is run to check if it is in proxy mode. /// /// If there are warnings or errors, this does not return, /// and the process exits with the corresponding `rustc` exit code. /// /// See [`fix_get_proxy_lock_addr`] pub fn fix_exec_rustc(config: &Config, lock_addr: &str) -> CargoResult<()> { let args = FixArgs::get()?; trace!("cargo-fix as rustc got file {:?}", args.file); let workspace_rustc = std::env::var("RUSTC_WORKSPACE_WRAPPER") .map(PathBuf::from) .ok(); let mut rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref()); rustc.retry_with_argfile(true); rustc.env_remove(FIX_ENV); args.apply(&mut rustc); trace!("start rustfixing {:?}", args.file); let json_error_rustc = { let mut cmd = rustc.clone(); cmd.arg("--error-format=json"); cmd }; let fixes = rustfix_crate(&lock_addr, &json_error_rustc, &args.file, &args, config)?; // Ok now we have our final goal of testing out the changes that we applied. // If these changes went awry and actually started to cause the crate to // *stop* compiling then we want to back them out and continue to print // warnings to the user. // // If we didn't actually make any changes then we can immediately execute the // new rustc, and otherwise we capture the output to hide it in the scenario // that we have to back it all out. if !fixes.files.is_empty() { debug!("calling rustc for final verification: {json_error_rustc}"); let output = json_error_rustc.output()?; if output.status.success() { for (path, file) in fixes.files.iter() { Message::Fixed { file: path.clone(), fixes: file.fixes_applied, } .post()?; } } // If we succeeded then we'll want to commit to the changes we made, if // any. If stderr is empty then there's no need for the final exec at // the end, we just bail out here. if output.status.success() && output.stderr.is_empty() { return Ok(()); } // Otherwise, if our rustc just failed, then that means that we broke the // user's code with our changes. Back out everything and fall through // below to recompile again. if !output.status.success() { if env::var_os(BROKEN_CODE_ENV).is_none() { for (path, file) in fixes.files.iter() { debug!("reverting {:?} due to errors", path); paths::write(path, &file.original_code)?; } } let krate = { let mut iter = json_error_rustc.get_args(); let mut krate = None; while let Some(arg) = iter.next() { if arg == "--crate-name" { krate = iter.next().and_then(|s| s.to_owned().into_string().ok()); } } krate }; log_failed_fix(krate, &output.stderr, output.status)?; } } // This final fall-through handles multiple cases; // - If the fix failed, show the original warnings and suggestions. // - If `--broken-code`, show the error messages. // - If the fix succeeded, show any remaining warnings. for arg in args.format_args { // Add any json/error format arguments that Cargo wants. This allows // things like colored output to work correctly. rustc.arg(arg); } debug!("calling rustc to display remaining diagnostics: {rustc}"); exit_with(rustc.status()?); } #[derive(Default)] struct FixedCrate { files: HashMap, } struct FixedFile { errors_applying_fixes: Vec, fixes_applied: u32, original_code: String, } /// Attempts to apply fixes to a single crate. /// /// This runs `rustc` (possibly multiple times) to gather suggestions from the /// compiler and applies them to the files on disk. fn rustfix_crate( lock_addr: &str, rustc: &ProcessBuilder, filename: &Path, args: &FixArgs, config: &Config, ) -> CargoResult { if !args.can_run_rustfix(config)? { // This fix should not be run. Skipping... return Ok(FixedCrate::default()); } // First up, we want to make sure that each crate is only checked by one // process at a time. If two invocations concurrently check a crate then // it's likely to corrupt it. // // Historically this used per-source-file locking, then per-package // locking. It now uses a single, global lock as some users do things like // #[path] or include!() of shared files between packages. Serializing // makes it slower, but is the only safe way to prevent concurrent // modification. let _lock = LockServerClient::lock(&lock_addr.parse()?, "global")?; // Next up, this is a bit suspicious, but we *iteratively* execute rustc and // collect suggestions to feed to rustfix. Once we hit our limit of times to // execute rustc or we appear to be reaching a fixed point we stop running // rustc. // // This is currently done to handle code like: // // ::foo::<::Bar>(); // // where there are two fixes to happen here: `crate::foo::()`. // The spans for these two suggestions are overlapping and its difficult in // the compiler to **not** have overlapping spans here. As a result, a naive // implementation would feed the two compiler suggestions for the above fix // into `rustfix`, but one would be rejected because it overlaps with the // other. // // In this case though, both suggestions are valid and can be automatically // applied! To handle this case we execute rustc multiple times, collecting // fixes each time we do so. Along the way we discard any suggestions that // failed to apply, assuming that they can be fixed the next time we run // rustc. // // Naturally, we want a few protections in place here though to avoid looping // forever or otherwise losing data. To that end we have a few termination // conditions: // // * Do this whole process a fixed number of times. In theory we probably // need an infinite number of times to apply fixes, but we're not gonna // sit around waiting for that. // * If it looks like a fix genuinely can't be applied we need to bail out. // Detect this when a fix fails to get applied *and* no suggestions // successfully applied to the same file. In that case looks like we // definitely can't make progress, so bail out. let mut fixes = FixedCrate::default(); let mut last_fix_counts = HashMap::new(); let iterations = env::var("CARGO_FIX_MAX_RETRIES") .ok() .and_then(|n| n.parse().ok()) .unwrap_or(4); for _ in 0..iterations { last_fix_counts.clear(); for (path, file) in fixes.files.iter_mut() { last_fix_counts.insert(path.clone(), file.fixes_applied); // We'll generate new errors below. file.errors_applying_fixes.clear(); } rustfix_and_fix(&mut fixes, rustc, filename, config)?; let mut progress_yet_to_be_made = false; for (path, file) in fixes.files.iter_mut() { if file.errors_applying_fixes.is_empty() { continue; } // If anything was successfully fixed *and* there's at least one // error, then assume the error was spurious and we'll try again on // the next iteration. if file.fixes_applied != *last_fix_counts.get(path).unwrap_or(&0) { progress_yet_to_be_made = true; } } if !progress_yet_to_be_made { break; } } // Any errors still remaining at this point need to be reported as probably // bugs in Cargo and/or rustfix. for (path, file) in fixes.files.iter_mut() { for error in file.errors_applying_fixes.drain(..) { Message::ReplaceFailed { file: path.clone(), message: error, } .post()?; } } Ok(fixes) } /// Executes `rustc` to apply one round of suggestions to the crate in question. /// /// This will fill in the `fixes` map with original code, suggestions applied, /// and any errors encountered while fixing files. fn rustfix_and_fix( fixes: &mut FixedCrate, rustc: &ProcessBuilder, filename: &Path, config: &Config, ) -> CargoResult<()> { // If not empty, filter by these lints. // TODO: implement a way to specify this. let only = HashSet::new(); debug!("calling rustc to collect suggestions and validate previous fixes: {rustc}"); let output = rustc.output()?; // If rustc didn't succeed for whatever reasons then we're very likely to be // looking at otherwise broken code. Let's not make things accidentally // worse by applying fixes where a bug could cause *more* broken code. // Instead, punt upwards which will reexec rustc over the original code, // displaying pretty versions of the diagnostics we just read out. if !output.status.success() && env::var_os(BROKEN_CODE_ENV).is_none() { debug!( "rustfixing `{:?}` failed, rustc exited with {:?}", filename, output.status.code() ); return Ok(()); } let fix_mode = env::var_os("__CARGO_FIX_YOLO") .map(|_| rustfix::Filter::Everything) .unwrap_or(rustfix::Filter::MachineApplicableOnly); // Sift through the output of the compiler to look for JSON messages. // indicating fixes that we can apply. let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as UTF-8")?; let suggestions = stderr .lines() .filter(|x| !x.is_empty()) .inspect(|y| trace!("line: {}", y)) // Parse each line of stderr, ignoring errors, as they may not all be JSON. .filter_map(|line| serde_json::from_str::(line).ok()) // From each diagnostic, try to extract suggestions from rustc. .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode)); // Collect suggestions by file so we can apply them one at a time later. let mut file_map = HashMap::new(); let mut num_suggestion = 0; // It's safe since we won't read any content under home dir. let home_path = config.home().as_path_unlocked(); for suggestion in suggestions { trace!("suggestion"); // Make sure we've got a file associated with this suggestion and all // snippets point to the same file. Right now it's not clear what // we would do with multiple files. let file_names = suggestion .solutions .iter() .flat_map(|s| s.replacements.iter()) .map(|r| &r.snippet.file_name); let file_name = if let Some(file_name) = file_names.clone().next() { file_name.clone() } else { trace!("rejecting as it has no solutions {:?}", suggestion); continue; }; // Do not write into registry cache. See rust-lang/cargo#9857. if Path::new(&file_name).starts_with(home_path) { continue; } if !file_names.clone().all(|f| f == &file_name) { trace!("rejecting as it changes multiple files: {:?}", suggestion); continue; } trace!("adding suggestion for {:?}: {:?}", file_name, suggestion); file_map .entry(file_name) .or_insert_with(Vec::new) .push(suggestion); num_suggestion += 1; } debug!( "collected {} suggestions for `{}`", num_suggestion, filename.display(), ); for (file, suggestions) in file_map { // Attempt to read the source code for this file. If this fails then // that'd be pretty surprising, so log a message and otherwise keep // going. let code = match paths::read(file.as_ref()) { Ok(s) => s, Err(e) => { warn!("failed to read `{}`: {}", file, e); continue; } }; let num_suggestions = suggestions.len(); debug!("applying {} fixes to {}", num_suggestions, file); // If this file doesn't already exist then we just read the original // code, so save it. If the file already exists then the original code // doesn't need to be updated as we've just read an interim state with // some fixes but perhaps not all. let fixed_file = fixes .files .entry(file.clone()) .or_insert_with(|| FixedFile { errors_applying_fixes: Vec::new(), fixes_applied: 0, original_code: code.clone(), }); let mut fixed = CodeFix::new(&code); // As mentioned above in `rustfix_crate`, we don't immediately warn // about suggestions that fail to apply here, and instead we save them // off for later processing. for suggestion in suggestions.iter().rev() { match fixed.apply(suggestion) { Ok(()) => fixed_file.fixes_applied += 1, Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()), } } let new_code = fixed.finish()?; paths::write(&file, new_code)?; } Ok(()) } fn exit_with(status: ExitStatus) -> ! { #[cfg(unix)] { use std::io::Write; use std::os::unix::prelude::*; if let Some(signal) = status.signal() { drop(writeln!( std::io::stderr().lock(), "child failed with signal `{}`", signal )); process::exit(2); } } process::exit(status.code().unwrap_or(3)); } fn log_failed_fix(krate: Option, stderr: &[u8], status: ExitStatus) -> CargoResult<()> { let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?; let diagnostics = stderr .lines() .filter(|x| !x.is_empty()) .filter_map(|line| serde_json::from_str::(line).ok()); let mut files = BTreeSet::new(); let mut errors = Vec::new(); for diagnostic in diagnostics { errors.push(diagnostic.rendered.unwrap_or(diagnostic.message)); for span in diagnostic.spans.into_iter() { files.insert(span.file_name); } } // Include any abnormal messages (like an ICE or whatever). errors.extend( stderr .lines() .filter(|x| !x.starts_with('{')) .map(|x| x.to_string()), ); let files = files.into_iter().collect(); let abnormal_exit = if status.code().map_or(false, is_simple_exit_code) { None } else { Some(exit_status_to_string(status)) }; Message::FixFailed { files, krate, errors, abnormal_exit, } .post()?; Ok(()) } /// Various command-line options and settings used when `cargo` is running as /// a proxy for `rustc` during the fix operation. struct FixArgs { /// This is the `.rs` file that is being fixed. file: PathBuf, /// If `--edition` is used to migrate to the next edition, this is the /// edition we are migrating towards. prepare_for_edition: Option, /// `true` if `--edition-idioms` is enabled. idioms: bool, /// The current edition. /// /// `None` if on 2015. enabled_edition: Option, /// Other command-line arguments not reflected by other fields in /// `FixArgs`. other: Vec, /// Path to the `rustc` executable. rustc: PathBuf, /// Console output flags (`--error-format`, `--json`, etc.). /// /// The normal fix procedure always uses `--json`, so it overrides what /// Cargo normally passes when applying fixes. When displaying warnings or /// errors, it will use these flags. format_args: Vec, } impl FixArgs { fn get() -> CargoResult { Self::from_args(env::args_os()) } // This is a separate function so that we can use it in tests. fn from_args(argv: impl IntoIterator) -> CargoResult { let mut argv = argv.into_iter(); let mut rustc = argv .nth(1) .map(PathBuf::from) .ok_or_else(|| anyhow::anyhow!("expected rustc or `@path` as first argument"))?; let mut file = None; let mut enabled_edition = None; let mut other = Vec::new(); let mut format_args = Vec::new(); let mut handle_arg = |arg: OsString| -> CargoResult<()> { let path = PathBuf::from(arg); if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() { file = Some(path); return Ok(()); } if let Some(s) = path.to_str() { if let Some(edition) = s.strip_prefix("--edition=") { enabled_edition = Some(edition.parse()?); return Ok(()); } if s.starts_with("--error-format=") || s.starts_with("--json=") { // Cargo may add error-format in some cases, but `cargo // fix` wants to add its own. format_args.push(s.to_string()); return Ok(()); } } other.push(path.into()); Ok(()) }; if let Some(argfile_path) = rustc.to_str().unwrap_or_default().strip_prefix("@") { // Because cargo in fix-proxy-mode might hit the command line size limit, // cargo fix need handle `@path` argfile for this special case. if argv.next().is_some() { bail!("argfile `@path` cannot be combined with other arguments"); } let contents = fs::read_to_string(argfile_path) .with_context(|| format!("failed to read argfile at `{argfile_path}`"))?; let mut iter = contents.lines().map(OsString::from); rustc = iter .next() .map(PathBuf::from) .ok_or_else(|| anyhow::anyhow!("expected rustc as first argument"))?; for arg in iter { handle_arg(arg)?; } } else { for arg in argv { handle_arg(arg)?; } } let file = file.ok_or_else(|| anyhow::anyhow!("could not find .rs file in rustc args"))?; let idioms = env::var(IDIOMS_ENV).is_ok(); let prepare_for_edition = env::var(EDITION_ENV).ok().map(|_| { enabled_edition .unwrap_or(Edition::Edition2015) .saturating_next() }); Ok(FixArgs { file, prepare_for_edition, idioms, enabled_edition, other, rustc, format_args, }) } fn apply(&self, cmd: &mut ProcessBuilder) { cmd.arg(&self.file); cmd.args(&self.other); if self.prepare_for_edition.is_some() { // When migrating an edition, we don't want to fix other lints as // they can sometimes add suggestions that fail to apply, causing // the entire migration to fail. But those lints aren't needed to // migrate. cmd.arg("--cap-lints=allow"); } else { // This allows `cargo fix` to work even if the crate has #[deny(warnings)]. cmd.arg("--cap-lints=warn"); } if let Some(edition) = self.enabled_edition { cmd.arg("--edition").arg(edition.to_string()); if self.idioms && edition.supports_idiom_lint() { cmd.arg(format!("-Wrust-{}-idioms", edition)); } } if let Some(edition) = self.prepare_for_edition { if edition.supports_compat_lint() { cmd.arg("--force-warn") .arg(format!("rust-{}-compatibility", edition)); } } } /// Validates the edition, and sends a message indicating what is being /// done. Returns a flag indicating whether this fix should be run. fn can_run_rustfix(&self, config: &Config) -> CargoResult { let to_edition = match self.prepare_for_edition { Some(s) => s, None => { return Message::Fixing { file: self.file.display().to_string(), } .post() .and(Ok(true)); } }; // Unfortunately determining which cargo targets are being built // isn't easy, and each target can be a different edition. The // cargo-as-rustc fix wrapper doesn't know anything about the // workspace, so it can't check for the `cargo-features` unstable // opt-in. As a compromise, this just restricts to the nightly // toolchain. // // Unfortunately this results in a pretty poor error message when // multiple jobs run in parallel (the error appears multiple // times). Hopefully this doesn't happen often in practice. if !to_edition.is_stable() && !config.nightly_features_allowed { let message = format!( "`{file}` is on the latest edition, but trying to \ migrate to edition {to_edition}.\n\ Edition {to_edition} is unstable and not allowed in \ this release, consider trying the nightly release channel.", file = self.file.display(), to_edition = to_edition ); return Message::EditionAlreadyEnabled { message, edition: to_edition.previous().unwrap(), } .post() .and(Ok(false)); // Do not run rustfix for this the edition. } let from_edition = self.enabled_edition.unwrap_or(Edition::Edition2015); if from_edition == to_edition { let message = format!( "`{}` is already on the latest edition ({}), \ unable to migrate further", self.file.display(), to_edition ); Message::EditionAlreadyEnabled { message, edition: to_edition, } .post() } else { Message::Migrating { file: self.file.display().to_string(), from_edition, to_edition, } .post() } .and(Ok(true)) } } #[cfg(test)] mod tests { use super::FixArgs; use std::ffi::OsString; use std::io::Write as _; use std::path::PathBuf; #[test] fn get_fix_args_from_argfile() { let mut temp = tempfile::Builder::new().tempfile().unwrap(); let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap(); let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display()); temp.write_all(content.as_bytes()).unwrap(); let argfile = format!("@{}", temp.path().display()); let args = ["cargo", &argfile]; let fix_args = FixArgs::from_args(args.map(|x| x.into())).unwrap(); assert_eq!(fix_args.rustc, PathBuf::from("/path/to/rustc")); assert_eq!(fix_args.file, main_rs.path()); assert_eq!(fix_args.other, vec![OsString::from("foobar")]); } #[test] fn get_fix_args_from_argfile_with_extra_arg() { let mut temp = tempfile::Builder::new().tempfile().unwrap(); let main_rs = tempfile::Builder::new().suffix(".rs").tempfile().unwrap(); let content = format!("/path/to/rustc\n{}\nfoobar\n", main_rs.path().display()); temp.write_all(content.as_bytes()).unwrap(); let argfile = format!("@{}", temp.path().display()); let args = ["cargo", &argfile, "boo!"]; match FixArgs::from_args(args.map(|x| x.into())) { Err(e) => assert_eq!( e.to_string(), "argfile `@path` cannot be combined with other arguments" ), Ok(_) => panic!("should fail"), } } } cargo-0.66.0/src/cargo/ops/lockfile.rs000066400000000000000000000177701432416201200175260ustar00rootroot00000000000000use std::io::prelude::*; use crate::core::{resolver, Resolve, ResolveVersion, Workspace}; use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; use crate::util::Filesystem; use anyhow::Context as _; use toml_edit::easy as toml; pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { if !ws.root().join("Cargo.lock").exists() { return Ok(None); } let root = Filesystem::new(ws.root().to_path_buf()); let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; let mut s = String::new(); f.read_to_string(&mut s) .with_context(|| format!("failed to read file: {}", f.path().display()))?; let resolve = (|| -> CargoResult> { let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; let v: resolver::EncodableResolve = resolve.try_into()?; Ok(Some(v.into_resolve(&s, ws)?)) })() .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; Ok(resolve) } /// Generate a toml String of Cargo.lock from a Resolve. pub fn resolve_to_string(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult { let (_orig, out, _ws_root) = resolve_to_string_orig(ws, resolve); Ok(out) } pub fn write_pkg_lockfile(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult<()> { let (orig, mut out, ws_root) = resolve_to_string_orig(ws, resolve); // If the lock file contents haven't changed so don't rewrite it. This is // helpful on read-only filesystems. if let Some(orig) = &orig { if are_equal_lockfiles(orig, &out, ws) { return Ok(()); } } if !ws.config().lock_update_allowed() { let flag = if ws.config().locked() { "--locked" } else { "--frozen" }; anyhow::bail!( "the lock file {} needs to be updated but {} was passed to prevent this\n\ If you want to try to generate the lock file without accessing the network, \ remove the {} flag and use --offline instead.", ws.root().to_path_buf().join("Cargo.lock").display(), flag, flag ); } // While we're updating the lock file anyway go ahead and update its // encoding to whatever the latest default is. That way we can slowly roll // out lock file updates as they're otherwise already updated, and changes // which don't touch dependencies won't seemingly spuriously update the lock // file. if resolve.version() < ResolveVersion::default() { resolve.set_version(ResolveVersion::default()); out = serialize_resolve(resolve, orig.as_deref()); } // Ok, if that didn't work just write it out ws_root .open_rw("Cargo.lock", ws.config(), "Cargo.lock file") .and_then(|mut f| { f.file().set_len(0)?; f.write_all(out.as_bytes())?; Ok(()) }) .with_context(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; Ok(()) } fn resolve_to_string_orig( ws: &Workspace<'_>, resolve: &mut Resolve, ) -> (Option, String, Filesystem) { // Load the original lock file if it exists. let ws_root = Filesystem::new(ws.root().to_path_buf()); let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); let orig = orig.and_then(|mut f| { let mut s = String::new(); f.read_to_string(&mut s)?; Ok(s) }); let out = serialize_resolve(resolve, orig.as_deref().ok()); (orig.ok(), out, ws_root) } fn serialize_resolve(resolve: &Resolve, orig: Option<&str>) -> String { let toml = toml_edit::ser::to_item(resolve).unwrap(); let mut out = String::new(); // At the start of the file we notify the reader that the file is generated. // Specifically Phabricator ignores files containing "@generated", so we use that. let marker_line = "# This file is automatically @generated by Cargo."; let extra_line = "# It is not intended for manual editing."; out.push_str(marker_line); out.push('\n'); out.push_str(extra_line); out.push('\n'); // and preserve any other top comments if let Some(orig) = orig { let mut comments = orig.lines().take_while(|line| line.starts_with('#')); if let Some(first) = comments.next() { if first != marker_line { out.push_str(first); out.push('\n'); } if let Some(second) = comments.next() { if second != extra_line { out.push_str(second); out.push('\n'); } for line in comments { out.push_str(line); out.push('\n'); } } } } if let Some(version) = toml.get("version") { out.push_str(&format!("version = {}\n\n", version)); } let deps = toml["package"].as_array().unwrap(); for dep in deps { let dep = dep.as_inline_table().unwrap(); out.push_str("[[package]]\n"); emit_package(dep, &mut out); } if let Some(patch) = toml.get("patch") { let list = patch["unused"].as_array().unwrap(); for entry in list { out.push_str("[[patch.unused]]\n"); emit_package(entry.as_inline_table().unwrap(), &mut out); out.push('\n'); } } if let Some(meta) = toml.get("metadata") { // 1. We need to ensure we print the entire tree, not just the direct members of `metadata` // (which `toml_edit::Table::to_string` only shows) // 2. We need to ensure all children tables have `metadata.` prefix let meta_table = meta .clone() .into_table() .expect("validation ensures this is a table"); let mut meta_doc = toml_edit::Document::new(); meta_doc["metadata"] = toml_edit::Item::Table(meta_table); out.push_str(&meta_doc.to_string()); } // Historical versions of Cargo in the old format accidentally left trailing // blank newlines at the end of files, so we just leave that as-is. For all // encodings going forward, though, we want to be sure that our encoded lock // file doesn't contain any trailing newlines so trim out the extra if // necessary. if resolve.version() >= ResolveVersion::V2 { while out.ends_with("\n\n") { out.pop(); } } out } fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool { // If we want to try and avoid updating the lock file, parse both and // compare them; since this is somewhat expensive, don't do it in the // common case where we can update lock files. if !ws.config().lock_update_allowed() { let res: CargoResult = (|| { let old: resolver::EncodableResolve = toml::from_str(orig)?; let new: resolver::EncodableResolve = toml::from_str(current)?; Ok(old.into_resolve(orig, ws)? == new.into_resolve(current, ws)?) })(); if let Ok(true) = res { return true; } } orig.lines().eq(current.lines()) } fn emit_package(dep: &toml_edit::InlineTable, out: &mut String) { out.push_str(&format!("name = {}\n", &dep["name"])); out.push_str(&format!("version = {}\n", &dep["version"])); if dep.contains_key("source") { out.push_str(&format!("source = {}\n", &dep["source"])); } if dep.contains_key("checksum") { out.push_str(&format!("checksum = {}\n", &dep["checksum"])); } if let Some(s) = dep.get("dependencies") { let slice = s.as_array().unwrap(); if !slice.is_empty() { out.push_str("dependencies = [\n"); for child in slice.iter() { out.push_str(&format!(" {},\n", child)); } out.push_str("]\n"); } out.push('\n'); } else if dep.contains_key("replace") { out.push_str(&format!("replace = {}\n\n", &dep["replace"])); } } cargo-0.66.0/src/cargo/ops/mod.rs000066400000000000000000000067061432416201200165120ustar00rootroot00000000000000use crate::sources::CRATES_IO_DOMAIN; pub use self::cargo_clean::{clean, CleanOptions}; pub use self::cargo_compile::{ compile, compile_with_exec, compile_ws, create_bcx, print, resolve_all_features, CompileOptions, }; pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages}; pub use self::cargo_doc::{doc, DocOptions}; pub use self::cargo_fetch::{fetch, FetchOptions}; pub use self::cargo_generate_lockfile::generate_lockfile; pub use self::cargo_generate_lockfile::update_lockfile; pub use self::cargo_generate_lockfile::UpdateOptions; pub use self::cargo_install::{install, install_list}; pub use self::cargo_new::{init, new, NewOptions, VersionControl}; pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions}; pub use self::cargo_package::{check_yanked, package, package_one, PackageOpts}; pub use self::cargo_pkgid::pkgid; pub use self::cargo_read_manifest::{read_package, read_packages}; pub use self::cargo_run::run; pub use self::cargo_test::{run_benches, run_tests, TestOptions}; pub use self::cargo_uninstall::uninstall; pub use self::fix::{fix, fix_exec_rustc, fix_get_proxy_lock_addr, FixOptions}; pub use self::lockfile::{load_pkg_lockfile, resolve_to_string, write_pkg_lockfile}; pub use self::registry::HttpTimeout; pub use self::registry::{configure_http_handle, http_handle, http_handle_and_timeout}; pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; pub use self::registry::{needs_custom_http_transport, registry_login, registry_logout, search}; pub use self::registry::{publish, registry_configuration, RegistryConfig}; pub use self::resolve::{ add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts, WorkspaceResolve, }; pub use self::vendor::{vendor, VendorOptions}; pub mod cargo_add; mod cargo_clean; mod cargo_compile; pub mod cargo_config; mod cargo_doc; mod cargo_fetch; mod cargo_generate_lockfile; mod cargo_install; mod cargo_new; mod cargo_output_metadata; mod cargo_package; mod cargo_pkgid; mod cargo_read_manifest; mod cargo_run; mod cargo_test; mod cargo_uninstall; mod common_for_install_and_uninstall; mod fix; mod lockfile; mod registry; mod resolve; pub mod tree; mod vendor; /// Returns true if the dependency is either git or path, false otherwise /// Error if a git/path dep is transitive, but has no version (registry source). /// This check is performed on dependencies before publishing or packaging fn check_dep_has_version(dep: &crate::core::Dependency, publish: bool) -> crate::CargoResult { let which = if dep.source_id().is_path() { "path" } else if dep.source_id().is_git() { "git" } else { return Ok(false); }; if !dep.specified_req() && dep.is_transitive() { let dep_version_source = dep.registry_id().map_or_else( || CRATES_IO_DOMAIN.to_string(), |registry_id| registry_id.display_registry_name(), ); anyhow::bail!( "all dependencies must have a version specified when {}.\n\ dependency `{}` does not specify a version\n\ Note: The {} dependency will use the version from {},\n\ the `{}` specification will be removed from the dependency declaration.", if publish { "publishing" } else { "packaging" }, dep.package_name(), if publish { "published" } else { "packaged" }, dep_version_source, which, ) } Ok(true) } cargo-0.66.0/src/cargo/ops/registry.rs000066400000000000000000001071551432416201200176030ustar00rootroot00000000000000use std::collections::{BTreeMap, HashSet}; use std::fs::File; use std::io::{self, BufRead}; use std::iter::repeat; use std::path::PathBuf; use std::str; use std::task::Poll; use std::time::Duration; use std::{cmp, env}; use anyhow::{bail, format_err, Context as _}; use cargo_util::paths; use crates_io::{self, NewCrate, NewCrateDependency, Registry}; use curl::easy::{Easy, InfoType, SslOpt, SslVersion}; use log::{log, Level}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; use termcolor::Color::Green; use termcolor::ColorSpec; use crate::core::dependency::DepKind; use crate::core::manifest::ManifestMetadata; use crate::core::resolver::CliFeatures; use crate::core::source::Source; use crate::core::{Package, SourceId, Workspace}; use crate::ops; use crate::ops::Packages; use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_DOMAIN, CRATES_IO_REGISTRY}; use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; use crate::util::errors::CargoResult; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::IntoUrl; use crate::{drop_print, drop_println, version}; mod auth; /// Registry settings loaded from config files. /// /// This is loaded based on the `--registry` flag and the config settings. #[derive(Debug)] pub enum RegistryConfig { None, /// The authentication token. Token(String), /// Process used for fetching a token. Process((PathBuf, Vec)), } impl RegistryConfig { /// Returns `true` if the credential is [`None`]. /// /// [`None`]: Self::None pub fn is_none(&self) -> bool { matches!(self, Self::None) } /// Returns `true` if the credential is [`Token`]. /// /// [`Token`]: Self::Token pub fn is_token(&self) -> bool { matches!(self, Self::Token(..)) } pub fn as_token(&self) -> Option<&str> { if let Self::Token(v) = self { Some(&*v) } else { None } } pub fn as_process(&self) -> Option<&(PathBuf, Vec)> { if let Self::Process(v) = self { Some(v) } else { None } } } pub struct PublishOpts<'cfg> { pub config: &'cfg Config, pub token: Option, pub index: Option, pub verify: bool, pub allow_dirty: bool, pub jobs: Option, pub keep_going: bool, pub to_publish: ops::Packages, pub targets: Vec, pub dry_run: bool, pub registry: Option, pub cli_features: CliFeatures, } pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { let specs = opts.to_publish.to_package_id_specs(ws)?; if specs.len() > 1 { bail!("the `-p` argument must be specified to select a single package to publish") } if Packages::Default == opts.to_publish && ws.is_virtual() { bail!("the `-p` argument must be specified in the root of a virtual workspace") } let member_ids = ws.members().map(|p| p.package_id()); // Check that the spec matches exactly one member. specs[0].query(member_ids)?; let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?; // In `members_with_features_old`, it will add "current" package (determined by the cwd) // So we need filter pkgs = pkgs .into_iter() .filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id()))) .collect(); // Double check. It is safe theoretically, unless logic has updated. assert_eq!(pkgs.len(), 1); let (pkg, cli_features) = pkgs.pop().unwrap(); let mut publish_registry = opts.registry.clone(); if let Some(ref allowed_registries) = *pkg.publish() { if publish_registry.is_none() && allowed_registries.len() == 1 { // If there is only one allowed registry, push to that one directly, // even though there is no registry specified in the command. let default_registry = &allowed_registries[0]; if default_registry != CRATES_IO_REGISTRY { // Don't change the registry for crates.io and don't warn the user. // crates.io will be defaulted even without this. opts.config.shell().note(&format!( "Found `{}` as only allowed registry. Publishing to it automatically.", default_registry ))?; publish_registry = Some(default_registry.clone()); } } let reg_name = publish_registry .clone() .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); if !allowed_registries.contains(®_name) { bail!( "`{}` cannot be published.\n\ The registry `{}` is not listed in the `publish` value in Cargo.toml.", pkg.name(), reg_name ); } } let (mut registry, _reg_cfg, reg_id) = registry( opts.config, opts.token.clone(), opts.index.as_deref(), publish_registry.as_deref(), true, !opts.dry_run, )?; verify_dependencies(pkg, ®istry, reg_id)?; // Prepare a tarball, with a non-suppressible warning if metadata // is missing since this is being put online. let tarball = ops::package_one( ws, pkg, &ops::PackageOpts { config: opts.config, verify: opts.verify, list: false, check_metadata: true, allow_dirty: opts.allow_dirty, to_package: ops::Packages::Default, targets: opts.targets.clone(), jobs: opts.jobs, keep_going: opts.keep_going, cli_features: cli_features, }, )? .unwrap(); opts.config .shell() .status("Uploading", pkg.package_id().to_string())?; transmit( opts.config, pkg, tarball.file(), &mut registry, reg_id, opts.dry_run, )?; Ok(()) } fn verify_dependencies( pkg: &Package, registry: &Registry, registry_src: SourceId, ) -> CargoResult<()> { for dep in pkg.dependencies().iter() { if super::check_dep_has_version(dep, true)? { continue; } // TomlManifest::prepare_for_publish will rewrite the dependency // to be just the `version` field. if dep.source_id() != registry_src { if !dep.source_id().is_registry() { // Consider making SourceId::kind a public type that we can // exhaustively match on. Using match can help ensure that // every kind is properly handled. panic!("unexpected source kind for dependency {:?}", dep); } // Block requests to send to crates.io with alt-registry deps. // This extra hostname check is mostly to assist with testing, // but also prevents someone using `--index` to specify // something that points to crates.io. if registry_src.is_default_registry() || registry.host_is_crates_io() { bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ registries. `{}` needs to be published to crates.io before publishing this crate.\n\ (crate `{}` is pulled from {})", dep.package_name(), dep.package_name(), dep.source_id()); } } } Ok(()) } fn transmit( config: &Config, pkg: &Package, tarball: &File, registry: &mut Registry, registry_id: SourceId, dry_run: bool, ) -> CargoResult<()> { let deps = pkg .dependencies() .iter() .filter(|dep| { // Skip dev-dependency without version. dep.is_transitive() || dep.specified_req() }) .map(|dep| { // If the dependency is from a different registry, then include the // registry in the dependency. let dep_registry_id = match dep.registry_id() { Some(id) => id, None => SourceId::crates_io(config)?, }; // In the index and Web API, None means "from the same registry" // whereas in Cargo.toml, it means "from crates.io". let dep_registry = if dep_registry_id != registry_id { Some(dep_registry_id.url().to_string()) } else { None }; Ok(NewCrateDependency { optional: dep.is_optional(), default_features: dep.uses_default_features(), name: dep.package_name().to_string(), features: dep.features().iter().map(|s| s.to_string()).collect(), version_req: dep.version_req().to_string(), target: dep.platform().map(|s| s.to_string()), kind: match dep.kind() { DepKind::Normal => "normal", DepKind::Build => "build", DepKind::Development => "dev", } .to_string(), registry: dep_registry, explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()), }) }) .collect::>>()?; let manifest = pkg.manifest(); let ManifestMetadata { ref authors, ref description, ref homepage, ref documentation, ref keywords, ref readme, ref repository, ref license, ref license_file, ref categories, ref badges, ref links, } = *manifest.metadata(); let readme_content = readme .as_ref() .map(|readme| { paths::read(&pkg.root().join(readme)) .with_context(|| format!("failed to read `readme` file for package `{}`", pkg)) }) .transpose()?; if let Some(ref file) = *license_file { if !pkg.root().join(file).exists() { bail!("the license file `{}` does not exist", file) } } // Do not upload if performing a dry run if dry_run { config.shell().warn("aborting upload due to dry run")?; return Ok(()); } let string_features = match manifest.original().features() { Some(features) => features .iter() .map(|(feat, values)| { ( feat.to_string(), values.iter().map(|fv| fv.to_string()).collect(), ) }) .collect::>>(), None => BTreeMap::new(), }; let warnings = registry .publish( &NewCrate { name: pkg.name().to_string(), vers: pkg.version().to_string(), deps, features: string_features, authors: authors.clone(), description: description.clone(), homepage: homepage.clone(), documentation: documentation.clone(), keywords: keywords.clone(), categories: categories.clone(), readme: readme_content, readme_file: readme.clone(), repository: repository.clone(), license: license.clone(), license_file: license_file.clone(), badges: badges.clone(), links: links.clone(), }, tarball, ) .with_context(|| format!("failed to publish to registry at {}", registry.host()))?; if !warnings.invalid_categories.is_empty() { let msg = format!( "the following are not valid category slugs and were \ ignored: {}. Please see https://crates.io/category_slugs \ for the list of all category slugs. \ ", warnings.invalid_categories.join(", ") ); config.shell().warn(&msg)?; } if !warnings.invalid_badges.is_empty() { let msg = format!( "the following are not valid badges and were ignored: {}. \ Either the badge type specified is unknown or a required \ attribute is missing. Please see \ https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ for valid badge types and their required attributes.", warnings.invalid_badges.join(", ") ); config.shell().warn(&msg)?; } if !warnings.other.is_empty() { for msg in warnings.other { config.shell().warn(&msg)?; } } Ok(()) } /// Returns the index and token from the config file for the given registry. /// /// `registry` is typically the registry specified on the command-line. If /// `None`, `index` is set to `None` to indicate it should use crates.io. pub fn registry_configuration( config: &Config, registry: Option<&str>, ) -> CargoResult { let err_both = |token_key: &str, proc_key: &str| { Err(format_err!( "both `{token_key}` and `{proc_key}` \ were specified in the config\n\ Only one of these values may be set, remove one or the other to proceed.", )) }; // `registry.default` is handled in command-line parsing. let (token, process) = match registry { Some(registry) => { let token_key = format!("registries.{registry}.token"); let token = config.get_string(&token_key)?.map(|p| p.val); let process = if config.cli_unstable().credential_process { let mut proc_key = format!("registries.{registry}.credential-process"); let mut process = config.get::>(&proc_key)?; if process.is_none() && token.is_none() { // This explicitly ignores the global credential-process if // the token is set, as that is "more specific". proc_key = String::from("registry.credential-process"); process = config.get::>(&proc_key)?; } else if process.is_some() && token.is_some() { return err_both(&token_key, &proc_key); } process } else { None }; (token, process) } None => { // Use crates.io default. config.check_registry_index_not_set()?; let token = config.get_string("registry.token")?.map(|p| p.val); let process = if config.cli_unstable().credential_process { let process = config.get::>("registry.credential-process")?; if token.is_some() && process.is_some() { return err_both("registry.token", "registry.credential-process"); } process } else { None }; (token, process) } }; let credential_process = process.map(|process| (process.path.resolve_program(config), process.args)); Ok(match (token, credential_process) { (None, None) => RegistryConfig::None, (None, Some(process)) => RegistryConfig::Process(process), (Some(x), None) => RegistryConfig::Token(x), (Some(_), Some(_)) => unreachable!("Only one of these values may be set."), }) } /// Returns the `Registry` and `Source` based on command-line and config settings. /// /// * `token`: The token from the command-line. If not set, uses the token /// from the config. /// * `index`: The index URL from the command-line. This is ignored if /// `registry` is set. /// * `registry`: The registry name from the command-line. If neither /// `registry`, or `index` are set, then uses `crates-io`, honoring /// `[source]` replacement if defined. /// * `force_update`: If `true`, forces the index to be updated. /// * `validate_token`: If `true`, the token must be set. fn registry( config: &Config, token: Option, index: Option<&str>, registry: Option<&str>, force_update: bool, validate_token: bool, ) -> CargoResult<(Registry, RegistryConfig, SourceId)> { if index.is_some() && registry.is_some() { // Otherwise we would silently ignore one or the other. bail!("both `--index` and `--registry` should not be set at the same time"); } // Parse all configuration options let reg_cfg = registry_configuration(config, registry)?; let opt_index = registry .map(|r| config.get_registry_index(r)) .transpose()? .map(|u| u.to_string()); let sid = get_source_id(config, opt_index.as_deref().or(index), registry)?; if !sid.is_remote_registry() { bail!( "{} does not support API commands.\n\ Check for a source-replacement in .cargo/config.", sid ); } let api_host = { let _lock = config.acquire_package_cache_lock()?; let mut src = RegistrySource::remote(sid, &HashSet::new(), config)?; // Only update the index if the config is not available or `force` is set. if force_update { src.invalidate_cache() } let cfg = loop { match src.config()? { Poll::Pending => src .block_until_ready() .with_context(|| format!("failed to update {}", sid))?, Poll::Ready(cfg) => break cfg, } }; cfg.and_then(|cfg| cfg.api) .ok_or_else(|| format_err!("{} does not support API commands", sid))? }; let token = if validate_token { if index.is_some() { if token.is_none() { bail!("command-line argument --index requires --token to be specified"); } token } else { // Check `is_default_registry` so that the crates.io index can // change config.json's "api" value, and this won't affect most // people. It will affect those using source replacement, but // hopefully that's a relatively small set of users. if token.is_none() && reg_cfg.is_token() && registry.is_none() && !sid.is_default_registry() && !crates_io::is_url_crates_io(&api_host) { config.shell().warn( "using `registry.token` config value with source \ replacement is deprecated\n\ This may become a hard error in the future; \ see .\n\ Use the --token command-line flag to remove this warning.", )?; reg_cfg.as_token().map(|t| t.to_owned()) } else { let token = auth::auth_token(config, token.as_deref(), ®_cfg, registry, &api_host)?; Some(token) } } } else { None }; let handle = http_handle(config)?; // Workaround for the sparse+https://index.crates.io replacement index. Use the non-replaced // source_id so that the original (github) url is used when publishing a crate. let sid = if sid.is_default_registry() { SourceId::crates_io(config)? } else { sid }; Ok((Registry::new_handle(api_host, token, handle), reg_cfg, sid)) } /// Creates a new HTTP handle with appropriate global configuration for cargo. pub fn http_handle(config: &Config) -> CargoResult { let (mut handle, timeout) = http_handle_and_timeout(config)?; timeout.configure(&mut handle)?; Ok(handle) } pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> { if config.frozen() { bail!( "attempting to make an HTTP request, but --frozen was \ specified" ) } if config.offline() { bail!( "attempting to make an HTTP request, but --offline was \ specified" ) } // The timeout option for libcurl by default times out the entire transfer, // but we probably don't want this. Instead we only set timeouts for the // connect phase as well as a "low speed" timeout so if we don't receive // many bytes in a large-ish period of time then we time out. let mut handle = Easy::new(); let timeout = configure_http_handle(config, &mut handle)?; Ok((handle, timeout)) } pub fn needs_custom_http_transport(config: &Config) -> CargoResult { Ok(http_proxy_exists(config)? || *config.http_config()? != Default::default() || env::var_os("HTTP_TIMEOUT").is_some()) } /// Configure a libcurl http handle with the defaults options for Cargo pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult { let http = config.http_config()?; if let Some(proxy) = http_proxy(config)? { handle.proxy(&proxy)?; } if let Some(cainfo) = &http.cainfo { let cainfo = cainfo.resolve_path(config); handle.cainfo(&cainfo)?; } if let Some(check) = http.check_revoke { handle.ssl_options(SslOpt::new().no_revoke(!check))?; } if let Some(user_agent) = &http.user_agent { handle.useragent(user_agent)?; } else { handle.useragent(&format!("cargo {}", version()))?; } fn to_ssl_version(s: &str) -> CargoResult { let version = match s { "default" => SslVersion::Default, "tlsv1" => SslVersion::Tlsv1, "tlsv1.0" => SslVersion::Tlsv10, "tlsv1.1" => SslVersion::Tlsv11, "tlsv1.2" => SslVersion::Tlsv12, "tlsv1.3" => SslVersion::Tlsv13, _ => bail!( "Invalid ssl version `{}`,\ choose from 'default', 'tlsv1', 'tlsv1.0', 'tlsv1.1', 'tlsv1.2', 'tlsv1.3'.", s ), }; Ok(version) } if let Some(ssl_version) = &http.ssl_version { match ssl_version { SslVersionConfig::Single(s) => { let version = to_ssl_version(s.as_str())?; handle.ssl_version(version)?; } SslVersionConfig::Range(SslVersionConfigRange { min, max }) => { let min_version = min .as_ref() .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; let max_version = max .as_ref() .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; handle.ssl_min_max_version(min_version, max_version)?; } } } if let Some(true) = http.debug { handle.verbose(true)?; log::debug!("{:#?}", curl::Version::get()); handle.debug_function(|kind, data| { let (prefix, level) = match kind { InfoType::Text => ("*", Level::Debug), InfoType::HeaderIn => ("<", Level::Debug), InfoType::HeaderOut => (">", Level::Debug), InfoType::DataIn => ("{", Level::Trace), InfoType::DataOut => ("}", Level::Trace), InfoType::SslDataIn | InfoType::SslDataOut => return, _ => return, }; match str::from_utf8(data) { Ok(s) => { for mut line in s.lines() { if line.starts_with("Authorization:") { line = "Authorization: [REDACTED]"; } else if line[..line.len().min(10)].eq_ignore_ascii_case("set-cookie") { line = "set-cookie: [REDACTED]"; } log!(level, "http-debug: {} {}", prefix, line); } } Err(_) => { log!( level, "http-debug: {} ({} bytes of data)", prefix, data.len() ); } } })?; } HttpTimeout::new(config) } #[must_use] pub struct HttpTimeout { pub dur: Duration, pub low_speed_limit: u32, } impl HttpTimeout { pub fn new(config: &Config) -> CargoResult { let config = config.http_config()?; let low_speed_limit = config.low_speed_limit.unwrap_or(10); let seconds = config .timeout .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) .unwrap_or(30); Ok(HttpTimeout { dur: Duration::new(seconds, 0), low_speed_limit, }) } pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> { // The timeout option for libcurl by default times out the entire // transfer, but we probably don't want this. Instead we only set // timeouts for the connect phase as well as a "low speed" timeout so // if we don't receive many bytes in a large-ish period of time then we // time out. handle.connect_timeout(self.dur)?; handle.low_speed_time(self.dur)?; handle.low_speed_limit(self.low_speed_limit)?; Ok(()) } } /// Finds an explicit HTTP proxy if one is available. /// /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult> { let http = config.http_config()?; if let Some(s) = &http.proxy { return Ok(Some(s.clone())); } if let Ok(cfg) = git2::Config::open_default() { if let Ok(s) = cfg.get_string("http.proxy") { return Ok(Some(s)); } } Ok(None) } /// Determine if an http proxy exists. /// /// Checks the following for existence, in order: /// /// * cargo's `http.proxy` /// * git's `http.proxy` /// * `http_proxy` env var /// * `HTTP_PROXY` env var /// * `https_proxy` env var /// * `HTTPS_PROXY` env var fn http_proxy_exists(config: &Config) -> CargoResult { if http_proxy(config)?.is_some() { Ok(true) } else { Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"] .iter() .any(|v| env::var(v).is_ok())) } } pub fn registry_login( config: &Config, token: Option, reg: Option, ) -> CargoResult<()> { let (registry, reg_cfg, _) = registry(config, token.clone(), None, reg.as_deref(), false, false)?; let token = match token { Some(token) => token, None => { drop_println!( config, "please paste the API Token found on {}/me below", registry.host() ); let mut line = String::new(); let input = io::stdin(); input .lock() .read_line(&mut line) .with_context(|| "failed to read stdin")?; // Automatically remove `cargo login` from an inputted token to // allow direct pastes from `registry.host()`/me. line.replace("cargo login", "").trim().to_string() } }; if let RegistryConfig::Token(old_token) = ®_cfg { if old_token == &token { config.shell().status("Login", "already logged in")?; return Ok(()); } } auth::login( config, token, reg_cfg.as_process(), reg.as_deref(), registry.host(), )?; config.shell().status( "Login", format!( "token for `{}` saved", reg.as_ref().map_or(CRATES_IO_DOMAIN, String::as_str) ), )?; Ok(()) } pub fn registry_logout(config: &Config, reg: Option) -> CargoResult<()> { let (registry, reg_cfg, _) = registry(config, None, None, reg.as_deref(), false, false)?; let reg_name = reg.as_deref().unwrap_or(CRATES_IO_DOMAIN); if reg_cfg.is_none() { config.shell().status( "Logout", format!("not currently logged in to `{}`", reg_name), )?; return Ok(()); } auth::logout( config, reg_cfg.as_process(), reg.as_deref(), registry.host(), )?; config.shell().status( "Logout", format!( "token for `{}` has been removed from local storage", reg_name ), )?; Ok(()) } pub struct OwnersOptions { pub krate: Option, pub token: Option, pub index: Option, pub to_add: Option>, pub to_remove: Option>, pub list: bool, pub registry: Option, } pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let name = match opts.krate { Some(ref name) => name.clone(), None => { let manifest_path = find_root_manifest_for_wd(config.cwd())?; let ws = Workspace::new(&manifest_path, config)?; ws.current()?.package_id().name().to_string() } }; let (mut registry, _, _) = registry( config, opts.token.clone(), opts.index.as_deref(), opts.registry.as_deref(), true, true, )?; if let Some(ref v) = opts.to_add { let v = v.iter().map(|s| &s[..]).collect::>(); let msg = registry.add_owners(&name, &v).with_context(|| { format!( "failed to invite owners to crate `{}` on registry at {}", name, registry.host() ) })?; config.shell().status("Owner", msg)?; } if let Some(ref v) = opts.to_remove { let v = v.iter().map(|s| &s[..]).collect::>(); config .shell() .status("Owner", format!("removing {:?} from crate {}", v, name))?; registry.remove_owners(&name, &v).with_context(|| { format!( "failed to remove owners from crate `{}` on registry at {}", name, registry.host() ) })?; } if opts.list { let owners = registry.list_owners(&name).with_context(|| { format!( "failed to list owners of crate `{}` on registry at {}", name, registry.host() ) })?; for owner in owners.iter() { drop_print!(config, "{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { (Some(name), Some(email)) => drop_println!(config, " ({} <{}>)", name, email), (Some(s), None) | (None, Some(s)) => drop_println!(config, " ({})", s), (None, None) => drop_println!(config), } } } Ok(()) } pub fn yank( config: &Config, krate: Option, version: Option, token: Option, index: Option, undo: bool, reg: Option, ) -> CargoResult<()> { let name = match krate { Some(name) => name, None => { let manifest_path = find_root_manifest_for_wd(config.cwd())?; let ws = Workspace::new(&manifest_path, config)?; ws.current()?.package_id().name().to_string() } }; let version = match version { Some(v) => v, None => bail!("a version must be specified to yank"), }; let (mut registry, _, _) = registry(config, token, index.as_deref(), reg.as_deref(), true, true)?; let package_spec = format!("{}@{}", name, version); if undo { config.shell().status("Unyank", package_spec)?; registry.unyank(&name, &version).with_context(|| { format!( "failed to undo a yank from the registry at {}", registry.host() ) })?; } else { config.shell().status("Yank", package_spec)?; registry .yank(&name, &version) .with_context(|| format!("failed to yank from the registry at {}", registry.host()))?; } Ok(()) } /// Gets the SourceId for an index or registry setting. /// /// The `index` and `reg` values are from the command-line or config settings. /// If both are None, returns the source for crates.io. fn get_source_id(config: &Config, index: Option<&str>, reg: Option<&str>) -> CargoResult { match (reg, index) { (Some(r), _) => SourceId::alt_registry(config, r), (_, Some(i)) => SourceId::for_registry(&i.into_url()?), _ => { let map = SourceConfigMap::new(config)?; let src = map.load(SourceId::crates_io(config)?, &HashSet::new())?; Ok(src.replaced_source_id()) } } } pub fn search( query: &str, config: &Config, index: Option, limit: u32, reg: Option, ) -> CargoResult<()> { fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { // We should truncate at grapheme-boundary and compute character-widths, // yet the dependencies on unicode-segmentation and unicode-width are // not worth it. let mut chars = s.chars(); let mut prefix = (&mut chars).take(max_width - 1).collect::(); if chars.next().is_some() { prefix.push('…'); } prefix } let (mut registry, _, source_id) = registry(config, None, index.as_deref(), reg.as_deref(), false, false)?; let (crates, total_crates) = registry.search(query, limit).with_context(|| { format!( "failed to retrieve search results from the registry at {}", registry.host() ) })?; let names = crates .iter() .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) .collect::>(); let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default(); let description_length = cmp::max(80, 128 - description_margin); let descriptions = crates.iter().map(|krate| { krate .description .as_ref() .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length)) }); for (name, description) in names.into_iter().zip(descriptions) { let line = match description { Some(desc) => { let space = repeat(' ') .take(description_margin - name.len()) .collect::(); name + &space + "# " + &desc } None => name, }; let mut fragments = line.split(query).peekable(); while let Some(fragment) = fragments.next() { let _ = config.shell().write_stdout(fragment, &ColorSpec::new()); if fragments.peek().is_some() { let _ = config .shell() .write_stdout(query, &ColorSpec::new().set_bold(true).set_fg(Some(Green))); } } let _ = config.shell().write_stdout("\n", &ColorSpec::new()); } let search_max_limit = 100; if total_crates > limit && limit < search_max_limit { let _ = config.shell().write_stdout( format_args!( "... and {} crates more (use --limit N to see more)\n", total_crates - limit ), &ColorSpec::new(), ); } else if total_crates > limit && limit >= search_max_limit { let extra = if source_id.is_default_registry() { format!( " (go to https://crates.io/search?q={} to see more)", percent_encode(query.as_bytes(), NON_ALPHANUMERIC) ) } else { String::new() }; let _ = config.shell().write_stdout( format_args!("... and {} crates more{}\n", total_crates - limit, extra), &ColorSpec::new(), ); } Ok(()) } cargo-0.66.0/src/cargo/ops/registry/000077500000000000000000000000001432416201200172245ustar00rootroot00000000000000cargo-0.66.0/src/cargo/ops/registry/auth.rs000066400000000000000000000161231432416201200205360ustar00rootroot00000000000000//! Registry authentication support. use crate::sources::CRATES_IO_REGISTRY; use crate::util::{config, CargoResult, Config}; use anyhow::{bail, format_err, Context as _}; use cargo_util::ProcessError; use std::io::{Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; use super::RegistryConfig; enum Action { Get, Store(String), Erase, } /// Returns the token to use for the given registry. pub(super) fn auth_token( config: &Config, cli_token: Option<&str>, credential: &RegistryConfig, registry_name: Option<&str>, api_url: &str, ) -> CargoResult { let token = match (cli_token, credential) { (None, RegistryConfig::None) => { bail!("no upload token found, please run `cargo login` or pass `--token`"); } (Some(cli_token), _) => cli_token.to_string(), (None, RegistryConfig::Token(config_token)) => config_token.to_string(), (None, RegistryConfig::Process(process)) => { let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); run_command(config, process, registry_name, api_url, Action::Get)?.unwrap() } }; Ok(token) } /// Saves the given token. pub(super) fn login( config: &Config, token: String, credential_process: Option<&(PathBuf, Vec)>, registry_name: Option<&str>, api_url: &str, ) -> CargoResult<()> { if let Some(process) = credential_process { let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); run_command( config, process, registry_name, api_url, Action::Store(token), )?; } else { config::save_credentials(config, Some(token), registry_name)?; } Ok(()) } /// Removes the token for the given registry. pub(super) fn logout( config: &Config, credential_process: Option<&(PathBuf, Vec)>, registry_name: Option<&str>, api_url: &str, ) -> CargoResult<()> { if let Some(process) = credential_process { let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); run_command(config, process, registry_name, api_url, Action::Erase)?; } else { config::save_credentials(config, None, registry_name)?; } Ok(()) } fn run_command( config: &Config, process: &(PathBuf, Vec), name: &str, api_url: &str, action: Action, ) -> CargoResult> { let cred_proc; let (exe, args) = if process.0.to_str().unwrap_or("").starts_with("cargo:") { cred_proc = sysroot_credential(config, process)?; &cred_proc } else { process }; if !args.iter().any(|arg| arg.contains("{action}")) { let msg = |which| { format!( "credential process `{}` cannot be used to {}, \ the credential-process configuration value must pass the \ `{{action}}` argument in the config to support this command", exe.display(), which ) }; match action { Action::Get => {} Action::Store(_) => bail!(msg("log in")), Action::Erase => bail!(msg("log out")), } } let action_str = match action { Action::Get => "get", Action::Store(_) => "store", Action::Erase => "erase", }; let args: Vec<_> = args .iter() .map(|arg| { arg.replace("{action}", action_str) .replace("{name}", name) .replace("{api_url}", api_url) }) .collect(); let mut cmd = Command::new(&exe); cmd.args(args) .env("CARGO", config.cargo_exe()?) .env("CARGO_REGISTRY_NAME", name) .env("CARGO_REGISTRY_API_URL", api_url); match action { Action::Get => { cmd.stdout(Stdio::piped()); } Action::Store(_) => { cmd.stdin(Stdio::piped()); } Action::Erase => {} } let mut child = cmd.spawn().with_context(|| { let verb = match action { Action::Get => "fetch", Action::Store(_) => "store", Action::Erase => "erase", }; format!( "failed to execute `{}` to {} authentication token for registry `{}`", exe.display(), verb, name ) })?; let mut token = None; match &action { Action::Get => { let mut buffer = String::new(); log::debug!("reading into buffer"); child .stdout .as_mut() .unwrap() .read_to_string(&mut buffer) .with_context(|| { format!( "failed to read token from registry credential process `{}`", exe.display() ) })?; if let Some(end) = buffer.find('\n') { if buffer.len() > end + 1 { bail!( "credential process `{}` returned more than one line of output; \ expected a single token", exe.display() ); } buffer.truncate(end); } token = Some(buffer); } Action::Store(token) => { writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| { format!( "failed to send token to registry credential process `{}`", exe.display() ) })?; } Action::Erase => {} } let status = child.wait().with_context(|| { format!( "registry credential process `{}` exit failure", exe.display() ) })?; if !status.success() { let msg = match action { Action::Get => "failed to authenticate to registry", Action::Store(_) => "failed to store token to registry", Action::Erase => "failed to erase token from registry", }; return Err(ProcessError::new( &format!( "registry credential process `{}` {} `{}`", exe.display(), msg, name ), Some(status), None, ) .into()); } Ok(token) } /// Gets the path to the libexec processes in the sysroot. fn sysroot_credential( config: &Config, process: &(PathBuf, Vec), ) -> CargoResult<(PathBuf, Vec)> { let cred_name = process.0.to_str().unwrap().strip_prefix("cargo:").unwrap(); let cargo = config.cargo_exe()?; let root = cargo .parent() .and_then(|p| p.parent()) .ok_or_else(|| format_err!("expected cargo path {}", cargo.display()))?; let exe = root.join("libexec").join(format!( "cargo-credential-{}{}", cred_name, std::env::consts::EXE_SUFFIX )); let mut args = process.1.clone(); if !args.iter().any(|arg| arg == "{action}") { args.push("{action}".to_string()); } Ok((exe, args)) } cargo-0.66.0/src/cargo/ops/resolve.rs000066400000000000000000001012241432416201200174010ustar00rootroot00000000000000//! High-level APIs for executing the resolver. //! //! This module provides functions for running the resolver given a workspace. //! There are roughly 3 main functions: //! //! - `resolve_ws`: A simple, high-level function with no options. //! - `resolve_ws_with_opts`: A medium-level function with options like //! user-provided features. This is the most appropriate function to use in //! most cases. //! - `resolve_with_previous`: A low-level function for running the resolver, //! providing the most power and flexibility. use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::registry::{LockedPatchDependency, PackageRegistry}; use crate::core::resolver::features::{ CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures, }; use crate::core::resolver::{ self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion, VersionPreferences, }; use crate::core::summary::Summary; use crate::core::Feature; use crate::core::{GitReference, PackageId, PackageIdSpec, PackageSet, SourceId, Workspace}; use crate::ops; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::{profile, CanonicalUrl}; use anyhow::Context as _; use log::{debug, trace}; use std::collections::{HashMap, HashSet}; /// Result for `resolve_ws_with_opts`. pub struct WorkspaceResolve<'cfg> { /// Packages to be downloaded. pub pkg_set: PackageSet<'cfg>, /// The resolve for the entire workspace. /// /// This may be `None` for things like `cargo install` and `-Zavoid-dev-deps`. /// This does not include `paths` overrides. pub workspace_resolve: Option, /// The narrowed resolve, with the specific features enabled, and only the /// given package specs requested. pub targeted_resolve: Resolve, /// The features activated per package. pub resolved_features: ResolvedFeatures, } const UNUSED_PATCH_WARNING: &str = "\ Check that the patched package version and available features are compatible with the dependency requirements. If the patch has a different version from what is locked in the Cargo.lock file, run `cargo update` to use the new version. This may also occur with an optional dependency that is not enabled."; /// Resolves all dependencies for the workspace using the previous /// lock file as a guide if present. /// /// This function will also write the result of resolution as a new lock file /// (unless it is an ephemeral workspace such as `cargo install` or `cargo /// package`). /// /// This is a simple interface used by commands like `clean`, `fetch`, and /// `package`, which don't specify any options or features. pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = PackageRegistry::new(ws.config())?; let resolve = resolve_with_registry(ws, &mut registry)?; let packages = get_resolved_packages(&resolve, registry)?; Ok((packages, resolve)) } /// Resolves dependencies for some packages of the workspace, /// taking into account `paths` overrides and activated features. /// /// This function will also write the result of resolution as a new lock file /// (unless `Workspace::require_optional_deps` is false, such as `cargo /// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo /// install` or `cargo package`). /// /// `specs` may be empty, which indicates it should resolve all workspace /// members. In this case, `opts.all_features` must be `true`. pub fn resolve_ws_with_opts<'cfg>( ws: &Workspace<'cfg>, target_data: &RustcTargetData<'cfg>, requested_targets: &[CompileKind], cli_features: &CliFeatures, specs: &[PackageIdSpec], has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, ) -> CargoResult> { let mut registry = PackageRegistry::new(ws.config())?; let mut add_patches = true; let resolve = if ws.ignore_lock() { None } else if ws.require_optional_deps() { // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. let resolve = resolve_with_registry(ws, &mut registry)?; // No need to add patches again, `resolve_with_registry` has done it. add_patches = false; // Second, resolve with precisely what we're doing. Filter out // transitive dependencies if necessary, specify features, handle // overrides, etc. let _p = profile::start("resolving with overrides..."); add_overrides(&mut registry, ws)?; for &(ref replace_spec, ref dep) in ws.root_replace() { if !resolve .iter() .any(|r| replace_spec.matches(r) && !dep.matches_id(r)) { ws.config() .shell() .warn(format!("package replacement is not used: {}", replace_spec))? } if dep.features().len() != 0 || !dep.uses_default_features() { ws.config() .shell() .warn(format!( "replacement for `{}` uses the features mechanism. \ default-features and features will not take effect because the replacement dependency does not support this mechanism", dep.package_name() ))? } } Some(resolve) } else { ops::load_pkg_lockfile(ws)? }; let resolved_with_overrides = resolve_with_previous( &mut registry, ws, cli_features, has_dev_units, resolve.as_ref(), None, specs, add_patches, )?; let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?; let member_ids = ws .members_with_features(specs, cli_features)? .into_iter() .map(|(p, _fts)| p.package_id()) .collect::>(); pkg_set.download_accessible( &resolved_with_overrides, &member_ids, has_dev_units, requested_targets, target_data, force_all_targets, )?; let feature_opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?; let resolved_features = FeatureResolver::resolve( ws, target_data, &resolved_with_overrides, &pkg_set, cli_features, specs, requested_targets, feature_opts, )?; pkg_set.warn_no_lib_packages_and_artifact_libs_overlapping_deps( ws, &resolved_with_overrides, &member_ids, has_dev_units, requested_targets, target_data, force_all_targets, )?; Ok(WorkspaceResolve { pkg_set, workspace_resolve: resolve, targeted_resolve: resolved_with_overrides, resolved_features, }) } fn resolve_with_registry<'cfg>( ws: &Workspace<'cfg>, registry: &mut PackageRegistry<'cfg>, ) -> CargoResult { let prev = ops::load_pkg_lockfile(ws)?; let mut resolve = resolve_with_previous( registry, ws, &CliFeatures::new_all(true), HasDevUnits::Yes, prev.as_ref(), None, &[], true, )?; if !ws.is_ephemeral() && ws.require_optional_deps() { ops::write_pkg_lockfile(ws, &mut resolve)?; } Ok(resolve) } /// Resolves all dependencies for a package using an optional previous instance /// of resolve to guide the resolution process. /// /// This also takes an optional hash set, `to_avoid`, which is a list of package /// IDs that should be avoided when consulting the previous instance of resolve /// (often used in pairings with updates). /// /// The previous resolve normally comes from a lock file. This function does not /// read or write lock files from the filesystem. /// /// `specs` may be empty, which indicates it should resolve all workspace /// members. In this case, `opts.all_features` must be `true`. /// /// If `register_patches` is true, then entries from the `[patch]` table in /// the manifest will be added to the given `PackageRegistry`. pub fn resolve_with_previous<'cfg>( registry: &mut PackageRegistry<'cfg>, ws: &Workspace<'cfg>, cli_features: &CliFeatures, has_dev_units: HasDevUnits, previous: Option<&Resolve>, to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], register_patches: bool, ) -> CargoResult { // We only want one Cargo at a time resolving a crate graph since this can // involve a lot of frobbing of the global caches. let _lock = ws.config().acquire_package_cache_lock()?; // Here we place an artificial limitation that all non-registry sources // cannot be locked at more than one revision. This means that if a Git // repository provides more than one package, they must all be updated in // step when any of them are updated. // // TODO: this seems like a hokey reason to single out the registry as being // different. let to_avoid_sources: HashSet = to_avoid .map(|set| { set.iter() .map(|p| p.source_id()) .filter(|s| !s.is_registry()) .collect() }) .unwrap_or_default(); let pre_patch_keep = |p: &PackageId| { !to_avoid_sources.contains(&p.source_id()) && match to_avoid { Some(set) => !set.contains(p), None => true, } }; // While registering patches, we will record preferences for particular versions // of various packages. let mut version_prefs = VersionPreferences::default(); // This is a set of PackageIds of `[patch]` entries, and some related locked PackageIds, for // which locking should be avoided (but which will be preferred when searching dependencies, // via prefer_patch_deps below) let mut avoid_patch_ids = HashSet::new(); if register_patches { for (url, patches) in ws.root_patch()?.iter() { for patch in patches { version_prefs.prefer_dependency(patch.clone()); } let previous = match previous { Some(r) => r, None => { let patches: Vec<_> = patches.iter().map(|p| (p, None)).collect(); let unlock_ids = registry.patch(url, &patches)?; // Since nothing is locked, this shouldn't possibly return anything. assert!(unlock_ids.is_empty()); continue; } }; // This is a list of pairs where the first element of the pair is // the raw `Dependency` which matches what's listed in `Cargo.toml`. // The second element is, if present, the "locked" version of // the `Dependency` as well as the `PackageId` that it previously // resolved to. This second element is calculated by looking at the // previous resolve graph, which is primarily what's done here to // build the `registrations` list. let mut registrations = Vec::new(); for dep in patches { let candidates = || { previous .iter() .chain(previous.unused_patches().iter().cloned()) .filter(&pre_patch_keep) }; let lock = match candidates().find(|id| dep.matches_id(*id)) { // If we found an exactly matching candidate in our list of // candidates, then that's the one to use. Some(package_id) => { let mut locked_dep = dep.clone(); locked_dep.lock_to(package_id); Some(LockedPatchDependency { dependency: locked_dep, package_id, alt_package_id: None, }) } None => { // If the candidate does not have a matching source id // then we may still have a lock candidate. If we're // loading a v2-encoded resolve graph and `dep` is a // git dep with `branch = 'master'`, then this should // also match candidates without `branch = 'master'` // (which is now treated separately in Cargo). // // In this scenario we try to convert candidates located // in the resolve graph to explicitly having the // `master` branch (if they otherwise point to // `DefaultBranch`). If this works and our `dep` // matches that then this is something we'll lock to. match candidates().find(|&id| { match master_branch_git_source(id, previous) { Some(id) => dep.matches_id(id), None => false, } }) { Some(id_using_default) => { let id_using_master = id_using_default.with_source_id( dep.source_id().with_precise( id_using_default .source_id() .precise() .map(|s| s.to_string()), ), ); let mut locked_dep = dep.clone(); locked_dep.lock_to(id_using_master); Some(LockedPatchDependency { dependency: locked_dep, package_id: id_using_master, // Note that this is where the magic // happens, where the resolve graph // probably has locks pointing to // DefaultBranch sources, and by including // this here those will get transparently // rewritten to Branch("master") which we // have a lock entry for. alt_package_id: Some(id_using_default), }) } // No locked candidate was found None => None, } } }; registrations.push((dep, lock)); } let canonical = CanonicalUrl::new(url)?; for (orig_patch, unlock_id) in registry.patch(url, ®istrations)? { // Avoid the locked patch ID. avoid_patch_ids.insert(unlock_id); // Also avoid the thing it is patching. avoid_patch_ids.extend(previous.iter().filter(|id| { orig_patch.matches_ignoring_source(*id) && *id.source_id().canonical_url() == canonical })); } } } debug!("avoid_patch_ids={:?}", avoid_patch_ids); let keep = |p: &PackageId| pre_patch_keep(p) && !avoid_patch_ids.contains(p); let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes; if let Some(r) = previous { trace!("previous: {:?}", r); // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version // without disturbing the graph structure. register_previous_locks(ws, registry, r, &keep, dev_deps); // Prefer to use anything in the previous lock file, aka we want to have conservative updates. for id in r.iter().filter(keep) { debug!("attempting to prefer {}", id); version_prefs.prefer_package_id(id); } } if register_patches { registry.lock_patches(); } // Some packages are already loaded when setting up a workspace. This // makes it so anything that was already loaded will not be loaded again. // Without this there were cases where members would be parsed multiple times ws.preload(registry); // In case any members were not already loaded or the Workspace is_ephemeral. for member in ws.members() { registry.add_sources(Some(member.package_id().source_id()))?; } let summaries: Vec<(Summary, ResolveOpts)> = ws .members_with_features(specs, cli_features)? .into_iter() .map(|(member, features)| { let summary = registry.lock(member.summary().clone()); ( summary, ResolveOpts { dev_deps, features: RequestedFeatures::CliFeatures(features), }, ) }) .collect(); let root_replace = ws.root_replace(); let replace = match previous { Some(r) => root_replace .iter() .map(|&(ref spec, ref dep)| { for (&key, &val) in r.replacements().iter() { if spec.matches(key) && dep.matches_id(val) && keep(&val) { let mut dep = dep.clone(); dep.lock_to(val); return (spec.clone(), dep); } } (spec.clone(), dep.clone()) }) .collect::>(), None => root_replace.to_vec(), }; ws.preload(registry); let mut resolved = resolver::resolve( &summaries, &replace, registry, &version_prefs, Some(ws.config()), ws.unstable_features() .require(Feature::public_dependency()) .is_ok(), )?; let patches: Vec<_> = registry .patches() .values() .flat_map(|v| v.iter().cloned()) .collect(); resolved.register_used_patches(&patches[..]); if register_patches && !resolved.unused_patches().is_empty() { emit_warnings_of_unused_patches(ws, &resolved, registry)?; } if let Some(previous) = previous { resolved.merge_from(previous)?; } Ok(resolved) } /// Read the `paths` configuration variable to discover all path overrides that /// have been configured. pub fn add_overrides<'a>( registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>, ) -> CargoResult<()> { let config = ws.config(); let paths = match config.get_list("paths")? { Some(list) => list, None => return Ok(()), }; let paths = paths.val.iter().map(|(s, def)| { // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component // to get the directory containing the `.cargo` folder. (def.root(config).join(s), def) }); for (path, definition) in paths { let id = SourceId::for_path(&path)?; let mut source = PathSource::new_recursive(&path, id, ws.config()); source.update().with_context(|| { format!( "failed to update path override `{}` \ (defined in `{}`)", path.display(), definition ) })?; registry.add_override(Box::new(source)); } Ok(()) } pub fn get_resolved_packages<'cfg>( resolve: &Resolve, registry: PackageRegistry<'cfg>, ) -> CargoResult> { let ids: Vec = resolve.iter().collect(); registry.get(&ids) } /// In this function we're responsible for informing the `registry` of all /// locked dependencies from the previous lock file we had, `resolve`. /// /// This gets particularly tricky for a couple of reasons. The first is that we /// want all updates to be conservative, so we actually want to take the /// `resolve` into account (and avoid unnecessary registry updates and such). /// the second, however, is that we want to be resilient to updates of /// manifests. For example if a dependency is added or a version is changed we /// want to make sure that we properly re-resolve (conservatively) instead of /// providing an opaque error. /// /// The logic here is somewhat subtle, but there should be more comments below to /// clarify things. /// /// Note that this function, at the time of this writing, is basically the /// entire fix for issue #4127. fn register_previous_locks( ws: &Workspace<'_>, registry: &mut PackageRegistry<'_>, resolve: &Resolve, keep: &dyn Fn(&PackageId) -> bool, dev_deps: bool, ) { let path_pkg = |id: SourceId| { if !id.is_path() { return None; } if let Ok(path) = id.url().to_file_path() { if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) { return Some(pkg); } } None }; // Ok so we've been passed in a `keep` function which basically says "if I // return `true` then this package wasn't listed for an update on the command // line". That is, if we run `cargo update -p foo` then `keep(bar)` will return // `true`, whereas `keep(foo)` will return `false` (roughly speaking). // // This isn't actually quite what we want, however. Instead we want to // further refine this `keep` function with *all transitive dependencies* of // the packages we're not keeping. For example, consider a case like this: // // * There's a crate `log`. // * There's a crate `serde` which depends on `log`. // // Let's say we then run `cargo update -p serde`. This may *also* want to // update the `log` dependency as our newer version of `serde` may have a // new minimum version required for `log`. Now this isn't always guaranteed // to work. What'll happen here is we *won't* lock the `log` dependency nor // the `log` crate itself, but we will inform the registry "please prefer // this version of `log`". That way if our newer version of serde works with // the older version of `log`, we conservatively won't update `log`. If, // however, nothing else in the dependency graph depends on `log` and the // newer version of `serde` requires a new version of `log` it'll get pulled // in (as we didn't accidentally lock it to an old version). // // Additionally, here we process all path dependencies listed in the previous // resolve. They can not only have their dependencies change but also // the versions of the package change as well. If this ends up happening // then we want to make sure we don't lock a package ID node that doesn't // actually exist. Note that we don't do transitive visits of all the // package's dependencies here as that'll be covered below to poison those // if they changed. let mut avoid_locking = HashSet::new(); registry.add_to_yanked_whitelist(resolve.iter().filter(keep)); for node in resolve.iter() { if !keep(&node) { add_deps(resolve, node, &mut avoid_locking); } else if let Some(pkg) = path_pkg(node.source_id()) { if pkg.package_id() != node { avoid_locking.insert(node); } } } // Ok, but the above loop isn't the entire story! Updates to the dependency // graph can come from two locations, the `cargo update` command or // manifests themselves. For example a manifest on the filesystem may // have been updated to have an updated version requirement on `serde`. In // this case both `keep(serde)` and `keep(log)` return `true` (the `keep` // that's an argument to this function). We, however, don't want to keep // either of those! Otherwise we'll get obscure resolve errors about locked // versions. // // To solve this problem we iterate over all packages with path sources // (aka ones with manifests that are changing) and take a look at all of // their dependencies. If any dependency does not match something in the // previous lock file, then we're guaranteed that the main resolver will // update the source of this dependency no matter what. Knowing this we // poison all packages from the same source, forcing them all to get // updated. // // This may seem like a heavy hammer, and it is! It means that if you change // anything from crates.io then all of crates.io becomes unlocked. Note, // however, that we still want conservative updates. This currently happens // because the first candidate the resolver picks is the previously locked // version, and only if that fails to activate to we move on and try // a different version. (giving the guise of conservative updates) // // For example let's say we had `serde = "0.1"` written in our lock file. // When we later edit this to `serde = "0.1.3"` we don't want to lock serde // at its old version, 0.1.1. Instead we want to allow it to update to // `0.1.3` and update its own dependencies (like above). To do this *all // crates from crates.io* are not locked (aka added to `avoid_locking`). // For dependencies like `log` their previous version in the lock file will // come up first before newer version, if newer version are available. let mut path_deps = ws.members().cloned().collect::>(); let mut visited = HashSet::new(); while let Some(member) = path_deps.pop() { if !visited.insert(member.package_id()) { continue; } let is_ws_member = ws.is_member(&member); for dep in member.dependencies() { // If this dependency didn't match anything special then we may want // to poison the source as it may have been added. If this path // dependencies is **not** a workspace member, however, and it's an // optional/non-transitive dependency then it won't be necessarily // be in our lock file. If this shows up then we avoid poisoning // this source as otherwise we'd repeatedly update the registry. // // TODO: this breaks adding an optional dependency in a // non-workspace member and then simultaneously editing the // dependency on that crate to enable the feature. For now, // this bug is better than the always-updating registry though. if !is_ws_member && (dep.is_optional() || !dep.is_transitive()) { continue; } // If dev-dependencies aren't being resolved, skip them. if !dep.is_transitive() && !dev_deps { continue; } // If this is a path dependency, then try to push it onto our // worklist. if let Some(pkg) = path_pkg(dep.source_id()) { path_deps.push(pkg); continue; } // If we match *anything* in the dependency graph then we consider // ourselves all ok, and assume that we'll resolve to that. if resolve.iter().any(|id| dep.matches_ignoring_source(id)) { continue; } // Ok if nothing matches, then we poison the source of these // dependencies and the previous lock file. debug!( "poisoning {} because {} looks like it changed {}", dep.source_id(), member.package_id(), dep.package_name() ); for id in resolve .iter() .filter(|id| id.source_id() == dep.source_id()) { add_deps(resolve, id, &mut avoid_locking); } } } // Alright now that we've got our new, fresh, shiny, and refined `keep` // function let's put it to action. Take a look at the previous lock file, // filter everything by this callback, and then shove everything else into // the registry as a locked dependency. let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id); registry.clear_lock(); for node in resolve.iter().filter(keep) { let deps = resolve .deps_not_replaced(node) .map(|p| p.0) .filter(keep) .collect::>(); // In the v2 lockfile format and prior the `branch=master` dependency // directive was serialized the same way as the no-branch-listed // directive. Nowadays in Cargo, however, these two directives are // considered distinct and are no longer represented the same way. To // maintain compatibility with older lock files we register locked nodes // for *both* the master branch and the default branch. // // Note that this is only applicable for loading older resolves now at // this point. All new lock files are encoded as v3-or-later, so this is // just compat for loading an old lock file successfully. if let Some(node) = master_branch_git_source(node, resolve) { registry.register_lock(node, deps.clone()); } registry.register_lock(node, deps); } /// Recursively add `node` and all its transitive dependencies to `set`. fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet) { if !set.insert(node) { return; } debug!("ignoring any lock pointing directly at {}", node); for (dep, _) in resolve.deps_not_replaced(node) { add_deps(resolve, dep, set); } } } fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option { if resolve.version() <= ResolveVersion::V2 { let source = id.source_id(); if let Some(GitReference::DefaultBranch) = source.git_reference() { let new_source = SourceId::for_git(source.url(), GitReference::Branch("master".to_string())) .unwrap() .with_precise(source.precise().map(|s| s.to_string())); return Some(id.with_source_id(new_source)); } } None } /// Emits warnings of unused patches case by case. /// /// This function does its best to provide more targeted and helpful /// (such as showing close candidates that failed to match). However, that's /// not terribly easy to do, so just show a general help message if we cannot. fn emit_warnings_of_unused_patches( ws: &Workspace<'_>, resolve: &Resolve, registry: &PackageRegistry<'_>, ) -> CargoResult<()> { const MESSAGE: &str = "was not used in the crate graph."; // Patch package with the source URLs being patch let mut patch_pkgid_to_urls = HashMap::new(); for (url, summaries) in registry.patches().iter() { for summary in summaries.iter() { patch_pkgid_to_urls .entry(summary.package_id()) .or_insert_with(HashSet::new) .insert(url); } } // pkg name -> all source IDs of under the same pkg name let mut source_ids_grouped_by_pkg_name = HashMap::new(); for pkgid in resolve.iter() { source_ids_grouped_by_pkg_name .entry(pkgid.name()) .or_insert_with(HashSet::new) .insert(pkgid.source_id()); } let mut unemitted_unused_patches = Vec::new(); for unused in resolve.unused_patches().iter() { // Show alternative source URLs if the source URLs being patch // cannot not be found in the crate graph. match ( source_ids_grouped_by_pkg_name.get(&unused.name()), patch_pkgid_to_urls.get(unused), ) { (Some(ids), Some(patched_urls)) if ids .iter() .all(|id| !patched_urls.contains(id.canonical_url())) => { use std::fmt::Write; let mut msg = String::new(); writeln!(msg, "Patch `{}` {}", unused, MESSAGE)?; write!( msg, "Perhaps you misspelled the source URL being patched.\n\ Possible URLs for `[patch.]`:", )?; for id in ids.iter() { write!(msg, "\n {}", id.display_registry_name())?; } ws.config().shell().warn(msg)?; } _ => unemitted_unused_patches.push(unused), } } // Show general help message. if !unemitted_unused_patches.is_empty() { let warnings: Vec<_> = unemitted_unused_patches .iter() .map(|pkgid| format!("Patch `{}` {}", pkgid, MESSAGE)) .collect(); ws.config() .shell() .warn(format!("{}\n{}", warnings.join("\n"), UNUSED_PATCH_WARNING))?; } return Ok(()); } cargo-0.66.0/src/cargo/ops/tree/000077500000000000000000000000001432416201200163135ustar00rootroot00000000000000cargo-0.66.0/src/cargo/ops/tree/format/000077500000000000000000000000001432416201200176035ustar00rootroot00000000000000cargo-0.66.0/src/cargo/ops/tree/format/mod.rs000066400000000000000000000110221432416201200207240ustar00rootroot00000000000000use std::fmt; use anyhow::{bail, Error}; use self::parse::{Parser, RawChunk}; use super::{Graph, Node}; mod parse; enum Chunk { Raw(String), Package, License, Repository, Features, LibName, } pub struct Pattern(Vec); impl Pattern { pub fn new(format: &str) -> Result { let mut chunks = vec![]; for raw in Parser::new(format) { let chunk = match raw { RawChunk::Text(text) => Chunk::Raw(text.to_owned()), RawChunk::Argument("p") => Chunk::Package, RawChunk::Argument("l") => Chunk::License, RawChunk::Argument("r") => Chunk::Repository, RawChunk::Argument("f") => Chunk::Features, RawChunk::Argument("lib") => Chunk::LibName, RawChunk::Argument(a) => { bail!("unsupported pattern `{}`", a); } RawChunk::Error(err) => bail!("{}", err), }; chunks.push(chunk); } Ok(Pattern(chunks)) } pub fn display<'a>(&'a self, graph: &'a Graph<'a>, node_index: usize) -> Display<'a> { Display { pattern: self, graph, node_index, } } } pub struct Display<'a> { pattern: &'a Pattern, graph: &'a Graph<'a>, node_index: usize, } impl<'a> fmt::Display for Display<'a> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { let node = self.graph.node(self.node_index); match node { Node::Package { package_id, features, .. } => { let package = self.graph.package_for_id(*package_id); for chunk in &self.pattern.0 { match chunk { Chunk::Raw(s) => fmt.write_str(s)?, Chunk::Package => { let proc_macro_suffix = if package.proc_macro() { " (proc-macro)" } else { "" }; write!( fmt, "{} v{}{}", package.name(), package.version(), proc_macro_suffix )?; let source_id = package.package_id().source_id(); if !source_id.is_default_registry() { write!(fmt, " ({})", source_id)?; } } Chunk::License => { if let Some(license) = &package.manifest().metadata().license { write!(fmt, "{}", license)?; } } Chunk::Repository => { if let Some(repository) = &package.manifest().metadata().repository { write!(fmt, "{}", repository)?; } } Chunk::Features => { write!(fmt, "{}", features.join(","))?; } Chunk::LibName => { if let Some(target) = package .manifest() .targets() .iter() .find(|target| target.is_lib()) { write!(fmt, "{}", target.crate_name())?; } } } } } Node::Feature { name, node_index } => { let for_node = self.graph.node(*node_index); match for_node { Node::Package { package_id, .. } => { write!(fmt, "{} feature \"{}\"", package_id.name(), name)?; if self.graph.is_cli_feature(self.node_index) { write!(fmt, " (command-line)")?; } } // The node_index in Node::Feature must point to a package // node, see `add_feature`. _ => panic!("unexpected feature node {:?}", for_node), } } } Ok(()) } } cargo-0.66.0/src/cargo/ops/tree/format/parse.rs000066400000000000000000000071161432416201200212700ustar00rootroot00000000000000//! Parser for the `--format` string for `cargo tree`. use std::iter; use std::str; pub enum RawChunk<'a> { /// Raw text to include in the output. Text(&'a str), /// A substitution to place in the output. For example, the argument "p" /// emits the package name. Argument(&'a str), /// Indicates an error in the format string. The given string is a /// human-readable message explaining the error. Error(&'static str), } /// `cargo tree` format parser. /// /// The format string indicates how each package should be displayed. It /// includes simple markers surrounded in curly braces that will be /// substituted with their corresponding values. For example, the text /// "{p} license:{l}" will substitute the `{p}` with the package name/version /// (and optionally source), and the `{l}` will be the license from /// `Cargo.toml`. /// /// Substitutions are alphabetic characters between curly braces, like `{p}` /// or `{foo}`. The actual interpretation of these are done in the `Pattern` /// struct. /// /// Bare curly braces can be included in the output with double braces like /// `{{` will include a single `{`, similar to Rust's format strings. pub struct Parser<'a> { s: &'a str, it: iter::Peekable>, } impl<'a> Parser<'a> { pub fn new(s: &'a str) -> Parser<'a> { Parser { s, it: s.char_indices().peekable(), } } fn consume(&mut self, ch: char) -> bool { match self.it.peek() { Some(&(_, c)) if c == ch => { self.it.next(); true } _ => false, } } fn argument(&mut self) -> RawChunk<'a> { RawChunk::Argument(self.name()) } fn name(&mut self) -> &'a str { let start = match self.it.peek() { Some(&(pos, ch)) if ch.is_alphabetic() => { self.it.next(); pos } _ => return "", }; loop { match self.it.peek() { Some(&(_, ch)) if ch.is_alphanumeric() => { self.it.next(); } Some(&(end, _)) => return &self.s[start..end], None => return &self.s[start..], } } } fn text(&mut self, start: usize) -> RawChunk<'a> { while let Some(&(pos, ch)) = self.it.peek() { match ch { '{' | '}' => return RawChunk::Text(&self.s[start..pos]), _ => { self.it.next(); } } } RawChunk::Text(&self.s[start..]) } } impl<'a> Iterator for Parser<'a> { type Item = RawChunk<'a>; fn next(&mut self) -> Option> { match self.it.peek() { Some(&(_, '{')) => { self.it.next(); if self.consume('{') { Some(RawChunk::Text("{")) } else { let chunk = self.argument(); if self.consume('}') { Some(chunk) } else { for _ in &mut self.it {} Some(RawChunk::Error("expected '}'")) } } } Some(&(_, '}')) => { self.it.next(); if self.consume('}') { Some(RawChunk::Text("}")) } else { Some(RawChunk::Error("unexpected '}'")) } } Some(&(i, _)) => Some(self.text(i)), None => None, } } } cargo-0.66.0/src/cargo/ops/tree/graph.rs000066400000000000000000000607471432416201200200000ustar00rootroot00000000000000//! Code for building the graph used by `cargo tree`. use super::TreeOptions; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; use crate::core::{FeatureMap, FeatureValue, Package, PackageId, PackageIdSpec, Workspace}; use crate::util::interning::InternedString; use crate::util::CargoResult; use std::collections::{HashMap, HashSet}; #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] pub enum Node { Package { package_id: PackageId, /// Features that are enabled on this package. features: Vec, kind: CompileKind, }, Feature { /// Index of the package node this feature is for. node_index: usize, /// Name of the feature. name: InternedString, }, } /// The kind of edge, for separating dependencies into different sections. #[derive(Debug, Copy, Hash, Eq, Clone, PartialEq)] pub enum EdgeKind { Dep(DepKind), Feature, } /// Set of outgoing edges for a single node. /// /// Edges are separated by the edge kind (`DepKind` or `Feature`). This is /// primarily done so that the output can easily display separate sections /// like `[build-dependencies]`. /// /// The value is a `Vec` because each edge kind can have multiple outgoing /// edges. For example, package "foo" can have multiple normal dependencies. #[derive(Clone)] struct Edges(HashMap>); impl Edges { fn new() -> Edges { Edges(HashMap::new()) } /// Adds an edge pointing to the given node. fn add_edge(&mut self, kind: EdgeKind, index: usize) { let indexes = self.0.entry(kind).or_default(); if !indexes.contains(&index) { indexes.push(index) } } } /// A graph of dependencies. pub struct Graph<'a> { nodes: Vec, /// The indexes of `edges` correspond to the `nodes`. That is, `edges[0]` /// is the set of outgoing edges for `nodes[0]`. They should always be in /// sync. edges: Vec, /// Index maps a node to an index, for fast lookup. index: HashMap, /// Map for looking up packages. package_map: HashMap, /// Set of indexes of feature nodes that were added via the command-line. /// /// For example `--features foo` will mark the "foo" node here. cli_features: HashSet, /// Map of dependency names, used for building internal feature map for /// dep_name/feat_name syntax. /// /// Key is the index of a package node, value is a map of dep_name to a /// set of `(pkg_node_index, is_optional)`. dep_name_map: HashMap>>, } impl<'a> Graph<'a> { fn new(package_map: HashMap) -> Graph<'a> { Graph { nodes: Vec::new(), edges: Vec::new(), index: HashMap::new(), package_map, cli_features: HashSet::new(), dep_name_map: HashMap::new(), } } /// Adds a new node to the graph, returning its new index. fn add_node(&mut self, node: Node) -> usize { let from_index = self.nodes.len(); self.nodes.push(node); self.edges.push(Edges::new()); self.index .insert(self.nodes[from_index].clone(), from_index); from_index } /// Returns a list of nodes the given node index points to for the given kind. pub fn connected_nodes(&self, from: usize, kind: &EdgeKind) -> Vec { match self.edges[from].0.get(kind) { Some(indexes) => { // Created a sorted list for consistent output. let mut indexes = indexes.clone(); indexes.sort_unstable_by(|a, b| self.nodes[*a].cmp(&self.nodes[*b])); indexes } None => Vec::new(), } } /// Returns `true` if the given node has any outgoing edges. pub fn has_outgoing_edges(&self, index: usize) -> bool { !self.edges[index].0.is_empty() } /// Gets a node by index. pub fn node(&self, index: usize) -> &Node { &self.nodes[index] } /// Given a slice of PackageIds, returns the indexes of all nodes that match. pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec { let mut result: Vec<(&Node, usize)> = self .nodes .iter() .enumerate() .filter(|(_i, node)| match node { Node::Package { package_id, .. } => package_ids.contains(package_id), _ => false, }) .map(|(i, node)| (node, i)) .collect(); // Sort for consistent output (the same command should always return // the same output). "unstable" since nodes should always be unique. result.sort_unstable(); result.into_iter().map(|(_node, i)| i).collect() } pub fn package_for_id(&self, id: PackageId) -> &Package { self.package_map[&id] } fn package_id_for_index(&self, index: usize) -> PackageId { match self.nodes[index] { Node::Package { package_id, .. } => package_id, Node::Feature { .. } => panic!("unexpected feature node"), } } /// Returns `true` if the given feature node index is a feature enabled /// via the command-line. pub fn is_cli_feature(&self, index: usize) -> bool { self.cli_features.contains(&index) } /// Returns a new graph by removing all nodes not reachable from the /// given nodes. pub fn from_reachable(&self, roots: &[usize]) -> Graph<'a> { // Graph built with features does not (yet) support --duplicates. assert!(self.dep_name_map.is_empty()); let mut new_graph = Graph::new(self.package_map.clone()); // Maps old index to new index. None if not yet visited. let mut remap: Vec> = vec![None; self.nodes.len()]; fn visit( graph: &Graph<'_>, new_graph: &mut Graph<'_>, remap: &mut Vec>, index: usize, ) -> usize { if let Some(new_index) = remap[index] { // Already visited. return new_index; } let node = graph.node(index).clone(); let new_from = new_graph.add_node(node); remap[index] = Some(new_from); // Visit dependencies. for (edge_kind, edge_indexes) in &graph.edges[index].0 { for edge_index in edge_indexes { let new_to_index = visit(graph, new_graph, remap, *edge_index); new_graph.edges[new_from].add_edge(*edge_kind, new_to_index); } } new_from } // Walk the roots, generating a new graph as it goes along. for root in roots { visit(self, &mut new_graph, &mut remap, *root); } new_graph } /// Inverts the direction of all edges. pub fn invert(&mut self) { let mut new_edges = vec![Edges::new(); self.edges.len()]; for (from_idx, node_edges) in self.edges.iter().enumerate() { for (kind, edges) in &node_edges.0 { for edge_idx in edges { new_edges[*edge_idx].add_edge(*kind, from_idx); } } } self.edges = new_edges; } /// Returns a list of nodes that are considered "duplicates" (same package /// name, with different versions/features/source/etc.). pub fn find_duplicates(&self) -> Vec { // Graph built with features does not (yet) support --duplicates. assert!(self.dep_name_map.is_empty()); // Collect a map of package name to Vec<(&Node, usize)>. let mut packages = HashMap::new(); for (i, node) in self.nodes.iter().enumerate() { if let Node::Package { package_id, .. } = node { packages .entry(package_id.name()) .or_insert_with(Vec::new) .push((node, i)); } } let mut dupes: Vec<(&Node, usize)> = packages .into_iter() .filter(|(_name, indexes)| { indexes .into_iter() .map(|(node, _)| { match node { Node::Package { package_id, features, .. } => { // Do not treat duplicates on the host or target as duplicates. Node::Package { package_id: package_id.clone(), features: features.clone(), kind: CompileKind::Host, } } _ => unreachable!(), } }) .collect::>() .len() > 1 }) .flat_map(|(_name, indexes)| indexes) .collect(); // For consistent output. dupes.sort_unstable(); dupes.into_iter().map(|(_node, i)| i).collect() } } /// Builds the graph. pub fn build<'a>( ws: &Workspace<'_>, resolve: &Resolve, resolved_features: &ResolvedFeatures, specs: &[PackageIdSpec], cli_features: &CliFeatures, target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], package_map: HashMap, opts: &TreeOptions, ) -> CargoResult> { let mut graph = Graph::new(package_map); let mut members_with_features = ws.members_with_features(specs, cli_features)?; members_with_features.sort_unstable_by_key(|e| e.0.package_id()); for (member, cli_features) in members_with_features { let member_id = member.package_id(); let features_for = FeaturesFor::from_for_host(member.proc_macro()); for kind in requested_kinds { let member_index = add_pkg( &mut graph, resolve, resolved_features, member_id, features_for, target_data, *kind, opts, ); if opts.graph_features { let fmap = resolve.summary(member_id).features(); add_cli_features(&mut graph, member_index, &cli_features, fmap); } } } if opts.graph_features { add_internal_features(&mut graph, resolve); } Ok(graph) } /// Adds a single package node (if it does not already exist). /// /// This will also recursively add all of its dependencies. /// /// Returns the index to the package node. fn add_pkg( graph: &mut Graph<'_>, resolve: &Resolve, resolved_features: &ResolvedFeatures, package_id: PackageId, features_for: FeaturesFor, target_data: &RustcTargetData<'_>, requested_kind: CompileKind, opts: &TreeOptions, ) -> usize { let node_features = resolved_features.activated_features(package_id, features_for); let node_kind = match features_for { FeaturesFor::HostDep => CompileKind::Host, FeaturesFor::NormalOrDevOrArtifactTarget(Some(target)) => CompileKind::Target(target), FeaturesFor::NormalOrDevOrArtifactTarget(None) => requested_kind, }; let node = Node::Package { package_id, features: node_features, kind: node_kind, }; if let Some(idx) = graph.index.get(&node) { return *idx; } let from_index = graph.add_node(node); // Compute the dep name map which is later used for foo/bar feature lookups. let mut dep_name_map: HashMap> = HashMap::new(); let mut deps: Vec<_> = resolve.deps(package_id).collect(); deps.sort_unstable_by_key(|(dep_id, _)| *dep_id); let show_all_targets = opts.target == super::Target::All; for (dep_id, deps) in deps { let mut deps: Vec<_> = deps .iter() // This filter is *similar* to the one found in `unit_dependencies::compute_deps`. // Try to keep them in sync! .filter(|dep| { let kind = match (node_kind, dep.kind()) { (CompileKind::Host, _) => CompileKind::Host, (_, DepKind::Build) => CompileKind::Host, (_, DepKind::Normal) => node_kind, (_, DepKind::Development) => node_kind, }; // Filter out inactivated targets. if !show_all_targets && !target_data.dep_platform_activated(dep, kind) { return false; } // Filter out dev-dependencies if requested. if !opts.edge_kinds.contains(&EdgeKind::Dep(dep.kind())) { return false; } if dep.is_optional() { // If the new feature resolver does not enable this // optional dep, then don't use it. if !resolved_features.is_dep_activated( package_id, features_for, dep.name_in_toml(), ) { return false; } } true }) .collect(); // This dependency is eliminated from the dependency tree under // the current target and feature set. if deps.is_empty() { continue; } deps.sort_unstable_by_key(|dep| dep.name_in_toml()); let dep_pkg = graph.package_map[&dep_id]; for dep in deps { let dep_features_for = if dep.is_build() || dep_pkg.proc_macro() { FeaturesFor::HostDep } else { features_for }; let dep_index = add_pkg( graph, resolve, resolved_features, dep_id, dep_features_for, target_data, requested_kind, opts, ); if opts.graph_features { // Add the dependency node with feature nodes in-between. dep_name_map .entry(dep.name_in_toml()) .or_default() .insert((dep_index, dep.is_optional())); if dep.uses_default_features() { add_feature( graph, InternedString::new("default"), Some(from_index), dep_index, EdgeKind::Dep(dep.kind()), ); } for feature in dep.features().iter() { add_feature( graph, *feature, Some(from_index), dep_index, EdgeKind::Dep(dep.kind()), ); } if !dep.uses_default_features() && dep.features().is_empty() { // No features, use a direct connection. graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index); } } else { graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index); } } } if opts.graph_features { assert!(graph .dep_name_map .insert(from_index, dep_name_map) .is_none()); } from_index } /// Adds a feature node between two nodes. /// /// That is, it adds the following: /// /// ```text /// from -Edge-> featname -Edge::Feature-> to /// ``` /// /// Returns a tuple `(missing, index)`. /// `missing` is true if this feature edge was already added. /// `index` is the index of the index in the graph of the `Feature` node. fn add_feature( graph: &mut Graph<'_>, name: InternedString, from: Option, to: usize, kind: EdgeKind, ) -> (bool, usize) { // `to` *must* point to a package node. assert!(matches! {graph.nodes[to], Node::Package{..}}); let node = Node::Feature { node_index: to, name, }; let (missing, node_index) = match graph.index.get(&node) { Some(idx) => (false, *idx), None => (true, graph.add_node(node)), }; if let Some(from) = from { graph.edges[from].add_edge(kind, node_index); } graph.edges[node_index].add_edge(EdgeKind::Feature, to); (missing, node_index) } /// Adds nodes for features requested on the command-line for the given member. /// /// Feature nodes are added as "roots" (i.e., they have no "from" index), /// because they come from the outside world. They usually only appear with /// `--invert`. fn add_cli_features( graph: &mut Graph<'_>, package_index: usize, cli_features: &CliFeatures, feature_map: &FeatureMap, ) { // NOTE: Recursive enabling of features will be handled by // add_internal_features. // Create a set of feature names requested on the command-line. let mut to_add: HashSet = HashSet::new(); if cli_features.all_features { to_add.extend(feature_map.keys().map(|feat| FeatureValue::Feature(*feat))); } if cli_features.uses_default_features { to_add.insert(FeatureValue::Feature(InternedString::new("default"))); } to_add.extend(cli_features.features.iter().cloned()); // Add each feature as a node, and mark as "from command-line" in graph.cli_features. for fv in to_add { match fv { FeatureValue::Feature(feature) => { let index = add_feature(graph, feature, None, package_index, EdgeKind::Feature).1; graph.cli_features.insert(index); } // This is enforced by CliFeatures. FeatureValue::Dep { .. } => panic!("unexpected cli dep feature {}", fv), FeatureValue::DepFeature { dep_name, dep_feature, weak, } => { let dep_connections = match graph.dep_name_map[&package_index].get(&dep_name) { // Clone to deal with immutable borrow of `graph`. :( Some(dep_connections) => dep_connections.clone(), None => { // --features bar?/feat where `bar` is not activated should be ignored. // If this wasn't weak, then this is a bug. if weak { continue; } panic!( "missing dep graph connection for CLI feature `{}` for member {:?}\n\ Please file a bug report at https://github.com/rust-lang/cargo/issues", fv, graph.nodes.get(package_index) ); } }; for (dep_index, is_optional) in dep_connections { if is_optional { // Activate the optional dep on self. let index = add_feature(graph, dep_name, None, package_index, EdgeKind::Feature).1; graph.cli_features.insert(index); } let index = add_feature(graph, dep_feature, None, dep_index, EdgeKind::Feature).1; graph.cli_features.insert(index); } } } } } /// Recursively adds connections between features in the `[features]` table /// for every package. fn add_internal_features(graph: &mut Graph<'_>, resolve: &Resolve) { // Collect features already activated by dependencies or command-line. let feature_nodes: Vec<(PackageId, usize, usize, InternedString)> = graph .nodes .iter() .enumerate() .filter_map(|(i, node)| match node { Node::Package { .. } => None, Node::Feature { node_index, name } => { let package_id = graph.package_id_for_index(*node_index); Some((package_id, *node_index, i, *name)) } }) .collect(); for (package_id, package_index, feature_index, feature_name) in feature_nodes { add_feature_rec( graph, resolve, feature_name, package_id, feature_index, package_index, ); } } /// Recursively add feature nodes for all features enabled by the given feature. /// /// `from` is the index of the node that enables this feature. /// `package_index` is the index of the package node for the feature. fn add_feature_rec( graph: &mut Graph<'_>, resolve: &Resolve, feature_name: InternedString, package_id: PackageId, from: usize, package_index: usize, ) { let feature_map = resolve.summary(package_id).features(); let fvs = match feature_map.get(&feature_name) { Some(fvs) => fvs, None => return, }; for fv in fvs { match fv { FeatureValue::Feature(dep_name) => { let (missing, feat_index) = add_feature( graph, *dep_name, Some(from), package_index, EdgeKind::Feature, ); // Don't recursive if the edge already exists to deal with cycles. if missing { add_feature_rec( graph, resolve, *dep_name, package_id, feat_index, package_index, ); } } // Dependencies are already shown in the graph as dep edges. I'm // uncertain whether or not this might be confusing in some cases // (like feature `"somefeat" = ["dep:somedep"]`), so maybe in the // future consider explicitly showing this? FeatureValue::Dep { .. } => {} FeatureValue::DepFeature { dep_name, dep_feature, // Note: `weak` is mostly handled when the graph is built in // `is_dep_activated` which is responsible for skipping // unactivated weak dependencies. Here it is only used to // determine if the feature of the dependency name is // activated on self. weak, } => { let dep_indexes = match graph.dep_name_map[&package_index].get(dep_name) { Some(indexes) => indexes.clone(), None => { log::debug!( "enabling feature {} on {}, found {}/{}, \ dep appears to not be enabled", feature_name, package_id, dep_name, dep_feature ); continue; } }; for (dep_index, is_optional) in dep_indexes { let dep_pkg_id = graph.package_id_for_index(dep_index); if is_optional && !weak { // Activate the optional dep on self. add_feature( graph, *dep_name, Some(from), package_index, EdgeKind::Feature, ); } let (missing, feat_index) = add_feature( graph, *dep_feature, Some(from), dep_index, EdgeKind::Feature, ); if missing { add_feature_rec( graph, resolve, *dep_feature, dep_pkg_id, feat_index, dep_index, ); } } } } } } cargo-0.66.0/src/cargo/ops/tree/mod.rs000066400000000000000000000301001432416201200174320ustar00rootroot00000000000000//! Implementation of `cargo tree`. use self::format::Pattern; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits}; use crate::core::{Package, PackageId, PackageIdSpec, Workspace}; use crate::ops::{self, Packages}; use crate::util::{CargoResult, Config}; use crate::{drop_print, drop_println}; use anyhow::Context; use graph::Graph; use std::collections::{HashMap, HashSet}; use std::str::FromStr; mod format; mod graph; pub use {graph::EdgeKind, graph::Node}; pub struct TreeOptions { pub cli_features: CliFeatures, /// The packages to display the tree for. pub packages: Packages, /// The platform to filter for. pub target: Target, /// The dependency kinds to display. pub edge_kinds: HashSet, pub invert: Vec, /// The packages to prune from the display of the dependency tree. pub pkgs_to_prune: Vec, /// The style of prefix for each line. pub prefix: Prefix, /// If `true`, duplicates will be repeated. /// If `false`, duplicates will be marked with `*`, and their dependencies /// won't be shown. pub no_dedupe: bool, /// If `true`, run in a special mode where it will scan for packages that /// appear with different versions, and report if any where found. Implies /// `invert`. pub duplicates: bool, /// The style of characters to use. pub charset: Charset, /// A format string indicating how each package should be displayed. pub format: String, /// Includes features in the tree as separate nodes. pub graph_features: bool, /// Maximum display depth of the dependency tree. pub max_display_depth: u32, /// Excludes proc-macro dependencies. pub no_proc_macro: bool, } #[derive(PartialEq)] pub enum Target { Host, Specific(Vec), All, } impl Target { pub fn from_cli(targets: Vec) -> Target { match targets.len() { 0 => Target::Host, 1 if targets[0] == "all" => Target::All, _ => Target::Specific(targets), } } } pub enum Charset { Utf8, Ascii, } impl FromStr for Charset { type Err = &'static str; fn from_str(s: &str) -> Result { match s { "utf8" => Ok(Charset::Utf8), "ascii" => Ok(Charset::Ascii), _ => Err("invalid charset"), } } } #[derive(Clone, Copy)] pub enum Prefix { None, Indent, Depth, } impl FromStr for Prefix { type Err = &'static str; fn from_str(s: &str) -> Result { match s { "none" => Ok(Prefix::None), "indent" => Ok(Prefix::Indent), "depth" => Ok(Prefix::Depth), _ => Err("invalid prefix"), } } } struct Symbols { down: &'static str, tee: &'static str, ell: &'static str, right: &'static str, } static UTF8_SYMBOLS: Symbols = Symbols { down: "β”‚", tee: "β”œ", ell: "β””", right: "─", }; static ASCII_SYMBOLS: Symbols = Symbols { down: "|", tee: "|", ell: "`", right: "-", }; /// Entry point for the `cargo tree` command. pub fn build_and_print(ws: &Workspace<'_>, opts: &TreeOptions) -> CargoResult<()> { let requested_targets = match &opts.target { Target::All | Target::Host => Vec::new(), Target::Specific(t) => t.clone(), }; // TODO: Target::All is broken with -Zfeatures=itarget. To handle that properly, // `FeatureResolver` will need to be taught what "all" means. let requested_kinds = CompileKind::from_requested_targets(ws.config(), &requested_targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; let specs = opts.packages.to_package_id_specs(ws)?; let has_dev = if opts .edge_kinds .contains(&EdgeKind::Dep(DepKind::Development)) { HasDevUnits::Yes } else { HasDevUnits::No }; let force_all = if opts.target == Target::All { ForceAllTargets::Yes } else { ForceAllTargets::No }; let ws_resolve = ops::resolve_ws_with_opts( ws, &target_data, &requested_kinds, &opts.cli_features, &specs, has_dev, force_all, )?; let package_map: HashMap = ws_resolve .pkg_set .packages() .map(|pkg| (pkg.package_id(), pkg)) .collect(); let mut graph = graph::build( ws, &ws_resolve.targeted_resolve, &ws_resolve.resolved_features, &specs, &opts.cli_features, &target_data, &requested_kinds, package_map, opts, )?; let root_specs = if opts.invert.is_empty() { specs } else { opts.invert .iter() .map(|p| PackageIdSpec::parse(p)) .collect::>>()? }; let root_ids = ws_resolve.targeted_resolve.specs_to_ids(&root_specs)?; let root_indexes = graph.indexes_from_ids(&root_ids); let root_indexes = if opts.duplicates { // `-d -p foo` will only show duplicates within foo's subtree graph = graph.from_reachable(root_indexes.as_slice()); graph.find_duplicates() } else { root_indexes }; if !opts.invert.is_empty() || opts.duplicates { graph.invert(); } // Packages to prune. let pkgs_to_prune = opts .pkgs_to_prune .iter() .map(|p| PackageIdSpec::parse(p)) .map(|r| { // Provide an error message if pkgid is not within the resolved // dependencies graph. r.and_then(|spec| spec.query(ws_resolve.targeted_resolve.iter()).and(Ok(spec))) }) .collect::>>()?; print(ws.config(), opts, root_indexes, &pkgs_to_prune, &graph)?; Ok(()) } /// Prints a tree for each given root. fn print( config: &Config, opts: &TreeOptions, roots: Vec, pkgs_to_prune: &[PackageIdSpec], graph: &Graph<'_>, ) -> CargoResult<()> { let format = Pattern::new(&opts.format) .with_context(|| format!("tree format `{}` not valid", opts.format))?; let symbols = match opts.charset { Charset::Utf8 => &UTF8_SYMBOLS, Charset::Ascii => &ASCII_SYMBOLS, }; // The visited deps is used to display a (*) whenever a dep has // already been printed (ignored with --no-dedupe). let mut visited_deps = HashSet::new(); for (i, root_index) in roots.into_iter().enumerate() { if i != 0 { drop_println!(config); } // A stack of bools used to determine where | symbols should appear // when printing a line. let mut levels_continue = vec![]; // The print stack is used to detect dependency cycles when // --no-dedupe is used. It contains a Node for each level. let mut print_stack = vec![]; print_node( config, graph, root_index, &format, symbols, pkgs_to_prune, opts.prefix, opts.no_dedupe, opts.max_display_depth, opts.no_proc_macro, &mut visited_deps, &mut levels_continue, &mut print_stack, ); } Ok(()) } /// Prints a package and all of its dependencies. fn print_node<'a>( config: &Config, graph: &'a Graph<'_>, node_index: usize, format: &Pattern, symbols: &Symbols, pkgs_to_prune: &[PackageIdSpec], prefix: Prefix, no_dedupe: bool, max_display_depth: u32, no_proc_macro: bool, visited_deps: &mut HashSet, levels_continue: &mut Vec, print_stack: &mut Vec, ) { let new = no_dedupe || visited_deps.insert(node_index); match prefix { Prefix::Depth => drop_print!(config, "{}", levels_continue.len()), Prefix::Indent => { if let Some((last_continues, rest)) = levels_continue.split_last() { for continues in rest { let c = if *continues { symbols.down } else { " " }; drop_print!(config, "{} ", c); } let c = if *last_continues { symbols.tee } else { symbols.ell }; drop_print!(config, "{0}{1}{1} ", c, symbols.right); } } Prefix::None => {} } let in_cycle = print_stack.contains(&node_index); // If this node does not have any outgoing edges, don't include the (*) // since there isn't really anything "deduplicated", and it generally just // adds noise. let has_deps = graph.has_outgoing_edges(node_index); let star = if (new && !in_cycle) || !has_deps { "" } else { " (*)" }; drop_println!(config, "{}{}", format.display(graph, node_index), star); if !new || in_cycle { return; } print_stack.push(node_index); for kind in &[ EdgeKind::Dep(DepKind::Normal), EdgeKind::Dep(DepKind::Build), EdgeKind::Dep(DepKind::Development), EdgeKind::Feature, ] { print_dependencies( config, graph, node_index, format, symbols, pkgs_to_prune, prefix, no_dedupe, max_display_depth, no_proc_macro, visited_deps, levels_continue, print_stack, kind, ); } print_stack.pop(); } /// Prints all the dependencies of a package for the given dependency kind. fn print_dependencies<'a>( config: &Config, graph: &'a Graph<'_>, node_index: usize, format: &Pattern, symbols: &Symbols, pkgs_to_prune: &[PackageIdSpec], prefix: Prefix, no_dedupe: bool, max_display_depth: u32, no_proc_macro: bool, visited_deps: &mut HashSet, levels_continue: &mut Vec, print_stack: &mut Vec, kind: &EdgeKind, ) { let deps = graph.connected_nodes(node_index, kind); if deps.is_empty() { return; } let name = match kind { EdgeKind::Dep(DepKind::Normal) => None, EdgeKind::Dep(DepKind::Build) => Some("[build-dependencies]"), EdgeKind::Dep(DepKind::Development) => Some("[dev-dependencies]"), EdgeKind::Feature => None, }; if let Prefix::Indent = prefix { if let Some(name) = name { for continues in &**levels_continue { let c = if *continues { symbols.down } else { " " }; drop_print!(config, "{} ", c); } drop_println!(config, "{}", name); } } // Current level exceeds maximum display depth. Skip. if levels_continue.len() + 1 > max_display_depth as usize { return; } let mut it = deps .iter() .filter(|dep| { // Filter out proc-macro dependencies. if no_proc_macro { match graph.node(**dep) { &Node::Package { package_id, .. } => { !graph.package_for_id(package_id).proc_macro() } _ => true, } } else { true } }) .filter(|dep| { // Filter out packages to prune. match graph.node(**dep) { Node::Package { package_id, .. } => { !pkgs_to_prune.iter().any(|spec| spec.matches(*package_id)) } _ => true, } }) .peekable(); while let Some(dependency) = it.next() { levels_continue.push(it.peek().is_some()); print_node( config, graph, *dependency, format, symbols, pkgs_to_prune, prefix, no_dedupe, max_display_depth, no_proc_macro, visited_deps, levels_continue, print_stack, ); levels_continue.pop(); } } cargo-0.66.0/src/cargo/ops/vendor.rs000066400000000000000000000323401432416201200172210ustar00rootroot00000000000000use crate::core::shell::Verbosity; use crate::core::{GitReference, Workspace}; use crate::ops; use crate::sources::path::PathSource; use crate::sources::CRATES_IO_REGISTRY; use crate::util::{CargoResult, Config}; use anyhow::{bail, Context as _}; use cargo_util::{paths, Sha256}; use serde::Serialize; use std::collections::HashSet; use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::fs::{self, File, OpenOptions}; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use toml_edit::easy as toml; pub struct VendorOptions<'a> { pub no_delete: bool, pub versioned_dirs: bool, pub destination: &'a Path, pub extra: Vec, } pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> { let config = ws.config(); let mut extra_workspaces = Vec::new(); for extra in opts.extra.iter() { let extra = config.cwd().join(extra); let ws = Workspace::new(&extra, config)?; extra_workspaces.push(ws); } let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?; if config.shell().verbosity() != Verbosity::Quiet { if vendor_config.source.is_empty() { crate::drop_eprintln!(config, "There is no dependency to vendor in this project."); } else { crate::drop_eprint!( config, "To use vendored sources, add this to your .cargo/config.toml for this project:\n\n" ); crate::drop_print!( config, "{}", &toml::to_string_pretty(&vendor_config).unwrap() ); } } Ok(()) } #[derive(Serialize)] struct VendorConfig { source: BTreeMap, } #[derive(Serialize)] #[serde(rename_all = "lowercase", untagged)] enum VendorSource { Directory { directory: String, }, Registry { registry: Option, #[serde(rename = "replace-with")] replace_with: String, }, Git { git: String, branch: Option, tag: Option, rev: Option, #[serde(rename = "replace-with")] replace_with: String, }, } fn sync( config: &Config, workspaces: &[&Workspace<'_>], opts: &VendorOptions<'_>, ) -> CargoResult { let canonical_destination = opts.destination.canonicalize(); let canonical_destination = canonical_destination.as_deref().unwrap_or(opts.destination); let dest_dir_already_exists = canonical_destination.exists(); paths::create_dir_all(&canonical_destination)?; let mut to_remove = HashSet::new(); if !opts.no_delete { for entry in canonical_destination.read_dir()? { let entry = entry?; if !entry .file_name() .to_str() .map_or(false, |s| s.starts_with('.')) { to_remove.insert(entry.path()); } } } // First up attempt to work around rust-lang/cargo#5956. Apparently build // artifacts sprout up in Cargo's global cache for whatever reason, although // it's unsure what tool is causing these issues at this time. For now we // apply a heavy-hammer approach which is to delete Cargo's unpacked version // of each crate to start off with. After we do this we'll re-resolve and // redownload again, which should trigger Cargo to re-extract all the // crates. // // Note that errors are largely ignored here as this is a best-effort // attempt. If anything fails here we basically just move on to the next // crate to work with. for ws in workspaces { let (packages, resolve) = ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // Don't delete actual source code! if pkg.source_id().is_path() { if let Ok(path) = pkg.source_id().url().to_file_path() { if let Ok(path) = path.canonicalize() { to_remove.remove(&path); } } continue; } if pkg.source_id().is_git() { continue; } if let Ok(pkg) = packages.get_one(pkg) { drop(fs::remove_dir_all(pkg.manifest_path().parent().unwrap())); } } } let mut checksums = HashMap::new(); let mut ids = BTreeMap::new(); // Next up let's actually download all crates and start storing internal // tables about them. for ws in workspaces { let (packages, resolve) = ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // No need to vendor path crates since they're already in the // repository if pkg.source_id().is_path() { continue; } ids.insert( pkg, packages .get_one(pkg) .with_context(|| "failed to fetch package")? .clone(), ); checksums.insert(pkg, resolve.checksums().get(&pkg).cloned()); } } let mut versions = HashMap::new(); for id in ids.keys() { let map = versions.entry(id.name()).or_insert_with(BTreeMap::default); if let Some(prev) = map.get(&id.version()) { bail!( "found duplicate version of package `{} v{}` \ vendored from two sources:\n\ \n\ \tsource 1: {}\n\ \tsource 2: {}", id.name(), id.version(), prev, id.source_id() ); } map.insert(id.version(), id.source_id()); } let mut sources = BTreeSet::new(); let mut tmp_buf = [0; 64 * 1024]; for (id, pkg) in ids.iter() { // Next up, copy it to the vendor directory let src = pkg .manifest_path() .parent() .expect("manifest_path should point to a file"); let max_version = *versions[&id.name()].iter().rev().next().unwrap().0; let dir_has_version_suffix = opts.versioned_dirs || id.version() != max_version; let dst_name = if dir_has_version_suffix { // Eg vendor/futures-0.1.13 format!("{}-{}", id.name(), id.version()) } else { // Eg vendor/futures id.name().to_string() }; sources.insert(id.source_id()); let dst = canonical_destination.join(&dst_name); to_remove.remove(&dst); let cksum = dst.join(".cargo-checksum.json"); if dir_has_version_suffix && cksum.exists() { // Always re-copy directory without version suffix in case the version changed continue; } config.shell().status( "Vendoring", &format!("{} ({}) to {}", id, src.to_string_lossy(), dst.display()), )?; let _ = fs::remove_dir_all(&dst); let pathsource = PathSource::new(src, id.source_id(), config); let paths = pathsource.list_files(pkg)?; let mut map = BTreeMap::new(); cp_sources(src, &paths, &dst, &mut map, &mut tmp_buf) .with_context(|| format!("failed to copy over vendored sources for: {}", id))?; // Finally, emit the metadata about this package let json = serde_json::json!({ "package": checksums.get(id), "files": map, }); paths::write(&cksum, json.to_string())?; } for path in to_remove { if path.is_dir() { paths::remove_dir_all(&path)?; } else { paths::remove_file(&path)?; } } // add our vendored source let mut config = BTreeMap::new(); let merged_source_name = "vendored-sources"; // replace original sources with vendor for source_id in sources { let name = if source_id.is_default_registry() { CRATES_IO_REGISTRY.to_string() } else { source_id.url().to_string() }; let source = if source_id.is_default_registry() { VendorSource::Registry { registry: None, replace_with: merged_source_name.to_string(), } } else if source_id.is_remote_registry() { let registry = source_id.url().to_string(); VendorSource::Registry { registry: Some(registry), replace_with: merged_source_name.to_string(), } } else if source_id.is_git() { let mut branch = None; let mut tag = None; let mut rev = None; if let Some(reference) = source_id.git_reference() { match *reference { GitReference::Branch(ref b) => branch = Some(b.clone()), GitReference::Tag(ref t) => tag = Some(t.clone()), GitReference::Rev(ref r) => rev = Some(r.clone()), GitReference::DefaultBranch => {} } } VendorSource::Git { git: source_id.url().to_string(), branch, tag, rev, replace_with: merged_source_name.to_string(), } } else { panic!("Invalid source ID: {}", source_id) }; config.insert(name, source); } if !config.is_empty() { config.insert( merged_source_name.to_string(), VendorSource::Directory { // Windows-flavour paths are valid here on Windows but Unix. // This backslash normalization is for making output paths more // cross-platform compatible. directory: opts.destination.to_string_lossy().replace("\\", "/"), }, ); } else if !dest_dir_already_exists { // Nothing to vendor. Remove the destination dir we've just created. paths::remove_dir(canonical_destination)?; } Ok(VendorConfig { source: config }) } fn cp_sources( src: &Path, paths: &[PathBuf], dst: &Path, cksums: &mut BTreeMap, tmp_buf: &mut [u8], ) -> CargoResult<()> { for p in paths { let relative = p.strip_prefix(&src).unwrap(); match relative.to_str() { // Skip git config files as they're not relevant to builds most of // the time and if we respect them (e.g. in git) then it'll // probably mess with the checksums when a vendor dir is checked // into someone else's source control Some(".gitattributes") | Some(".gitignore") | Some(".git") => continue, // Temporary Cargo files Some(".cargo-ok") => continue, // Skip patch-style orig/rej files. Published crates on crates.io // have `Cargo.toml.orig` which we don't want to use here and // otherwise these are rarely used as part of the build process. Some(filename) => { if filename.ends_with(".orig") || filename.ends_with(".rej") { continue; } } _ => {} }; // Join pathname components individually to make sure that the joined // path uses the correct directory separators everywhere, since // `relative` may use Unix-style and `dst` may require Windows-style // backslashes. let dst = relative .iter() .fold(dst.to_owned(), |acc, component| acc.join(&component)); paths::create_dir_all(dst.parent().unwrap())?; let cksum = copy_and_checksum(p, &dst, tmp_buf)?; cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum); } Ok(()) } fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult { let mut src = File::open(src_path).with_context(|| format!("failed to open {:?}", src_path))?; let mut dst_opts = OpenOptions::new(); dst_opts.write(true).create(true).truncate(true); #[cfg(unix)] { use std::os::unix::fs::{MetadataExt, OpenOptionsExt}; let src_metadata = src .metadata() .with_context(|| format!("failed to stat {:?}", src_path))?; dst_opts.mode(src_metadata.mode()); } let mut dst = dst_opts .open(dst_path) .with_context(|| format!("failed to create {:?}", dst_path))?; // Not going to bother setting mode on pre-existing files, since there // shouldn't be any under normal conditions. let mut cksum = Sha256::new(); loop { let n = src .read(buf) .with_context(|| format!("failed to read from {:?}", src_path))?; if n == 0 { break Ok(cksum.finish_hex()); } let data = &buf[..n]; cksum.update(data); dst.write_all(data) .with_context(|| format!("failed to write to {:?}", dst_path))?; } } cargo-0.66.0/src/cargo/sources/000077500000000000000000000000001432416201200162365ustar00rootroot00000000000000cargo-0.66.0/src/cargo/sources/config.rs000066400000000000000000000240731432416201200200570ustar00rootroot00000000000000//! Implementation of configuration for various sources //! //! This module will parse the various `source.*` TOML configuration keys into a //! structure usable by Cargo itself. Currently this is primarily used to map //! sources to one another via the `replace-with` key in `.cargo/config`. use crate::core::{GitReference, PackageId, Source, SourceId}; use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY}; use crate::util::config::{self, ConfigRelativePath, OptValue}; use crate::util::errors::CargoResult; use crate::util::{Config, IntoUrl}; use anyhow::{bail, Context as _}; use log::debug; use std::collections::{HashMap, HashSet}; use url::Url; #[derive(Clone)] pub struct SourceConfigMap<'cfg> { /// Mapping of source name to the toml configuration. cfgs: HashMap, /// Mapping of `SourceId` to the source name. id2name: HashMap, config: &'cfg Config, } /// Definition of a source in a config file. #[derive(Debug, serde::Deserialize)] #[serde(rename_all = "kebab-case")] struct SourceConfigDef { /// Indicates this source should be replaced with another of the given name. replace_with: OptValue, /// A directory source. directory: Option, /// A registry source. Value is a URL. registry: OptValue, /// A local registry source. local_registry: Option, /// A git source. Value is a URL. git: OptValue, /// The git branch. branch: OptValue, /// The git tag. tag: OptValue, /// The git revision. rev: OptValue, } /// Configuration for a particular source, found in TOML looking like: /// /// ```toml /// [source.crates-io] /// registry = 'https://github.com/rust-lang/crates.io-index' /// replace-with = 'foo' # optional /// ``` #[derive(Clone)] struct SourceConfig { /// `SourceId` this source corresponds to, inferred from the various /// defined keys in the configuration. id: SourceId, /// Whether or not this source is replaced with another. /// /// This field is a tuple of `(name, location)` where `location` is where /// this configuration key was defined (such as the `.cargo/config` path /// or the environment variable name). replace_with: Option<(String, String)>, } impl<'cfg> SourceConfigMap<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { let mut base = SourceConfigMap::empty(config)?; let sources: Option> = config.get("source")?; if let Some(sources) = sources { for (key, value) in sources.into_iter() { base.add_config(key, value)?; } } Ok(base) } pub fn empty(config: &'cfg Config) -> CargoResult> { let mut base = SourceConfigMap { cfgs: HashMap::new(), id2name: HashMap::new(), config, }; base.add( CRATES_IO_REGISTRY, SourceConfig { id: SourceId::crates_io(config)?, replace_with: None, }, )?; if config.cli_unstable().sparse_registry { base.add( CRATES_IO_REGISTRY, SourceConfig { id: SourceId::crates_io_maybe_sparse_http(config)?, replace_with: None, }, )?; } Ok(base) } pub fn config(&self) -> &'cfg Config { self.config } /// Get the `Source` for a given `SourceId`. pub fn load( &self, id: SourceId, yanked_whitelist: &HashSet, ) -> CargoResult> { debug!("loading: {}", id); let mut name = match self.id2name.get(&id) { Some(name) => name, None => return id.load(self.config, yanked_whitelist), }; let mut cfg_loc = ""; let orig_name = name; let new_id; loop { let cfg = match self.cfgs.get(name) { Some(cfg) => cfg, None => bail!( "could not find a configured source with the \ name `{}` when attempting to lookup `{}` \ (configuration in `{}`)", name, orig_name, cfg_loc ), }; match &cfg.replace_with { Some((s, c)) => { name = s; cfg_loc = c; } None if id == cfg.id => return id.load(self.config, yanked_whitelist), None => { new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); break; } } debug!("following pointer to {}", name); if name == orig_name { bail!( "detected a cycle of `replace-with` sources, the source \ `{}` is eventually replaced with itself \ (configuration in `{}`)", name, cfg_loc ) } } let new_src = new_id.load( self.config, &yanked_whitelist .iter() .map(|p| p.map_source(id, new_id)) .collect(), )?; let old_src = id.load(self.config, yanked_whitelist)?; if !new_src.supports_checksums() && old_src.supports_checksums() { bail!( "\ cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ checksums, but `{name}` does not a lock file compatible with `{orig}` cannot be generated in this situation ", orig = orig_name, name = name ); } if old_src.requires_precise() && id.precise().is_none() { bail!( "\ the source {orig} requires a lock file to be present first before it can be used against vendored source code remove the source replacement configuration, generate a lock file, and then restore the source replacement configuration to continue the build ", orig = orig_name ); } Ok(Box::new(ReplacedSource::new(id, new_id, new_src))) } fn add(&mut self, name: &str, cfg: SourceConfig) -> CargoResult<()> { if let Some(old_name) = self.id2name.insert(cfg.id, name.to_string()) { // The user is allowed to redefine the built-in crates-io // definition from `empty()`. if name != CRATES_IO_REGISTRY { bail!( "source `{}` defines source {}, but that source is already defined by `{}`\n\ note: Sources are not allowed to be defined multiple times.", name, cfg.id, old_name ); } } self.cfgs.insert(name.to_string(), cfg); Ok(()) } fn add_config(&mut self, name: String, def: SourceConfigDef) -> CargoResult<()> { let mut srcs = Vec::new(); if let Some(registry) = def.registry { let url = url(®istry, &format!("source.{}.registry", name))?; srcs.push(SourceId::for_alt_registry(&url, &name)?); } if let Some(local_registry) = def.local_registry { let path = local_registry.resolve_path(self.config); srcs.push(SourceId::for_local_registry(&path)?); } if let Some(directory) = def.directory { let path = directory.resolve_path(self.config); srcs.push(SourceId::for_directory(&path)?); } if let Some(git) = def.git { let url = url(&git, &format!("source.{}.git", name))?; let reference = match def.branch { Some(b) => GitReference::Branch(b.val), None => match def.tag { Some(b) => GitReference::Tag(b.val), None => match def.rev { Some(b) => GitReference::Rev(b.val), None => GitReference::DefaultBranch, }, }, }; srcs.push(SourceId::for_git(&url, reference)?); } else { let check_not_set = |key, v: OptValue| { if let Some(val) = v { bail!( "source definition `source.{}` specifies `{}`, \ but that requires a `git` key to be specified (in {})", name, key, val.definition ); } Ok(()) }; check_not_set("branch", def.branch)?; check_not_set("tag", def.tag)?; check_not_set("rev", def.rev)?; } if name == CRATES_IO_REGISTRY && srcs.is_empty() { srcs.push(SourceId::crates_io_maybe_sparse_http(self.config)?); } match srcs.len() { 0 => bail!( "no source location specified for `source.{}`, need \ `registry`, `local-registry`, `directory`, or `git` defined", name ), 1 => {} _ => bail!( "more than one source location specified for `source.{}`", name ), } let src = srcs[0]; let replace_with = def .replace_with .map(|val| (val.val, val.definition.to_string())); self.add( &name, SourceConfig { id: src, replace_with, }, )?; return Ok(()); fn url(val: &config::Value, key: &str) -> CargoResult { let url = val.val.into_url().with_context(|| { format!( "configuration key `{}` specified an invalid \ URL (in {})", key, val.definition ) })?; Ok(url) } } } cargo-0.66.0/src/cargo/sources/directory.rs000066400000000000000000000161071432416201200206150ustar00rootroot00000000000000use std::collections::HashMap; use std::fmt::{self, Debug, Formatter}; use std::path::{Path, PathBuf}; use std::task::Poll; use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary}; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::Config; use anyhow::Context as _; use cargo_util::{paths, Sha256}; use serde::Deserialize; pub struct DirectorySource<'cfg> { source_id: SourceId, root: PathBuf, packages: HashMap, config: &'cfg Config, updated: bool, } #[derive(Deserialize)] struct Checksum { package: Option, files: HashMap, } impl<'cfg> DirectorySource<'cfg> { pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { DirectorySource { source_id: id, root: path.to_path_buf(), config, packages: HashMap::new(), updated: false, } } } impl<'cfg> Debug for DirectorySource<'cfg> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "DirectorySource {{ root: {:?} }}", self.root) } } impl<'cfg> Source for DirectorySource<'cfg> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { if !self.updated { return Poll::Pending; } let packages = self.packages.values().map(|p| &p.0); let matches = packages.filter(|pkg| match kind { QueryKind::Exact => dep.matches(pkg.summary()), QueryKind::Fuzzy => true, }); for summary in matches.map(|pkg| pkg.summary().clone()) { f(summary); } Poll::Ready(Ok(())) } fn supports_checksums(&self) -> bool { true } fn requires_precise(&self) -> bool { true } fn source_id(&self) -> SourceId { self.source_id } fn block_until_ready(&mut self) -> CargoResult<()> { if self.updated { return Ok(()); } self.packages.clear(); let entries = self.root.read_dir().with_context(|| { format!( "failed to read root of directory source: {}", self.root.display() ) })?; for entry in entries { let entry = entry?; let path = entry.path(); // Ignore hidden/dot directories as they typically don't contain // crates and otherwise may conflict with a VCS // (rust-lang/cargo#3414). if let Some(s) = path.file_name().and_then(|s| s.to_str()) { if s.starts_with('.') { continue; } } // Vendor directories are often checked into a VCS, but throughout // the lifetime of a vendor dir crates are often added and deleted. // Some VCS implementations don't always fully delete the directory // when a dir is removed from a different checkout. Sometimes a // mostly-empty dir is left behind. // // Additionally vendor directories are sometimes accompanied with // readme files and other auxiliary information not too interesting // to Cargo. // // To help handle all this we only try processing folders with a // `Cargo.toml` in them. This has the upside of being pretty // flexible with the contents of vendor directories but has the // downside of accidentally misconfigured vendor directories // silently returning less crates. if !path.join("Cargo.toml").exists() { continue; } let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let mut pkg = src.root_package()?; let cksum_file = path.join(".cargo-checksum.json"); let cksum = paths::read(&path.join(cksum_file)).with_context(|| { format!( "failed to load checksum `.cargo-checksum.json` \ of {} v{}", pkg.package_id().name(), pkg.package_id().version() ) })?; let cksum: Checksum = serde_json::from_str(&cksum).with_context(|| { format!( "failed to decode `.cargo-checksum.json` of \ {} v{}", pkg.package_id().name(), pkg.package_id().version() ) })?; if let Some(package) = &cksum.package { pkg.manifest_mut() .summary_mut() .set_checksum(package.clone()); } self.packages.insert(pkg.package_id(), (pkg, cksum)); } self.updated = true; Ok(()) } fn download(&mut self, id: PackageId) -> CargoResult { self.packages .get(&id) .map(|p| &p.0) .cloned() .map(MaybePackage::Ready) .ok_or_else(|| anyhow::format_err!("failed to find package with id: {}", id)) } fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { panic!("no downloads to do") } fn fingerprint(&self, pkg: &Package) -> CargoResult { Ok(pkg.package_id().version().to_string()) } fn verify(&self, id: PackageId) -> CargoResult<()> { let (pkg, cksum) = match self.packages.get(&id) { Some(&(ref pkg, ref cksum)) => (pkg, cksum), None => anyhow::bail!("failed to find entry for `{}` in directory source", id), }; for (file, cksum) in cksum.files.iter() { let file = pkg.root().join(file); let actual = Sha256::new() .update_path(&file) .with_context(|| format!("failed to calculate checksum of: {}", file.display()))? .finish_hex(); if &*actual != cksum { anyhow::bail!( "the listed checksum of `{}` has changed:\n\ expected: {}\n\ actual: {}\n\ \n\ directory sources are not intended to be edited, if \ modifications are required then it is recommended \ that `[patch]` is used with a forked copy of the \ source\ ", file.display(), cksum, actual ); } } Ok(()) } fn describe(&self) -> String { format!("directory source `{}`", self.root.display()) } fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} fn is_yanked(&mut self, _pkg: PackageId) -> Poll> { Poll::Ready(Ok(false)) } fn invalidate_cache(&mut self) { // Path source has no local cache. } } cargo-0.66.0/src/cargo/sources/git/000077500000000000000000000000001432416201200170215ustar00rootroot00000000000000cargo-0.66.0/src/cargo/sources/git/mod.rs000066400000000000000000000001731432416201200201470ustar00rootroot00000000000000pub use self::source::GitSource; pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote}; mod source; mod utils; cargo-0.66.0/src/cargo/sources/git/source.rs000066400000000000000000000225711432416201200206760ustar00rootroot00000000000000use crate::core::source::{MaybePackage, QueryKind, Source, SourceId}; use crate::core::GitReference; use crate::core::{Dependency, Package, PackageId, Summary}; use crate::sources::git::utils::GitRemote; use crate::sources::PathSource; use crate::util::errors::CargoResult; use crate::util::hex::short_hash; use crate::util::Config; use anyhow::Context; use cargo_util::paths::exclude_from_backups_and_indexing; use log::trace; use std::fmt::{self, Debug, Formatter}; use std::task::Poll; use url::Url; pub struct GitSource<'cfg> { remote: GitRemote, manifest_reference: GitReference, locked_rev: Option, source_id: SourceId, path_source: Option>, ident: String, config: &'cfg Config, } impl<'cfg> GitSource<'cfg> { pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult> { assert!(source_id.is_git(), "id is not git, id={}", source_id); let remote = GitRemote::new(source_id.url()); let ident = ident(&source_id); let source = GitSource { remote, manifest_reference: source_id.git_reference().unwrap().clone(), locked_rev: match source_id.precise() { Some(s) => Some(git2::Oid::from_str(s).with_context(|| { format!("precise value for git is not a git revision: {}", s) })?), None => None, }, source_id, path_source: None, ident, config, }; Ok(source) } pub fn url(&self) -> &Url { self.remote.url() } pub fn read_packages(&mut self) -> CargoResult> { if self.path_source.is_none() { self.invalidate_cache(); self.block_until_ready()?; } self.path_source.as_mut().unwrap().read_packages() } } fn ident(id: &SourceId) -> String { let ident = id .canonical_url() .raw_canonicalized_url() .path_segments() .and_then(|s| s.rev().next()) .unwrap_or(""); let ident = if ident.is_empty() { "_empty" } else { ident }; format!("{}-{}", ident, short_hash(id.canonical_url())) } impl<'cfg> Debug for GitSource<'cfg> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "git repo at {}", self.remote.url())?; match self.manifest_reference.pretty_ref() { Some(s) => write!(f, " ({})", s), None => Ok(()), } } } impl<'cfg> Source for GitSource<'cfg> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { if let Some(src) = self.path_source.as_mut() { src.query(dep, kind, f) } else { Poll::Pending } } fn supports_checksums(&self) -> bool { false } fn requires_precise(&self) -> bool { true } fn source_id(&self) -> SourceId { self.source_id } fn block_until_ready(&mut self) -> CargoResult<()> { if self.path_source.is_some() { return Ok(()); } let git_fs = self.config.git_path(); // Ignore errors creating it, in case this is a read-only filesystem: // perhaps the later operations can succeed anyhow. let _ = git_fs.create_dir(); let git_path = self.config.assert_package_cache_locked(&git_fs); // Before getting a checkout, make sure that `/git` is // marked as excluded from indexing and backups. Older versions of Cargo // didn't do this, so we do it here regardless of whether `` // exists. // // This does not use `create_dir_all_excluded_from_backups_atomic` for // the same reason: we want to exclude it even if the directory already // exists. exclude_from_backups_and_indexing(&git_path); let db_path = git_path.join("db").join(&self.ident); let db = self.remote.db_at(&db_path).ok(); let (db, actual_rev) = match (self.locked_rev, db) { // If we have a locked revision, and we have a preexisting database // which has that revision, then no update needs to happen. (Some(rev), Some(db)) if db.contains(rev) => (db, rev), // If we're in offline mode, we're not locked, and we have a // database, then try to resolve our reference with the preexisting // repository. (None, Some(db)) if self.config.offline() => { let rev = db.resolve(&self.manifest_reference).with_context(|| { "failed to lookup reference in preexisting repository, and \ can't check for updates in offline mode (--offline)" })?; (db, rev) } // ... otherwise we use this state to update the git database. Note // that we still check for being offline here, for example in the // situation that we have a locked revision but the database // doesn't have it. (locked_rev, db) => { if self.config.offline() { anyhow::bail!( "can't checkout from '{}': you are in the offline mode (--offline)", self.remote.url() ); } self.config.shell().status( "Updating", format!("git repository `{}`", self.remote.url()), )?; trace!("updating git source `{:?}`", self.remote); self.remote.checkout( &db_path, db, &self.manifest_reference, locked_rev, self.config, )? } }; // Don’t use the full hash, in order to contribute less to reaching the // path length limit on Windows. See // . let short_id = db.to_short_id(actual_rev)?; // Check out `actual_rev` from the database to a scoped location on the // filesystem. This will use hard links and such to ideally make the // checkout operation here pretty fast. let checkout_path = git_path .join("checkouts") .join(&self.ident) .join(short_id.as_str()); db.copy_to(actual_rev, &checkout_path, self.config)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config); self.path_source = Some(path_source); self.locked_rev = Some(actual_rev); self.path_source.as_mut().unwrap().update() } fn download(&mut self, id: PackageId) -> CargoResult { trace!( "getting packages for package ID `{}` from `{:?}`", id, self.remote ); self.path_source .as_mut() .expect("BUG: `update()` must be called before `get()`") .download(id) } fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { panic!("no download should have started") } fn fingerprint(&self, _pkg: &Package) -> CargoResult { Ok(self.locked_rev.as_ref().unwrap().to_string()) } fn describe(&self) -> String { format!("Git repository {}", self.source_id) } fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} fn is_yanked(&mut self, _pkg: PackageId) -> Poll> { Poll::Ready(Ok(false)) } fn invalidate_cache(&mut self) {} } #[cfg(test)] mod test { use super::ident; use crate::core::{GitReference, SourceId}; use crate::util::IntoUrl; #[test] pub fn test_url_to_path_ident_with_path() { let ident = ident(&src("https://github.com/carlhuda/cargo")); assert!(ident.starts_with("cargo-")); } #[test] pub fn test_url_to_path_ident_without_path() { let ident = ident(&src("https://github.com")); assert!(ident.starts_with("_empty-")); } #[test] fn test_canonicalize_idents_by_stripping_trailing_url_slash() { let ident1 = ident(&src("https://github.com/PistonDevelopers/piston/")); let ident2 = ident(&src("https://github.com/PistonDevelopers/piston")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_lowercasing_github_urls() { let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&src("https://github.com/pistondevelopers/piston")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_stripping_dot_git() { let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&src("https://github.com/PistonDevelopers/piston.git")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_different_protocols() { let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&src("git://github.com/PistonDevelopers/piston")); assert_eq!(ident1, ident2); } fn src(s: &str) -> SourceId { SourceId::for_git(&s.into_url().unwrap(), GitReference::DefaultBranch).unwrap() } } cargo-0.66.0/src/cargo/sources/git/utils.rs000066400000000000000000001300541432416201200205320ustar00rootroot00000000000000//! Utilities for handling git repositories, mainly around //! authentication/cloning. use crate::core::GitReference; use crate::util::errors::CargoResult; use crate::util::{network, Config, IntoUrl, MetricsCounter, Progress}; use anyhow::{anyhow, Context as _}; use cargo_util::{paths, ProcessBuilder}; use curl::easy::List; use git2::{self, ErrorClass, ObjectType, Oid}; use log::{debug, info}; use serde::ser; use serde::Serialize; use std::env; use std::fmt; use std::path::{Path, PathBuf}; use std::process::Command; use std::str; use std::time::{Duration, Instant}; use url::Url; fn serialize_str(t: &T, s: S) -> Result where T: fmt::Display, S: ser::Serializer, { s.collect_str(t) } pub struct GitShortID(git2::Buf); impl GitShortID { pub fn as_str(&self) -> &str { self.0.as_str().unwrap() } } /// `GitRemote` represents a remote repository. It gets cloned into a local /// `GitDatabase`. #[derive(PartialEq, Clone, Debug, Serialize)] pub struct GitRemote { #[serde(serialize_with = "serialize_str")] url: Url, } /// `GitDatabase` is a local clone of a remote repository's database. Multiple /// `GitCheckouts` can be cloned from this `GitDatabase`. #[derive(Serialize)] pub struct GitDatabase { remote: GitRemote, path: PathBuf, #[serde(skip_serializing)] repo: git2::Repository, } /// `GitCheckout` is a local checkout of a particular revision. Calling /// `clone_into` with a reference will resolve the reference into a revision, /// and return an `anyhow::Error` if no revision for that reference was found. #[derive(Serialize)] pub struct GitCheckout<'a> { database: &'a GitDatabase, location: PathBuf, #[serde(serialize_with = "serialize_str")] revision: git2::Oid, #[serde(skip_serializing)] repo: git2::Repository, } // Implementations impl GitRemote { pub fn new(url: &Url) -> GitRemote { GitRemote { url: url.clone() } } pub fn url(&self) -> &Url { &self.url } pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { reference.resolve(&self.db_at(path)?.repo) } pub fn checkout( &self, into: &Path, db: Option, reference: &GitReference, locked_rev: Option, cargo_config: &Config, ) -> CargoResult<(GitDatabase, git2::Oid)> { // If we have a previous instance of `GitDatabase` then fetch into that // if we can. If that can successfully load our revision then we've // populated the database with the latest version of `reference`, so // return that database and the rev we resolve to. if let Some(mut db) = db { fetch(&mut db.repo, self.url.as_str(), reference, cargo_config) .context(format!("failed to fetch into: {}", into.display()))?; match locked_rev { Some(rev) => { if db.contains(rev) { return Ok((db, rev)); } } None => { if let Ok(rev) = reference.resolve(&db.repo) { return Ok((db, rev)); } } } } // Otherwise start from scratch to handle corrupt git repositories. // After our fetch (which is interpreted as a clone now) we do the same // resolution to figure out what we cloned. if into.exists() { paths::remove_dir_all(into)?; } paths::create_dir_all(into)?; let mut repo = init(into, true)?; fetch(&mut repo, self.url.as_str(), reference, cargo_config) .context(format!("failed to clone into: {}", into.display()))?; let rev = match locked_rev { Some(rev) => rev, None => reference.resolve(&repo)?, }; Ok(( GitDatabase { remote: self.clone(), path: into.to_path_buf(), repo, }, rev, )) } pub fn db_at(&self, db_path: &Path) -> CargoResult { let repo = git2::Repository::open(db_path)?; Ok(GitDatabase { remote: self.clone(), path: db_path.to_path_buf(), repo, }) } } impl GitDatabase { pub fn copy_to( &self, rev: git2::Oid, dest: &Path, cargo_config: &Config, ) -> CargoResult> { // If the existing checkout exists, and it is fresh, use it. // A non-fresh checkout can happen if the checkout operation was // interrupted. In that case, the checkout gets deleted and a new // clone is created. let checkout = match git2::Repository::open(dest) .ok() .map(|repo| GitCheckout::new(dest, self, rev, repo)) .filter(|co| co.is_fresh()) { Some(co) => co, None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, }; checkout.update_submodules(cargo_config)?; Ok(checkout) } pub fn to_short_id(&self, revision: git2::Oid) -> CargoResult { let obj = self.repo.find_object(revision, None)?; Ok(GitShortID(obj.short_id()?)) } pub fn contains(&self, oid: git2::Oid) -> bool { self.repo.revparse_single(&oid.to_string()).is_ok() } pub fn resolve(&self, r: &GitReference) -> CargoResult { r.resolve(&self.repo) } } impl GitReference { pub fn resolve(&self, repo: &git2::Repository) -> CargoResult { let id = match self { // Note that we resolve the named tag here in sync with where it's // fetched into via `fetch` below. GitReference::Tag(s) => (|| -> CargoResult { let refname = format!("refs/remotes/origin/tags/{}", s); let id = repo.refname_to_id(&refname)?; let obj = repo.find_object(id, None)?; let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id()) })() .with_context(|| format!("failed to find tag `{}`", s))?, // Resolve the remote name since that's all we're configuring in // `fetch` below. GitReference::Branch(s) => { let name = format!("origin/{}", s); let b = repo .find_branch(&name, git2::BranchType::Remote) .with_context(|| format!("failed to find branch `{}`", s))?; b.get() .target() .ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", s))? } // We'll be using the HEAD commit GitReference::DefaultBranch => { let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; let head = repo.find_object(head_id, None)?; head.peel(ObjectType::Commit)?.id() } GitReference::Rev(s) => { let obj = repo.revparse_single(s)?; match obj.as_tag() { Some(tag) => tag.target_id(), None => obj.id(), } } }; Ok(id) } } impl<'a> GitCheckout<'a> { fn new( path: &Path, database: &'a GitDatabase, revision: git2::Oid, repo: git2::Repository, ) -> GitCheckout<'a> { GitCheckout { location: path.to_path_buf(), database, revision, repo, } } fn clone_into( into: &Path, database: &'a GitDatabase, revision: git2::Oid, config: &Config, ) -> CargoResult> { let dirname = into.parent().unwrap(); paths::create_dir_all(&dirname)?; if into.exists() { paths::remove_dir_all(into)?; } // we're doing a local filesystem-to-filesystem clone so there should // be no need to respect global configuration options, so pass in // an empty instance of `git2::Config` below. let git_config = git2::Config::new()?; // Clone the repository, but make sure we use the "local" option in // libgit2 which will attempt to use hardlinks to set up the database. // This should speed up the clone operation quite a bit if it works. // // Note that we still use the same fetch options because while we don't // need authentication information we may want progress bars and such. let url = database.path.into_url()?; let mut repo = None; with_fetch_options(&git_config, url.as_str(), config, &mut |fopts| { let mut checkout = git2::build::CheckoutBuilder::new(); checkout.dry_run(); // we'll do this below during a `reset` let r = git2::build::RepoBuilder::new() // use hard links and/or copy the database, we're doing a // filesystem clone so this'll speed things up quite a bit. .clone_local(git2::build::CloneLocal::Local) .with_checkout(checkout) .fetch_options(fopts) .clone(url.as_str(), into)?; repo = Some(r); Ok(()) })?; let repo = repo.unwrap(); let checkout = GitCheckout::new(into, database, revision, repo); checkout.reset(config)?; Ok(checkout) } fn is_fresh(&self) -> bool { match self.repo.revparse_single("HEAD") { Ok(ref head) if head.id() == self.revision => { // See comments in reset() for why we check this self.location.join(".cargo-ok").exists() } _ => false, } } fn reset(&self, config: &Config) -> CargoResult<()> { // If we're interrupted while performing this reset (e.g., we die because // of a signal) Cargo needs to be sure to try to check out this repo // again on the next go-round. // // To enable this we have a dummy file in our checkout, .cargo-ok, which // if present means that the repo has been successfully reset and is // ready to go. Hence if we start to do a reset, we make sure this file // *doesn't* exist, and then once we're done we create the file. let ok_file = self.location.join(".cargo-ok"); let _ = paths::remove_file(&ok_file); info!("reset {} to {}", self.repo.path().display(), self.revision); // Ensure libgit2 won't mess with newlines when we vendor. if let Ok(mut git_config) = self.repo.config() { git_config.set_bool("core.autocrlf", false)?; } let object = self.repo.find_object(self.revision, None)?; reset(&self.repo, &object, config)?; paths::create(ok_file)?; Ok(()) } fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { return update_submodules(&self.repo, cargo_config); fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { debug!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in repo.submodules()? { update_submodule(repo, &mut child, cargo_config).with_context(|| { format!( "failed to update submodule `{}`", child.name().unwrap_or("") ) })?; } Ok(()) } fn update_submodule( parent: &git2::Repository, child: &mut git2::Submodule<'_>, cargo_config: &Config, ) -> CargoResult<()> { child.init(false)?; let url = child.url().ok_or_else(|| { anyhow::format_err!("non-utf8 url for submodule {:?}?", child.path()) })?; // Skip the submodule if the config says not to update it. if child.update_strategy() == git2::SubmoduleUpdate::None { cargo_config.shell().status( "Skipping", format!( "git submodule `{}` due to update strategy in .gitmodules", url ), )?; return Ok(()); } // A submodule which is listed in .gitmodules but not actually // checked out will not have a head id, so we should ignore it. let head = match child.head_id() { Some(head) => head, None => return Ok(()), }; // If the submodule hasn't been checked out yet, we need to // clone it. If it has been checked out and the head is the same // as the submodule's head, then we can skip an update and keep // recursing. let head_and_repo = child.open().and_then(|repo| { let target = repo.head()?.target(); Ok((target, repo)) }); let mut repo = match head_and_repo { Ok((head, repo)) => { if child.head_id() == head { return update_submodules(&repo, cargo_config); } repo } Err(..) => { let path = parent.workdir().unwrap().join(child.path()); let _ = paths::remove_dir_all(&path); init(&path, false)? } }; // Fetch data from origin and reset to the head commit let reference = GitReference::Rev(head.to_string()); cargo_config .shell() .status("Updating", format!("git submodule `{}`", url))?; fetch(&mut repo, url, &reference, cargo_config).with_context(|| { format!( "failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), url ) })?; let obj = repo.find_object(head, None)?; reset(&repo, &obj, cargo_config)?; update_submodules(&repo, cargo_config) } } } /// Prepare the authentication callbacks for cloning a git repository. /// /// The main purpose of this function is to construct the "authentication /// callback" which is used to clone a repository. This callback will attempt to /// find the right authentication on the system (without user input) and will /// guide libgit2 in doing so. /// /// The callback is provided `allowed` types of credentials, and we try to do as /// much as possible based on that: /// /// * Prioritize SSH keys from the local ssh agent as they're likely the most /// reliable. The username here is prioritized from the credential /// callback, then from whatever is configured in git itself, and finally /// we fall back to the generic user of `git`. /// /// * If a username/password is allowed, then we fallback to git2-rs's /// implementation of the credential helper. This is what is configured /// with `credential.helper` in git, and is the interface for the macOS /// keychain, for example. /// /// * After the above two have failed, we just kinda grapple attempting to /// return *something*. /// /// If any form of authentication fails, libgit2 will repeatedly ask us for /// credentials until we give it a reason to not do so. To ensure we don't /// just sit here looping forever we keep track of authentications we've /// attempted and we don't try the same ones again. fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult where F: FnMut(&mut git2::Credentials<'_>) -> CargoResult, { let mut cred_helper = git2::CredentialHelper::new(url); cred_helper.config(cfg); let mut ssh_username_requested = false; let mut cred_helper_bad = None; let mut ssh_agent_attempts = Vec::new(); let mut any_attempts = false; let mut tried_sshkey = false; let mut url_attempt = None; let orig_url = url; let mut res = f(&mut |url, username, allowed| { any_attempts = true; if url != orig_url { url_attempt = Some(url.to_string()); } // libgit2's "USERNAME" authentication actually means that it's just // asking us for a username to keep going. This is currently only really // used for SSH authentication and isn't really an authentication type. // The logic currently looks like: // // let user = ...; // if (user.is_null()) // user = callback(USERNAME, null, ...); // // callback(SSH_KEY, user, ...) // // So if we're being called here then we know that (a) we're using ssh // authentication and (b) no username was specified in the URL that // we're trying to clone. We need to guess an appropriate username here, // but that may involve a few attempts. Unfortunately we can't switch // usernames during one authentication session with libgit2, so to // handle this we bail out of this authentication session after setting // the flag `ssh_username_requested`, and then we handle this below. if allowed.contains(git2::CredentialType::USERNAME) { debug_assert!(username.is_none()); ssh_username_requested = true; return Err(git2::Error::from_str("gonna try usernames later")); } // An "SSH_KEY" authentication indicates that we need some sort of SSH // authentication. This can currently either come from the ssh-agent // process or from a raw in-memory SSH key. Cargo only supports using // ssh-agent currently. // // If we get called with this then the only way that should be possible // is if a username is specified in the URL itself (e.g., `username` is // Some), hence the unwrap() here. We try custom usernames down below. if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey { // If ssh-agent authentication fails, libgit2 will keep // calling this callback asking for other authentication // methods to try. Make sure we only try ssh-agent once, // to avoid looping forever. tried_sshkey = true; let username = username.unwrap(); debug_assert!(!ssh_username_requested); ssh_agent_attempts.push(username.to_string()); return git2::Cred::ssh_key_from_agent(username); } // Sometimes libgit2 will ask for a username/password in plaintext. This // is where Cargo would have an interactive prompt if we supported it, // but we currently don't! Right now the only way we support fetching a // plaintext password is through the `credential.helper` support, so // fetch that here. // // If ssh-agent authentication fails, libgit2 will keep calling this // callback asking for other authentication methods to try. Check // cred_helper_bad to make sure we only try the git credential helper // once, to avoid looping forever. if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) && cred_helper_bad.is_none() { let r = git2::Cred::credential_helper(cfg, url, username); cred_helper_bad = Some(r.is_err()); return r; } // I'm... not sure what the DEFAULT kind of authentication is, but seems // easy to support? if allowed.contains(git2::CredentialType::DEFAULT) { return git2::Cred::default(); } // Whelp, we tried our best Err(git2::Error::from_str("no authentication available")) }); // Ok, so if it looks like we're going to be doing ssh authentication, we // want to try a few different usernames as one wasn't specified in the URL // for us to use. In order, we'll try: // // * A credential helper's username for this URL, if available. // * This account's username. // * "git" // // We have to restart the authentication session each time (due to // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we // call our callback, `f`, in a loop here. if ssh_username_requested { debug_assert!(res.is_err()); let mut attempts = vec![String::from("git")]; if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) { attempts.push(s); } if let Some(ref s) = cred_helper.username { attempts.push(s.clone()); } while let Some(s) = attempts.pop() { // We should get `USERNAME` first, where we just return our attempt, // and then after that we should get `SSH_KEY`. If the first attempt // fails we'll get called again, but we don't have another option so // we bail out. let mut attempts = 0; res = f(&mut |_url, username, allowed| { if allowed.contains(git2::CredentialType::USERNAME) { return git2::Cred::username(&s); } if allowed.contains(git2::CredentialType::SSH_KEY) { debug_assert_eq!(Some(&s[..]), username); attempts += 1; if attempts == 1 { ssh_agent_attempts.push(s.to_string()); return git2::Cred::ssh_key_from_agent(&s); } } Err(git2::Error::from_str("no authentication available")) }); // If we made two attempts then that means: // // 1. A username was requested, we returned `s`. // 2. An ssh key was requested, we returned to look up `s` in the // ssh agent. // 3. For whatever reason that lookup failed, so we were asked again // for another mode of authentication. // // Essentially, if `attempts == 2` then in theory the only error was // that this username failed to authenticate (e.g., no other network // errors happened). Otherwise something else is funny so we bail // out. if attempts != 2 { break; } } } let mut err = match res { Ok(e) => return Ok(e), Err(e) => e, }; // In the case of an authentication failure (where we tried something) then // we try to give a more helpful error message about precisely what we // tried. if any_attempts { let mut msg = "failed to authenticate when downloading \ repository" .to_string(); if let Some(attempt) = &url_attempt { if url != attempt { msg.push_str(": "); msg.push_str(attempt); } } msg.push('\n'); if !ssh_agent_attempts.is_empty() { let names = ssh_agent_attempts .iter() .map(|s| format!("`{}`", s)) .collect::>() .join(", "); msg.push_str(&format!( "\n* attempted ssh-agent authentication, but \ no usernames succeeded: {}", names )); } if let Some(failed_cred_helper) = cred_helper_bad { if failed_cred_helper { msg.push_str( "\n* attempted to find username/password via \ git's `credential.helper` support, but failed", ); } else { msg.push_str( "\n* attempted to find username/password via \ `credential.helper`, but maybe the found \ credentials were incorrect", ); } } msg.push_str("\n\n"); msg.push_str("if the git CLI succeeds then `net.git-fetch-with-cli` may help here\n"); msg.push_str("https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli"); err = err.context(msg); // Otherwise if we didn't even get to the authentication phase them we may // have failed to set up a connection, in these cases hint on the // `net.git-fetch-with-cli` configuration option. } else if let Some(e) = err.downcast_ref::() { match e.class() { ErrorClass::Net | ErrorClass::Ssl | ErrorClass::Submodule | ErrorClass::FetchHead | ErrorClass::Ssh | ErrorClass::Callback | ErrorClass::Http => { let mut msg = "network failure seems to have happened\n".to_string(); msg.push_str( "if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n", ); msg.push_str( "https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli", ); err = err.context(msg); } _ => {} } } Err(err) } fn reset(repo: &git2::Repository, obj: &git2::Object<'_>, config: &Config) -> CargoResult<()> { let mut pb = Progress::new("Checkout", config); let mut opts = git2::build::CheckoutBuilder::new(); opts.progress(|_, cur, max| { drop(pb.tick(cur, max, "")); }); debug!("doing reset"); repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?; debug!("reset done"); Ok(()) } pub fn with_fetch_options( git_config: &git2::Config, url: &str, config: &Config, cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>, ) -> CargoResult<()> { let mut progress = Progress::new("Fetch", config); network::with_retry(config, || { with_authentication(url, git_config, |f| { let mut last_update = Instant::now(); let mut rcb = git2::RemoteCallbacks::new(); // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms` // sliding window for tracking the data transfer rate (in bytes/s). let mut counter = MetricsCounter::<10>::new(0, last_update); rcb.credentials(f); rcb.transfer_progress(|stats| { let indexed_deltas = stats.indexed_deltas(); let msg = if indexed_deltas > 0 { // Resolving deltas. format!( ", ({}/{}) resolving deltas", indexed_deltas, stats.total_deltas() ) } else { // Receiving objects. // // # Caveat // // Progress bar relies on git2 calling `transfer_progress` // to update its transfer rate, but we cannot guarantee a // periodic call of that callback. Thus if we don't receive // any data for, say, 10 seconds, the rate will get stuck // and never go down to 0B/s. // In the future, we need to find away to update the rate // even when the callback is not called. let now = Instant::now(); // Scrape a `received_bytes` to the counter every 300ms. if now - last_update > Duration::from_millis(300) { counter.add(stats.received_bytes(), now); last_update = now; } fn format_bytes(bytes: f32) -> (&'static str, f32) { static UNITS: [&str; 5] = ["", "Ki", "Mi", "Gi", "Ti"]; let i = (bytes.log2() / 10.0).min(4.0) as usize; (UNITS[i], bytes / 1024_f32.powi(i as i32)) } let (unit, rate) = format_bytes(counter.rate()); format!(", {:.2}{}B/s", rate, unit) }; progress .tick(stats.indexed_objects(), stats.total_objects(), &msg) .is_ok() }); // Create a local anonymous remote in the repository to fetch the // url let mut opts = git2::FetchOptions::new(); opts.remote_callbacks(rcb); cb(opts) })?; Ok(()) }) } pub fn fetch( repo: &mut git2::Repository, url: &str, reference: &GitReference, config: &Config, ) -> CargoResult<()> { if config.frozen() { anyhow::bail!( "attempting to update a git repository, but --frozen \ was specified" ) } if !config.network_allowed() { anyhow::bail!("can't update a git repository in the offline mode") } // If we're fetching from GitHub, attempt GitHub's special fast path for // testing if we've already got an up-to-date copy of the repository let oid_to_fetch = match github_fast_path(repo, url, reference, config) { Ok(FastPathRev::UpToDate) => return Ok(()), Ok(FastPathRev::NeedsFetch(rev)) => Some(rev), Ok(FastPathRev::Indeterminate) => None, Err(e) => { debug!("failed to check github {:?}", e); None } }; // We reuse repositories quite a lot, so before we go through and update the // repo check to see if it's a little too old and could benefit from a gc. // In theory this shouldn't be too too expensive compared to the network // request we're about to issue. maybe_gc_repo(repo)?; // Translate the reference desired here into an actual list of refspecs // which need to get fetched. Additionally record if we're fetching tags. let mut refspecs = Vec::new(); let mut tags = false; // The `+` symbol on the refspec means to allow a forced (fast-forward) // update which is needed if there is ever a force push that requires a // fast-forward. match reference { // For branches and tags we can fetch simply one reference and copy it // locally, no need to fetch other branches/tags. GitReference::Branch(b) => { refspecs.push(format!("+refs/heads/{0}:refs/remotes/origin/{0}", b)); } GitReference::Tag(t) => { refspecs.push(format!("+refs/tags/{0}:refs/remotes/origin/tags/{0}", t)); } GitReference::DefaultBranch => { refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD")); } GitReference::Rev(rev) => { if rev.starts_with("refs/") { refspecs.push(format!("+{0}:{0}", rev)); } else if let Some(oid_to_fetch) = oid_to_fetch { refspecs.push(format!("+{0}:refs/commit/{0}", oid_to_fetch)); } else { // We don't know what the rev will point to. To handle this // situation we fetch all branches and tags, and then we pray // it's somewhere in there. refspecs.push(String::from("+refs/heads/*:refs/remotes/origin/*")); refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD")); tags = true; } } } // Unfortunately `libgit2` is notably lacking in the realm of authentication // when compared to the `git` command line. As a result, allow an escape // hatch for users that would prefer to use `git`-the-CLI for fetching // repositories instead of `libgit2`-the-library. This should make more // flavors of authentication possible while also still giving us all the // speed and portability of using `libgit2`. if let Some(true) = config.net_config()?.git_fetch_with_cli { return fetch_with_cli(repo, url, &refspecs, tags, config); } debug!("doing a fetch for {}", url); let git_config = git2::Config::open_default()?; with_fetch_options(&git_config, url, config, &mut |mut opts| { if tags { opts.download_tags(git2::AutotagOption::All); } // The `fetch` operation here may fail spuriously due to a corrupt // repository. It could also fail, however, for a whole slew of other // reasons (aka network related reasons). We want Cargo to automatically // recover from corrupt repositories, but we don't want Cargo to stomp // over other legitimate errors. // // Consequently we save off the error of the `fetch` operation and if it // looks like a "corrupt repo" error then we blow away the repo and try // again. If it looks like any other kind of error, or if we've already // blown away the repository, then we want to return the error as-is. let mut repo_reinitialized = false; loop { debug!("initiating fetch of {:?} from {}", refspecs, url); let res = repo .remote_anonymous(url)? .fetch(&refspecs, Some(&mut opts), None); let err = match res { Ok(()) => break, Err(e) => e, }; debug!("fetch failed: {}", err); if !repo_reinitialized && matches!(err.class(), ErrorClass::Reference | ErrorClass::Odb) { repo_reinitialized = true; debug!( "looks like this is a corrupt repository, reinitializing \ and trying again" ); if reinitialize(repo).is_ok() { continue; } } return Err(err.into()); } Ok(()) }) } fn fetch_with_cli( repo: &mut git2::Repository, url: &str, refspecs: &[String], tags: bool, config: &Config, ) -> CargoResult<()> { let mut cmd = ProcessBuilder::new("git"); cmd.arg("fetch"); if tags { cmd.arg("--tags"); } cmd.arg("--force") // handle force pushes .arg("--update-head-ok") // see discussion in #2078 .arg(url) .args(refspecs) // If cargo is run by git (for example, the `exec` command in `git // rebase`), the GIT_DIR is set by git and will point to the wrong // location (this takes precedence over the cwd). Make sure this is // unset so git will look at cwd for the repo. .env_remove("GIT_DIR") // The reset of these may not be necessary, but I'm including them // just to be extra paranoid and avoid any issues. .env_remove("GIT_WORK_TREE") .env_remove("GIT_INDEX_FILE") .env_remove("GIT_OBJECT_DIRECTORY") .env_remove("GIT_ALTERNATE_OBJECT_DIRECTORIES") .cwd(repo.path()); config .shell() .verbose(|s| s.status("Running", &cmd.to_string()))?; cmd.exec_with_output()?; Ok(()) } /// Cargo has a bunch of long-lived git repositories in its global cache and /// some, like the index, are updated very frequently. Right now each update /// creates a new "pack file" inside the git database, and over time this can /// cause bad performance and bad current behavior in libgit2. /// /// One pathological use case today is where libgit2 opens hundreds of file /// descriptors, getting us dangerously close to blowing out the OS limits of /// how many fds we can have open. This is detailed in #4403. /// /// To try to combat this problem we attempt a `git gc` here. Note, though, that /// we may not even have `git` installed on the system! As a result we /// opportunistically try a `git gc` when the pack directory looks too big, and /// failing that we just blow away the repository and start over. fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { // Here we arbitrarily declare that if you have more than 100 files in your // `pack` folder that we need to do a gc. let entries = match repo.path().join("objects/pack").read_dir() { Ok(e) => e.count(), Err(_) => { debug!("skipping gc as pack dir appears gone"); return Ok(()); } }; let max = env::var("__CARGO_PACKFILE_LIMIT") .ok() .and_then(|s| s.parse::().ok()) .unwrap_or(100); if entries < max { debug!("skipping gc as there's only {} pack files", entries); return Ok(()); } // First up, try a literal `git gc` by shelling out to git. This is pretty // likely to fail though as we may not have `git` installed. Note that // libgit2 doesn't currently implement the gc operation, so there's no // equivalent there. match Command::new("git") .arg("gc") .current_dir(repo.path()) .output() { Ok(out) => { debug!( "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", out.status, String::from_utf8_lossy(&out.stdout), String::from_utf8_lossy(&out.stderr) ); if out.status.success() { let new = git2::Repository::open(repo.path())?; *repo = new; return Ok(()); } } Err(e) => debug!("git-gc failed to spawn: {}", e), } // Alright all else failed, let's start over. reinitialize(repo) } fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> { // Here we want to drop the current repository object pointed to by `repo`, // so we initialize temporary repository in a sub-folder, blow away the // existing git folder, and then recreate the git repo. Finally we blow away // the `tmp` folder we allocated. let path = repo.path().to_path_buf(); debug!("reinitializing git repo at {:?}", path); let tmp = path.join("tmp"); let bare = !repo.path().ends_with(".git"); *repo = init(&tmp, false)?; for entry in path.read_dir()? { let entry = entry?; if entry.file_name().to_str() == Some("tmp") { continue; } let path = entry.path(); drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); } *repo = init(&path, bare)?; paths::remove_dir_all(&tmp)?; Ok(()) } fn init(path: &Path, bare: bool) -> CargoResult { let mut opts = git2::RepositoryInitOptions::new(); // Skip anything related to templates, they just call all sorts of issues as // we really don't want to use them yet they insist on being used. See #6240 // for an example issue that comes up. opts.external_template(false); opts.bare(bare); Ok(git2::Repository::init_opts(&path, &opts)?) } enum FastPathRev { /// The local rev (determined by `reference.resolve(repo)`) is already up to /// date with what this rev resolves to on GitHub's server. UpToDate, /// The following SHA must be fetched in order for the local rev to become /// up to date. NeedsFetch(Oid), /// Don't know whether local rev is up to date. We'll fetch _all_ branches /// and tags from the server and see what happens. Indeterminate, } /// Updating the index is done pretty regularly so we want it to be as fast as /// possible. For registries hosted on GitHub (like the crates.io index) there's /// a fast path available to use [1] to tell us that there's no updates to be /// made. /// /// This function will attempt to hit that fast path and verify that the `oid` /// is actually the current branch of the repository. /// /// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference /// /// Note that this function should never cause an actual failure because it's /// just a fast path. As a result all errors are ignored in this function and we /// just return a `bool`. Any real errors will be reported through the normal /// update path above. fn github_fast_path( repo: &mut git2::Repository, url: &str, reference: &GitReference, config: &Config, ) -> CargoResult { let url = Url::parse(url)?; if !is_github(&url) { return Ok(FastPathRev::Indeterminate); } let local_object = reference.resolve(repo).ok(); let github_branch_name = match reference { GitReference::Branch(branch) => branch, GitReference::Tag(tag) => tag, GitReference::DefaultBranch => "HEAD", GitReference::Rev(rev) => { if rev.starts_with("refs/") { rev } else if looks_like_commit_hash(rev) { // `revparse_single` (used by `resolve`) is the only way to turn // short hash -> long hash, but it also parses other things, // like branch and tag names, which might coincidentally be // valid hex. // // We only return early if `rev` is a prefix of the object found // by `revparse_single`. Don't bother talking to GitHub in that // case, since commit hashes are permanent. If a commit with the // requested hash is already present in the local clone, its // contents must be the same as what is on the server for that // hash. // // If `rev` is not found locally by `revparse_single`, we'll // need GitHub to resolve it and get a hash. If `rev` is found // but is not a short hash of the found object, it's probably a // branch and we also need to get a hash from GitHub, in case // the branch has moved. if let Some(local_object) = local_object { if is_short_hash_of(rev, local_object) { return Ok(FastPathRev::UpToDate); } } rev } else { debug!("can't use github fast path with `rev = \"{}\"`", rev); return Ok(FastPathRev::Indeterminate); } } }; // This expects GitHub urls in the form `github.com/user/repo` and nothing // else let mut pieces = url .path_segments() .ok_or_else(|| anyhow!("no path segments on url"))?; let username = pieces .next() .ok_or_else(|| anyhow!("couldn't find username"))?; let repository = pieces .next() .ok_or_else(|| anyhow!("couldn't find repository name"))?; if pieces.next().is_some() { anyhow::bail!("too many segments on URL"); } // Trim off the `.git` from the repository, if present, since that's // optional for GitHub and won't work when we try to use the API as well. let repository = repository.strip_suffix(".git").unwrap_or(repository); let url = format!( "https://api.github.com/repos/{}/{}/commits/{}", username, repository, github_branch_name, ); let mut handle = config.http()?.borrow_mut(); debug!("attempting GitHub fast path for {}", url); handle.get(true)?; handle.url(&url)?; handle.useragent("cargo")?; handle.http_headers({ let mut headers = List::new(); headers.append("Accept: application/vnd.github.3.sha")?; if let Some(local_object) = local_object { headers.append(&format!("If-None-Match: \"{}\"", local_object))?; } headers })?; let mut response_body = Vec::new(); let mut transfer = handle.transfer(); transfer.write_function(|data| { response_body.extend_from_slice(data); Ok(data.len()) })?; transfer.perform()?; drop(transfer); // end borrow of handle so that response_code can be called let response_code = handle.response_code()?; if response_code == 304 { Ok(FastPathRev::UpToDate) } else if response_code == 200 { let oid_to_fetch = str::from_utf8(&response_body)?.parse::()?; Ok(FastPathRev::NeedsFetch(oid_to_fetch)) } else { // Usually response_code == 404 if the repository does not exist, and // response_code == 422 if exists but GitHub is unable to resolve the // requested rev. Ok(FastPathRev::Indeterminate) } } fn is_github(url: &Url) -> bool { url.host_str() == Some("github.com") } fn looks_like_commit_hash(rev: &str) -> bool { rev.len() >= 7 && rev.chars().all(|ch| ch.is_ascii_hexdigit()) } fn is_short_hash_of(rev: &str, oid: Oid) -> bool { let long_hash = oid.to_string(); match long_hash.get(..rev.len()) { Some(truncated_long_hash) => truncated_long_hash.eq_ignore_ascii_case(rev), None => false, } } cargo-0.66.0/src/cargo/sources/mod.rs000066400000000000000000000005731432416201200173700ustar00rootroot00000000000000pub use self::config::SourceConfigMap; pub use self::directory::DirectorySource; pub use self::git::GitSource; pub use self::path::PathSource; pub use self::registry::{RegistrySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; pub use self::replaced::ReplacedSource; pub mod config; pub mod directory; pub mod git; pub mod path; pub mod registry; pub mod replaced; cargo-0.66.0/src/cargo/sources/path.rs000066400000000000000000000501621432416201200175440ustar00rootroot00000000000000use std::collections::HashSet; use std::fmt::{self, Debug, Formatter}; use std::path::{Path, PathBuf}; use std::task::Poll; use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary}; use crate::ops; use crate::util::{internal, CargoResult, Config}; use anyhow::Context as _; use cargo_util::paths; use filetime::FileTime; use ignore::gitignore::GitignoreBuilder; use log::{trace, warn}; use walkdir::WalkDir; pub struct PathSource<'cfg> { source_id: SourceId, path: PathBuf, updated: bool, packages: Vec, config: &'cfg Config, recursive: bool, } impl<'cfg> PathSource<'cfg> { /// Invoked with an absolute path to a directory that contains a `Cargo.toml`. /// /// This source will only return the package at precisely the `path` /// specified, and it will be an error if there's not a package at `path`. pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { PathSource { source_id, path: path.to_path_buf(), updated: false, packages: Vec::new(), config, recursive: false, } } /// Creates a new source which is walked recursively to discover packages. /// /// This is similar to the `new` method except that instead of requiring a /// valid package to be present at `root` the folder is walked entirely to /// crawl for packages. /// /// Note that this should be used with care and likely shouldn't be chosen /// by default! pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { PathSource { recursive: true, ..PathSource::new(root, id, config) } } pub fn preload_with(&mut self, pkg: Package) { assert!(!self.updated); assert!(!self.recursive); assert!(self.packages.is_empty()); self.updated = true; self.packages.push(pkg); } pub fn root_package(&mut self) -> CargoResult { trace!("root_package; source={:?}", self); self.update()?; match self.packages.iter().find(|p| p.root() == &*self.path) { Some(pkg) => Ok(pkg.clone()), None => Err(internal(format!( "no package found in source {:?}", self.path ))), } } pub fn read_packages(&self) -> CargoResult> { if self.updated { Ok(self.packages.clone()) } else if self.recursive { ops::read_packages(&self.path, self.source_id, self.config) } else { let path = self.path.join("Cargo.toml"); let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?; Ok(vec![pkg]) } } /// List all files relevant to building this package inside this source. /// /// This function will use the appropriate methods to determine the /// set of files underneath this source's directory which are relevant for /// building `pkg`. /// /// The basic assumption of this method is that all files in the directory /// are relevant for building this package, but it also contains logic to /// use other methods like .gitignore to filter the list of files. pub fn list_files(&self, pkg: &Package) -> CargoResult> { self._list_files(pkg).with_context(|| { format!( "failed to determine list of files in {}", pkg.root().display() ) }) } fn _list_files(&self, pkg: &Package) -> CargoResult> { let root = pkg.root(); let no_include_option = pkg.manifest().include().is_empty(); let git_repo = if no_include_option { self.discover_git_repo(root)? } else { None }; let mut exclude_builder = GitignoreBuilder::new(root); if no_include_option && git_repo.is_none() { // no include option and not git repo discovered (see rust-lang/cargo#7183). exclude_builder.add_line(None, ".*")?; } for rule in pkg.manifest().exclude() { exclude_builder.add_line(None, rule)?; } let ignore_exclude = exclude_builder.build()?; let mut include_builder = GitignoreBuilder::new(root); for rule in pkg.manifest().include() { include_builder.add_line(None, rule)?; } let ignore_include = include_builder.build()?; let ignore_should_package = |relative_path: &Path, is_dir: bool| { // "Include" and "exclude" options are mutually exclusive. if no_include_option { !ignore_exclude .matched_path_or_any_parents(relative_path, is_dir) .is_ignore() } else { if is_dir { // Generally, include directives don't list every // directory (nor should they!). Just skip all directory // checks, and only check files. return true; } ignore_include .matched_path_or_any_parents(relative_path, /* is_dir */ false) .is_ignore() } }; let filter = |path: &Path, is_dir: bool| { let relative_path = match path.strip_prefix(root) { Ok(p) => p, Err(_) => return false, }; let rel = relative_path.as_os_str(); if rel == "Cargo.lock" { return pkg.include_lockfile(); } else if rel == "Cargo.toml" { return true; } ignore_should_package(relative_path, is_dir) }; // Attempt Git-prepopulate only if no `include` (see rust-lang/cargo#4135). if no_include_option { if let Some(repo) = git_repo { return self.list_files_git(pkg, &repo, &filter); } } self.list_files_walk(pkg, &filter) } /// Returns `Some(git2::Repository)` if found sibling `Cargo.toml` and `.git` /// directory; otherwise, caller should fall back on full file list. fn discover_git_repo(&self, root: &Path) -> CargoResult> { let repo = match git2::Repository::discover(root) { Ok(repo) => repo, Err(e) => { log::debug!( "could not discover git repo at or above {}: {}", root.display(), e ); return Ok(None); } }; let index = repo .index() .with_context(|| format!("failed to open git index at {}", repo.path().display()))?; let repo_root = repo.workdir().ok_or_else(|| { anyhow::format_err!( "did not expect repo at {} to be bare", repo.path().display() ) })?; let repo_relative_path = match paths::strip_prefix_canonical(root, repo_root) { Ok(p) => p, Err(e) => { log::warn!( "cannot determine if path `{:?}` is in git repo `{:?}`: {:?}", root, repo_root, e ); return Ok(None); } }; let manifest_path = repo_relative_path.join("Cargo.toml"); if index.get_path(&manifest_path, 0).is_some() { return Ok(Some(repo)); } // Package Cargo.toml is not in git, don't use git to guide our selection. Ok(None) } fn list_files_git( &self, pkg: &Package, repo: &git2::Repository, filter: &dyn Fn(&Path, bool) -> bool, ) -> CargoResult> { warn!("list_files_git {}", pkg.package_id()); let index = repo.index()?; let root = repo .workdir() .ok_or_else(|| anyhow::format_err!("can't list files on a bare repository"))?; let pkg_path = pkg.root(); let mut ret = Vec::::new(); // We use information from the Git repository to guide us in traversing // its tree. The primary purpose of this is to take advantage of the // `.gitignore` and auto-ignore files that don't matter. // // Here we're also careful to look at both tracked and untracked files as // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK}; // ``is_dir`` is an optimization to avoid calling // ``fs::metadata`` on every file. let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 { // Let the code below figure out if this symbolic link points // to a directory or not. None } else { Some(entry.mode == GIT_FILEMODE_COMMIT as u32) }; (join(root, &entry.path), is_dir) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); if let Ok(suffix) = pkg_path.strip_prefix(root) { opts.pathspec(suffix); } let statuses = repo.statuses(Some(&mut opts))?; let mut skip_paths = HashSet::new(); let untracked: Vec<_> = statuses .iter() .filter_map(|entry| { match entry.status() { // Don't include Cargo.lock if it is untracked. Packaging will // generate a new one as needed. git2::Status::WT_NEW if entry.path() != Some("Cargo.lock") => { Some(Ok((join(root, entry.path_bytes()), None))) } git2::Status::WT_DELETED => { let path = match join(root, entry.path_bytes()) { Ok(p) => p, Err(e) => return Some(Err(e)), }; skip_paths.insert(path); None } _ => None, } }) .collect::>()?; let mut subpackages_found = Vec::new(); for (file_path, is_dir) in index_files.chain(untracked) { let file_path = file_path?; if skip_paths.contains(&file_path) { continue; } // Filter out files blatantly outside this package. This is helped a // bit above via the `pathspec` function call, but we need to filter // the entries in the index as well. if !file_path.starts_with(pkg_path) { continue; } match file_path.file_name().and_then(|s| s.to_str()) { // The `target` directory is never included. Some("target") => continue, // Keep track of all sub-packages found and also strip out all // matches we've found so far. Note, though, that if we find // our own `Cargo.toml`, we keep going. Some("Cargo.toml") => { let path = file_path.parent().unwrap(); if path != pkg_path { warn!("subpackage found: {}", path.display()); ret.retain(|p| !p.starts_with(path)); subpackages_found.push(path.to_path_buf()); continue; } } _ => {} } // If this file is part of any other sub-package we've found so far, // skip it. if subpackages_found.iter().any(|p| file_path.starts_with(p)) { continue; } // `is_dir` is None for symlinks. The `unwrap` checks if the // symlink points to a directory. let is_dir = is_dir.unwrap_or_else(|| file_path.is_dir()); if is_dir { warn!(" found submodule {}", file_path.display()); let rel = file_path.strip_prefix(root)?; let rel = rel.to_str().ok_or_else(|| { anyhow::format_err!("invalid utf-8 filename: {}", rel.display()) })?; // Git submodules are currently only named through `/` path // separators, explicitly not `\` which windows uses. Who knew? let rel = rel.replace(r"\", "/"); match repo.find_submodule(&rel).and_then(|s| s.open()) { Ok(repo) => { let files = self.list_files_git(pkg, &repo, filter)?; ret.extend(files.into_iter()); } Err(..) => { self.walk(&file_path, &mut ret, false, filter)?; } } } else if filter(&file_path, is_dir) { assert!(!is_dir); // We found a file! warn!(" found {}", file_path.display()); ret.push(file_path); } } return Ok(ret); #[cfg(unix)] fn join(path: &Path, data: &[u8]) -> CargoResult { use std::ffi::OsStr; use std::os::unix::prelude::*; Ok(path.join(::from_bytes(data))) } #[cfg(windows)] fn join(path: &Path, data: &[u8]) -> CargoResult { use std::str; match str::from_utf8(data) { Ok(s) => Ok(path.join(s)), Err(e) => Err(anyhow::format_err!( "cannot process path in git with a non utf8 filename: {}\n{:?}", e, data )), } } } fn list_files_walk( &self, pkg: &Package, filter: &dyn Fn(&Path, bool) -> bool, ) -> CargoResult> { let mut ret = Vec::new(); self.walk(pkg.root(), &mut ret, true, filter)?; Ok(ret) } fn walk( &self, path: &Path, ret: &mut Vec, is_root: bool, filter: &dyn Fn(&Path, bool) -> bool, ) -> CargoResult<()> { let walkdir = WalkDir::new(path) .follow_links(true) .into_iter() .filter_entry(|entry| { let path = entry.path(); let at_root = is_root && entry.depth() == 0; let is_dir = entry.file_type().is_dir(); if !at_root && !filter(path, is_dir) { return false; } if !is_dir { return true; } // Don't recurse into any sub-packages that we have. if !at_root && path.join("Cargo.toml").exists() { return false; } // Skip root Cargo artifacts. if is_root && entry.depth() == 1 && path.file_name().and_then(|s| s.to_str()) == Some("target") { return false; } true }); for entry in walkdir { match entry { Ok(entry) => { if !entry.file_type().is_dir() { ret.push(entry.into_path()); } } Err(err) if err.loop_ancestor().is_some() => { self.config.shell().warn(err)?; } Err(err) => match err.path() { // If an error occurs with a path, filter it again. // If it is excluded, Just ignore it in this case. // See issue rust-lang/cargo#10917 Some(path) if !filter(path, path.is_dir()) => {} // Otherwise, simply recover from it. // Don't worry about error skipping here, the callers would // still hit the IO error if they do access it thereafter. Some(path) => ret.push(path.to_path_buf()), None => return Err(err.into()), }, } } Ok(()) } pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> { if !self.updated { return Err(internal(format!( "BUG: source `{:?}` was not updated", self.path ))); } let mut max = FileTime::zero(); let mut max_path = PathBuf::new(); for file in self.list_files(pkg).with_context(|| { format!( "failed to determine the most recently modified file in {}", pkg.root().display() ) })? { // An `fs::stat` error here is either because path is a // broken symlink, a permissions error, or a race // condition where this path was `rm`-ed -- either way, // we can ignore the error and treat the path's `mtime` // as `0`. let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero()); if mtime > max { max = mtime; max_path = file; } } trace!("last modified file {}: {}", self.path.display(), max); Ok((max, max_path)) } pub fn path(&self) -> &Path { &self.path } pub fn update(&mut self) -> CargoResult<()> { if !self.updated { let packages = self.read_packages()?; self.packages.extend(packages.into_iter()); self.updated = true; } Ok(()) } } impl<'cfg> Debug for PathSource<'cfg> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "the paths source") } } impl<'cfg> Source for PathSource<'cfg> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { self.update()?; for s in self.packages.iter().map(|p| p.summary()) { let matched = match kind { QueryKind::Exact => dep.matches(s), QueryKind::Fuzzy => true, }; if matched { f(s.clone()) } } Poll::Ready(Ok(())) } fn supports_checksums(&self) -> bool { false } fn requires_precise(&self) -> bool { false } fn source_id(&self) -> SourceId { self.source_id } fn download(&mut self, id: PackageId) -> CargoResult { trace!("getting packages; id={}", id); self.update()?; let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); pkg.cloned() .map(MaybePackage::Ready) .ok_or_else(|| internal(format!("failed to find {} in path source", id))) } fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { panic!("no download should have started") } fn fingerprint(&self, pkg: &Package) -> CargoResult { let (max, max_path) = self.last_modified_file(pkg)?; // Note that we try to strip the prefix of this package to get a // relative path to ensure that the fingerprint remains consistent // across entire project directory renames. let max_path = max_path.strip_prefix(&self.path).unwrap_or(&max_path); Ok(format!("{} ({})", max, max_path.display())) } fn describe(&self) -> String { match self.source_id.url().to_file_path() { Ok(path) => path.display().to_string(), Err(_) => self.source_id.to_string(), } } fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} fn is_yanked(&mut self, _pkg: PackageId) -> Poll> { Poll::Ready(Ok(false)) } fn block_until_ready(&mut self) -> CargoResult<()> { self.update() } fn invalidate_cache(&mut self) { // Path source has no local cache. } } cargo-0.66.0/src/cargo/sources/registry/000077500000000000000000000000001432416201200201065ustar00rootroot00000000000000cargo-0.66.0/src/cargo/sources/registry/download.rs000066400000000000000000000070361432416201200222710ustar00rootroot00000000000000use anyhow::Context; use cargo_util::Sha256; use crate::core::PackageId; use crate::sources::registry::make_dep_prefix; use crate::sources::registry::MaybeLock; use crate::sources::registry::{ RegistryConfig, CHECKSUM_TEMPLATE, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE, VERSION_TEMPLATE, }; use crate::util::errors::CargoResult; use crate::util::{Config, Filesystem}; use std::fmt::Write as FmtWrite; use std::fs::{self, File, OpenOptions}; use std::io::prelude::*; use std::io::SeekFrom; use std::str; pub(super) fn filename(pkg: PackageId) -> String { format!("{}-{}.crate", pkg.name(), pkg.version()) } pub(super) fn download( cache_path: &Filesystem, config: &Config, pkg: PackageId, checksum: &str, registry_config: RegistryConfig, ) -> CargoResult { let filename = filename(pkg); let path = cache_path.join(&filename); let path = config.assert_package_cache_locked(&path); // Attempt to open a read-only copy first to avoid an exclusive write // lock and also work with read-only filesystems. Note that we check the // length of the file like below to handle interrupted downloads. // // If this fails then we fall through to the exclusive path where we may // have to redownload the file. if let Ok(dst) = File::open(path) { let meta = dst.metadata()?; if meta.len() > 0 { return Ok(MaybeLock::Ready(dst)); } } let mut url = registry_config.dl; if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) && !url.contains(PREFIX_TEMPLATE) && !url.contains(LOWER_PREFIX_TEMPLATE) && !url.contains(CHECKSUM_TEMPLATE) { // Original format before customizing the download URL was supported. write!( url, "/{}/{}/download", pkg.name(), pkg.version().to_string() ) .unwrap(); } else { let prefix = make_dep_prefix(&*pkg.name()); url = url .replace(CRATE_TEMPLATE, &*pkg.name()) .replace(VERSION_TEMPLATE, &pkg.version().to_string()) .replace(PREFIX_TEMPLATE, &prefix) .replace(LOWER_PREFIX_TEMPLATE, &prefix.to_lowercase()) .replace(CHECKSUM_TEMPLATE, checksum); } Ok(MaybeLock::Download { url, descriptor: pkg.to_string(), }) } pub(super) fn finish_download( cache_path: &Filesystem, config: &Config, pkg: PackageId, checksum: &str, data: &[u8], ) -> CargoResult { // Verify what we just downloaded let actual = Sha256::new().update(data).finish_hex(); if actual != checksum { anyhow::bail!("failed to verify the checksum of `{}`", pkg) } let filename = filename(pkg); cache_path.create_dir()?; let path = cache_path.join(&filename); let path = config.assert_package_cache_locked(&path); let mut dst = OpenOptions::new() .create(true) .read(true) .write(true) .open(&path) .with_context(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); } dst.write_all(data)?; dst.seek(SeekFrom::Start(0))?; Ok(dst) } pub(super) fn is_crate_downloaded( cache_path: &Filesystem, config: &Config, pkg: PackageId, ) -> bool { let path = cache_path.join(filename(pkg)); let path = config.assert_package_cache_locked(&path); if let Ok(meta) = fs::metadata(path) { return meta.len() > 0; } false } cargo-0.66.0/src/cargo/sources/registry/http_remote.rs000066400000000000000000000603461432416201200230170ustar00rootroot00000000000000//! Access to a HTTP-based crate registry. //! //! See [`HttpRegistry`] for details. use crate::core::{PackageId, SourceId}; use crate::ops; use crate::sources::registry::download; use crate::sources::registry::MaybeLock; use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData}; use crate::util::errors::CargoResult; use crate::util::{Config, Filesystem, IntoUrl, Progress, ProgressStyle}; use anyhow::Context; use cargo_util::paths; use curl::easy::{HttpVersion, List}; use curl::multi::{EasyHandle, Multi}; use log::{debug, trace}; use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::fs::{self, File}; use std::path::{Path, PathBuf}; use std::str; use std::task::Poll; use std::time::Duration; use url::Url; const ETAG: &'static str = "ETag"; const LAST_MODIFIED: &'static str = "Last-Modified"; const UNKNOWN: &'static str = "Unknown"; /// A registry served by the HTTP-based registry API. /// /// This type is primarily accessed through the [`RegistryData`] trait. /// /// `HttpRegistry` implements the HTTP-based registry API outlined in [RFC 2789]. Read the RFC for /// the complete protocol, but _roughly_ the implementation loads each index file (e.g., /// config.json or re/ge/regex) from an HTTP service rather than from a locally cloned git /// repository. The remote service can more or less be a static file server that simply serves the /// contents of the origin git repository. /// /// Implemented naively, this leads to a significant amount of network traffic, as a lookup of any /// index file would need to check with the remote backend if the index file has changed. This /// cost is somewhat mitigated by the use of HTTP conditional fetches (`If-Modified-Since` and /// `If-None-Match` for `ETag`s) which can be efficiently handled by HTTP/2. /// /// [RFC 2789]: https://github.com/rust-lang/rfcs/pull/2789 pub struct HttpRegistry<'cfg> { index_path: Filesystem, cache_path: Filesystem, source_id: SourceId, config: &'cfg Config, /// Store the server URL without the protocol prefix (sparse+) url: Url, /// HTTP multi-handle for asynchronous/parallel requests. multi: Multi, /// Has the client requested a cache update? /// /// Only if they have do we double-check the freshness of each locally-stored index file. requested_update: bool, /// State for currently pending index downloads. downloads: Downloads<'cfg>, /// Does the config say that we can use HTTP multiplexing? multiplexing: bool, /// What paths have we already fetched since the last index update? /// /// We do not need to double-check any of these index files since we have already done so. fresh: HashSet, /// Have we started to download any index files? fetch_started: bool, /// Cached registry configuration. registry_config: Option, } /// Helper for downloading crates. pub struct Downloads<'cfg> { /// When a download is started, it is added to this map. The key is a /// "token" (see `Download::token`). It is removed once the download is /// finished. pending: HashMap, /// Set of paths currently being downloaded, mapped to their tokens. /// This should stay in sync with `pending`. pending_ids: HashMap, /// The final result of each download. A pair `(token, result)`. This is a /// temporary holding area, needed because curl can report multiple /// downloads at once, but the main loop (`wait`) is written to only /// handle one at a time. results: HashMap>, /// The next ID to use for creating a token (see `Download::token`). next: usize, /// Progress bar. progress: RefCell>>, /// Number of downloads that have successfully finished. downloads_finished: usize, /// Number of times the caller has requested blocking. This is used for /// an estimate of progress. blocking_calls: usize, } struct Download { /// The token for this download, used as the key of the `Downloads::pending` map /// and stored in `EasyHandle` as well. token: usize, /// The path of the package that we're downloading. path: PathBuf, /// Actual downloaded data, updated throughout the lifetime of this download. data: RefCell>, /// ETag or Last-Modified header received from the server (if any). index_version: RefCell>, } struct CompletedDownload { response_code: u32, data: Vec, index_version: String, } impl<'cfg> HttpRegistry<'cfg> { pub fn new( source_id: SourceId, config: &'cfg Config, name: &str, ) -> CargoResult> { if !config.cli_unstable().sparse_registry { anyhow::bail!("usage of sparse registries requires `-Z sparse-registry`"); } let url = source_id.url().as_str(); // Ensure the url ends with a slash so we can concatenate paths. if !url.ends_with('/') { anyhow::bail!("registry url must end in a slash `/`: {url}") } let url = url .trim_start_matches("sparse+") .into_url() .expect("a url with the protocol stripped should still be valid"); Ok(HttpRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), source_id, config, url, multi: Multi::new(), multiplexing: false, downloads: Downloads { next: 0, pending: HashMap::new(), pending_ids: HashMap::new(), results: HashMap::new(), progress: RefCell::new(Some(Progress::with_style( "Fetch", ProgressStyle::Indeterminate, config, ))), downloads_finished: 0, blocking_calls: 0, }, fresh: HashSet::new(), requested_update: false, fetch_started: false, registry_config: None, }) } fn handle_http_header(buf: &[u8]) -> Option<(&str, &str)> { if buf.is_empty() { return None; } let buf = std::str::from_utf8(buf).ok()?.trim_end(); // Don't let server sneak extra lines anywhere. if buf.contains('\n') { return None; } let (tag, value) = buf.split_once(':')?; let value = value.trim(); Some((tag, value)) } fn start_fetch(&mut self) -> CargoResult<()> { if self.fetch_started { // We only need to run the setup code once. return Ok(()); } self.fetch_started = true; // We've enabled the `http2` feature of `curl` in Cargo, so treat // failures here as fatal as it would indicate a build-time problem. self.multiplexing = self.config.http_config()?.multiplexing.unwrap_or(true); self.multi .pipelining(false, self.multiplexing) .with_context(|| "failed to enable multiplexing/pipelining in curl")?; // let's not flood the server with connections self.multi.set_max_host_connections(2)?; self.config .shell() .status("Updating", self.source_id.display_index())?; Ok(()) } fn handle_completed_downloads(&mut self) -> CargoResult<()> { assert_eq!( self.downloads.pending.len(), self.downloads.pending_ids.len() ); // Collect the results from the Multi handle. let pending = &mut self.downloads.pending; self.multi.messages(|msg| { let token = msg.token().expect("failed to read token"); let (_, handle) = &pending[&token]; let result = match msg.result_for(handle) { Some(result) => result, None => return, // transfer is not yet complete. }; let (download, mut handle) = pending.remove(&token).unwrap(); self.downloads.pending_ids.remove(&download.path).unwrap(); let result = match result { Ok(()) => { self.downloads.downloads_finished += 1; match handle.response_code() { Ok(code) => Ok(CompletedDownload { response_code: code, data: download.data.take(), index_version: download .index_version .take() .unwrap_or_else(|| UNKNOWN.to_string()), }), Err(e) => Err(e), } } Err(e) => Err(e), }; self.downloads.results.insert(download.path, result); }); self.downloads.tick()?; Ok(()) } fn full_url(&self, path: &Path) -> String { // self.url always ends with a slash. format!("{}{}", self.url, path.display()) } fn is_fresh(&self, path: &Path) -> bool { if !self.requested_update { trace!( "using local {} as user did not request update", path.display() ); true } else if self.config.cli_unstable().no_index_update { trace!("using local {} in no_index_update mode", path.display()); true } else if self.config.offline() { trace!("using local {} in offline mode", path.display()); true } else if self.fresh.contains(path) { trace!("using local {} as it was already fetched", path.display()); true } else { debug!("checking freshness of {}", path.display()); false } } } impl<'cfg> RegistryData for HttpRegistry<'cfg> { fn prepare(&self) -> CargoResult<()> { Ok(()) } fn index_path(&self) -> &Filesystem { &self.index_path } fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { self.config.assert_package_cache_locked(path) } fn is_updated(&self) -> bool { self.requested_update } fn load( &mut self, _root: &Path, path: &Path, index_version: Option<&str>, ) -> Poll> { trace!("load: {}", path.display()); if let Some(_token) = self.downloads.pending_ids.get(path) { debug!("dependency is still pending: {}", path.display()); return Poll::Pending; } if let Some(index_version) = index_version { trace!( "local cache of {} is available at version `{}`", path.display(), index_version ); if self.is_fresh(path) { return Poll::Ready(Ok(LoadResponse::CacheValid)); } } else if self.fresh.contains(path) { debug!( "cache did not contain previously downloaded file {}", path.display() ); } if let Some(result) = self.downloads.results.remove(path) { let result = result.with_context(|| format!("download of {} failed", path.display()))?; debug!( "index file downloaded with status code {}", result.response_code ); trace!("index file version: {}", result.index_version); if !self.fresh.insert(path.to_path_buf()) { debug!("downloaded the index file `{}` twice", path.display()) } match result.response_code { 200 => {} 304 => { // Not Modified: the data in the cache is still the latest. if index_version.is_none() { return Poll::Ready(Err(anyhow::anyhow!( "server said not modified (HTTP 304) when no local cache exists" ))); } return Poll::Ready(Ok(LoadResponse::CacheValid)); } 404 | 410 | 451 => { // The crate was not found or deleted from the registry. return Poll::Ready(Ok(LoadResponse::NotFound)); } code => { return Err(anyhow::anyhow!( "server returned unexpected HTTP status code {} for {}\nbody: {}", code, self.full_url(path), str::from_utf8(&result.data).unwrap_or(""), )) .into(); } } return Poll::Ready(Ok(LoadResponse::Data { raw_data: result.data, index_version: Some(result.index_version), })); } if self.config.offline() { return Poll::Ready(Err(anyhow::anyhow!( "can't download index file from '{}': you are in offline mode (--offline)", self.url ))); } // Looks like we're going to have to do a network request. self.start_fetch()?; // Load the registry config. if self.registry_config.is_none() && path != Path::new("config.json") { match self.config()? { Poll::Ready(_) => {} Poll::Pending => return Poll::Pending, } } let mut handle = ops::http_handle(self.config)?; let full_url = self.full_url(path); debug!("fetch {}", full_url); handle.get(true)?; handle.url(&full_url)?; handle.follow_location(true)?; // Enable HTTP/2 if possible. if self.multiplexing { handle.http_version(HttpVersion::V2)?; } else { handle.http_version(HttpVersion::V11)?; } // This is an option to `libcurl` which indicates that if there's a // bunch of parallel requests to the same host they all wait until the // pipelining status of the host is known. This means that we won't // initiate dozens of connections to crates.io, but rather only one. // Once the main one is opened we realized that pipelining is possible // and multiplexing is possible with static.crates.io. All in all this // reduces the number of connections done to a more manageable state. handle.pipewait(true)?; // Make sure we don't send data back if it's the same as we have in the index. let mut headers = List::new(); if let Some(index_version) = index_version { if let Some((key, value)) = index_version.split_once(':') { match key { ETAG => headers.append(&format!("If-None-Match: {}", value.trim()))?, LAST_MODIFIED => { headers.append(&format!("If-Modified-Since: {}", value.trim()))? } _ => debug!("unexpected index version: {}", index_version), } } } handle.http_headers(headers)?; // We're going to have a bunch of downloads all happening "at the same time". // So, we need some way to track what headers/data/responses are for which request. // We do that through this token. Each request (and associated response) gets one. let token = self.downloads.next; self.downloads.next += 1; debug!("downloading {} as {}", path.display(), token); assert_eq!( self.downloads.pending_ids.insert(path.to_path_buf(), token), None, "path queued for download more than once" ); // Each write should go to self.downloads.pending[&token].data. // Since the write function must be 'static, we access downloads through a thread-local. // That thread-local is set up in `block_until_ready` when it calls self.multi.perform, // which is what ultimately calls this method. handle.write_function(move |buf| { trace!("{} - {} bytes of data", token, buf.len()); tls::with(|downloads| { if let Some(downloads) = downloads { downloads.pending[&token] .0 .data .borrow_mut() .extend_from_slice(buf); } }); Ok(buf.len()) })?; // And ditto for the header function. handle.header_function(move |buf| { if let Some((tag, value)) = Self::handle_http_header(buf) { let is_etag = tag.eq_ignore_ascii_case(ETAG); let is_lm = tag.eq_ignore_ascii_case(LAST_MODIFIED); if is_etag || is_lm { tls::with(|downloads| { if let Some(downloads) = downloads { let mut index_version = downloads.pending[&token].0.index_version.borrow_mut(); if is_etag { *index_version = Some(format!("{}: {}", ETAG, value)); } else if index_version.is_none() && is_lm { *index_version = Some(format!("{}: {}", LAST_MODIFIED, value)); }; } }) } } true })?; let dl = Download { token, data: RefCell::new(Vec::new()), path: path.to_path_buf(), index_version: RefCell::new(None), }; // Finally add the request we've lined up to the pool of requests that cURL manages. let mut handle = self.multi.add(handle)?; handle.set_token(token)?; self.downloads.pending.insert(dl.token, (dl, handle)); Poll::Pending } fn config(&mut self) -> Poll>> { if self.registry_config.is_some() { return Poll::Ready(Ok(self.registry_config.clone())); } debug!("loading config"); let index_path = self.config.assert_package_cache_locked(&self.index_path); let config_json_path = index_path.join("config.json"); if self.is_fresh(Path::new("config.json")) { match fs::read(&config_json_path) { Ok(raw_data) => match serde_json::from_slice(&raw_data) { Ok(json) => { self.registry_config = Some(json); return Poll::Ready(Ok(self.registry_config.clone())); } Err(e) => log::debug!("failed to decode cached config.json: {}", e), }, Err(e) => log::debug!("failed to read config.json cache: {}", e), } } match self.load(Path::new(""), Path::new("config.json"), None)? { Poll::Ready(LoadResponse::Data { raw_data, index_version: _, }) => { trace!("config loaded"); self.registry_config = Some(serde_json::from_slice(&raw_data)?); if paths::create_dir_all(&config_json_path.parent().unwrap()).is_ok() { if let Err(e) = fs::write(&config_json_path, &raw_data) { log::debug!("failed to write config.json cache: {}", e); } } Poll::Ready(Ok(self.registry_config.clone())) } Poll::Ready(LoadResponse::NotFound) => { Poll::Ready(Err(anyhow::anyhow!("config.json not found in registry"))) } Poll::Ready(LoadResponse::CacheValid) => { panic!("config.json is not stored in the index cache") } Poll::Pending => Poll::Pending, } } fn invalidate_cache(&mut self) { // Actually updating the index is more or less a no-op for this implementation. // All it does is ensure that a subsequent load will double-check files with the // server rather than rely on a locally cached copy of the index files. debug!("invalidated index cache"); self.requested_update = true; } fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { let registry_config = loop { match self.config()? { Poll::Pending => self.block_until_ready()?, Poll::Ready(cfg) => break cfg.unwrap(), } }; download::download( &self.cache_path, &self.config, pkg, checksum, registry_config, ) } fn finish_download( &mut self, pkg: PackageId, checksum: &str, data: &[u8], ) -> CargoResult { download::finish_download(&self.cache_path, &self.config, pkg, checksum, data) } fn is_crate_downloaded(&self, pkg: PackageId) -> bool { download::is_crate_downloaded(&self.cache_path, &self.config, pkg) } fn block_until_ready(&mut self) -> CargoResult<()> { trace!( "block_until_ready: {} transfers pending", self.downloads.pending.len() ); self.downloads.blocking_calls += 1; loop { self.handle_completed_downloads()?; let remaining_in_multi = tls::set(&self.downloads, || { self.multi .perform() .with_context(|| "failed to perform http requests") })?; trace!("{} transfers remaining", remaining_in_multi); if remaining_in_multi == 0 { return Ok(()); } // We have no more replies to provide the caller with, // so we need to wait until cURL has something new for us. let timeout = self .multi .get_timeout()? .unwrap_or_else(|| Duration::new(5, 0)); self.multi .wait(&mut [], timeout) .with_context(|| "failed to wait on curl `Multi`")?; } } } impl<'cfg> Downloads<'cfg> { fn tick(&self) -> CargoResult<()> { let mut progress = self.progress.borrow_mut(); let progress = progress.as_mut().unwrap(); // Since the sparse protocol discovers dependencies as it goes, // it's not possible to get an accurate progress indication. // // As an approximation, we assume that the depth of the dependency graph // is fixed, and base the progress on how many times the caller has asked // for blocking. If there are actually additional dependencies, the progress // bar will get stuck. If there are fewer dependencies, it will disappear // early. It will never go backwards. // // The status text also contains the number of completed & pending requests, which // gives an better indication of forward progress. let approximate_tree_depth = 10; progress.tick( self.blocking_calls.min(approximate_tree_depth), approximate_tree_depth + 1, &format!( " {} complete; {} pending", self.downloads_finished, self.pending.len() ), ) } } mod tls { use super::Downloads; use std::cell::Cell; thread_local!(static PTR: Cell = Cell::new(0)); pub(crate) fn with(f: impl FnOnce(Option<&Downloads<'_>>) -> R) -> R { let ptr = PTR.with(|p| p.get()); if ptr == 0 { f(None) } else { // Safety: * `ptr` is only set by `set` below which ensures the type is correct. let ptr = unsafe { &*(ptr as *const Downloads<'_>) }; f(Some(ptr)) } } pub(crate) fn set(dl: &Downloads<'_>, f: impl FnOnce() -> R) -> R { struct Reset<'a, T: Copy>(&'a Cell, T); impl<'a, T: Copy> Drop for Reset<'a, T> { fn drop(&mut self) { self.0.set(self.1); } } PTR.with(|p| { let _reset = Reset(p, p.get()); p.set(dl as *const Downloads<'_> as usize); f() }) } } cargo-0.66.0/src/cargo/sources/registry/index.rs000066400000000000000000001122761432416201200215740ustar00rootroot00000000000000//! Management of the index of a registry source //! //! This module contains management of the index and various operations, such as //! actually parsing the index, looking for crates, etc. This is intended to be //! abstract over remote indices (downloaded via git) and local registry indices //! (which are all just present on the filesystem). //! //! ## Index Performance //! //! One important aspect of the index is that we want to optimize the "happy //! path" as much as possible. Whenever you type `cargo build` Cargo will //! *always* reparse the registry and learn about dependency information. This //! is done because Cargo needs to learn about the upstream crates.io crates //! that you're using and ensure that the preexisting `Cargo.lock` still matches //! the current state of the world. //! //! Consequently, Cargo "null builds" (the index that Cargo adds to each build //! itself) need to be fast when accessing the index. The primary performance //! optimization here is to avoid parsing JSON blobs from the registry if we //! don't need them. Most secondary optimizations are centered around removing //! allocations and such, but avoiding parsing JSON is the #1 optimization. //! //! When we get queries from the resolver we're given a `Dependency`. This //! dependency in turn has a version requirement, and with lock files that //! already exist these version requirements are exact version requirements //! `=a.b.c`. This means that we in theory only need to parse one line of JSON //! per query in the registry, the one that matches version `a.b.c`. //! //! The crates.io index, however, is not amenable to this form of query. Instead //! the crates.io index simply is a file where each line is a JSON blob. To //! learn about the versions in each JSON blob we would need to parse the JSON, //! defeating the purpose of trying to parse as little as possible. //! //! > Note that as a small aside even *loading* the JSON from the registry is //! > actually pretty slow. For crates.io and remote registries we don't //! > actually check out the git index on disk because that takes quite some //! > time and is quite large. Instead we use `libgit2` to read the JSON from //! > the raw git objects. This in turn can be slow (aka show up high in //! > profiles) because libgit2 has to do deflate decompression and such. //! //! To solve all these issues a strategy is employed here where Cargo basically //! creates an index into the index. The first time a package is queried about //! (first time being for an entire computer) Cargo will load the contents //! (slowly via libgit2) from the registry. It will then (slowly) parse every //! single line to learn about its versions. Afterwards, however, Cargo will //! emit a new file (a cache) which is amenable for speedily parsing in future //! invocations. //! //! This cache file is currently organized by basically having the semver //! version extracted from each JSON blob. That way Cargo can quickly and easily //! parse all versions contained and which JSON blob they're associated with. //! The JSON blob then doesn't actually need to get parsed unless the version is //! parsed. //! //! Altogether the initial measurements of this shows a massive improvement for //! Cargo null build performance. It's expected that the improvements earned //! here will continue to grow over time in the sense that the previous //! implementation (parse all lines each time) actually continues to slow down //! over time as new versions of a crate are published. In any case when first //! implemented a null build of Cargo itself would parse 3700 JSON blobs from //! the registry and load 150 blobs from git. Afterwards it parses 150 JSON //! blobs and loads 0 files git. Removing 200ms or more from Cargo's startup //! time is certainly nothing to sneeze at! //! //! Note that this is just a high-level overview, there's of course lots of //! details like invalidating caches and whatnot which are handled below, but //! hopefully those are more obvious inline in the code itself. use crate::core::dependency::Dependency; use crate::core::{PackageId, SourceId, Summary}; use crate::sources::registry::{LoadResponse, RegistryData, RegistryPackage, INDEX_V_MAX}; use crate::util::interning::InternedString; use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, ToSemver}; use anyhow::bail; use cargo_util::{paths, registry::make_dep_path}; use log::{debug, info}; use semver::Version; use std::collections::{HashMap, HashSet}; use std::fs; use std::io::ErrorKind; use std::path::Path; use std::str; use std::task::Poll; /// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not. /// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it. /// This loop tries all possible combinations of switching hyphen and underscores to find the /// uncanonicalized one. As all stored inputs have the correct spelling, we start with the spelling /// as-provided. struct UncanonicalizedIter<'s> { input: &'s str, num_hyphen_underscore: u32, hyphen_combination_num: u16, } impl<'s> UncanonicalizedIter<'s> { fn new(input: &'s str) -> Self { let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32; UncanonicalizedIter { input, num_hyphen_underscore, hyphen_combination_num: 0, } } } impl<'s> Iterator for UncanonicalizedIter<'s> { type Item = String; fn next(&mut self) -> Option { if self.hyphen_combination_num > 0 && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore { return None; } let ret = Some( self.input .chars() .scan(0u16, |s, c| { // the check against 15 here's to prevent // shift overflow on inputs with more than 15 hyphens if (c == '_' || c == '-') && *s <= 15 { let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0; let out = if (c == '_') ^ switch { '_' } else { '-' }; *s += 1; Some(out) } else { Some(c) } }) .collect(), ); self.hyphen_combination_num += 1; ret } } #[test] fn no_hyphen() { assert_eq!( UncanonicalizedIter::new("test").collect::>(), vec!["test".to_string()] ) } #[test] fn two_hyphen() { assert_eq!( UncanonicalizedIter::new("te-_st").collect::>(), vec![ "te-_st".to_string(), "te__st".to_string(), "te--st".to_string(), "te_-st".to_string() ] ) } #[test] fn overflow_hyphen() { assert_eq!( UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st") .take(100) .count(), 100 ) } /// Manager for handling the on-disk index. /// /// Note that local and remote registries store the index differently. Local /// is a simple on-disk tree of files of the raw index. Remote registries are /// stored as a raw git repository. The different means of access are handled /// via the [`RegistryData`] trait abstraction. /// /// This transparently handles caching of the index in a more efficient format. pub struct RegistryIndex<'cfg> { source_id: SourceId, /// Root directory of the index for the registry. path: Filesystem, /// Cache of summary data. /// /// This is keyed off the package name. The [`Summaries`] value handles /// loading the summary data. It keeps an optimized on-disk representation /// of the JSON files, which is created in an as-needed fashion. If it /// hasn't been cached already, it uses [`RegistryData::load`] to access /// to JSON files from the index, and the creates the optimized on-disk /// summary cache. summaries_cache: HashMap, /// [`Config`] reference for convenience. config: &'cfg Config, } /// An internal cache of summaries for a particular package. /// /// A list of summaries are loaded from disk via one of two methods: /// /// 1. Primarily Cargo will parse the corresponding file for a crate in the /// upstream crates.io registry. That's just a JSON blob per line which we /// can parse, extract the version, and then store here. /// /// 2. Alternatively, if Cargo has previously run, we'll have a cached index of /// dependencies for the upstream index. This is a file that Cargo maintains /// lazily on the local filesystem and is much faster to parse since it /// doesn't involve parsing all of the JSON. /// /// The outward-facing interface of this doesn't matter too much where it's /// loaded from, but it's important when reading the implementation to note that /// we try to parse as little as possible! #[derive(Default)] struct Summaries { /// A raw vector of uninterpreted bytes. This is what `Unparsed` start/end /// fields are indexes into. If a `Summaries` is loaded from the crates.io /// index then this field will be empty since nothing is `Unparsed`. raw_data: Vec, /// All known versions of a crate, keyed from their `Version` to the /// possibly parsed or unparsed version of the full summary. versions: HashMap, } /// A lazily parsed `IndexSummary`. enum MaybeIndexSummary { /// A summary which has not been parsed, The `start` and `end` are pointers /// into `Summaries::raw_data` which this is an entry of. Unparsed { start: usize, end: usize }, /// An actually parsed summary. Parsed(IndexSummary), } /// A parsed representation of a summary from the index. /// /// In addition to a full `Summary` we have information on whether it is `yanked`. pub struct IndexSummary { pub summary: Summary, pub yanked: bool, /// Schema version, see [`RegistryPackage`]. v: u32, } /// A representation of the cache on disk that Cargo maintains of summaries. /// Cargo will initially parse all summaries in the registry and will then /// serialize that into this form and place it in a new location on disk, /// ensuring that access in the future is much speedier. #[derive(Default)] struct SummariesCache<'a> { versions: Vec<(Version, &'a [u8])>, index_version: &'a str, } impl<'cfg> RegistryIndex<'cfg> { pub fn new( source_id: SourceId, path: &Filesystem, config: &'cfg Config, ) -> RegistryIndex<'cfg> { RegistryIndex { source_id, path: path.clone(), summaries_cache: HashMap::new(), config, } } /// Returns the hash listed for a specified `PackageId`. pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> Poll> { let req = OptVersionReq::exact(pkg.version()); let summary = self.summaries(pkg.name(), &req, load)?; let summary = match summary { Poll::Ready(mut summary) => summary.next(), Poll::Pending => return Poll::Pending, }; Poll::Ready(Ok(summary .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))? .summary .checksum() .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?)) } /// Load a list of summaries for `name` package in this registry which /// match `req` /// /// This function will semantically parse the on-disk index, match all /// versions, and then return an iterator over all summaries which matched. /// Internally there's quite a few layer of caching to amortize this cost /// though since this method is called quite a lot on null builds in Cargo. pub fn summaries<'a, 'b>( &'a mut self, name: InternedString, req: &'b OptVersionReq, load: &mut dyn RegistryData, ) -> Poll + 'b>> where 'a: 'b, { let source_id = self.source_id; let config = self.config; // First up actually parse what summaries we have available. If Cargo // has run previously this will parse a Cargo-specific cache file rather // than the registry itself. In effect this is intended to be a quite // cheap operation. let summaries = match self.load_summaries(name, load)? { Poll::Ready(summaries) => summaries, Poll::Pending => return Poll::Pending, }; // Iterate over our summaries, extract all relevant ones which match our // version requirement, and then parse all corresponding rows in the // registry. As a reminder this `summaries` method is called for each // entry in a lock file on every build, so we want to absolutely // minimize the amount of work being done here and parse as little as // necessary. let raw_data = &summaries.raw_data; Poll::Ready(Ok(summaries .versions .iter_mut() .filter_map(move |(k, v)| if req.matches(k) { Some(v) } else { None }) .filter_map( move |maybe| match maybe.parse(config, raw_data, source_id) { Ok(summary) => Some(summary), Err(e) => { info!("failed to parse `{}` registry package: {}", name, e); None } }, ) .filter(move |is| { if is.v > INDEX_V_MAX { debug!( "unsupported schema version {} ({} {})", is.v, is.summary.name(), is.summary.version() ); false } else { true } }))) } fn load_summaries( &mut self, name: InternedString, load: &mut dyn RegistryData, ) -> Poll> { // If we've previously loaded what versions are present for `name`, just // return that since our cache should still be valid. if self.summaries_cache.contains_key(&name) { return Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap())); } // Prepare the `RegistryData` which will lazily initialize internal data // structures. load.prepare()?; let root = load.assert_index_locked(&self.path); let cache_root = root.join(".cache"); // See module comment in `registry/mod.rs` for why this is structured // the way it is. let fs_name = name .chars() .flat_map(|c| c.to_lowercase()) .collect::(); let raw_path = make_dep_path(&fs_name, false); let mut any_pending = false; // Attempt to handle misspellings by searching for a chain of related // names to the original `raw_path` name. Only return summaries // associated with the first hit, however. The resolver will later // reject any candidates that have the wrong name, and with this it'll // along the way produce helpful "did you mean?" suggestions. for (i, path) in UncanonicalizedIter::new(&raw_path).take(1024).enumerate() { let summaries = Summaries::parse( root, &cache_root, path.as_ref(), self.source_id, load, self.config, )?; if summaries.is_pending() { if i == 0 { // If we have not herd back about the name as requested // then don't ask about other spellings yet. // This prevents us spamming all the variations in the // case where we have the correct spelling. return Poll::Pending; } any_pending = true; } if let Poll::Ready(Some(summaries)) = summaries { self.summaries_cache.insert(name, summaries); return Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap())); } } if any_pending { return Poll::Pending; } // If nothing was found then this crate doesn't exists, so just use an // empty `Summaries` list. self.summaries_cache.insert(name, Summaries::default()); Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap())) } /// Clears the in-memory summaries cache. pub fn clear_summaries_cache(&mut self) { self.summaries_cache.clear(); } pub fn query_inner( &mut self, dep: &Dependency, load: &mut dyn RegistryData, yanked_whitelist: &HashSet, f: &mut dyn FnMut(Summary), ) -> Poll> { if self.config.offline() { match self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? { Poll::Ready(0) => {} Poll::Ready(_) => return Poll::Ready(Ok(())), Poll::Pending => return Poll::Pending, } // If offline, and there are no matches, try again with online. // This is necessary for dependencies that are not used (such as // target-cfg or optional), but are not downloaded. Normally the // build should succeed if they are not downloaded and not used, // but they still need to resolve. If they are actually needed // then cargo will fail to download and an error message // indicating that the required dependency is unavailable while // offline will be displayed. } self.query_inner_with_online(dep, load, yanked_whitelist, f, true) .map_ok(|_| ()) } fn query_inner_with_online( &mut self, dep: &Dependency, load: &mut dyn RegistryData, yanked_whitelist: &HashSet, f: &mut dyn FnMut(Summary), online: bool, ) -> Poll> { let source_id = self.source_id; let summaries = match self.summaries(dep.package_name(), dep.version_req(), load)? { Poll::Ready(summaries) => summaries, Poll::Pending => return Poll::Pending, }; let summaries = summaries // First filter summaries for `--offline`. If we're online then // everything is a candidate, otherwise if we're offline we're only // going to consider candidates which are actually present on disk. // // Note: This particular logic can cause problems with // optional dependencies when offline. If at least 1 version // of an optional dependency is downloaded, but that version // does not satisfy the requirements, then resolution will // fail. Unfortunately, whether or not something is optional // is not known here. .filter(|s| (online || load.is_crate_downloaded(s.summary.package_id()))) // Next filter out all yanked packages. Some yanked packages may // leak throguh if they're in a whitelist (aka if they were // previously in `Cargo.lock` .filter(|s| !s.yanked || yanked_whitelist.contains(&s.summary.package_id())) .map(|s| s.summary.clone()); // Handle `cargo update --precise` here. If specified, our own source // will have a precise version listed of the form // `=o->` where `` is the name of a crate on // this source, `` is the version installed and ` is the // version requested (argument to `--precise`). let name = dep.package_name().as_str(); let precise = match source_id.precise() { Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => { let mut vers = p[name.len() + 1..].splitn(2, "->"); let current_vers = vers.next().unwrap().to_semver().unwrap(); let requested_vers = vers.next().unwrap().to_semver().unwrap(); Some((current_vers, requested_vers)) } _ => None, }; let summaries = summaries.filter(|s| match &precise { Some((current, requested)) => { if dep.version_req().matches(current) { // Unfortunately crates.io allows versions to differ only // by build metadata. This shouldn't be allowed, but since // it is, this will honor it if requested. However, if not // specified, then ignore it. let s_vers = s.version(); match (s_vers.build.is_empty(), requested.build.is_empty()) { (true, true) => s_vers == requested, (true, false) => false, (false, true) => { // Strip out the metadata. s_vers.major == requested.major && s_vers.minor == requested.minor && s_vers.patch == requested.patch && s_vers.pre == requested.pre } (false, false) => s_vers == requested, } } else { true } } None => true, }); let mut count = 0; for summary in summaries { f(summary); count += 1; } Poll::Ready(Ok(count)) } pub fn is_yanked( &mut self, pkg: PackageId, load: &mut dyn RegistryData, ) -> Poll> { let req = OptVersionReq::exact(pkg.version()); let found = self .summaries(pkg.name(), &req, load) .map_ok(|mut p| p.any(|summary| summary.yanked)); found } } impl Summaries { /// Parse out a `Summaries` instances from on-disk state. /// /// This will attempt to prefer parsing a previous cache file that already /// exists from a previous invocation of Cargo (aka you're typing `cargo /// build` again after typing it previously). If parsing fails or the cache /// isn't found, then we take a slower path which loads the full descriptor /// for `relative` from the underlying index (aka typically libgit2 with /// crates.io) and then parse everything in there. /// /// * `root` - this is the root argument passed to `load` /// * `cache_root` - this is the root on the filesystem itself of where to /// store cache files. /// * `relative` - this is the file we're loading from cache or the index /// data /// * `source_id` - the registry's SourceId used when parsing JSON blobs to /// create summaries. /// * `load` - the actual index implementation which may be very slow to /// call. We avoid this if we can. pub fn parse( root: &Path, cache_root: &Path, relative: &Path, source_id: SourceId, load: &mut dyn RegistryData, config: &Config, ) -> Poll>> { // First up, attempt to load the cache. This could fail for all manner // of reasons, but consider all of them non-fatal and just log their // occurrence in case anyone is debugging anything. let cache_path = cache_root.join(relative); let mut cached_summaries = None; let mut index_version = None; match fs::read(&cache_path) { Ok(contents) => match Summaries::parse_cache(contents) { Ok((s, v)) => { cached_summaries = Some(s); index_version = Some(v); } Err(e) => { log::debug!("failed to parse {:?} cache: {}", relative, e); } }, Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e), } let response = match load.load(root, relative, index_version.as_deref())? { Poll::Pending => return Poll::Pending, Poll::Ready(response) => response, }; match response { LoadResponse::CacheValid => { log::debug!("fast path for registry cache of {:?}", relative); return Poll::Ready(Ok(cached_summaries)); } LoadResponse::NotFound => { debug_assert!(cached_summaries.is_none()); if let Err(e) = fs::remove_file(cache_path) { if e.kind() != ErrorKind::NotFound { log::debug!("failed to remove from cache: {}", e); } } return Poll::Ready(Ok(None)); } LoadResponse::Data { raw_data, index_version, } => { // This is the fallback path where we actually talk to the registry backend to load // information. Here we parse every single line in the index (as we need // to find the versions) log::debug!("slow path for {:?}", relative); let mut cache = SummariesCache::default(); let mut ret = Summaries::default(); ret.raw_data = raw_data; for line in split(&ret.raw_data, b'\n') { // Attempt forwards-compatibility on the index by ignoring // everything that we ourselves don't understand, that should // allow future cargo implementations to break the // interpretation of each line here and older cargo will simply // ignore the new lines. let summary = match IndexSummary::parse(config, line, source_id) { Ok(summary) => summary, Err(e) => { // This should only happen when there is an index // entry from a future version of cargo that this // version doesn't understand. Hopefully, those future // versions of cargo correctly set INDEX_V_MAX and // CURRENT_CACHE_VERSION, otherwise this will skip // entries in the cache preventing those newer // versions from reading them (that is, until the // cache is rebuilt). log::info!("failed to parse {:?} registry package: {}", relative, e); continue; } }; let version = summary.summary.package_id().version().clone(); cache.versions.push((version.clone(), line)); ret.versions.insert(version, summary.into()); } if let Some(index_version) = index_version { log::trace!("caching index_version {}", index_version); let cache_bytes = cache.serialize(index_version.as_str()); // Once we have our `cache_bytes` which represents the `Summaries` we're // about to return, write that back out to disk so future Cargo // invocations can use it. // // This is opportunistic so we ignore failure here but are sure to log // something in case of error. if paths::create_dir_all(cache_path.parent().unwrap()).is_ok() { let path = Filesystem::new(cache_path.clone()); config.assert_package_cache_locked(&path); if let Err(e) = fs::write(cache_path, &cache_bytes) { log::info!("failed to write cache: {}", e); } } // If we've got debug assertions enabled read back in the cached values // and assert they match the expected result. #[cfg(debug_assertions)] { let readback = SummariesCache::parse(&cache_bytes) .expect("failed to parse cache we just wrote"); assert_eq!( readback.index_version, index_version, "index_version mismatch" ); assert_eq!(readback.versions, cache.versions, "versions mismatch"); } } Poll::Ready(Ok(Some(ret))) } } } /// Parses an open `File` which represents information previously cached by /// Cargo. pub fn parse_cache(contents: Vec) -> CargoResult<(Summaries, InternedString)> { let cache = SummariesCache::parse(&contents)?; let index_version = InternedString::new(cache.index_version); let mut ret = Summaries::default(); for (version, summary) in cache.versions { let (start, end) = subslice_bounds(&contents, summary); ret.versions .insert(version, MaybeIndexSummary::Unparsed { start, end }); } ret.raw_data = contents; return Ok((ret, index_version)); // Returns the start/end offsets of `inner` with `outer`. Asserts that // `inner` is a subslice of `outer`. fn subslice_bounds(outer: &[u8], inner: &[u8]) -> (usize, usize) { let outer_start = outer.as_ptr() as usize; let outer_end = outer_start + outer.len(); let inner_start = inner.as_ptr() as usize; let inner_end = inner_start + inner.len(); assert!(inner_start >= outer_start); assert!(inner_end <= outer_end); (inner_start - outer_start, inner_end - outer_start) } } } // Implementation of serializing/deserializing the cache of summaries on disk. // Currently the format looks like: // // +--------------------+----------------------+-------------+---+ // | cache version byte | index format version | git sha rev | 0 | // +--------------------+----------------------+-------------+---+ // // followed by... // // +----------------+---+------------+---+ // | semver version | 0 | JSON blob | 0 | ... // +----------------+---+------------+---+ // // The idea is that this is a very easy file for Cargo to parse in future // invocations. The read from disk should be quite fast and then afterwards all // we need to know is what versions correspond to which JSON blob. // // The leading version byte is intended to ensure that there's some level of // future compatibility against changes to this cache format so if different // versions of Cargo share the same cache they don't get too confused. The git // sha lets us know when the file needs to be regenerated (it needs regeneration // whenever the index itself updates). // // Cache versions: // * `1`: The original version. // * `2`: Added the "index format version" field so that if the index format // changes, different versions of cargo won't get confused reading each // other's caches. // * `3`: Bumped the version to work around an issue where multiple versions of // a package were published that differ only by semver metadata. For // example, openssl-src 110.0.0 and 110.0.0+1.1.0f. Previously, the cache // would be incorrectly populated with two entries, both 110.0.0. After // this, the metadata will be correctly included. This isn't really a format // change, just a version bump to clear the incorrect cache entries. Note: // the index shouldn't allow these, but unfortunately crates.io doesn't // check it. const CURRENT_CACHE_VERSION: u8 = 3; impl<'a> SummariesCache<'a> { fn parse(data: &'a [u8]) -> CargoResult> { // NB: keep this method in sync with `serialize` below let (first_byte, rest) = data .split_first() .ok_or_else(|| anyhow::format_err!("malformed cache"))?; if *first_byte != CURRENT_CACHE_VERSION { bail!("looks like a different Cargo's cache, bailing out"); } let index_v_bytes = rest .get(..4) .ok_or_else(|| anyhow::anyhow!("cache expected 4 bytes for index version"))?; let index_v = u32::from_le_bytes(index_v_bytes.try_into().unwrap()); if index_v != INDEX_V_MAX { bail!( "index format version {} doesn't match the version I know ({})", index_v, INDEX_V_MAX ); } let rest = &rest[4..]; let mut iter = split(rest, 0); let last_index_update = if let Some(update) = iter.next() { str::from_utf8(update)? } else { bail!("malformed file"); }; let mut ret = SummariesCache::default(); ret.index_version = last_index_update; while let Some(version) = iter.next() { let version = str::from_utf8(version)?; let version = Version::parse(version)?; let summary = iter.next().unwrap(); ret.versions.push((version, summary)); } Ok(ret) } fn serialize(&self, index_version: &str) -> Vec { // NB: keep this method in sync with `parse` above let size = self .versions .iter() .map(|(_version, data)| (10 + data.len())) .sum(); let mut contents = Vec::with_capacity(size); contents.push(CURRENT_CACHE_VERSION); contents.extend(&u32::to_le_bytes(INDEX_V_MAX)); contents.extend_from_slice(index_version.as_bytes()); contents.push(0); for (version, data) in self.versions.iter() { contents.extend_from_slice(version.to_string().as_bytes()); contents.push(0); contents.extend_from_slice(data); contents.push(0); } contents } } impl MaybeIndexSummary { /// Parses this "maybe a summary" into a `Parsed` for sure variant. /// /// Does nothing if this is already `Parsed`, and otherwise the `raw_data` /// passed in is sliced with the bounds in `Unparsed` and then actually /// parsed. fn parse( &mut self, config: &Config, raw_data: &[u8], source_id: SourceId, ) -> CargoResult<&IndexSummary> { let (start, end) = match self { MaybeIndexSummary::Unparsed { start, end } => (*start, *end), MaybeIndexSummary::Parsed(summary) => return Ok(summary), }; let summary = IndexSummary::parse(config, &raw_data[start..end], source_id)?; *self = MaybeIndexSummary::Parsed(summary); match self { MaybeIndexSummary::Unparsed { .. } => unreachable!(), MaybeIndexSummary::Parsed(summary) => Ok(summary), } } } impl From for MaybeIndexSummary { fn from(summary: IndexSummary) -> MaybeIndexSummary { MaybeIndexSummary::Parsed(summary) } } impl IndexSummary { /// Parses a line from the registry's index file into an `IndexSummary` for /// a package. /// /// The `line` provided is expected to be valid JSON. fn parse(config: &Config, line: &[u8], source_id: SourceId) -> CargoResult { // ****CAUTION**** Please be extremely careful with returning errors // from this function. Entries that error are not included in the // index cache, and can cause cargo to get confused when switching // between different versions that understand the index differently. // Make sure to consider the INDEX_V_MAX and CURRENT_CACHE_VERSION // values carefully when making changes here. let RegistryPackage { name, vers, cksum, deps, mut features, features2, yanked, links, v, } = serde_json::from_slice(line)?; let v = v.unwrap_or(1); log::trace!("json parsed registry {}/{}", name, vers); let pkgid = PackageId::new(name, &vers, source_id)?; let deps = deps .into_iter() .map(|dep| dep.into_dep(source_id)) .collect::>>()?; if let Some(features2) = features2 { for (name, values) in features2 { features.entry(name).or_default().extend(values); } } let mut summary = Summary::new(config, pkgid, deps, &features, links)?; summary.set_checksum(cksum); Ok(IndexSummary { summary, yanked: yanked.unwrap_or(false), v, }) } } fn split(haystack: &[u8], needle: u8) -> impl Iterator { struct Split<'a> { haystack: &'a [u8], needle: u8, } impl<'a> Iterator for Split<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option<&'a [u8]> { if self.haystack.is_empty() { return None; } let (ret, remaining) = match memchr::memchr(self.needle, self.haystack) { Some(pos) => (&self.haystack[..pos], &self.haystack[pos + 1..]), None => (self.haystack, &[][..]), }; self.haystack = remaining; Some(ret) } } Split { haystack, needle } } cargo-0.66.0/src/cargo/sources/registry/local.rs000066400000000000000000000116001432416201200215440ustar00rootroot00000000000000use crate::core::PackageId; use crate::sources::registry::{LoadResponse, MaybeLock, RegistryConfig, RegistryData}; use crate::util::errors::CargoResult; use crate::util::{Config, Filesystem}; use cargo_util::{paths, Sha256}; use std::fs::File; use std::io::SeekFrom; use std::io::{self, prelude::*}; use std::path::Path; use std::task::Poll; /// A local registry is a registry that lives on the filesystem as a set of /// `.crate` files with an `index` directory in the same format as a remote /// registry. pub struct LocalRegistry<'cfg> { index_path: Filesystem, root: Filesystem, src_path: Filesystem, config: &'cfg Config, updated: bool, } impl<'cfg> LocalRegistry<'cfg> { pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> { LocalRegistry { src_path: config.registry_source_path().join(name), index_path: Filesystem::new(root.join("index")), root: Filesystem::new(root.to_path_buf()), config, updated: false, } } } impl<'cfg> RegistryData for LocalRegistry<'cfg> { fn prepare(&self) -> CargoResult<()> { Ok(()) } fn index_path(&self) -> &Filesystem { &self.index_path } fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { // Note that the `*_unlocked` variant is used here since we're not // modifying the index and it's required to be externally synchronized. path.as_path_unlocked() } fn load( &mut self, root: &Path, path: &Path, _index_version: Option<&str>, ) -> Poll> { if self.updated { let raw_data = match paths::read_bytes(&root.join(path)) { Err(e) if e.downcast_ref::() .map_or(false, |ioe| ioe.kind() == io::ErrorKind::NotFound) => { return Poll::Ready(Ok(LoadResponse::NotFound)); } r => r, }?; Poll::Ready(Ok(LoadResponse::Data { raw_data, index_version: None, })) } else { Poll::Pending } } fn config(&mut self) -> Poll>> { // Local registries don't have configuration for remote APIs or anything // like that Poll::Ready(Ok(None)) } fn block_until_ready(&mut self) -> CargoResult<()> { if self.updated { return Ok(()); } // Nothing to update, we just use what's on disk. Verify it actually // exists though. We don't use any locks as we're just checking whether // these directories exist. let root = self.root.clone().into_path_unlocked(); if !root.is_dir() { anyhow::bail!("local registry path is not a directory: {}", root.display()); } let index_path = self.index_path.clone().into_path_unlocked(); if !index_path.is_dir() { anyhow::bail!( "local registry index path is not a directory: {}", index_path.display() ); } self.updated = true; Ok(()) } fn invalidate_cache(&mut self) { // Local registry has no cache - just reads from disk. } fn is_updated(&self) -> bool { self.updated } fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); // Note that the usage of `into_path_unlocked` here is because the local // crate files here never change in that we're not the one writing them, // so it's not our responsibility to synchronize access to them. let path = self.root.join(&crate_file).into_path_unlocked(); let mut crate_file = paths::open(&path)?; // If we've already got an unpacked version of this crate, then skip the // checksum below as it is in theory already verified. let dst = format!("{}-{}", pkg.name(), pkg.version()); if self.src_path.join(dst).into_path_unlocked().exists() { return Ok(MaybeLock::Ready(crate_file)); } self.config.shell().status("Unpacking", pkg)?; // We don't actually need to download anything per-se, we just need to // verify the checksum matches the .crate file itself. let actual = Sha256::new().update_file(&crate_file)?.finish_hex(); if actual != checksum { anyhow::bail!("failed to verify the checksum of `{}`", pkg) } crate_file.seek(SeekFrom::Start(0))?; Ok(MaybeLock::Ready(crate_file)) } fn finish_download( &mut self, _pkg: PackageId, _checksum: &str, _data: &[u8], ) -> CargoResult { panic!("this source doesn't download") } } cargo-0.66.0/src/cargo/sources/registry/mod.rs000066400000000000000000001005001432416201200212270ustar00rootroot00000000000000//! A `Source` for registry-based packages. //! //! # What's a Registry? //! //! Registries are central locations where packages can be uploaded to, //! discovered, and searched for. The purpose of a registry is to have a //! location that serves as permanent storage for versions of a crate over time. //! //! Compared to git sources, a registry provides many packages as well as many //! versions simultaneously. Git sources can also have commits deleted through //! rebasings where registries cannot have their versions deleted. //! //! # The Index of a Registry //! //! One of the major difficulties with a registry is that hosting so many //! packages may quickly run into performance problems when dealing with //! dependency graphs. It's infeasible for cargo to download the entire contents //! of the registry just to resolve one package's dependencies, for example. As //! a result, cargo needs some efficient method of querying what packages are //! available on a registry, what versions are available, and what the //! dependencies for each version is. //! //! One method of doing so would be having the registry expose an HTTP endpoint //! which can be queried with a list of packages and a response of their //! dependencies and versions is returned. This is somewhat inefficient however //! as we may have to hit the endpoint many times and we may have already //! queried for much of the data locally already (for other packages, for //! example). This also involves inventing a transport format between the //! registry and Cargo itself, so this route was not taken. //! //! Instead, Cargo communicates with registries through a git repository //! referred to as the Index. The Index of a registry is essentially an easily //! query-able version of the registry's database for a list of versions of a //! package as well as a list of dependencies for each version. //! //! Using git to host this index provides a number of benefits: //! //! * The entire index can be stored efficiently locally on disk. This means //! that all queries of a registry can happen locally and don't need to touch //! the network. //! //! * Updates of the index are quite efficient. Using git buys incremental //! updates, compressed transmission, etc for free. The index must be updated //! each time we need fresh information from a registry, but this is one //! update of a git repository that probably hasn't changed a whole lot so //! it shouldn't be too expensive. //! //! Additionally, each modification to the index is just appending a line at //! the end of a file (the exact format is described later). This means that //! the commits for an index are quite small and easily applied/compressible. //! //! ## The format of the Index //! //! The index is a store for the list of versions for all packages known, so its //! format on disk is optimized slightly to ensure that `ls registry` doesn't //! produce a list of all packages ever known. The index also wants to ensure //! that there's not a million files which may actually end up hitting //! filesystem limits at some point. To this end, a few decisions were made //! about the format of the registry: //! //! 1. Each crate will have one file corresponding to it. Each version for a //! crate will just be a line in this file. //! 2. There will be two tiers of directories for crate names, under which //! crates corresponding to those tiers will be located. //! //! As an example, this is an example hierarchy of an index: //! //! ```notrust //! . //! β”œβ”€β”€ 3 //! β”‚Β Β  └── u //! β”‚Β Β  └── url //! β”œβ”€β”€ bz //! β”‚Β Β  └── ip //! β”‚Β Β  └── bzip2 //! β”œβ”€β”€ config.json //! β”œβ”€β”€ en //! β”‚Β Β  └── co //! β”‚Β Β  └── encoding //! └── li //! Β Β  β”œβ”€β”€ bg //! Β Β  β”‚Β Β  └── libgit2 //! Β Β  └── nk //! Β Β  └── link-config //! ``` //! //! The root of the index contains a `config.json` file with a few entries //! corresponding to the registry (see [`RegistryConfig`] below). //! //! Otherwise, there are three numbered directories (1, 2, 3) for crates with //! names 1, 2, and 3 characters in length. The 1/2 directories simply have the //! crate files underneath them, while the 3 directory is sharded by the first //! letter of the crate name. //! //! Otherwise the top-level directory contains many two-letter directory names, //! each of which has many sub-folders with two letters. At the end of all these //! are the actual crate files themselves. //! //! The purpose of this layout is to hopefully cut down on `ls` sizes as well as //! efficient lookup based on the crate name itself. //! //! ## Crate files //! //! Each file in the index is the history of one crate over time. Each line in //! the file corresponds to one version of a crate, stored in JSON format (see //! the `RegistryPackage` structure below). //! //! As new versions are published, new lines are appended to this file. The only //! modifications to this file that should happen over time are yanks of a //! particular version. //! //! # Downloading Packages //! //! The purpose of the Index was to provide an efficient method to resolve the //! dependency graph for a package. So far we only required one network //! interaction to update the registry's repository (yay!). After resolution has //! been performed, however we need to download the contents of packages so we //! can read the full manifest and build the source code. //! //! To accomplish this, this source's `download` method will make an HTTP //! request per-package requested to download tarballs into a local cache. These //! tarballs will then be unpacked into a destination folder. //! //! Note that because versions uploaded to the registry are frozen forever that //! the HTTP download and unpacking can all be skipped if the version has //! already been downloaded and unpacked. This caching allows us to only //! download a package when absolutely necessary. //! //! # Filesystem Hierarchy //! //! Overall, the `$HOME/.cargo` looks like this when talking about the registry: //! //! ```notrust //! # A folder under which all registry metadata is hosted (similar to //! # $HOME/.cargo/git) //! $HOME/.cargo/registry/ //! //! # For each registry that cargo knows about (keyed by hostname + hash) //! # there is a folder which is the checked out version of the index for //! # the registry in this location. Note that this is done so cargo can //! # support multiple registries simultaneously //! index/ //! registry1-/ //! registry2-/ //! ... //! //! # This folder is a cache for all downloaded tarballs from a registry. //! # Once downloaded and verified, a tarball never changes. //! cache/ //! registry1-/-.crate //! ... //! //! # Location in which all tarballs are unpacked. Each tarball is known to //! # be frozen after downloading, so transitively this folder is also //! # frozen once its unpacked (it's never unpacked again) //! src/ //! registry1-/-/... //! ... //! ``` use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::HashSet; use std::fs::{File, OpenOptions}; use std::io::Write; use std::path::{Path, PathBuf}; use std::task::Poll; use anyhow::Context as _; use cargo_util::paths::exclude_from_backups_and_indexing; use flate2::read::GzDecoder; use log::debug; use semver::Version; use serde::Deserialize; use tar::Archive; use crate::core::dependency::{DepKind, Dependency}; use crate::core::source::MaybePackage; use crate::core::{Package, PackageId, QueryKind, Source, SourceId, Summary}; use crate::sources::PathSource; use crate::util::hex; use crate::util::interning::InternedString; use crate::util::into_url::IntoUrl; use crate::util::network::PollExt; use crate::util::{restricted_names, CargoResult, Config, Filesystem, OptVersionReq}; const PACKAGE_SOURCE_LOCK: &str = ".cargo-ok"; pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index"; pub const CRATES_IO_HTTP_INDEX: &str = "sparse+https://index.crates.io/"; pub const CRATES_IO_REGISTRY: &str = "crates-io"; pub const CRATES_IO_DOMAIN: &str = "crates.io"; const CRATE_TEMPLATE: &str = "{crate}"; const VERSION_TEMPLATE: &str = "{version}"; const PREFIX_TEMPLATE: &str = "{prefix}"; const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}"; const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}"; /// A "source" for a local (see `local::LocalRegistry`) or remote (see /// `remote::RemoteRegistry`) registry. /// /// This contains common functionality that is shared between the two registry /// kinds, with the registry-specific logic implemented as part of the /// [`RegistryData`] trait referenced via the `ops` field. pub struct RegistrySource<'cfg> { source_id: SourceId, /// The path where crate files are extracted (`$CARGO_HOME/registry/src/$REG-HASH`). src_path: Filesystem, /// Local reference to [`Config`] for convenience. config: &'cfg Config, /// Abstraction for interfacing to the different registry kinds. ops: Box, /// Interface for managing the on-disk index. index: index::RegistryIndex<'cfg>, /// A set of packages that should be allowed to be used, even if they are /// yanked. /// /// This is populated from the entries in `Cargo.lock` to ensure that /// `cargo update -p somepkg` won't unlock yanked entries in `Cargo.lock`. /// Otherwise, the resolver would think that those entries no longer /// exist, and it would trigger updates to unrelated packages. yanked_whitelist: HashSet, } /// The `config.json` file stored in the index. #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct RegistryConfig { /// Download endpoint for all crates. /// /// The string is a template which will generate the download URL for the /// tarball of a specific version of a crate. The substrings `{crate}` and /// `{version}` will be replaced with the crate's name and version /// respectively. The substring `{prefix}` will be replaced with the /// crate's prefix directory name, and the substring `{lowerprefix}` will /// be replaced with the crate's prefix directory name converted to /// lowercase. The substring `{sha256-checksum}` will be replaced with the /// crate's sha256 checksum. /// /// For backwards compatibility, if the string does not contain any /// markers (`{crate}`, `{version}`, `{prefix}`, or ``{lowerprefix}`), it /// will be extended with `/{crate}/{version}/download` to /// support registries like crates.io which were created before the /// templating setup was created. pub dl: String, /// API endpoint for the registry. This is what's actually hit to perform /// operations like yanks, owner modifications, publish new crates, etc. /// If this is None, the registry does not support API commands. pub api: Option, } /// The maximum version of the `v` field in the index this version of cargo /// understands. pub(crate) const INDEX_V_MAX: u32 = 2; /// A single line in the index representing a single version of a package. #[derive(Deserialize)] pub struct RegistryPackage<'a> { name: InternedString, vers: Version, #[serde(borrow)] deps: Vec>, features: BTreeMap>, /// This field contains features with new, extended syntax. Specifically, /// namespaced features (`dep:`) and weak dependencies (`pkg?/feat`). /// /// This is separated from `features` because versions older than 1.19 /// will fail to load due to not being able to parse the new syntax, even /// with a `Cargo.lock` file. features2: Option>>, cksum: String, /// If `true`, Cargo will skip this version when resolving. /// /// This was added in 2014. Everything in the crates.io index has this set /// now, so this probably doesn't need to be an option anymore. yanked: Option, /// Native library name this package links to. /// /// Added early 2018 (see ), /// can be `None` if published before then. links: Option, /// The schema version for this entry. /// /// If this is None, it defaults to version 1. Entries with unknown /// versions are ignored. /// /// Version `2` format adds the `features2` field. /// /// This provides a method to safely introduce changes to index entries /// and allow older versions of cargo to ignore newer entries it doesn't /// understand. This is honored as of 1.51, so unfortunately older /// versions will ignore it, and potentially misinterpret version 2 and /// newer entries. /// /// The intent is that versions older than 1.51 will work with a /// pre-existing `Cargo.lock`, but they may not correctly process `cargo /// update` or build a lock from scratch. In that case, cargo may /// incorrectly select a new package that uses a new index format. A /// workaround is to downgrade any packages that are incompatible with the /// `--precise` flag of `cargo update`. v: Option, } #[test] fn escaped_char_in_json() { let _: RegistryPackage<'_> = serde_json::from_str( r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#, ) .unwrap(); let _: RegistryPackage<'_> = serde_json::from_str( r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"# ).unwrap(); // Now we add escaped cher all the places they can go // these are not valid, but it should error later than json parsing let _: RegistryPackage<'_> = serde_json::from_str( r#"{ "name":"This name has a escaped cher in it \n\t\" ", "vers":"0.0.1", "deps":[{ "name": " \n\t\" ", "req": " \n\t\" ", "features": [" \n\t\" "], "optional": true, "default_features": true, "target": " \n\t\" ", "kind": " \n\t\" ", "registry": " \n\t\" " }], "cksum":"bae3", "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]}, "links":" \n\t\" "}"#, ) .unwrap(); } /// A dependency as encoded in the index JSON. #[derive(Deserialize)] struct RegistryDependency<'a> { name: InternedString, #[serde(borrow)] req: Cow<'a, str>, features: Vec, optional: bool, default_features: bool, target: Option>, kind: Option>, registry: Option>, package: Option, public: Option, } impl<'a> RegistryDependency<'a> { /// Converts an encoded dependency in the registry to a cargo dependency pub fn into_dep(self, default: SourceId) -> CargoResult { let RegistryDependency { name, req, mut features, optional, default_features, target, kind, registry, package, public, } = self; let id = if let Some(registry) = ®istry { SourceId::for_registry(®istry.into_url()?)? } else { default }; let mut dep = Dependency::parse(package.unwrap_or(name), Some(&req), id)?; if package.is_some() { dep.set_explicit_name_in_toml(name); } let kind = match kind.as_deref().unwrap_or("") { "dev" => DepKind::Development, "build" => DepKind::Build, _ => DepKind::Normal, }; let platform = match target { Some(target) => Some(target.parse()?), None => None, }; // All dependencies are private by default let public = public.unwrap_or(false); // Unfortunately older versions of cargo and/or the registry ended up // publishing lots of entries where the features array contained the // empty feature, "", inside. This confuses the resolution process much // later on and these features aren't actually valid, so filter them all // out here. features.retain(|s| !s.is_empty()); // In index, "registry" is null if it is from the same index. // In Cargo.toml, "registry" is None if it is from the default if !id.is_default_registry() { dep.set_registry_id(id); } dep.set_optional(optional) .set_default_features(default_features) .set_features(features) .set_platform(platform) .set_kind(kind) .set_public(public); Ok(dep) } } pub enum LoadResponse { /// The cache is valid. The cached data should be used. CacheValid, /// The cache is out of date. Returned data should be used. Data { raw_data: Vec, index_version: Option, }, /// The requested crate was found. NotFound, } /// An abstract interface to handle both a local (see `local::LocalRegistry`) /// and remote (see `remote::RemoteRegistry`) registry. /// /// This allows [`RegistrySource`] to abstractly handle both registry kinds. pub trait RegistryData { /// Performs initialization for the registry. /// /// This should be safe to call multiple times, the implementation is /// expected to not do any work if it is already prepared. fn prepare(&self) -> CargoResult<()>; /// Returns the path to the index. /// /// Note that different registries store the index in different formats /// (remote=git, local=files). fn index_path(&self) -> &Filesystem; /// Loads the JSON for a specific named package from the index. /// /// * `root` is the root path to the index. /// * `path` is the relative path to the package to load (like `ca/rg/cargo`). /// * `index_version` is the version of the requested crate data currently in cache. fn load( &mut self, root: &Path, path: &Path, index_version: Option<&str>, ) -> Poll>; /// Loads the `config.json` file and returns it. /// /// Local registries don't have a config, and return `None`. fn config(&mut self) -> Poll>>; /// Invalidates locally cached data. fn invalidate_cache(&mut self); /// Is the local cached data up-to-date? fn is_updated(&self) -> bool; /// Prepare to start downloading a `.crate` file. /// /// Despite the name, this doesn't actually download anything. If the /// `.crate` is already downloaded, then it returns [`MaybeLock::Ready`]. /// If it hasn't been downloaded, then it returns [`MaybeLock::Download`] /// which contains the URL to download. The [`crate::core::package::Downloads`] /// system handles the actual download process. After downloading, it /// calls [`Self::finish_download`] to save the downloaded file. /// /// `checksum` is currently only used by local registries to verify the /// file contents (because local registries never actually download /// anything). Remote registries will validate the checksum in /// `finish_download`. For already downloaded `.crate` files, it does not /// validate the checksum, assuming the filesystem does not suffer from /// corruption or manipulation. fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult; /// Finish a download by saving a `.crate` file to disk. /// /// After [`crate::core::package::Downloads`] has finished a download, /// it will call this to save the `.crate` file. This is only relevant /// for remote registries. This should validate the checksum and save /// the given data to the on-disk cache. /// /// Returns a [`File`] handle to the `.crate` file, positioned at the start. fn finish_download(&mut self, pkg: PackageId, checksum: &str, data: &[u8]) -> CargoResult; /// Returns whether or not the `.crate` file is already downloaded. fn is_crate_downloaded(&self, _pkg: PackageId) -> bool { true } /// Validates that the global package cache lock is held. /// /// Given the [`Filesystem`], this will make sure that the package cache /// lock is held. If not, it will panic. See /// [`Config::acquire_package_cache_lock`] for acquiring the global lock. /// /// Returns the [`Path`] to the [`Filesystem`]. fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path; /// Block until all outstanding Poll::Pending requests are Poll::Ready. fn block_until_ready(&mut self) -> CargoResult<()>; } /// The status of [`RegistryData::download`] which indicates if a `.crate` /// file has already been downloaded, or if not then the URL to download. pub enum MaybeLock { /// The `.crate` file is already downloaded. [`File`] is a handle to the /// opened `.crate` file on the filesystem. Ready(File), /// The `.crate` file is not downloaded, here's the URL to download it from. /// /// `descriptor` is just a text string to display to the user of what is /// being downloaded. Download { url: String, descriptor: String }, } mod download; mod http_remote; mod index; mod local; mod remote; fn short_name(id: SourceId) -> String { let hash = hex::short_hash(&id); let ident = id.url().host_str().unwrap_or("").to_string(); format!("{}-{}", ident, hash) } impl<'cfg> RegistrySource<'cfg> { pub fn remote( source_id: SourceId, yanked_whitelist: &HashSet, config: &'cfg Config, ) -> CargoResult> { let name = short_name(source_id); let ops = if source_id.url().scheme().starts_with("sparse+") { Box::new(http_remote::HttpRegistry::new(source_id, config, &name)?) as Box<_> } else { Box::new(remote::RemoteRegistry::new(source_id, config, &name)) as Box<_> }; Ok(RegistrySource::new( source_id, config, &name, ops, yanked_whitelist, )) } pub fn local( source_id: SourceId, path: &Path, yanked_whitelist: &HashSet, config: &'cfg Config, ) -> RegistrySource<'cfg> { let name = short_name(source_id); let ops = local::LocalRegistry::new(path, config, &name); RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist) } fn new( source_id: SourceId, config: &'cfg Config, name: &str, ops: Box, yanked_whitelist: &HashSet, ) -> RegistrySource<'cfg> { RegistrySource { src_path: config.registry_source_path().join(name), config, source_id, index: index::RegistryIndex::new(source_id, ops.index_path(), config), yanked_whitelist: yanked_whitelist.clone(), ops, } } /// Decode the configuration stored within the registry. /// /// This requires that the index has been at least checked out. pub fn config(&mut self) -> Poll>> { self.ops.config() } /// Unpacks a downloaded package into a location where it's ready to be /// compiled. /// /// No action is taken if the source looks like it's already unpacked. fn unpack_package(&self, pkg: PackageId, tarball: &File) -> CargoResult { // The `.cargo-ok` file is used to track if the source is already // unpacked. let package_dir = format!("{}-{}", pkg.name(), pkg.version()); let dst = self.src_path.join(&package_dir); dst.create_dir()?; let path = dst.join(PACKAGE_SOURCE_LOCK); let path = self.config.assert_package_cache_locked(&path); let unpack_dir = path.parent().unwrap(); if let Ok(meta) = path.metadata() { if meta.len() > 0 { return Ok(unpack_dir.to_path_buf()); } } let gz = GzDecoder::new(tarball); let mut tar = Archive::new(gz); let prefix = unpack_dir.file_name().unwrap(); let parent = unpack_dir.parent().unwrap(); for entry in tar.entries()? { let mut entry = entry.with_context(|| "failed to iterate over archive")?; let entry_path = entry .path() .with_context(|| "failed to read entry path")? .into_owned(); // We're going to unpack this tarball into the global source // directory, but we want to make sure that it doesn't accidentally // (or maliciously) overwrite source code from other crates. Cargo // itself should never generate a tarball that hits this error, and // crates.io should also block uploads with these sorts of tarballs, // but be extra sure by adding a check here as well. if !entry_path.starts_with(prefix) { anyhow::bail!( "invalid tarball downloaded, contains \ a file at {:?} which isn't under {:?}", entry_path, prefix ) } // Unpacking failed let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from); if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) { result = result.with_context(|| { format!( "`{}` appears to contain a reserved Windows path, \ it cannot be extracted on Windows", entry_path.display() ) }); } result .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; } // The lock file is created after unpacking so we overwrite a lock file // which may have been extracted from the package. let mut ok = OpenOptions::new() .create(true) .read(true) .write(true) .open(&path) .with_context(|| format!("failed to open `{}`", path.display()))?; // Write to the lock file to indicate that unpacking was successful. write!(ok, "ok")?; Ok(unpack_dir.to_path_buf()) } fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult { let path = self .unpack_package(package, path) .with_context(|| format!("failed to unpack package `{}`", package))?; let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let mut pkg = match src.download(package)? { MaybePackage::Ready(pkg) => pkg, MaybePackage::Download { .. } => unreachable!(), }; // After we've loaded the package configure its summary's `checksum` // field with the checksum we know for this `PackageId`. let req = OptVersionReq::exact(package.version()); let summary_with_cksum = self .index .summaries(package.name(), &req, &mut *self.ops)? .expect("a downloaded dep now pending!?") .map(|s| s.summary.clone()) .next() .expect("summary not found"); if let Some(cksum) = summary_with_cksum.checksum() { pkg.manifest_mut() .summary_mut() .set_checksum(cksum.to_string()); } Ok(pkg) } } impl<'cfg> Source for RegistrySource<'cfg> { fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { // If this is a precise dependency, then it came from a lock file and in // theory the registry is known to contain this version. If, however, we // come back with no summaries, then our registry may need to be // updated, so we fall back to performing a lazy update. if kind == QueryKind::Exact && dep.source_id().precise().is_some() && !self.ops.is_updated() { debug!("attempting query without update"); let mut called = false; let pend = self.index .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { if dep.matches(&s) { called = true; f(s); } })?; if pend.is_pending() { return Poll::Pending; } if called { return Poll::Ready(Ok(())); } else { debug!("falling back to an update"); self.invalidate_cache(); return Poll::Pending; } } self.index .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { let matched = match kind { QueryKind::Exact => dep.matches(&s), QueryKind::Fuzzy => true, }; if matched { f(s); } }) } fn supports_checksums(&self) -> bool { true } fn requires_precise(&self) -> bool { false } fn source_id(&self) -> SourceId { self.source_id } fn invalidate_cache(&mut self) { self.index.clear_summaries_cache(); self.ops.invalidate_cache(); } fn download(&mut self, package: PackageId) -> CargoResult { let hash = loop { match self.index.hash(package, &mut *self.ops)? { Poll::Pending => self.block_until_ready()?, Poll::Ready(hash) => break hash, } }; match self.ops.download(package, hash)? { MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready), MaybeLock::Download { url, descriptor } => { Ok(MaybePackage::Download { url, descriptor }) } } } fn finish_download(&mut self, package: PackageId, data: Vec) -> CargoResult { let hash = loop { match self.index.hash(package, &mut *self.ops)? { Poll::Pending => self.block_until_ready()?, Poll::Ready(hash) => break hash, } }; let file = self.ops.finish_download(package, hash, &data)?; self.get_pkg(package, &file) } fn fingerprint(&self, pkg: &Package) -> CargoResult { Ok(pkg.package_id().version().to_string()) } fn describe(&self) -> String { self.source_id.display_index() } fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { self.yanked_whitelist.extend(pkgs); } fn is_yanked(&mut self, pkg: PackageId) -> Poll> { self.index.is_yanked(pkg, &mut *self.ops) } fn block_until_ready(&mut self) -> CargoResult<()> { // Before starting to work on the registry, make sure that // `/registry` is marked as excluded from indexing and // backups. Older versions of Cargo didn't do this, so we do it here // regardless of whether `` exists. // // This does not use `create_dir_all_excluded_from_backups_atomic` for // the same reason: we want to exclude it even if the directory already // exists. // // IO errors in creating and marking it are ignored, e.g. in case we're on a // read-only filesystem. let registry_base = self.config.registry_base_path(); let _ = registry_base.create_dir(); exclude_from_backups_and_indexing(®istry_base.into_path_unlocked()); self.ops.block_until_ready() } } fn make_dep_prefix(name: &str) -> String { match name.len() { 1 => String::from("1"), 2 => String::from("2"), 3 => format!("3/{}", &name[..1]), _ => format!("{}/{}", &name[0..2], &name[2..4]), } } #[cfg(test)] mod tests { use super::make_dep_prefix; #[test] fn dep_prefix() { assert_eq!(make_dep_prefix("a"), "1"); assert_eq!(make_dep_prefix("ab"), "2"); assert_eq!(make_dep_prefix("abc"), "3/a"); assert_eq!(make_dep_prefix("Abc"), "3/A"); assert_eq!(make_dep_prefix("AbCd"), "Ab/Cd"); assert_eq!(make_dep_prefix("aBcDe"), "aB/cD"); } } cargo-0.66.0/src/cargo/sources/registry/remote.rs000066400000000000000000000321261432416201200217530ustar00rootroot00000000000000use crate::core::{GitReference, PackageId, SourceId}; use crate::sources::git; use crate::sources::registry::download; use crate::sources::registry::MaybeLock; use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{Config, Filesystem}; use anyhow::Context as _; use cargo_util::paths; use lazycell::LazyCell; use log::{debug, trace}; use std::cell::{Cell, Ref, RefCell}; use std::fs::File; use std::mem; use std::path::Path; use std::str; use std::task::Poll; /// A remote registry is a registry that lives at a remote URL (such as /// crates.io). The git index is cloned locally, and `.crate` files are /// downloaded as needed and cached locally. pub struct RemoteRegistry<'cfg> { index_path: Filesystem, /// Path to the cache of `.crate` files (`$CARGO_HOME/registry/path/$REG-HASH`). cache_path: Filesystem, source_id: SourceId, index_git_ref: GitReference, config: &'cfg Config, tree: RefCell>>, repo: LazyCell, head: Cell>, current_sha: Cell>, needs_update: bool, // Does this registry need to be updated? updated: bool, // Has this registry been updated this session? } impl<'cfg> RemoteRegistry<'cfg> { pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { RemoteRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), source_id, config, // TODO: we should probably make this configurable index_git_ref: GitReference::DefaultBranch, tree: RefCell::new(None), repo: LazyCell::new(), head: Cell::new(None), current_sha: Cell::new(None), needs_update: false, updated: false, } } fn repo(&self) -> CargoResult<&git2::Repository> { self.repo.try_borrow_with(|| { let path = self.config.assert_package_cache_locked(&self.index_path); // Fast path without a lock if let Ok(repo) = git2::Repository::open(&path) { trace!("opened a repo without a lock"); return Ok(repo); } // Ok, now we need to lock and try the whole thing over again. trace!("acquiring registry index lock"); match git2::Repository::open(&path) { Ok(repo) => Ok(repo), Err(_) => { drop(paths::remove_dir_all(&path)); paths::create_dir_all(&path)?; // Note that we'd actually prefer to use a bare repository // here as we're not actually going to check anything out. // All versions of Cargo, though, share the same CARGO_HOME, // so for compatibility with older Cargo which *does* do // checkouts we make sure to initialize a new full // repository (not a bare one). // // We should change this to `init_bare` whenever we feel // like enough time has passed or if we change the directory // that the folder is located in, such as by changing the // hash at the end of the directory. // // Note that in the meantime we also skip `init.templatedir` // as it can be misconfigured sometimes or otherwise add // things that we don't want. let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); Ok(git2::Repository::init_opts(&path, &opts).with_context(|| { format!("failed to initialize index git repository (in {:?})", path) })?) } } }) } fn head(&self) -> CargoResult { if self.head.get().is_none() { let repo = self.repo()?; let oid = self.index_git_ref.resolve(repo)?; self.head.set(Some(oid)); } Ok(self.head.get().unwrap()) } fn tree(&self) -> CargoResult>> { { let tree = self.tree.borrow(); if tree.is_some() { return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); } } let repo = self.repo()?; let commit = repo.find_commit(self.head()?)?; let tree = commit.tree()?; // Unfortunately in libgit2 the tree objects look like they've got a // reference to the repository object which means that a tree cannot // outlive the repository that it came from. Here we want to cache this // tree, though, so to accomplish this we transmute it to a static // lifetime. // // Note that we don't actually hand out the static lifetime, instead we // only return a scoped one from this function. Additionally the repo // we loaded from (above) lives as long as this object // (`RemoteRegistry`) so we then just need to ensure that the tree is // destroyed first in the destructor, hence the destructor on // `RemoteRegistry` below. let tree = unsafe { mem::transmute::, git2::Tree<'static>>(tree) }; *self.tree.borrow_mut() = Some(tree); Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) } fn current_version(&self) -> Option { if let Some(sha) = self.current_sha.get() { return Some(sha); } let sha = InternedString::new(&self.head().ok()?.to_string()); self.current_sha.set(Some(sha)); Some(sha) } } const LAST_UPDATED_FILE: &str = ".last-updated"; impl<'cfg> RegistryData for RemoteRegistry<'cfg> { fn prepare(&self) -> CargoResult<()> { self.repo()?; // create intermediate dirs and initialize the repo Ok(()) } fn index_path(&self) -> &Filesystem { &self.index_path } fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { self.config.assert_package_cache_locked(path) } // `index_version` Is a string representing the version of the file used to construct the cached copy. // Older versions of Cargo used the single value of the hash of the HEAD commit as a `index_version`. // This is technically correct but a little too conservative. If a new commit is fetched all cached // files need to be regenerated even if a particular file was not changed. // However if an old cargo has written such a file we still know how to read it, as long as we check for that hash value. // // Cargo now uses a hash of the file's contents as provided by git. fn load( &mut self, _root: &Path, path: &Path, index_version: Option<&str>, ) -> Poll> { if self.needs_update { return Poll::Pending; } // Check if the cache is valid. let git_commit_hash = self.current_version(); if index_version.is_some() && index_version == git_commit_hash.as_deref() { // This file was written by an old version of cargo, but it is still up-to-date. return Poll::Ready(Ok(LoadResponse::CacheValid)); } // Note that the index calls this method and the filesystem is locked // in the index, so we don't need to worry about an `update_index` // happening in a different process. fn load_helper( registry: &RemoteRegistry<'_>, path: &Path, index_version: Option<&str>, ) -> CargoResult { let repo = registry.repo()?; let tree = registry.tree()?; let entry = tree.get_path(path); let entry = entry?; let git_file_hash = Some(entry.id().to_string()); // Check if the cache is valid. if index_version.is_some() && index_version == git_file_hash.as_deref() { return Ok(LoadResponse::CacheValid); } let object = entry.to_object(repo)?; let blob = match object.as_blob() { Some(blob) => blob, None => anyhow::bail!("path `{}` is not a blob in the git repo", path.display()), }; Ok(LoadResponse::Data { raw_data: blob.content().to_vec(), index_version: git_file_hash, }) } match load_helper(&self, path, index_version) { Ok(result) => Poll::Ready(Ok(result)), Err(_) if !self.updated => { // If git returns an error and we haven't updated the repo, return // pending to allow an update to try again. self.needs_update = true; Poll::Pending } Err(e) if e.downcast_ref::() .map(|e| e.code() == git2::ErrorCode::NotFound) .unwrap_or_default() => { // The repo has been updated and the file does not exist. Poll::Ready(Ok(LoadResponse::NotFound)) } Err(e) => Poll::Ready(Err(e)), } } fn config(&mut self) -> Poll>> { debug!("loading config"); self.prepare()?; self.config.assert_package_cache_locked(&self.index_path); match self.load(Path::new(""), Path::new("config.json"), None)? { Poll::Ready(LoadResponse::Data { raw_data, .. }) => { trace!("config loaded"); Poll::Ready(Ok(Some(serde_json::from_slice(&raw_data)?))) } Poll::Ready(_) => Poll::Ready(Ok(None)), Poll::Pending => Poll::Pending, } } fn block_until_ready(&mut self) -> CargoResult<()> { if !self.needs_update { return Ok(()); } self.updated = true; self.needs_update = false; if self.config.offline() { return Ok(()); } if self.config.cli_unstable().no_index_update { return Ok(()); } // Make sure the index is only updated once per session since it is an // expensive operation. This generally only happens when the resolver // is run multiple times, such as during `cargo publish`. if self.config.updated_sources().contains(&self.source_id) { return Ok(()); } debug!("updating the index"); // Ensure that we'll actually be able to acquire an HTTP handle later on // once we start trying to download crates. This will weed out any // problems with `.cargo/config` configuration related to HTTP. // // This way if there's a problem the error gets printed before we even // hit the index, which may not actually read this configuration. self.config.http()?; self.prepare()?; self.head.set(None); *self.tree.borrow_mut() = None; self.current_sha.set(None); let path = self.config.assert_package_cache_locked(&self.index_path); self.config .shell() .status("Updating", self.source_id.display_index())?; // Fetch the latest version of our `index_git_ref` into the index // checkout. let url = self.source_id.url(); let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) .with_context(|| format!("failed to fetch `{}`", url))?; self.config.updated_sources().insert(self.source_id); // Create a dummy file to record the mtime for when we updated the // index. paths::create(&path.join(LAST_UPDATED_FILE))?; Ok(()) } fn invalidate_cache(&mut self) { if !self.updated { self.needs_update = true; } } fn is_updated(&self) -> bool { self.updated } fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { let registry_config = loop { match self.config()? { Poll::Pending => self.block_until_ready()?, Poll::Ready(cfg) => break cfg.unwrap(), } }; download::download( &self.cache_path, &self.config, pkg, checksum, registry_config, ) } fn finish_download( &mut self, pkg: PackageId, checksum: &str, data: &[u8], ) -> CargoResult { download::finish_download(&self.cache_path, &self.config, pkg, checksum, data) } fn is_crate_downloaded(&self, pkg: PackageId) -> bool { download::is_crate_downloaded(&self.cache_path, &self.config, pkg) } } impl<'cfg> Drop for RemoteRegistry<'cfg> { fn drop(&mut self) { // Just be sure to drop this before our other fields self.tree.borrow_mut().take(); } } cargo-0.66.0/src/cargo/sources/replaced.rs000066400000000000000000000072341432416201200203710ustar00rootroot00000000000000use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, QueryKind, Source, SourceId, Summary}; use crate::util::errors::CargoResult; use std::task::Poll; use anyhow::Context as _; pub struct ReplacedSource<'cfg> { to_replace: SourceId, replace_with: SourceId, inner: Box, } impl<'cfg> ReplacedSource<'cfg> { pub fn new( to_replace: SourceId, replace_with: SourceId, src: Box, ) -> ReplacedSource<'cfg> { ReplacedSource { to_replace, replace_with, inner: src, } } } impl<'cfg> Source for ReplacedSource<'cfg> { fn source_id(&self) -> SourceId { self.to_replace } fn replaced_source_id(&self) -> SourceId { self.replace_with } fn supports_checksums(&self) -> bool { self.inner.supports_checksums() } fn requires_precise(&self) -> bool { self.inner.requires_precise() } fn query( &mut self, dep: &Dependency, kind: QueryKind, f: &mut dyn FnMut(Summary), ) -> Poll> { let (replace_with, to_replace) = (self.replace_with, self.to_replace); let dep = dep.clone().map_source(to_replace, replace_with); self.inner .query(&dep, kind, &mut |summary| { f(summary.map_source(replace_with, to_replace)) }) .map_err(|e| { e.context(format!( "failed to query replaced source {}", self.to_replace )) }) } fn invalidate_cache(&mut self) { self.inner.invalidate_cache() } fn download(&mut self, id: PackageId) -> CargoResult { let id = id.with_source_id(self.replace_with); let pkg = self .inner .download(id) .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(match pkg { MaybePackage::Ready(pkg) => { MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) } other @ MaybePackage::Download { .. } => other, }) } fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { let id = id.with_source_id(self.replace_with); let pkg = self .inner .finish_download(id, data) .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(pkg.map_source(self.replace_with, self.to_replace)) } fn fingerprint(&self, id: &Package) -> CargoResult { self.inner.fingerprint(id) } fn verify(&self, id: PackageId) -> CargoResult<()> { let id = id.with_source_id(self.replace_with); self.inner.verify(id) } fn describe(&self) -> String { format!( "{} (which is replacing {})", self.inner.describe(), self.to_replace ) } fn is_replaced(&self) -> bool { true } fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { let pkgs = pkgs .iter() .map(|id| id.with_source_id(self.replace_with)) .collect::>(); self.inner.add_to_yanked_whitelist(&pkgs); } fn is_yanked(&mut self, pkg: PackageId) -> Poll> { self.inner.is_yanked(pkg) } fn block_until_ready(&mut self) -> CargoResult<()> { self.inner .block_until_ready() .with_context(|| format!("failed to update replaced source {}", self.to_replace)) } } cargo-0.66.0/src/cargo/util/000077500000000000000000000000001432416201200155305ustar00rootroot00000000000000cargo-0.66.0/src/cargo/util/canonical_url.rs000066400000000000000000000063221432416201200207120ustar00rootroot00000000000000use crate::util::{errors::CargoResult, IntoUrl}; use std::hash::{self, Hash}; use url::Url; /// A newtype wrapper around `Url` which represents a "canonical" version of an /// original URL. /// /// A "canonical" url is only intended for internal comparison purposes in /// Cargo. It's to help paper over mistakes such as depending on /// `github.com/foo/bar` vs `github.com/foo/bar.git`. This is **only** for /// internal purposes within Cargo and provides no means to actually read the /// underlying string value of the `Url` it contains. This is intentional, /// because all fetching should still happen within the context of the original /// URL. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct CanonicalUrl(Url); impl CanonicalUrl { pub fn new(url: &Url) -> CargoResult { let mut url = url.clone(); // cannot-be-a-base-urls (e.g., `github.com:rust-lang/rustfmt.git`) // are not supported. if url.cannot_be_a_base() { anyhow::bail!( "invalid url `{}`: cannot-be-a-base-URLs are not supported", url ) } // Strip a trailing slash. if url.path().ends_with('/') { url.path_segments_mut().unwrap().pop_if_empty(); } // For GitHub URLs specifically, just lower-case everything. GitHub // treats both the same, but they hash differently, and we're gonna be // hashing them. This wants a more general solution, and also we're // almost certainly not using the same case conversion rules that GitHub // does. (See issue #84) if url.host_str() == Some("github.com") { url = format!("https{}", &url[url::Position::AfterScheme..]) .parse() .unwrap(); let path = url.path().to_lowercase(); url.set_path(&path); } // Repos can generally be accessed with or without `.git` extension. let needs_chopping = url.path().ends_with(".git"); if needs_chopping { let last = { let last = url.path_segments().unwrap().next_back().unwrap(); last[..last.len() - 4].to_owned() }; url.path_segments_mut().unwrap().pop().push(&last); } // Ignore the protocol specifier (if any). if url.scheme().starts_with("sparse+") { // NOTE: it is illegal to use set_scheme to change sparse+http(s) to http(s). url = url .to_string() .strip_prefix("sparse+") .expect("we just found that prefix") .into_url() .expect("a valid url without a protocol specifier should still be valid"); } Ok(CanonicalUrl(url)) } /// Returns the raw canonicalized URL, although beware that this should /// never be used/displayed/etc, it should only be used for internal data /// structures and hashes and such. pub fn raw_canonicalized_url(&self) -> &Url { &self.0 } } // See comment in `source_id.rs` for why we explicitly use `as_str()` here. impl Hash for CanonicalUrl { fn hash(&self, into: &mut S) { self.0.as_str().hash(into); } } cargo-0.66.0/src/cargo/util/command_prelude.rs000066400000000000000000000666231432416201200212510ustar00rootroot00000000000000use crate::core::compiler::{BuildConfig, MessageFormat, TimingOutput}; use crate::core::resolver::CliFeatures; use crate::core::{Edition, Workspace}; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; use crate::sources::CRATES_IO_REGISTRY; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::interning::InternedString; use crate::util::restricted_names::is_glob_pattern; use crate::util::toml::{StringOrVec, TomlProfile}; use crate::util::validate_package_name; use crate::util::{ print_available_benches, print_available_binaries, print_available_examples, print_available_packages, print_available_tests, }; use crate::CargoResult; use anyhow::bail; use cargo_util::paths; use std::ffi::{OsStr, OsString}; use std::path::PathBuf; pub use crate::core::compiler::CompileMode; pub use crate::{CliError, CliResult, Config}; pub use clap::{value_parser, AppSettings, Arg, ArgAction, ArgMatches}; pub type App = clap::Command<'static>; pub trait AppExt: Sized { fn _arg(self, arg: Arg<'static>) -> Self; /// Do not use this method, it is only for backwards compatibility. /// Use `arg_package_spec_no_all` instead. fn arg_package_spec( self, package: &'static str, all: &'static str, exclude: &'static str, ) -> Self { self.arg_package_spec_no_all(package, all, exclude) ._arg(flag("all", "Alias for --workspace (deprecated)")) } /// Variant of arg_package_spec that does not include the `--all` flag /// (but does include `--workspace`). Used to avoid confusion with /// historical uses of `--all`. fn arg_package_spec_no_all( self, package: &'static str, all: &'static str, exclude: &'static str, ) -> Self { self.arg_package_spec_simple(package) ._arg(flag("workspace", all)) ._arg(multi_opt("exclude", "SPEC", exclude)) } fn arg_package_spec_simple(self, package: &'static str) -> Self { self._arg(optional_multi_opt("package", "SPEC", package).short('p')) } fn arg_package(self, package: &'static str) -> Self { self._arg( optional_opt("package", package) .short('p') .value_name("SPEC"), ) } fn arg_jobs(self) -> Self { self._arg( opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short('j') .value_name("N") .allow_hyphen_values(true), ) ._arg(flag( "keep-going", "Do not abort the build as soon as there is an error (unstable)", )) } fn arg_targets_all( self, lib: &'static str, bin: &'static str, bins: &'static str, example: &'static str, examples: &'static str, test: &'static str, tests: &'static str, bench: &'static str, benches: &'static str, all: &'static str, ) -> Self { self.arg_targets_lib_bin_example(lib, bin, bins, example, examples) ._arg(optional_multi_opt("test", "NAME", test)) ._arg(flag("tests", tests)) ._arg(optional_multi_opt("bench", "NAME", bench)) ._arg(flag("benches", benches)) ._arg(flag("all-targets", all)) } fn arg_targets_lib_bin_example( self, lib: &'static str, bin: &'static str, bins: &'static str, example: &'static str, examples: &'static str, ) -> Self { self._arg(flag("lib", lib)) ._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(flag("bins", bins)) ._arg(optional_multi_opt("example", "NAME", example)) ._arg(flag("examples", examples)) } fn arg_targets_bins_examples( self, bin: &'static str, bins: &'static str, example: &'static str, examples: &'static str, ) -> Self { self._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(flag("bins", bins)) ._arg(optional_multi_opt("example", "NAME", example)) ._arg(flag("examples", examples)) } fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self { self._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(optional_multi_opt("example", "NAME", example)) } fn arg_features(self) -> Self { self._arg( multi_opt( "features", "FEATURES", "Space or comma separated list of features to activate", ) .short('F'), ) ._arg(flag("all-features", "Activate all available features")) ._arg(flag( "no-default-features", "Do not activate the `default` feature", )) } fn arg_release(self, release: &'static str) -> Self { self._arg(flag("release", release).short('r')) } fn arg_profile(self, profile: &'static str) -> Self { self._arg(opt("profile", profile).value_name("PROFILE-NAME")) } fn arg_doc(self, doc: &'static str) -> Self { self._arg(flag("doc", doc)) } fn arg_target_triple(self, target: &'static str) -> Self { self._arg(multi_opt("target", "TRIPLE", target)) } fn arg_target_dir(self) -> Self { self._arg( opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"), ) } fn arg_manifest_path(self) -> Self { self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH")) } fn arg_message_format(self) -> Self { self._arg(multi_opt("message-format", "FMT", "Error format")) } fn arg_build_plan(self) -> Self { self._arg(flag( "build-plan", "Output the build plan in JSON (unstable)", )) } fn arg_unit_graph(self) -> Self { self._arg(flag("unit-graph", "Output build graph in JSON (unstable)")) } fn arg_new_opts(self) -> Self { self._arg( opt( "vcs", "Initialize a new repository for the given version \ control system (git, hg, pijul, or fossil) or do not \ initialize any version control at all (none), overriding \ a global configuration.", ) .value_name("VCS") .value_parser(["git", "hg", "pijul", "fossil", "none"]), ) ._arg(flag("bin", "Use a binary (application) template [default]")) ._arg(flag("lib", "Use a library template")) ._arg( opt("edition", "Edition to set for the crate generated") .value_parser(Edition::CLI_VALUES) .value_name("YEAR"), ) ._arg( opt( "name", "Set the resulting package name, defaults to the directory name", ) .value_name("NAME"), ) } fn arg_index(self) -> Self { self._arg(opt("index", "Registry index URL to upload the package to").value_name("INDEX")) } fn arg_dry_run(self, dry_run: &'static str) -> Self { self._arg(flag("dry-run", dry_run)) } fn arg_ignore_rust_version(self) -> Self { self._arg(flag( "ignore-rust-version", "Ignore `rust-version` specification in packages", )) } fn arg_future_incompat_report(self) -> Self { self._arg(flag( "future-incompat-report", "Outputs a future incompatibility report at the end of the build", )) } fn arg_quiet(self) -> Self { self._arg(flag("quiet", "Do not print cargo log messages").short('q')) } fn arg_timings(self) -> Self { self._arg( optional_opt( "timings", "Timing output formats (unstable) (comma separated): html, json", ) .value_name("FMTS") .require_equals(true), ) } } impl AppExt for App { fn _arg(self, arg: Arg<'static>) -> Self { self.arg(arg) } } pub fn flag(name: &'static str, help: &'static str) -> Arg<'static> { Arg::new(name) .long(name) .help(help) .action(ArgAction::SetTrue) } pub fn opt(name: &'static str, help: &'static str) -> Arg<'static> { Arg::new(name).long(name).help(help) } pub fn optional_opt(name: &'static str, help: &'static str) -> Arg<'static> { opt(name, help).min_values(0) } pub fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Arg<'static> { opt(name, help) .value_name(value_name) .action(ArgAction::Append) .multiple_values(true) .min_values(0) .number_of_values(1) } pub fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Arg<'static> { opt(name, help) .value_name(value_name) .action(ArgAction::Append) } pub fn subcommand(name: &'static str) -> App { App::new(name) .dont_collapse_args_in_usage(true) .setting(AppSettings::DeriveDisplayOrder) } /// Determines whether or not to gate `--profile` as unstable when resolving it. pub enum ProfileChecking { /// `cargo rustc` historically has allowed "test", "bench", and "check". This /// variant explicitly allows those. LegacyRustc, /// `cargo check` and `cargo fix` historically has allowed "test". This variant /// explicitly allows that on stable. LegacyTestOnly, /// All other commands, which allow any valid custom named profile. Custom, } pub trait ArgMatchesExt { fn value_of_u32(&self, name: &str) -> CargoResult> { let arg = match self._value_of(name) { None => None, Some(arg) => Some(arg.parse::().map_err(|_| { clap::Error::raw( clap::ErrorKind::ValueValidation, format!("Invalid value: could not parse `{}` as a number", arg), ) })?), }; Ok(arg) } fn value_of_i32(&self, name: &str) -> CargoResult> { let arg = match self._value_of(name) { None => None, Some(arg) => Some(arg.parse::().map_err(|_| { clap::Error::raw( clap::ErrorKind::ValueValidation, format!("Invalid value: could not parse `{}` as a number", arg), ) })?), }; Ok(arg) } /// Returns value of the `name` command-line argument as an absolute path fn value_of_path(&self, name: &str, config: &Config) -> Option { self._value_of(name).map(|path| config.cwd().join(path)) } fn root_manifest(&self, config: &Config) -> CargoResult { if let Some(path) = self.value_of_path("manifest-path", config) { // In general, we try to avoid normalizing paths in Cargo, // but in this particular case we need it to fix #3586. let path = paths::normalize_path(&path); if !path.ends_with("Cargo.toml") { anyhow::bail!("the manifest-path must be a path to a Cargo.toml file") } if !path.exists() { anyhow::bail!( "manifest path `{}` does not exist", self._value_of("manifest-path").unwrap() ) } return Ok(path); } find_root_manifest_for_wd(config.cwd()) } fn workspace<'a>(&self, config: &'a Config) -> CargoResult> { let root = self.root_manifest(config)?; let mut ws = Workspace::new(&root, config)?; if config.cli_unstable().avoid_dev_deps { ws.set_require_optional_deps(false); } Ok(ws) } fn jobs(&self) -> CargoResult> { self.value_of_i32("jobs") } fn verbose(&self) -> u32 { self._count("verbose") } fn dry_run(&self) -> bool { self.flag("dry-run") } fn keep_going(&self) -> bool { self.flag("keep-going") } fn targets(&self) -> Vec { self._values_of("target") } fn get_profile_name( &self, config: &Config, default: &str, profile_checking: ProfileChecking, ) -> CargoResult { let specified_profile = self._value_of("profile"); // Check for allowed legacy names. // This is an early exit, since it allows combination with `--release`. match (specified_profile, profile_checking) { // `cargo rustc` has legacy handling of these names (Some(name @ ("dev" | "test" | "bench" | "check")), ProfileChecking::LegacyRustc) // `cargo fix` and `cargo check` has legacy handling of this profile name | (Some(name @ "test"), ProfileChecking::LegacyTestOnly) => { if self.flag("release") { config.shell().warn( "the `--release` flag should not be specified with the `--profile` flag\n\ The `--release` flag will be ignored.\n\ This was historically accepted, but will become an error \ in a future release." )?; } return Ok(InternedString::new(name)); } _ => {} } let conflict = |flag: &str, equiv: &str, specified: &str| -> anyhow::Error { anyhow::format_err!( "conflicting usage of --profile={} and --{flag}\n\ The `--{flag}` flag is the same as `--profile={equiv}`.\n\ Remove one flag or the other to continue.", specified, flag = flag, equiv = equiv ) }; let name = match (self.flag("release"), self.flag("debug"), specified_profile) { (false, false, None) => default, (true, _, None | Some("release")) => "release", (true, _, Some(name)) => return Err(conflict("release", "release", name)), (_, true, None | Some("dev")) => "dev", (_, true, Some(name)) => return Err(conflict("debug", "dev", name)), // `doc` is separate from all the other reservations because // [profile.doc] was historically allowed, but is deprecated and // has no effect. To avoid potentially breaking projects, it is a // warning in Cargo.toml, but since `--profile` is new, we can // reject it completely here. (_, _, Some("doc")) => { bail!("profile `doc` is reserved and not allowed to be explicitly specified") } (_, _, Some(name)) => { TomlProfile::validate_name(name)?; name } }; Ok(InternedString::new(name)) } fn packages_from_flags(&self) -> CargoResult { Packages::from_flags( // TODO Integrate into 'workspace' self.flag("workspace") || self.flag("all"), self._values_of("exclude"), self._values_of("package"), ) } fn compile_options( &self, config: &Config, mode: CompileMode, workspace: Option<&Workspace<'_>>, profile_checking: ProfileChecking, ) -> CargoResult { let spec = self.packages_from_flags()?; let mut message_format = None; let default_json = MessageFormat::Json { short: false, ansi: false, render_diagnostics: false, }; for fmt in self._values_of("message-format") { for fmt in fmt.split(',') { let fmt = fmt.to_ascii_lowercase(); match fmt.as_str() { "json" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(default_json); } "human" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(MessageFormat::Human); } "short" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(MessageFormat::Short); } "json-render-diagnostics" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { render_diagnostics, .. }) => *render_diagnostics = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } "json-diagnostic-short" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { short, .. }) => *short = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } "json-diagnostic-rendered-ansi" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { ansi, .. }) => *ansi = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } s => bail!("invalid message format specifier: `{}`", s), } } } let mut build_config = BuildConfig::new( config, self.jobs()?, self.keep_going(), &self.targets(), mode, )?; build_config.message_format = message_format.unwrap_or(MessageFormat::Human); build_config.requested_profile = self.get_profile_name(config, "dev", profile_checking)?; build_config.build_plan = self.flag("build-plan"); build_config.unit_graph = self.flag("unit-graph"); build_config.future_incompat_report = self.flag("future-incompat-report"); if self._contains("timings") { for timing_output in self._values_of("timings") { for timing_output in timing_output.split(',') { let timing_output = timing_output.to_ascii_lowercase(); let timing_output = match timing_output.as_str() { "html" => { config .cli_unstable() .fail_if_stable_opt("--timings=html", 7405)?; TimingOutput::Html } "json" => { config .cli_unstable() .fail_if_stable_opt("--timings=json", 7405)?; TimingOutput::Json } s => bail!("invalid timings output specifier: `{}`", s), }; build_config.timing_outputs.push(timing_output); } } if build_config.timing_outputs.is_empty() { build_config.timing_outputs.push(TimingOutput::Html); } } if build_config.keep_going { config .cli_unstable() .fail_if_stable_opt("--keep-going", 10496)?; } if build_config.build_plan { config .cli_unstable() .fail_if_stable_opt("--build-plan", 5579)?; }; if build_config.unit_graph { config .cli_unstable() .fail_if_stable_opt("--unit-graph", 8002)?; } let opts = CompileOptions { build_config, cli_features: self.cli_features()?, spec, filter: CompileFilter::from_raw_arguments( self.flag("lib"), self._values_of("bin"), self.flag("bins"), self._values_of("test"), self.flag("tests"), self._values_of("example"), self.flag("examples"), self._values_of("bench"), self.flag("benches"), self.flag("all-targets"), ), target_rustdoc_args: None, target_rustc_args: None, target_rustc_crate_types: None, local_rustdoc_args: None, rustdoc_document_private_items: false, honor_rust_version: !self.flag("ignore-rust-version"), }; if let Some(ws) = workspace { self.check_optional_opts(ws, &opts)?; } else if self.is_present_with_zero_values("package") { // As for cargo 0.50.0, this won't occur but if someone sneaks in // we can still provide this informative message for them. anyhow::bail!( "\"--package \" requires a SPEC format value, \ which can be any package ID specifier in the dependency graph.\n\ Run `cargo help pkgid` for more information about SPEC format." ) } Ok(opts) } fn cli_features(&self) -> CargoResult { CliFeatures::from_command_line( &self._values_of("features"), self.flag("all-features"), !self.flag("no-default-features"), ) } fn compile_options_for_single_package( &self, config: &Config, mode: CompileMode, workspace: Option<&Workspace<'_>>, profile_checking: ProfileChecking, ) -> CargoResult { let mut compile_opts = self.compile_options(config, mode, workspace, profile_checking)?; let spec = self._values_of("package"); if spec.iter().any(is_glob_pattern) { anyhow::bail!("Glob patterns on package selection are not supported.") } compile_opts.spec = Packages::Packages(spec); Ok(compile_opts) } fn new_options(&self, config: &Config) -> CargoResult { let vcs = self._value_of("vcs").map(|vcs| match vcs { "git" => VersionControl::Git, "hg" => VersionControl::Hg, "pijul" => VersionControl::Pijul, "fossil" => VersionControl::Fossil, "none" => VersionControl::NoVcs, vcs => panic!("Impossible vcs: {:?}", vcs), }); NewOptions::new( vcs, self.flag("bin"), self.flag("lib"), self.value_of_path("path", config).unwrap(), self._value_of("name").map(|s| s.to_string()), self._value_of("edition").map(|s| s.to_string()), self.registry(config)?, ) } fn registry(&self, config: &Config) -> CargoResult> { match self._value_of("registry") { Some(registry) => { validate_package_name(registry, "registry name", "")?; if registry == CRATES_IO_REGISTRY { // If "crates.io" is specified, then we just need to return `None`, // as that will cause cargo to use crates.io. This is required // for the case where a default alternative registry is used // but the user wants to switch back to crates.io for a single // command. Ok(None) } else { Ok(Some(registry.to_string())) } } None => config.default_registry(), } } fn index(&self) -> CargoResult> { let index = self._value_of("index").map(|s| s.to_string()); Ok(index) } fn check_optional_opts( &self, workspace: &Workspace<'_>, compile_opts: &CompileOptions, ) -> CargoResult<()> { if self.is_present_with_zero_values("package") { print_available_packages(workspace)? } if self.is_present_with_zero_values("example") { print_available_examples(workspace, compile_opts)?; } if self.is_present_with_zero_values("bin") { print_available_binaries(workspace, compile_opts)?; } if self.is_present_with_zero_values("bench") { print_available_benches(workspace, compile_opts)?; } if self.is_present_with_zero_values("test") { print_available_tests(workspace, compile_opts)?; } Ok(()) } fn is_present_with_zero_values(&self, name: &str) -> bool { self._contains(name) && self._value_of(name).is_none() } fn flag(&self, name: &str) -> bool; fn _value_of(&self, name: &str) -> Option<&str>; fn _values_of(&self, name: &str) -> Vec; fn _value_of_os(&self, name: &str) -> Option<&OsStr>; fn _values_of_os(&self, name: &str) -> Vec; fn _count(&self, name: &str) -> u32; fn _contains(&self, name: &str) -> bool; } impl<'a> ArgMatchesExt for ArgMatches { fn flag(&self, name: &str) -> bool { ignore_unknown(self.try_get_one::(name)) .copied() .unwrap_or(false) } fn _value_of(&self, name: &str) -> Option<&str> { ignore_unknown(self.try_get_one::(name)).map(String::as_str) } fn _value_of_os(&self, name: &str) -> Option<&OsStr> { ignore_unknown(self.try_get_one::(name)).map(OsString::as_os_str) } fn _values_of(&self, name: &str) -> Vec { ignore_unknown(self.try_get_many::(name)) .unwrap_or_default() .cloned() .collect() } fn _values_of_os(&self, name: &str) -> Vec { ignore_unknown(self.try_get_many::(name)) .unwrap_or_default() .cloned() .collect() } fn _count(&self, name: &str) -> u32 { *ignore_unknown(self.try_get_one::(name)).expect("defaulted by clap") as u32 } fn _contains(&self, name: &str) -> bool { ignore_unknown(self.try_contains_id(name)) } } pub fn values(args: &ArgMatches, name: &str) -> Vec { args._values_of(name) } pub fn values_os(args: &ArgMatches, name: &str) -> Vec { args._values_of_os(name) } #[track_caller] fn ignore_unknown(r: Result) -> T { match r { Ok(t) => t, Err(clap::parser::MatchesError::UnknownArgument { .. }) => Default::default(), Err(e) => { panic!("Mismatch between definition and access: {}", e); } } } #[derive(PartialEq, Eq, PartialOrd, Ord)] pub enum CommandInfo { BuiltIn { about: Option }, External { path: PathBuf }, Alias { target: StringOrVec }, } cargo-0.66.0/src/cargo/util/config/000077500000000000000000000000001432416201200167755ustar00rootroot00000000000000cargo-0.66.0/src/cargo/util/config/de.rs000066400000000000000000000436761432416201200177530ustar00rootroot00000000000000//! Support for deserializing configuration via `serde` use crate::util::config::value; use crate::util::config::{Config, ConfigError, ConfigKey}; use crate::util::config::{ConfigValue as CV, Definition, Value}; use serde::{de, de::IntoDeserializer}; use std::collections::HashSet; use std::vec; /// Serde deserializer used to convert config values to a target type using /// `Config::get`. #[derive(Clone)] pub(super) struct Deserializer<'config> { pub(super) config: &'config Config, /// The current key being deserialized. pub(super) key: ConfigKey, /// Whether or not this key part is allowed to be an inner table. For /// example, `profile.dev.build-override` needs to check if /// CARGO_PROFILE_DEV_BUILD_OVERRIDE_ prefixes exist. But /// CARGO_BUILD_TARGET should not check for prefixes because it would /// collide with CARGO_BUILD_TARGET_DIR. See `ConfigMapAccess` for /// details. pub(super) env_prefix_ok: bool, } macro_rules! deserialize_method { ($method:ident, $visit:ident, $getter:ident) => { fn $method(self, visitor: V) -> Result where V: de::Visitor<'de>, { let v = self .config .$getter(&self.key)? .ok_or_else(|| ConfigError::missing(&self.key))?; let Value { val, definition } = v; let res: Result = visitor.$visit(val); res.map_err(|e| e.with_key_context(&self.key, definition)) } }; } impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> { type Error = ConfigError; fn deserialize_any(self, visitor: V) -> Result where V: de::Visitor<'de>, { let cv = self.config.get_cv_with_env(&self.key)?; if let Some(cv) = cv { let res: (Result, Definition) = match cv { CV::Integer(i, def) => (visitor.visit_i64(i), def), CV::String(s, def) => (visitor.visit_string(s), def), CV::List(_, def) => (visitor.visit_seq(ConfigSeqAccess::new(self.clone())?), def), CV::Table(_, def) => ( visitor.visit_map(ConfigMapAccess::new_map(self.clone())?), def, ), CV::Boolean(b, def) => (visitor.visit_bool(b), def), }; let (res, def) = res; return res.map_err(|e| e.with_key_context(&self.key, def)); } Err(ConfigError::missing(&self.key)) } deserialize_method!(deserialize_bool, visit_bool, get_bool); deserialize_method!(deserialize_i8, visit_i64, get_integer); deserialize_method!(deserialize_i16, visit_i64, get_integer); deserialize_method!(deserialize_i32, visit_i64, get_integer); deserialize_method!(deserialize_i64, visit_i64, get_integer); deserialize_method!(deserialize_u8, visit_i64, get_integer); deserialize_method!(deserialize_u16, visit_i64, get_integer); deserialize_method!(deserialize_u32, visit_i64, get_integer); deserialize_method!(deserialize_u64, visit_i64, get_integer); deserialize_method!(deserialize_string, visit_string, get_string_priv); fn deserialize_option(self, visitor: V) -> Result where V: de::Visitor<'de>, { if self.config.has_key(&self.key, self.env_prefix_ok) { visitor.visit_some(self) } else { // Treat missing values as `None`. visitor.visit_none() } } fn deserialize_struct( self, name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result where V: de::Visitor<'de>, { // Match on the magical struct name/field names that are passed in to // detect when we're deserializing `Value`. // // See more comments in `value.rs` for the protocol used here. if name == value::NAME && fields == value::FIELDS { return visitor.visit_map(ValueDeserializer::new(self)?); } visitor.visit_map(ConfigMapAccess::new_struct(self, fields)?) } fn deserialize_map(self, visitor: V) -> Result where V: de::Visitor<'de>, { visitor.visit_map(ConfigMapAccess::new_map(self)?) } fn deserialize_seq(self, visitor: V) -> Result where V: de::Visitor<'de>, { visitor.visit_seq(ConfigSeqAccess::new(self)?) } fn deserialize_tuple(self, _len: usize, visitor: V) -> Result where V: de::Visitor<'de>, { visitor.visit_seq(ConfigSeqAccess::new(self)?) } fn deserialize_tuple_struct( self, _name: &'static str, _len: usize, visitor: V, ) -> Result where V: de::Visitor<'de>, { visitor.visit_seq(ConfigSeqAccess::new(self)?) } fn deserialize_newtype_struct( self, name: &'static str, visitor: V, ) -> Result where V: de::Visitor<'de>, { let merge = if name == "StringList" { true } else if name == "UnmergedStringList" { false } else { return visitor.visit_newtype_struct(self); }; let vals = self.config.get_list_or_string(&self.key, merge)?; let vals: Vec = vals.into_iter().map(|vd| vd.0).collect(); visitor.visit_newtype_struct(vals.into_deserializer()) } fn deserialize_enum( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result where V: de::Visitor<'de>, { let value = self .config .get_string_priv(&self.key)? .ok_or_else(|| ConfigError::missing(&self.key))?; let Value { val, definition } = value; visitor .visit_enum(val.into_deserializer()) .map_err(|e: ConfigError| e.with_key_context(&self.key, definition)) } // These aren't really supported, yet. serde::forward_to_deserialize_any! { f32 f64 char str bytes byte_buf unit unit_struct identifier ignored_any } } struct ConfigMapAccess<'config> { de: Deserializer<'config>, /// The fields that this map should deserialize. fields: Vec, /// Current field being deserialized. field_index: usize, } #[derive(Debug, PartialEq, Eq, Hash)] enum KeyKind { Normal(String), CaseSensitive(String), } impl<'config> ConfigMapAccess<'config> { fn new_map(de: Deserializer<'config>) -> Result, ConfigError> { let mut fields = Vec::new(); if let Some(mut v) = de.config.get_table(&de.key)? { // `v: Value>` for (key, _value) in v.val.drain() { fields.push(KeyKind::CaseSensitive(key)); } } if de.config.cli_unstable().advanced_env { // `CARGO_PROFILE_DEV_PACKAGE_` let env_prefix = format!("{}_", de.key.as_env_key()); for env_key in de.config.env.keys() { if env_key.starts_with(&env_prefix) { // `CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL = 3` let rest = &env_key[env_prefix.len()..]; // `rest = bar_OPT_LEVEL` let part = rest.splitn(2, '_').next().unwrap(); // `part = "bar"` fields.push(KeyKind::CaseSensitive(part.to_string())); } } } Ok(ConfigMapAccess { de, fields, field_index: 0, }) } fn new_struct( de: Deserializer<'config>, given_fields: &'static [&'static str], ) -> Result, ConfigError> { let table = de.config.get_table(&de.key)?; // Assume that if we're deserializing a struct it exhaustively lists all // possible fields on this key that we're *supposed* to use, so take // this opportunity to warn about any keys that aren't recognized as // fields and warn about them. if let Some(v) = table.as_ref() { let unused_keys = v .val .iter() .filter(|(k, _v)| !given_fields.iter().any(|gk| gk == k)); for (unused_key, unused_value) in unused_keys { de.config.shell().warn(format!( "unused config key `{}.{}` in `{}`", de.key, unused_key, unused_value.definition() ))?; } } let mut fields = HashSet::new(); // If the caller is interested in a field which we can provide from // the environment, get it from there. for field in given_fields { let mut field_key = de.key.clone(); field_key.push(field); for env_key in de.config.env.keys() { if env_key.starts_with(field_key.as_env_key()) { fields.insert(KeyKind::Normal(field.to_string())); } } } // Add everything from the config table we're interested in that we // haven't already provided via an environment variable if let Some(v) = table { for key in v.val.keys() { fields.insert(KeyKind::Normal(key.clone())); } } Ok(ConfigMapAccess { de, fields: fields.into_iter().collect(), field_index: 0, }) } } impl<'de, 'config> de::MapAccess<'de> for ConfigMapAccess<'config> { type Error = ConfigError; fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> where K: de::DeserializeSeed<'de>, { if self.field_index >= self.fields.len() { return Ok(None); } let field = match &self.fields[self.field_index] { KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(), }; seed.deserialize(field.into_deserializer()).map(Some) } fn next_value_seed(&mut self, seed: V) -> Result where V: de::DeserializeSeed<'de>, { let field = &self.fields[self.field_index]; self.field_index += 1; // Set this as the current key in the deserializer. let field = match field { KeyKind::Normal(field) => { self.de.key.push(field); field } KeyKind::CaseSensitive(field) => { self.de.key.push_sensitive(field); field } }; // Env vars that are a prefix of another with a dash/underscore cannot // be supported by our serde implementation, so check for them here. // Example: // CARGO_BUILD_TARGET // CARGO_BUILD_TARGET_DIR // or // CARGO_PROFILE_DEV_DEBUG // CARGO_PROFILE_DEV_DEBUG_ASSERTIONS // The `deserialize_option` method does not know the type of the field. // If the type is an Option (like // `profile.dev.build-override`), then it needs to check for env vars // starting with CARGO_FOO_BAR_. This is a problem for keys like // CARGO_BUILD_TARGET because checking for a prefix would incorrectly // match CARGO_BUILD_TARGET_DIR. `deserialize_option` would have no // choice but to call `visit_some()` which would then fail if // CARGO_BUILD_TARGET isn't set. So we check for these prefixes and // disallow them here. let env_prefix = format!("{}_", field).replace('-', "_"); let env_prefix_ok = !self.fields.iter().any(|field| { let field = match field { KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(), }; field.replace('-', "_").starts_with(&env_prefix) }); let result = seed.deserialize(Deserializer { config: self.de.config, key: self.de.key.clone(), env_prefix_ok, }); self.de.key.pop(); result } } struct ConfigSeqAccess { list_iter: vec::IntoIter<(String, Definition)>, } impl ConfigSeqAccess { fn new(de: Deserializer<'_>) -> Result { let mut res = Vec::new(); if let Some(v) = de.config._get_list(&de.key)? { res.extend(v.val); } de.config.get_env_list(&de.key, &mut res)?; Ok(ConfigSeqAccess { list_iter: res.into_iter(), }) } } impl<'de> de::SeqAccess<'de> for ConfigSeqAccess { type Error = ConfigError; fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> where T: de::DeserializeSeed<'de>, { match self.list_iter.next() { // TODO: add `def` to error? Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some), None => Ok(None), } } } /// This is a deserializer that deserializes into a `Value` for /// configuration. /// /// This is a special deserializer because it deserializes one of its struct /// fields into the location that this configuration value was defined in. /// /// See more comments in `value.rs` for the protocol used here. struct ValueDeserializer<'config> { hits: u32, definition: Definition, de: Deserializer<'config>, } impl<'config> ValueDeserializer<'config> { fn new(de: Deserializer<'config>) -> Result, ConfigError> { // Figure out where this key is defined. let definition = { let env = de.key.as_env_key(); let env_def = Definition::Environment(env.to_string()); match (de.config.env.contains_key(env), de.config.get_cv(&de.key)?) { (true, Some(cv)) => { // Both, pick highest priority. if env_def.is_higher_priority(cv.definition()) { env_def } else { cv.definition().clone() } } (false, Some(cv)) => cv.definition().clone(), // Assume it is an environment, even if the key is not set. // This can happen for intermediate tables, like // CARGO_FOO_BAR_* where `CARGO_FOO_BAR` is not set. (_, None) => env_def, } }; Ok(ValueDeserializer { hits: 0, definition, de, }) } } impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { type Error = ConfigError; fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> where K: de::DeserializeSeed<'de>, { self.hits += 1; match self.hits { 1 => seed .deserialize(value::VALUE_FIELD.into_deserializer()) .map(Some), 2 => seed .deserialize(value::DEFINITION_FIELD.into_deserializer()) .map(Some), _ => Ok(None), } } fn next_value_seed(&mut self, seed: V) -> Result where V: de::DeserializeSeed<'de>, { // If this is the first time around we deserialize the `value` field // which is the actual deserializer if self.hits == 1 { return seed .deserialize(self.de.clone()) .map_err(|e| e.with_key_context(&self.de.key, self.definition.clone())); } // ... otherwise we're deserializing the `definition` field, so we need // to figure out where the field we just deserialized was defined at. match &self.definition { Definition::Path(path) => { seed.deserialize(Tuple2Deserializer(0i32, path.to_string_lossy())) } Definition::Environment(env) => { seed.deserialize(Tuple2Deserializer(1i32, env.as_str())) } Definition::Cli => seed.deserialize(Tuple2Deserializer(2i32, "")), } } } /// A deserializer which takes two values and deserializes into a tuple of those /// two values. This is similar to types like `StrDeserializer` in upstream /// serde itself. struct Tuple2Deserializer(T, U); impl<'de, T, U> de::Deserializer<'de> for Tuple2Deserializer where T: IntoDeserializer<'de, ConfigError>, U: IntoDeserializer<'de, ConfigError>, { type Error = ConfigError; fn deserialize_any(self, visitor: V) -> Result where V: de::Visitor<'de>, { struct SeqVisitor { first: Option, second: Option, } impl<'de, T, U> de::SeqAccess<'de> for SeqVisitor where T: IntoDeserializer<'de, ConfigError>, U: IntoDeserializer<'de, ConfigError>, { type Error = ConfigError; fn next_element_seed(&mut self, seed: K) -> Result, Self::Error> where K: de::DeserializeSeed<'de>, { if let Some(first) = self.first.take() { return seed.deserialize(first.into_deserializer()).map(Some); } if let Some(second) = self.second.take() { return seed.deserialize(second.into_deserializer()).map(Some); } Ok(None) } } visitor.visit_seq(SeqVisitor { first: Some(self.0), second: Some(self.1), }) } serde::forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq bytes byte_buf map struct option unit newtype_struct ignored_any unit_struct tuple_struct tuple enum identifier } } cargo-0.66.0/src/cargo/util/config/key.rs000066400000000000000000000100431432416201200201310ustar00rootroot00000000000000use std::borrow::Cow; use std::fmt; /// Key for a configuration variable. /// /// This type represents a configuration variable that we're looking up in /// Cargo's configuration. This structure simultaneously keeps track of a /// corresponding environment variable name as well as a TOML config name. The /// intention here is that this is built up and torn down over time efficiently, /// avoiding clones and such as possible. #[derive(Debug, Clone)] pub struct ConfigKey { // The current environment variable this configuration key maps to. This is // updated with `push` methods and looks like `CARGO_FOO_BAR` for pushing // `foo` and then `bar`. env: String, // This is used to keep track of how many sub-keys have been pushed on // this `ConfigKey`. Each element of this vector is a new sub-key pushed // onto this `ConfigKey`. Each element is a pair where the first item is // the key part as a string, and the second item is an index into `env`. // The `env` index is used on `pop` to truncate `env` to rewind back to // the previous `ConfigKey` state before a `push`. parts: Vec<(String, usize)>, } impl ConfigKey { /// Creates a new blank configuration key which is ready to get built up by /// using `push` and `push_sensitive`. pub fn new() -> ConfigKey { ConfigKey { env: "CARGO".to_string(), parts: Vec::new(), } } /// Creates a `ConfigKey` from the `key` specified. /// /// The `key` specified is expected to be a period-separated toml /// configuration key. pub fn from_str(key: &str) -> ConfigKey { let mut cfg = ConfigKey::new(); for part in key.split('.') { cfg.push(part); } cfg } /// Pushes a new sub-key on this `ConfigKey`. This sub-key should be /// equivalent to accessing a sub-table in TOML. /// /// Note that this considers `name` to be case-insensitive, meaning that the /// corrseponding toml key is appended with this `name` as-is and the /// corresponding env key is appended with `name` after transforming it to /// uppercase characters. pub fn push(&mut self, name: &str) { let env = name.replace("-", "_").to_uppercase(); self._push(&env, name); } /// Performs the same function as `push` except that the corresponding /// environment variable does not get the uppercase letters of `name` but /// instead `name` is pushed raw onto the corresponding environment /// variable. pub fn push_sensitive(&mut self, name: &str) { self._push(name, name); } fn _push(&mut self, env: &str, config: &str) { self.parts.push((config.to_string(), self.env.len())); self.env.push('_'); self.env.push_str(env); } /// Rewinds this `ConfigKey` back to the state it was at before the last /// `push` method being called. pub fn pop(&mut self) { let (_part, env) = self.parts.pop().unwrap(); self.env.truncate(env); } /// Returns the corresponding environment variable key for this /// configuration value. pub fn as_env_key(&self) -> &str { &self.env } /// Returns an iterator of the key parts as strings. pub(crate) fn parts(&self) -> impl Iterator { self.parts.iter().map(|p| p.0.as_ref()) } /// Returns whether or not this is a key for the root table. pub fn is_root(&self) -> bool { self.parts.is_empty() } } impl fmt::Display for ConfigKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let parts: Vec<_> = self.parts().map(|part| escape_key_part(part)).collect(); parts.join(".").fmt(f) } } fn escape_key_part<'a>(part: &'a str) -> Cow<'a, str> { let ok = part.chars().all(|c| { matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_') }); if ok { Cow::Borrowed(part) } else { // This is a bit messy, but toml doesn't expose a function to do this. Cow::Owned(toml_edit::Value::from(part).to_string()) } } cargo-0.66.0/src/cargo/util/config/mod.rs000066400000000000000000002563721432416201200201410ustar00rootroot00000000000000//! Cargo's config system. //! //! The `Config` object contains general information about the environment, //! and provides access to Cargo's configuration files. //! //! ## Config value API //! //! The primary API for fetching user-defined config values is the //! `Config::get` method. It uses `serde` to translate config values to a //! target type. //! //! There are a variety of helper types for deserializing some common formats: //! //! - `value::Value`: This type provides access to the location where the //! config value was defined. //! - `ConfigRelativePath`: For a path that is relative to where it is //! defined. //! - `PathAndArgs`: Similar to `ConfigRelativePath`, but also supports a list //! of arguments, useful for programs to execute. //! - `StringList`: Get a value that is either a list or a whitespace split //! string. //! //! ## Map key recommendations //! //! Handling tables that have arbitrary keys can be tricky, particularly if it //! should support environment variables. In general, if possible, the caller //! should pass the full key path into the `get()` method so that the config //! deserializer can properly handle environment variables (which need to be //! uppercased, and dashes converted to underscores). //! //! A good example is the `[target]` table. The code will request //! `target.$TRIPLE` and the config system can then appropriately fetch //! environment variables like `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER`. //! Conversely, it is not possible do the same thing for the `cfg()` target //! tables (because Cargo must fetch all of them), so those do not support //! environment variables. //! //! Try to avoid keys that are a prefix of another with a dash/underscore. For //! example `build.target` and `build.target-dir`. This is OK if these are not //! structs/maps, but if it is a struct or map, then it will not be able to //! read the environment variable due to ambiguity. (See `ConfigMapAccess` for //! more details.) //! //! ## Internal API //! //! Internally config values are stored with the `ConfigValue` type after they //! have been loaded from disk. This is similar to the `toml::Value` type, but //! includes the definition location. The `get()` method uses serde to //! translate from `ConfigValue` and environment variables to the caller's //! desired type. use std::borrow::Cow; use std::cell::{RefCell, RefMut}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::env; use std::ffi::OsStr; use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; use std::io::{self, SeekFrom}; use std::mem; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Once; use std::time::Instant; use self::ConfigValue as CV; use crate::core::compiler::rustdoc::RustdocExternMap; use crate::core::shell::Verbosity; use crate::core::{features, CliUnstable, Shell, SourceId, Workspace, WorkspaceRootConfig}; use crate::ops; use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; use crate::util::validate_package_name; use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc}; use anyhow::{anyhow, bail, format_err, Context as _}; use cargo_util::paths; use curl::easy::Easy; use lazycell::LazyCell; use serde::Deserialize; use toml_edit::{easy as toml, Item}; use url::Url; mod de; use de::Deserializer; mod value; pub use value::{Definition, OptValue, Value}; mod key; pub use key::ConfigKey; mod path; pub use path::{ConfigRelativePath, PathAndArgs}; mod target; pub use target::{TargetCfgConfig, TargetConfig}; // Helper macro for creating typed access methods. macro_rules! get_value_typed { ($name:ident, $ty:ty, $variant:ident, $expected:expr) => { /// Low-level private method for getting a config value as an OptValue. fn $name(&self, key: &ConfigKey) -> Result, ConfigError> { let cv = self.get_cv(key)?; let env = self.get_env::<$ty>(key)?; match (cv, env) { (Some(CV::$variant(val, definition)), Some(env)) => { if definition.is_higher_priority(&env.definition) { Ok(Some(Value { val, definition })) } else { Ok(Some(env)) } } (Some(CV::$variant(val, definition)), None) => Ok(Some(Value { val, definition })), (Some(cv), _) => Err(ConfigError::expected(key, $expected, &cv)), (None, Some(env)) => Ok(Some(env)), (None, None) => Ok(None), } } }; } /// Configuration information for cargo. This is not specific to a build, it is information /// relating to cargo itself. #[derive(Debug)] pub struct Config { /// The location of the user's Cargo home directory. OS-dependent. home_path: Filesystem, /// Information about how to write messages to the shell shell: RefCell, /// A collection of configuration options values: LazyCell>, /// CLI config values, passed in via `configure`. cli_config: Option>, /// The current working directory of cargo cwd: PathBuf, /// Directory where config file searching should stop (inclusive). search_stop_path: Option, /// The location of the cargo executable (path to current process) cargo_exe: LazyCell, /// The location of the rustdoc executable rustdoc: LazyCell, /// Whether we are printing extra verbose messages extra_verbose: bool, /// `frozen` is the same as `locked`, but additionally will not access the /// network to determine if the lock file is out-of-date. frozen: bool, /// `locked` is set if we should not update lock files. If the lock file /// is missing, or needs to be updated, an error is produced. locked: bool, /// `offline` is set if we should never access the network, but otherwise /// continue operating if possible. offline: bool, /// A global static IPC control mechanism (used for managing parallel builds) jobserver: Option, /// Cli flags of the form "-Z something" merged with config file values unstable_flags: CliUnstable, /// Cli flags of the form "-Z something" unstable_flags_cli: Option>, /// A handle on curl easy mode for http calls easy: LazyCell>, /// Cache of the `SourceId` for crates.io crates_io_source_id: LazyCell, /// If false, don't cache `rustc --version --verbose` invocations cache_rustc_info: bool, /// Creation time of this config, used to output the total build time creation_time: Instant, /// Target Directory via resolved Cli parameter target_dir: Option, /// Environment variables, separated to assist testing. env: HashMap, /// Environment variables, converted to uppercase to check for case mismatch upper_case_env: HashMap, /// Tracks which sources have been updated to avoid multiple updates. updated_sources: LazyCell>>, /// Lock, if held, of the global package cache along with the number of /// acquisitions so far. package_cache_lock: RefCell, usize)>>, /// Cached configuration parsed by Cargo http_config: LazyCell, future_incompat_config: LazyCell, net_config: LazyCell, build_config: LazyCell, target_cfgs: LazyCell>, doc_extern_map: LazyCell, progress_config: ProgressConfig, env_config: LazyCell, /// This should be false if: /// - this is an artifact of the rustc distribution process for "stable" or for "beta" /// - this is an `#[test]` that does not opt in with `enable_nightly_features` /// - this is an integration test that uses `ProcessBuilder` /// that does not opt in with `masquerade_as_nightly_cargo` /// This should be true if: /// - this is an artifact of the rustc distribution process for "nightly" /// - this is being used in the rustc distribution process internally /// - this is a cargo executable that was built from source /// - this is an `#[test]` that called `enable_nightly_features` /// - this is an integration test that uses `ProcessBuilder` /// that called `masquerade_as_nightly_cargo` /// It's public to allow tests use nightly features. /// NOTE: this should be set before `configure()`. If calling this from an integration test, /// consider using `ConfigBuilder::enable_nightly_features` instead. pub nightly_features_allowed: bool, /// WorkspaceRootConfigs that have been found pub ws_roots: RefCell>, } impl Config { /// Creates a new config instance. /// /// This is typically used for tests or other special cases. `default` is /// preferred otherwise. /// /// This does only minimal initialization. In particular, it does not load /// any config files from disk. Those will be loaded lazily as-needed. pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config { static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; static INIT: Once = Once::new(); // This should be called early on in the process, so in theory the // unsafety is ok here. (taken ownership of random fds) INIT.call_once(|| unsafe { if let Some(client) = jobserver::Client::from_env() { GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); } }); let env: HashMap<_, _> = env::vars_os() .filter_map(|(k, v)| { // Ignore any key/values that are not valid Unicode. match (k.into_string(), v.into_string()) { (Ok(k), Ok(v)) => Some((k, v)), _ => None, } }) .collect(); let upper_case_env = env .clone() .into_iter() .map(|(k, _)| (k.to_uppercase().replace("-", "_"), k)) .collect(); let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") { Some(cache) => cache != "0", _ => true, }; Config { home_path: Filesystem::new(homedir), shell: RefCell::new(shell), cwd, search_stop_path: None, values: LazyCell::new(), cli_config: None, cargo_exe: LazyCell::new(), rustdoc: LazyCell::new(), extra_verbose: false, frozen: false, locked: false, offline: false, jobserver: unsafe { if GLOBAL_JOBSERVER.is_null() { None } else { Some((*GLOBAL_JOBSERVER).clone()) } }, unstable_flags: CliUnstable::default(), unstable_flags_cli: None, easy: LazyCell::new(), crates_io_source_id: LazyCell::new(), cache_rustc_info, creation_time: Instant::now(), target_dir: None, env, upper_case_env, updated_sources: LazyCell::new(), package_cache_lock: RefCell::new(None), http_config: LazyCell::new(), future_incompat_config: LazyCell::new(), net_config: LazyCell::new(), build_config: LazyCell::new(), target_cfgs: LazyCell::new(), doc_extern_map: LazyCell::new(), progress_config: ProgressConfig::default(), env_config: LazyCell::new(), nightly_features_allowed: matches!(&*features::channel(), "nightly" | "dev"), ws_roots: RefCell::new(HashMap::new()), } } /// Creates a new Config instance, with all default settings. /// /// This does only minimal initialization. In particular, it does not load /// any config files from disk. Those will be loaded lazily as-needed. pub fn default() -> CargoResult { let shell = Shell::new(); let cwd = env::current_dir() .with_context(|| "couldn't get the current directory of the process")?; let homedir = homedir(&cwd).ok_or_else(|| { anyhow!( "Cargo couldn't find your home directory. \ This probably means that $HOME was not set." ) })?; Ok(Config::new(shell, cwd, homedir)) } /// Gets the user's Cargo home directory (OS-dependent). pub fn home(&self) -> &Filesystem { &self.home_path } /// Gets the Cargo Git directory (`/git`). pub fn git_path(&self) -> Filesystem { self.home_path.join("git") } /// Gets the Cargo base directory for all registry information (`/registry`). pub fn registry_base_path(&self) -> Filesystem { self.home_path.join("registry") } /// Gets the Cargo registry index directory (`/registry/index`). pub fn registry_index_path(&self) -> Filesystem { self.registry_base_path().join("index") } /// Gets the Cargo registry cache directory (`/registry/path`). pub fn registry_cache_path(&self) -> Filesystem { self.registry_base_path().join("cache") } /// Gets the Cargo registry source directory (`/registry/src`). pub fn registry_source_path(&self) -> Filesystem { self.registry_base_path().join("src") } /// Gets the default Cargo registry. pub fn default_registry(&self) -> CargoResult> { Ok(self .get_string("registry.default")? .map(|registry| registry.val)) } /// Gets a reference to the shell, e.g., for writing error messages. pub fn shell(&self) -> RefMut<'_, Shell> { self.shell.borrow_mut() } /// Gets the path to the `rustdoc` executable. pub fn rustdoc(&self) -> CargoResult<&Path> { self.rustdoc .try_borrow_with(|| Ok(self.get_tool("rustdoc", &self.build_config()?.rustdoc))) .map(AsRef::as_ref) } /// Gets the path to the `rustc` executable. pub fn load_global_rustc(&self, ws: Option<&Workspace<'_>>) -> CargoResult { let cache_location = ws.map(|ws| { ws.target_dir() .join(".rustc_info.json") .into_path_unlocked() }); let wrapper = self.maybe_get_tool("rustc_wrapper", &self.build_config()?.rustc_wrapper); let rustc_workspace_wrapper = self.maybe_get_tool( "rustc_workspace_wrapper", &self.build_config()?.rustc_workspace_wrapper, ); Rustc::new( self.get_tool("rustc", &self.build_config()?.rustc), wrapper, rustc_workspace_wrapper, &self .home() .join("bin") .join("rustc") .into_path_unlocked() .with_extension(env::consts::EXE_EXTENSION), if self.cache_rustc_info { cache_location } else { None }, ) } /// Gets the path to the `cargo` executable. pub fn cargo_exe(&self) -> CargoResult<&Path> { self.cargo_exe .try_borrow_with(|| { fn from_current_exe() -> CargoResult { // Try fetching the path to `cargo` using `env::current_exe()`. // The method varies per operating system and might fail; in particular, // it depends on `/proc` being mounted on Linux, and some environments // (like containers or chroots) may not have that available. let exe = env::current_exe()?.canonicalize()?; Ok(exe) } fn from_argv() -> CargoResult { // Grab `argv[0]` and attempt to resolve it to an absolute path. // If `argv[0]` has one component, it must have come from a `PATH` lookup, // so probe `PATH` in that case. // Otherwise, it has multiple components and is either: // - a relative path (e.g., `./cargo`, `target/debug/cargo`), or // - an absolute path (e.g., `/usr/local/bin/cargo`). // In either case, `Path::canonicalize` will return the full absolute path // to the target if it exists. let argv0 = env::args_os() .map(PathBuf::from) .next() .ok_or_else(|| anyhow!("no argv[0]"))?; paths::resolve_executable(&argv0) } let exe = from_current_exe() .or_else(|_| from_argv()) .with_context(|| "couldn't get the path to cargo executable")?; Ok(exe) }) .map(AsRef::as_ref) } /// Which package sources have been updated, used to ensure it is only done once. pub fn updated_sources(&self) -> RefMut<'_, HashSet> { self.updated_sources .borrow_with(|| RefCell::new(HashSet::new())) .borrow_mut() } /// Gets all config values from disk. /// /// This will lazy-load the values as necessary. Callers are responsible /// for checking environment variables. Callers outside of the `config` /// module should avoid using this. pub fn values(&self) -> CargoResult<&HashMap> { self.values.try_borrow_with(|| self.load_values()) } /// Gets a mutable copy of the on-disk config values. /// /// This requires the config values to already have been loaded. This /// currently only exists for `cargo vendor` to remove the `source` /// entries. This doesn't respect environment variables. You should avoid /// using this if possible. pub fn values_mut(&mut self) -> CargoResult<&mut HashMap> { match self.values.borrow_mut() { Some(map) => Ok(map), None => bail!("config values not loaded yet"), } } // Note: this is used by RLS, not Cargo. pub fn set_values(&self, values: HashMap) -> CargoResult<()> { if self.values.borrow().is_some() { bail!("config values already found") } match self.values.fill(values) { Ok(()) => Ok(()), Err(_) => bail!("could not fill values"), } } /// Sets the path where ancestor config file searching will stop. The /// given path is included, but its ancestors are not. pub fn set_search_stop_path>(&mut self, path: P) { let path = path.into(); debug_assert!(self.cwd.starts_with(&path)); self.search_stop_path = Some(path); } /// Reloads on-disk configuration values, starting at the given path and /// walking up its ancestors. pub fn reload_rooted_at>(&mut self, path: P) -> CargoResult<()> { let values = self.load_values_from(path.as_ref())?; self.values.replace(values); self.merge_cli_args()?; self.load_unstable_flags_from_config()?; Ok(()) } /// The current working directory. pub fn cwd(&self) -> &Path { &self.cwd } /// The `target` output directory to use. /// /// Returns `None` if the user has not chosen an explicit directory. /// /// Callers should prefer `Workspace::target_dir` instead. pub fn target_dir(&self) -> CargoResult> { if let Some(dir) = &self.target_dir { Ok(Some(dir.clone())) } else if let Some(dir) = self.env.get("CARGO_TARGET_DIR") { // Check if the CARGO_TARGET_DIR environment variable is set to an empty string. if dir.is_empty() { bail!( "the target directory is set to an empty string in the \ `CARGO_TARGET_DIR` environment variable" ) } Ok(Some(Filesystem::new(self.cwd.join(dir)))) } else if let Some(val) = &self.build_config()?.target_dir { let path = val.resolve_path(self); // Check if the target directory is set to an empty string in the config.toml file. if val.raw_value().is_empty() { bail!( "the target directory is set to an empty string in {}", val.value().definition ) } Ok(Some(Filesystem::new(path))) } else { Ok(None) } } /// Get a configuration value by key. /// /// This does NOT look at environment variables. See `get_cv_with_env` for /// a variant that supports environment variables. fn get_cv(&self, key: &ConfigKey) -> CargoResult> { log::trace!("get cv {:?}", key); let vals = self.values()?; if key.is_root() { // Returning the entire root table (for example `cargo config get` // with no key). The definition here shouldn't matter. return Ok(Some(CV::Table( vals.clone(), Definition::Path(PathBuf::new()), ))); } let mut parts = key.parts().enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, None => return Ok(None), }; for (i, part) in parts { match val { CV::Table(map, _) => { val = match map.get(part) { Some(val) => val, None => return Ok(None), } } CV::Integer(_, def) | CV::String(_, def) | CV::List(_, def) | CV::Boolean(_, def) => { let mut key_so_far = ConfigKey::new(); for part in key.parts().take(i) { key_so_far.push(part); } bail!( "expected table for configuration key `{}`, \ but found {} in {}", key_so_far, val.desc(), def ) } } } Ok(Some(val.clone())) } /// This is a helper for getting a CV from a file or env var. pub(crate) fn get_cv_with_env(&self, key: &ConfigKey) -> CargoResult> { // Determine if value comes from env, cli, or file, and merge env if // possible. let cv = self.get_cv(key)?; if key.is_root() { // Root table can't have env value. return Ok(cv); } let env = self.env.get(key.as_env_key()); let env_def = Definition::Environment(key.as_env_key().to_string()); let use_env = match (&cv, env) { // Lists are always merged. (Some(CV::List(..)), Some(_)) => true, (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), (None, Some(_)) => true, _ => false, }; if !use_env { return Ok(cv); } // Future note: If you ever need to deserialize a non-self describing // map type, this should implement a starts_with check (similar to how // ConfigMapAccess does). let env = env.unwrap(); if env == "true" { Ok(Some(CV::Boolean(true, env_def))) } else if env == "false" { Ok(Some(CV::Boolean(false, env_def))) } else if let Ok(i) = env.parse::() { Ok(Some(CV::Integer(i, env_def))) } else if self.cli_unstable().advanced_env && env.starts_with('[') && env.ends_with(']') { match cv { Some(CV::List(mut cv_list, cv_def)) => { // Merge with config file. self.get_env_list(key, &mut cv_list)?; Ok(Some(CV::List(cv_list, cv_def))) } Some(cv) => { // This can't assume StringList or UnmergedStringList. // Return an error, which is the behavior of merging // multiple config.toml files with the same scenario. bail!( "unable to merge array env for config `{}`\n\ file: {:?}\n\ env: {}", key, cv, env ); } None => { let mut cv_list = Vec::new(); self.get_env_list(key, &mut cv_list)?; Ok(Some(CV::List(cv_list, env_def))) } } } else { // Try to merge if possible. match cv { Some(CV::List(mut cv_list, cv_def)) => { // Merge with config file. self.get_env_list(key, &mut cv_list)?; Ok(Some(CV::List(cv_list, cv_def))) } _ => { // Note: CV::Table merging is not implemented, as env // vars do not support table values. In the future, we // could check for `{}`, and interpret it as TOML if // that seems useful. Ok(Some(CV::String(env.to_string(), env_def))) } } } } /// Helper primarily for testing. pub fn set_env(&mut self, env: HashMap) { self.env = env; } /// Returns all environment variables. pub(crate) fn env(&self) -> &HashMap { &self.env } fn get_env(&self, key: &ConfigKey) -> Result, ConfigError> where T: FromStr, ::Err: fmt::Display, { match self.env.get(key.as_env_key()) { Some(value) => { let definition = Definition::Environment(key.as_env_key().to_string()); Ok(Some(Value { val: value .parse() .map_err(|e| ConfigError::new(format!("{}", e), definition.clone()))?, definition, })) } None => { self.check_environment_key_case_mismatch(key); Ok(None) } } } fn has_key(&self, key: &ConfigKey, env_prefix_ok: bool) -> bool { if self.env.contains_key(key.as_env_key()) { return true; } // See ConfigMapAccess for a description of this. if env_prefix_ok { let env_prefix = format!("{}_", key.as_env_key()); if self.env.keys().any(|k| k.starts_with(&env_prefix)) { return true; } } if let Ok(o_cv) = self.get_cv(key) { if o_cv.is_some() { return true; } } self.check_environment_key_case_mismatch(key); false } fn check_environment_key_case_mismatch(&self, key: &ConfigKey) { if let Some(env_key) = self.upper_case_env.get(key.as_env_key()) { let _ = self.shell().warn(format!( "Environment variables are expected to use uppercase letters and underscores, \ the variable `{}` will be ignored and have no effect", env_key )); } } /// Get a string config value. /// /// See `get` for more details. pub fn get_string(&self, key: &str) -> CargoResult> { self.get::>>(key) } /// Get a config value that is expected to be a path. /// /// This returns a relative path if the value does not contain any /// directory separators. See `ConfigRelativePath::resolve_program` for /// more details. pub fn get_path(&self, key: &str) -> CargoResult> { self.get::>>(key).map(|v| { v.map(|v| Value { val: v.val.resolve_program(self), definition: v.definition, }) }) } fn string_to_path(&self, value: &str, definition: &Definition) -> PathBuf { let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\')); if is_path { definition.root(self).join(value) } else { // A pathless name. PathBuf::from(value) } } /// Get a list of strings. /// /// DO NOT USE outside of the config module. `pub` will be removed in the /// future. /// /// NOTE: this does **not** support environment variables. Use `get` instead /// if you want that. pub fn get_list(&self, key: &str) -> CargoResult>> { let key = ConfigKey::from_str(key); self._get_list(&key) } fn _get_list(&self, key: &ConfigKey) -> CargoResult>> { match self.get_cv(key)? { Some(CV::List(val, definition)) => Ok(Some(Value { val, definition })), Some(val) => self.expected("list", key, &val), None => Ok(None), } } /// Helper for StringList type to get something that is a string or list. fn get_list_or_string( &self, key: &ConfigKey, merge: bool, ) -> CargoResult> { let mut res = Vec::new(); if !merge { self.get_env_list(key, &mut res)?; if !res.is_empty() { return Ok(res); } } match self.get_cv(key)? { Some(CV::List(val, _def)) => res.extend(val), Some(CV::String(val, def)) => { let split_vs = val.split_whitespace().map(|s| (s.to_string(), def.clone())); res.extend(split_vs); } Some(val) => { return self.expected("string or array of strings", key, &val); } None => {} } self.get_env_list(key, &mut res)?; Ok(res) } /// Internal method for getting an environment variable as a list. fn get_env_list( &self, key: &ConfigKey, output: &mut Vec<(String, Definition)>, ) -> CargoResult<()> { let env_val = match self.env.get(key.as_env_key()) { Some(v) => v, None => { self.check_environment_key_case_mismatch(key); return Ok(()); } }; let def = Definition::Environment(key.as_env_key().to_string()); if self.cli_unstable().advanced_env && env_val.starts_with('[') && env_val.ends_with(']') { // Parse an environment string as a TOML array. let toml_s = format!("value={}", env_val); let toml_v: toml::Value = toml::de::from_str(&toml_s).map_err(|e| { ConfigError::new(format!("could not parse TOML list: {}", e), def.clone()) })?; let values = toml_v .as_table() .unwrap() .get("value") .unwrap() .as_array() .expect("env var was not array"); for value in values { // TODO: support other types. let s = value.as_str().ok_or_else(|| { ConfigError::new( format!("expected string, found {}", value.type_str()), def.clone(), ) })?; output.push((s.to_string(), def.clone())); } } else { output.extend( env_val .split_whitespace() .map(|s| (s.to_string(), def.clone())), ); } Ok(()) } /// Low-level method for getting a config value as an `OptValue>`. /// /// NOTE: This does not read from env. The caller is responsible for that. fn get_table(&self, key: &ConfigKey) -> CargoResult>> { match self.get_cv(key)? { Some(CV::Table(val, definition)) => Ok(Some(Value { val, definition })), Some(val) => self.expected("table", key, &val), None => Ok(None), } } get_value_typed! {get_integer, i64, Integer, "an integer"} get_value_typed! {get_bool, bool, Boolean, "true/false"} get_value_typed! {get_string_priv, String, String, "a string"} /// Generate an error when the given value is the wrong type. fn expected(&self, ty: &str, key: &ConfigKey, val: &CV) -> CargoResult { val.expected(ty, &key.to_string()) .map_err(|e| anyhow!("invalid configuration for key `{}`\n{}", key, e)) } /// Update the Config instance based on settings typically passed in on /// the command-line. /// /// This may also load the config from disk if it hasn't already been /// loaded. pub fn configure( &mut self, verbose: u32, quiet: bool, color: Option<&str>, frozen: bool, locked: bool, offline: bool, target_dir: &Option, unstable_flags: &[String], cli_config: &[String], ) -> CargoResult<()> { for warning in self .unstable_flags .parse(unstable_flags, self.nightly_features_allowed)? { self.shell().warn(warning)?; } if !unstable_flags.is_empty() { // store a copy of the cli flags separately for `load_unstable_flags_from_config` // (we might also need it again for `reload_rooted_at`) self.unstable_flags_cli = Some(unstable_flags.to_vec()); } if !cli_config.is_empty() { self.cli_config = Some(cli_config.iter().map(|s| s.to_string()).collect()); self.merge_cli_args()?; } if self.unstable_flags.config_include { // If the config was already loaded (like when fetching the // `[alias]` table), it was loaded with includes disabled because // the `unstable_flags` hadn't been set up, yet. Any values // fetched before this step will not process includes, but that // should be fine (`[alias]` is one of the only things loaded // before configure). This can be removed when stabilized. self.reload_rooted_at(self.cwd.clone())?; } let extra_verbose = verbose >= 2; let verbose = verbose != 0; // Ignore errors in the configuration files. We don't want basic // commands like `cargo version` to error out due to config file // problems. let term = self.get::("term").unwrap_or_default(); let color = color.or_else(|| term.color.as_deref()); // The command line takes precedence over configuration. let verbosity = match (verbose, quiet) { (true, true) => bail!("cannot set both --verbose and --quiet"), (true, false) => Verbosity::Verbose, (false, true) => Verbosity::Quiet, (false, false) => match (term.verbose, term.quiet) { (Some(true), Some(true)) => { bail!("cannot set both `term.verbose` and `term.quiet`") } (Some(true), _) => Verbosity::Verbose, (_, Some(true)) => Verbosity::Quiet, _ => Verbosity::Normal, }, }; let cli_target_dir = target_dir.as_ref().map(|dir| Filesystem::new(dir.clone())); self.shell().set_verbosity(verbosity); self.shell().set_color_choice(color)?; self.progress_config = term.progress.unwrap_or_default(); self.extra_verbose = extra_verbose; self.frozen = frozen; self.locked = locked; self.offline = offline || self .net_config() .ok() .and_then(|n| n.offline) .unwrap_or(false); self.target_dir = cli_target_dir; self.load_unstable_flags_from_config()?; Ok(()) } fn load_unstable_flags_from_config(&mut self) -> CargoResult<()> { // If nightly features are enabled, allow setting Z-flags from config // using the `unstable` table. Ignore that block otherwise. if self.nightly_features_allowed { self.unstable_flags = self .get::>("unstable")? .unwrap_or_default(); if let Some(unstable_flags_cli) = &self.unstable_flags_cli { // NB. It's not ideal to parse these twice, but doing it again here // allows the CLI to override config files for both enabling // and disabling, and doing it up top allows CLI Zflags to // control config parsing behavior. self.unstable_flags.parse(unstable_flags_cli, true)?; } } Ok(()) } pub fn cli_unstable(&self) -> &CliUnstable { &self.unstable_flags } pub fn extra_verbose(&self) -> bool { self.extra_verbose } pub fn network_allowed(&self) -> bool { !self.frozen() && !self.offline() } pub fn offline(&self) -> bool { self.offline } pub fn frozen(&self) -> bool { self.frozen } pub fn locked(&self) -> bool { self.locked } pub fn lock_update_allowed(&self) -> bool { !self.frozen && !self.locked } /// Loads configuration from the filesystem. pub fn load_values(&self) -> CargoResult> { self.load_values_from(&self.cwd) } pub(crate) fn load_values_unmerged(&self) -> CargoResult> { let mut result = Vec::new(); let mut seen = HashSet::new(); let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(&self.cwd, &home, |path| { let mut cv = self._load_file(path, &mut seen, false)?; if self.cli_unstable().config_include { self.load_unmerged_include(&mut cv, &mut seen, &mut result)?; } result.push(cv); Ok(()) }) .with_context(|| "could not load Cargo configuration")?; Ok(result) } fn load_unmerged_include( &self, cv: &mut CV, seen: &mut HashSet, output: &mut Vec, ) -> CargoResult<()> { let includes = self.include_paths(cv, false)?; for (path, abs_path, def) in includes { let mut cv = self._load_file(&abs_path, seen, false).with_context(|| { format!("failed to load config include `{}` from `{}`", path, def) })?; self.load_unmerged_include(&mut cv, seen, output)?; output.push(cv); } Ok(()) } fn load_values_from(&self, path: &Path) -> CargoResult> { // This definition path is ignored, this is just a temporary container // representing the entire file. let mut cfg = CV::Table(HashMap::new(), Definition::Path(PathBuf::from("."))); let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(path, &home, |path| { let value = self.load_file(path, true)?; cfg.merge(value, false).with_context(|| { format!("failed to merge configuration at `{}`", path.display()) })?; Ok(()) }) .with_context(|| "could not load Cargo configuration")?; match cfg { CV::Table(map, _) => Ok(map), _ => unreachable!(), } } fn load_file(&self, path: &Path, includes: bool) -> CargoResult { self._load_file(path, &mut HashSet::new(), includes) } fn _load_file( &self, path: &Path, seen: &mut HashSet, includes: bool, ) -> CargoResult { if !seen.insert(path.to_path_buf()) { bail!( "config `include` cycle detected with path `{}`", path.display() ); } let contents = fs::read_to_string(path) .with_context(|| format!("failed to read configuration file `{}`", path.display()))?; let toml = cargo_toml::parse(&contents, path, self).with_context(|| { format!("could not parse TOML configuration in `{}`", path.display()) })?; let value = CV::from_toml(Definition::Path(path.to_path_buf()), toml).with_context(|| { format!( "failed to load TOML configuration from `{}`", path.display() ) })?; if includes { self.load_includes(value, seen) } else { Ok(value) } } /// Load any `include` files listed in the given `value`. /// /// Returns `value` with the given include files merged into it. /// /// `seen` is used to check for cyclic includes. fn load_includes(&self, mut value: CV, seen: &mut HashSet) -> CargoResult { // Get the list of files to load. let includes = self.include_paths(&mut value, true)?; // Check unstable. if !self.cli_unstable().config_include { return Ok(value); } // Accumulate all values here. let mut root = CV::Table(HashMap::new(), value.definition().clone()); for (path, abs_path, def) in includes { self._load_file(&abs_path, seen, true) .and_then(|include| root.merge(include, true)) .with_context(|| { format!("failed to load config include `{}` from `{}`", path, def) })?; } root.merge(value, true)?; Ok(root) } /// Converts the `include` config value to a list of absolute paths. fn include_paths( &self, cv: &mut CV, remove: bool, ) -> CargoResult> { let abs = |path: &str, def: &Definition| -> (String, PathBuf, Definition) { let abs_path = match def { Definition::Path(p) => p.parent().unwrap().join(&path), Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), }; (path.to_string(), abs_path, def.clone()) }; let table = match cv { CV::Table(table, _def) => table, _ => unreachable!(), }; let owned; let include = if remove { owned = table.remove("include"); owned.as_ref() } else { table.get("include") }; let includes = match include { Some(CV::String(s, def)) => { vec![abs(s, def)] } Some(CV::List(list, _def)) => list.iter().map(|(s, def)| abs(s, def)).collect(), Some(other) => bail!( "`include` expected a string or list, but found {} in `{}`", other.desc(), other.definition() ), None => { return Ok(Vec::new()); } }; Ok(includes) } /// Parses the CLI config args and returns them as a table. pub(crate) fn cli_args_as_table(&self) -> CargoResult { let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); let cli_args = match &self.cli_config { Some(cli_args) => cli_args, None => return Ok(loaded_args), }; let mut seen = HashSet::new(); for arg in cli_args { let arg_as_path = self.cwd.join(arg); let tmp_table = if !arg.is_empty() && arg_as_path.exists() { // --config path_to_file let str_path = arg_as_path .to_str() .ok_or_else(|| { anyhow::format_err!("config path {:?} is not utf-8", arg_as_path) })? .to_string(); self._load_file(&self.cwd().join(&str_path), &mut seen, true) .with_context(|| format!("failed to load config from `{}`", str_path))? } else { // We only want to allow "dotted key" (see https://toml.io/en/v1.0.0#keys) // expressions followed by a value that's not an "inline table" // (https://toml.io/en/v1.0.0#inline-table). Easiest way to check for that is to // parse the value as a toml_edit::Document, and check that the (single) // inner-most table is set via dotted keys. let doc: toml_edit::Document = arg.parse().with_context(|| { format!("failed to parse value from --config argument `{arg}` as a dotted key expression") })?; fn non_empty_decor(d: &toml_edit::Decor) -> bool { d.prefix().map_or(false, |p| !p.trim().is_empty()) || d.suffix().map_or(false, |s| !s.trim().is_empty()) } let ok = { let mut got_to_value = false; let mut table = doc.as_table(); let mut is_root = true; while table.is_dotted() || is_root { is_root = false; if table.len() != 1 { break; } let (k, n) = table.iter().next().expect("len() == 1 above"); match n { Item::Table(nt) => { if table.key_decor(k).map_or(false, non_empty_decor) || non_empty_decor(nt.decor()) { bail!( "--config argument `{arg}` \ includes non-whitespace decoration" ) } table = nt; } Item::Value(v) if v.is_inline_table() => { bail!( "--config argument `{arg}` \ sets a value to an inline table, which is not accepted" ); } Item::Value(v) => { if non_empty_decor(v.decor()) { bail!( "--config argument `{arg}` \ includes non-whitespace decoration" ) } got_to_value = true; break; } Item::ArrayOfTables(_) => { bail!( "--config argument `{arg}` \ sets a value to an array of tables, which is not accepted" ); } Item::None => { bail!("--config argument `{arg}` doesn't provide a value") } } } got_to_value }; if !ok { bail!( "--config argument `{arg}` was not a TOML dotted key expression (such as `build.jobs = 2`)" ); } let toml_v: toml::Value = toml::from_document(doc).with_context(|| { format!("failed to parse value from --config argument `{arg}`") })?; if toml_v .get("registry") .and_then(|v| v.as_table()) .and_then(|t| t.get("token")) .is_some() { bail!("registry.token cannot be set through --config for security reasons"); } else if let Some((k, _)) = toml_v .get("registries") .and_then(|v| v.as_table()) .and_then(|t| t.iter().find(|(_, v)| v.get("token").is_some())) { bail!( "registries.{}.token cannot be set through --config for security reasons", k ); } CV::from_toml(Definition::Cli, toml_v) .with_context(|| format!("failed to convert --config argument `{arg}`"))? }; let tmp_table = self .load_includes(tmp_table, &mut HashSet::new()) .with_context(|| "failed to load --config include".to_string())?; loaded_args .merge(tmp_table, true) .with_context(|| format!("failed to merge --config argument `{arg}`"))?; } Ok(loaded_args) } /// Add config arguments passed on the command line. fn merge_cli_args(&mut self) -> CargoResult<()> { let loaded_map = match self.cli_args_as_table()? { CV::Table(table, _def) => table, _ => unreachable!(), }; // Force values to be loaded. let _ = self.values()?; let values = self.values_mut()?; for (key, value) in loaded_map.into_iter() { match values.entry(key) { Vacant(entry) => { entry.insert(value); } Occupied(mut entry) => entry.get_mut().merge(value, true).with_context(|| { format!( "failed to merge --config key `{}` into `{}`", entry.key(), entry.get().definition(), ) })?, }; } Ok(()) } /// The purpose of this function is to aid in the transition to using /// .toml extensions on Cargo's config files, which were historically not used. /// Both 'config.toml' and 'credentials.toml' should be valid with or without extension. /// When both exist, we want to prefer the one without an extension for /// backwards compatibility, but warn the user appropriately. fn get_file_path( &self, dir: &Path, filename_without_extension: &str, warn: bool, ) -> CargoResult> { let possible = dir.join(filename_without_extension); let possible_with_extension = dir.join(format!("{}.toml", filename_without_extension)); if possible.exists() { if warn && possible_with_extension.exists() { // We don't want to print a warning if the version // without the extension is just a symlink to the version // WITH an extension, which people may want to do to // support multiple Cargo versions at once and not // get a warning. let skip_warning = if let Ok(target_path) = fs::read_link(&possible) { target_path == possible_with_extension } else { false }; if !skip_warning { self.shell().warn(format!( "Both `{}` and `{}` exist. Using `{}`", possible.display(), possible_with_extension.display(), possible.display() ))?; } } Ok(Some(possible)) } else if possible_with_extension.exists() { Ok(Some(possible_with_extension)) } else { Ok(None) } } fn walk_tree(&self, pwd: &Path, home: &Path, mut walk: F) -> CargoResult<()> where F: FnMut(&Path) -> CargoResult<()>, { let mut stash: HashSet = HashSet::new(); for current in paths::ancestors(pwd, self.search_stop_path.as_deref()) { if let Some(path) = self.get_file_path(¤t.join(".cargo"), "config", true)? { walk(&path)?; stash.insert(path); } } // Once we're done, also be sure to walk the home directory even if it's not // in our history to be sure we pick up that standard location for // information. if let Some(path) = self.get_file_path(home, "config", true)? { if !stash.contains(&path) { walk(&path)?; } } Ok(()) } /// Gets the index for a registry. pub fn get_registry_index(&self, registry: &str) -> CargoResult { validate_package_name(registry, "registry name", "")?; if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? { self.resolve_registry_index(&index).with_context(|| { format!( "invalid index URL for registry `{}` defined in {}", registry, index.definition ) }) } else { bail!("no index found for registry: `{}`", registry); } } /// Returns an error if `registry.index` is set. pub fn check_registry_index_not_set(&self) -> CargoResult<()> { if self.get_string("registry.index")?.is_some() { bail!( "the `registry.index` config value is no longer supported\n\ Use `[source]` replacement to alter the default index for crates.io." ); } Ok(()) } fn resolve_registry_index(&self, index: &Value) -> CargoResult { // This handles relative file: URLs, relative to the config definition. let base = index .definition .root(self) .join("truncated-by-url_with_base"); // Parse val to check it is a URL, not a relative path without a protocol. let _parsed = index.val.into_url()?; let url = index.val.into_url_with_base(Some(&*base))?; if url.password().is_some() { bail!("registry URLs may not contain passwords"); } Ok(url) } /// Loads credentials config from the credentials file, if present. pub fn load_credentials(&mut self) -> CargoResult<()> { let home_path = self.home_path.clone().into_path_unlocked(); let credentials = match self.get_file_path(&home_path, "credentials", true)? { Some(credentials) => credentials, None => return Ok(()), }; let mut value = self.load_file(&credentials, true)?; // Backwards compatibility for old `.cargo/credentials` layout. { let (value_map, def) = match value { CV::Table(ref mut value, ref def) => (value, def), _ => unreachable!(), }; if let Some(token) = value_map.remove("token") { if let Vacant(entry) = value_map.entry("registry".into()) { let map = HashMap::from([("token".into(), token)]); let table = CV::Table(map, def.clone()); entry.insert(table); } } } if let CV::Table(map, _) = value { let base_map = self.values_mut()?; for (k, v) in map { match base_map.entry(k) { Vacant(entry) => { entry.insert(v); } Occupied(mut entry) => { entry.get_mut().merge(v, true)?; } } } } Ok(()) } /// Looks for a path for `tool` in an environment variable or the given config, and returns /// `None` if it's not present. fn maybe_get_tool( &self, tool: &str, from_config: &Option, ) -> Option { let var = tool.to_uppercase(); match env::var_os(&var) { Some(tool_path) => { let maybe_relative = match tool_path.to_str() { Some(s) => s.contains('/') || s.contains('\\'), None => false, }; let path = if maybe_relative { self.cwd.join(tool_path) } else { PathBuf::from(tool_path) }; Some(path) } None => from_config.as_ref().map(|p| p.resolve_program(self)), } } /// Looks for a path for `tool` in an environment variable or config path, defaulting to `tool` /// as a path. fn get_tool(&self, tool: &str, from_config: &Option) -> PathBuf { self.maybe_get_tool(tool, from_config) .unwrap_or_else(|| PathBuf::from(tool)) } pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> { self.jobserver.as_ref() } pub fn http(&self) -> CargoResult<&RefCell> { let http = self .easy .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?; { let mut http = http.borrow_mut(); http.reset(); let timeout = ops::configure_http_handle(self, &mut http)?; timeout.configure(&mut http)?; } Ok(http) } pub fn http_config(&self) -> CargoResult<&CargoHttpConfig> { self.http_config .try_borrow_with(|| self.get::("http")) } pub fn future_incompat_config(&self) -> CargoResult<&CargoFutureIncompatConfig> { self.future_incompat_config .try_borrow_with(|| self.get::("future-incompat-report")) } pub fn net_config(&self) -> CargoResult<&CargoNetConfig> { self.net_config .try_borrow_with(|| self.get::("net")) } pub fn build_config(&self) -> CargoResult<&CargoBuildConfig> { self.build_config .try_borrow_with(|| self.get::("build")) } pub fn progress_config(&self) -> &ProgressConfig { &self.progress_config } pub fn env_config(&self) -> CargoResult<&EnvConfig> { self.env_config .try_borrow_with(|| self.get::("env")) } /// This is used to validate the `term` table has valid syntax. /// /// This is necessary because loading the term settings happens very /// early, and in some situations (like `cargo version`) we don't want to /// fail if there are problems with the config file. pub fn validate_term_config(&self) -> CargoResult<()> { drop(self.get::("term")?); Ok(()) } /// Returns a list of [target.'cfg()'] tables. /// /// The list is sorted by the table name. pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> { self.target_cfgs .try_borrow_with(|| target::load_target_cfgs(self)) } pub fn doc_extern_map(&self) -> CargoResult<&RustdocExternMap> { // Note: This does not support environment variables. The `Unit` // fundamentally does not have access to the registry name, so there is // nothing to query. Plumbing the name into SourceId is quite challenging. self.doc_extern_map .try_borrow_with(|| self.get::("doc.extern-map")) } /// Returns true if the `[target]` table should be applied to host targets. pub fn target_applies_to_host(&self) -> CargoResult { target::get_target_applies_to_host(self) } /// Returns the `[host]` table definition for the given target triple. pub fn host_cfg_triple(&self, target: &str) -> CargoResult { target::load_host_triple(self, target) } /// Returns the `[target]` table definition for the given target triple. pub fn target_cfg_triple(&self, target: &str) -> CargoResult { target::load_target_triple(self, target) } pub fn crates_io_source_id(&self, f: F) -> CargoResult where F: FnMut() -> CargoResult, { Ok(*(self.crates_io_source_id.try_borrow_with(f)?)) } pub fn creation_time(&self) -> Instant { self.creation_time } /// Retrieves a config variable. /// /// This supports most serde `Deserialize` types. Examples: /// /// ```rust,ignore /// let v: Option = config.get("some.nested.key")?; /// let v: Option = config.get("some.key")?; /// let v: Option> = config.get("foo")?; /// ``` /// /// The key may be a dotted key, but this does NOT support TOML key /// quoting. Avoid key components that may have dots. For example, /// `foo.'a.b'.bar" does not work if you try to fetch `foo.'a.b'". You can /// fetch `foo` if it is a map, though. pub fn get<'de, T: serde::de::Deserialize<'de>>(&self, key: &str) -> CargoResult { let d = Deserializer { config: self, key: ConfigKey::from_str(key), env_prefix_ok: true, }; T::deserialize(d).map_err(|e| e.into()) } pub fn assert_package_cache_locked<'a>(&self, f: &'a Filesystem) -> &'a Path { let ret = f.as_path_unlocked(); assert!( self.package_cache_lock.borrow().is_some(), "package cache lock is not currently held, Cargo forgot to call \ `acquire_package_cache_lock` before we got to this stack frame", ); assert!(ret.starts_with(self.home_path.as_path_unlocked())); ret } /// Acquires an exclusive lock on the global "package cache" /// /// This lock is global per-process and can be acquired recursively. An RAII /// structure is returned to release the lock, and if this process /// abnormally terminates the lock is also released. pub fn acquire_package_cache_lock(&self) -> CargoResult> { let mut slot = self.package_cache_lock.borrow_mut(); match *slot { // We've already acquired the lock in this process, so simply bump // the count and continue. Some((_, ref mut cnt)) => { *cnt += 1; } None => { let path = ".package-cache"; let desc = "package cache"; // First, attempt to open an exclusive lock which is in general // the purpose of this lock! // // If that fails because of a readonly filesystem or a // permission error, though, then we don't really want to fail // just because of this. All files that this lock protects are // in subfolders, so they're assumed by Cargo to also be // readonly or have invalid permissions for us to write to. If // that's the case, then we don't really need to grab a lock in // the first place here. // // Despite this we attempt to grab a readonly lock. This means // that if our read-only folder is shared read-write with // someone else on the system we should synchronize with them, // but if we can't even do that then we did our best and we just // keep on chugging elsewhere. match self.home_path.open_rw(path, self, desc) { Ok(lock) => *slot = Some((Some(lock), 1)), Err(e) => { if maybe_readonly(&e) { let lock = self.home_path.open_ro(path, self, desc).ok(); *slot = Some((lock, 1)); return Ok(PackageCacheLock(self)); } Err(e).with_context(|| "failed to acquire package cache lock")?; } } } } return Ok(PackageCacheLock(self)); fn maybe_readonly(err: &anyhow::Error) -> bool { err.chain().any(|err| { if let Some(io) = err.downcast_ref::() { if io.kind() == io::ErrorKind::PermissionDenied { return true; } #[cfg(unix)] return io.raw_os_error() == Some(libc::EROFS); } false }) } } pub fn release_package_cache_lock(&self) {} } /// Internal error for serde errors. #[derive(Debug)] pub struct ConfigError { error: anyhow::Error, definition: Option, } impl ConfigError { fn new(message: String, definition: Definition) -> ConfigError { ConfigError { error: anyhow::Error::msg(message), definition: Some(definition), } } fn expected(key: &ConfigKey, expected: &str, found: &ConfigValue) -> ConfigError { ConfigError { error: anyhow!( "`{}` expected {}, but found a {}", key, expected, found.desc() ), definition: Some(found.definition().clone()), } } fn missing(key: &ConfigKey) -> ConfigError { ConfigError { error: anyhow!("missing config key `{}`", key), definition: None, } } fn with_key_context(self, key: &ConfigKey, definition: Definition) -> ConfigError { ConfigError { error: anyhow::Error::from(self) .context(format!("could not load config key `{}`", key)), definition: Some(definition), } } } impl std::error::Error for ConfigError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.error.source() } } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(definition) = &self.definition { write!(f, "error in {}: {}", definition, self.error) } else { self.error.fmt(f) } } } impl serde::de::Error for ConfigError { fn custom(msg: T) -> Self { ConfigError { error: anyhow::Error::msg(msg.to_string()), definition: None, } } } impl From for ConfigError { fn from(error: anyhow::Error) -> Self { ConfigError { error, definition: None, } } } #[derive(Eq, PartialEq, Clone)] pub enum ConfigValue { Integer(i64, Definition), String(String, Definition), List(Vec<(String, Definition)>, Definition), Table(HashMap, Definition), Boolean(bool, Definition), } impl fmt::Debug for ConfigValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CV::Integer(i, def) => write!(f, "{} (from {})", i, def), CV::Boolean(b, def) => write!(f, "{} (from {})", b, def), CV::String(s, def) => write!(f, "{} (from {})", s, def), CV::List(list, def) => { write!(f, "[")?; for (i, (s, def)) in list.iter().enumerate() { if i > 0 { write!(f, ", ")?; } write!(f, "{} (from {})", s, def)?; } write!(f, "] (from {})", def) } CV::Table(table, _) => write!(f, "{:?}", table), } } } impl ConfigValue { fn from_toml(def: Definition, toml: toml::Value) -> CargoResult { match toml { toml::Value::String(val) => Ok(CV::String(val, def)), toml::Value::Boolean(b) => Ok(CV::Boolean(b, def)), toml::Value::Integer(i) => Ok(CV::Integer(i, def)), toml::Value::Array(val) => Ok(CV::List( val.into_iter() .map(|toml| match toml { toml::Value::String(val) => Ok((val, def.clone())), v => bail!("expected string but found {} in list", v.type_str()), }) .collect::>()?, def, )), toml::Value::Table(val) => Ok(CV::Table( val.into_iter() .map(|(key, value)| { let value = CV::from_toml(def.clone(), value) .with_context(|| format!("failed to parse key `{}`", key))?; Ok((key, value)) }) .collect::>()?, def, )), v => bail!( "found TOML configuration value of unknown type `{}`", v.type_str() ), } } fn into_toml(self) -> toml::Value { match self { CV::Boolean(s, _) => toml::Value::Boolean(s), CV::String(s, _) => toml::Value::String(s), CV::Integer(i, _) => toml::Value::Integer(i), CV::List(l, _) => { toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect()) } CV::Table(l, _) => { toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect()) } } } /// Merge the given value into self. /// /// If `force` is true, primitive (non-container) types will override existing values. /// If false, the original will be kept and the new value ignored. /// /// Container types (tables and arrays) are merged with existing values. /// /// Container and non-container types cannot be mixed. fn merge(&mut self, from: ConfigValue, force: bool) -> CargoResult<()> { match (self, from) { (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { old.extend(mem::take(new).into_iter()); } (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { for (key, value) in mem::take(new) { match old.entry(key.clone()) { Occupied(mut entry) => { let new_def = value.definition().clone(); let entry = entry.get_mut(); entry.merge(value, force).with_context(|| { format!( "failed to merge key `{}` between \ {} and {}", key, entry.definition(), new_def, ) })?; } Vacant(entry) => { entry.insert(value); } }; } } // Allow switching types except for tables or arrays. (expected @ &mut CV::List(_, _), found) | (expected @ &mut CV::Table(_, _), found) | (expected, found @ CV::List(_, _)) | (expected, found @ CV::Table(_, _)) => { return Err(anyhow!( "failed to merge config value from `{}` into `{}`: expected {}, but found {}", found.definition(), expected.definition(), expected.desc(), found.desc() )); } (old, mut new) => { if force || new.definition().is_higher_priority(old.definition()) { mem::swap(old, &mut new); } } } Ok(()) } pub fn i64(&self, key: &str) -> CargoResult<(i64, &Definition)> { match self { CV::Integer(i, def) => Ok((*i, def)), _ => self.expected("integer", key), } } pub fn string(&self, key: &str) -> CargoResult<(&str, &Definition)> { match self { CV::String(s, def) => Ok((s, def)), _ => self.expected("string", key), } } pub fn table(&self, key: &str) -> CargoResult<(&HashMap, &Definition)> { match self { CV::Table(table, def) => Ok((table, def)), _ => self.expected("table", key), } } pub fn list(&self, key: &str) -> CargoResult<&[(String, Definition)]> { match self { CV::List(list, _) => Ok(list), _ => self.expected("list", key), } } pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Definition)> { match self { CV::Boolean(b, def) => Ok((*b, def)), _ => self.expected("bool", key), } } pub fn desc(&self) -> &'static str { match *self { CV::Table(..) => "table", CV::List(..) => "array", CV::String(..) => "string", CV::Boolean(..) => "boolean", CV::Integer(..) => "integer", } } pub fn definition(&self) -> &Definition { match self { CV::Boolean(_, def) | CV::Integer(_, def) | CV::String(_, def) | CV::List(_, def) | CV::Table(_, def) => def, } } fn expected(&self, wanted: &str, key: &str) -> CargoResult { bail!( "expected a {}, but found a {} for `{}` in {}", wanted, self.desc(), key, self.definition() ) } } pub fn homedir(cwd: &Path) -> Option { ::home::cargo_home_with_cwd(cwd).ok() } pub fn save_credentials( cfg: &Config, token: Option, registry: Option<&str>, ) -> CargoResult<()> { // If 'credentials.toml' exists, we should write to that, otherwise // use the legacy 'credentials'. There's no need to print the warning // here, because it would already be printed at load time. let home_path = cfg.home_path.clone().into_path_unlocked(); let filename = match cfg.get_file_path(&home_path, "credentials", false)? { Some(path) => match path.file_name() { Some(filename) => Path::new(filename).to_owned(), None => Path::new("credentials").to_owned(), }, None => Path::new("credentials").to_owned(), }; let mut file = { cfg.home_path.create_dir()?; cfg.home_path .open_rw(filename, cfg, "credentials' config file")? }; let mut contents = String::new(); file.read_to_string(&mut contents).with_context(|| { format!( "failed to read configuration file `{}`", file.path().display() ) })?; let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; // Move the old token location to the new one. if let Some(token) = toml.as_table_mut().unwrap().remove("token") { let map = HashMap::from([("token".to_string(), token)]); toml.as_table_mut() .unwrap() .insert("registry".into(), map.into()); } if let Some(token) = token { // login let (key, mut value) = { let key = "token".to_string(); let value = ConfigValue::String(token, Definition::Path(file.path().to_path_buf())); let map = HashMap::from([(key, value)]); let table = CV::Table(map, Definition::Path(file.path().to_path_buf())); if let Some(registry) = registry { let map = HashMap::from([(registry.to_string(), table)]); ( "registries".into(), CV::Table(map, Definition::Path(file.path().to_path_buf())), ) } else { ("registry".into(), table) } }; if registry.is_some() { if let Some(table) = toml.as_table_mut().unwrap().remove("registries") { let v = CV::from_toml(Definition::Path(file.path().to_path_buf()), table)?; value.merge(v, false)?; } } toml.as_table_mut().unwrap().insert(key, value.into_toml()); } else { // logout let table = toml.as_table_mut().unwrap(); if let Some(registry) = registry { if let Some(registries) = table.get_mut("registries") { if let Some(reg) = registries.get_mut(registry) { let rtable = reg.as_table_mut().ok_or_else(|| { format_err!("expected `[registries.{}]` to be a table", registry) })?; rtable.remove("token"); } } } else if let Some(registry) = table.get_mut("registry") { let reg_table = registry .as_table_mut() .ok_or_else(|| format_err!("expected `[registry]` to be a table"))?; reg_table.remove("token"); } } let contents = toml.to_string(); file.seek(SeekFrom::Start(0))?; file.write_all(contents.as_bytes()) .with_context(|| format!("failed to write to `{}`", file.path().display()))?; file.file().set_len(contents.len() as u64)?; set_permissions(file.file(), 0o600) .with_context(|| format!("failed to set permissions of `{}`", file.path().display()))?; return Ok(()); #[cfg(unix)] fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { use std::os::unix::fs::PermissionsExt; let mut perms = file.metadata()?.permissions(); perms.set_mode(mode); file.set_permissions(perms)?; Ok(()) } #[cfg(not(unix))] #[allow(unused)] fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { Ok(()) } } pub struct PackageCacheLock<'a>(&'a Config); impl Drop for PackageCacheLock<'_> { fn drop(&mut self) { let mut slot = self.0.package_cache_lock.borrow_mut(); let (_, cnt) = slot.as_mut().unwrap(); *cnt -= 1; if *cnt == 0 { *slot = None; } } } #[derive(Debug, Default, Deserialize, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct CargoHttpConfig { pub proxy: Option, pub low_speed_limit: Option, pub timeout: Option, pub cainfo: Option, pub check_revoke: Option, pub user_agent: Option, pub debug: Option, pub multiplexing: Option, pub ssl_version: Option, } #[derive(Debug, Default, Deserialize, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct CargoFutureIncompatConfig { frequency: Option, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum CargoFutureIncompatFrequencyConfig { Always, Never, } impl CargoFutureIncompatConfig { pub fn should_display_message(&self) -> bool { use CargoFutureIncompatFrequencyConfig::*; let frequency = self.frequency.as_ref().unwrap_or(&Always); match frequency { Always => true, Never => false, } } } impl Default for CargoFutureIncompatFrequencyConfig { fn default() -> Self { Self::Always } } /// Configuration for `ssl-version` in `http` section /// There are two ways to configure: /// /// ```text /// [http] /// ssl-version = "tlsv1.3" /// ``` /// /// ```text /// [http] /// ssl-version.min = "tlsv1.2" /// ssl-version.max = "tlsv1.3" /// ``` #[derive(Clone, Debug, Deserialize, PartialEq)] #[serde(untagged)] pub enum SslVersionConfig { Single(String), Range(SslVersionConfigRange), } #[derive(Clone, Debug, Deserialize, PartialEq)] pub struct SslVersionConfigRange { pub min: Option, pub max: Option, } #[derive(Debug, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct CargoNetConfig { pub retry: Option, pub offline: Option, pub git_fetch_with_cli: Option, } #[derive(Debug, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct CargoBuildConfig { // deprecated, but preserved for compatibility pub pipelining: Option, pub dep_info_basedir: Option, pub target_dir: Option, pub incremental: Option, pub target: Option, pub jobs: Option, pub rustflags: Option, pub rustdocflags: Option, pub rustc_wrapper: Option, pub rustc_workspace_wrapper: Option, pub rustc: Option, pub rustdoc: Option, pub out_dir: Option, } /// Configuration for `build.target`. /// /// Accepts in the following forms: /// /// ```toml /// target = "a" /// target = ["a"] /// target = ["a", "b"] /// ``` #[derive(Debug, Deserialize)] #[serde(transparent)] pub struct BuildTargetConfig { inner: Value, } #[derive(Debug, Deserialize)] #[serde(untagged)] enum BuildTargetConfigInner { One(String), Many(Vec), } impl BuildTargetConfig { /// Gets values of `build.target` as a list of strings. pub fn values(&self, config: &Config) -> CargoResult> { let map = |s: &String| { if s.ends_with(".json") { // Path to a target specification file (in JSON). // self.inner .definition .root(config) .join(s) .to_str() .expect("must be utf-8 in toml") .to_string() } else { // A string. Probably a target triple. s.to_string() } }; let values = match &self.inner.val { BuildTargetConfigInner::One(s) => vec![map(s)], BuildTargetConfigInner::Many(v) => v.iter().map(map).collect(), }; Ok(values) } } #[derive(Deserialize, Default)] struct TermConfig { verbose: Option, quiet: Option, color: Option, #[serde(default)] #[serde(deserialize_with = "progress_or_string")] progress: Option, } #[derive(Debug, Default, Deserialize)] pub struct ProgressConfig { pub when: ProgressWhen, pub width: Option, } #[derive(Debug, Deserialize)] #[serde(rename_all = "lowercase")] pub enum ProgressWhen { Auto, Never, Always, } impl Default for ProgressWhen { fn default() -> ProgressWhen { ProgressWhen::Auto } } fn progress_or_string<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::de::Deserializer<'de>, { struct ProgressVisitor; impl<'de> serde::de::Visitor<'de> for ProgressVisitor { type Value = Option; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a string (\"auto\" or \"never\") or a table") } fn visit_str(self, s: &str) -> Result where E: serde::de::Error, { match s { "auto" => Ok(Some(ProgressConfig { when: ProgressWhen::Auto, width: None, })), "never" => Ok(Some(ProgressConfig { when: ProgressWhen::Never, width: None, })), "always" => Err(E::custom("\"always\" progress requires a `width` key")), _ => Err(E::unknown_variant(s, &["auto", "never"])), } } fn visit_none(self) -> Result where E: serde::de::Error, { Ok(None) } fn visit_some(self, deserializer: D) -> Result where D: serde::de::Deserializer<'de>, { let pc = ProgressConfig::deserialize(deserializer)?; if let ProgressConfig { when: ProgressWhen::Always, width: None, } = pc { return Err(serde::de::Error::custom( "\"always\" progress requires a `width` key", )); } Ok(Some(pc)) } } deserializer.deserialize_option(ProgressVisitor) } #[derive(Debug, Deserialize)] #[serde(untagged)] enum EnvConfigValueInner { Simple(String), WithOptions { value: String, #[serde(default)] force: bool, #[serde(default)] relative: bool, }, } #[derive(Debug, Deserialize)] #[serde(transparent)] pub struct EnvConfigValue { inner: Value, } impl EnvConfigValue { pub fn is_force(&self) -> bool { match self.inner.val { EnvConfigValueInner::Simple(_) => false, EnvConfigValueInner::WithOptions { force, .. } => force, } } pub fn resolve<'a>(&'a self, config: &Config) -> Cow<'a, OsStr> { match self.inner.val { EnvConfigValueInner::Simple(ref s) => Cow::Borrowed(OsStr::new(s.as_str())), EnvConfigValueInner::WithOptions { ref value, relative, .. } => { if relative { let p = self.inner.definition.root(config).join(&value); Cow::Owned(p.into_os_string()) } else { Cow::Borrowed(OsStr::new(value.as_str())) } } } } } pub type EnvConfig = HashMap; /// A type to deserialize a list of strings from a toml file. /// /// Supports deserializing either a whitespace-separated list of arguments in a /// single string or a string list itself. For example these deserialize to /// equivalent values: /// /// ```toml /// a = 'a b c' /// b = ['a', 'b', 'c'] /// ``` #[derive(Debug, Deserialize, Clone)] pub struct StringList(Vec); impl StringList { pub fn as_slice(&self) -> &[String] { &self.0 } } /// StringList automatically merges config values with environment values, /// this instead follows the precedence rules, so that eg. a string list found /// in the environment will be used instead of one in a config file. /// /// This is currently only used by `PathAndArgs` #[derive(Debug, Deserialize)] pub struct UnmergedStringList(Vec); #[macro_export] macro_rules! __shell_print { ($config:expr, $which:ident, $newline:literal, $($arg:tt)*) => ({ let mut shell = $config.shell(); let out = shell.$which(); drop(out.write_fmt(format_args!($($arg)*))); if $newline { drop(out.write_all(b"\n")); } }); } #[macro_export] macro_rules! drop_println { ($config:expr) => ( $crate::drop_print!($config, "\n") ); ($config:expr, $($arg:tt)*) => ( $crate::__shell_print!($config, out, true, $($arg)*) ); } #[macro_export] macro_rules! drop_eprintln { ($config:expr) => ( $crate::drop_eprint!($config, "\n") ); ($config:expr, $($arg:tt)*) => ( $crate::__shell_print!($config, err, true, $($arg)*) ); } #[macro_export] macro_rules! drop_print { ($config:expr, $($arg:tt)*) => ( $crate::__shell_print!($config, out, false, $($arg)*) ); } #[macro_export] macro_rules! drop_eprint { ($config:expr, $($arg:tt)*) => ( $crate::__shell_print!($config, err, false, $($arg)*) ); } cargo-0.66.0/src/cargo/util/config/path.rs000066400000000000000000000051721432416201200203040ustar00rootroot00000000000000use super::{Config, UnmergedStringList, Value}; use serde::{de::Error, Deserialize}; use std::path::PathBuf; /// Use with the `get` API to fetch a string that will be converted to a /// `PathBuf`. Relative paths are converted to absolute paths based on the /// location of the config file. #[derive(Debug, Deserialize, PartialEq, Clone)] #[serde(transparent)] pub struct ConfigRelativePath(Value); impl ConfigRelativePath { /// Returns the underlying value. pub fn value(&self) -> &Value { &self.0 } /// Returns the raw underlying configuration value for this key. pub fn raw_value(&self) -> &str { &self.0.val } /// Resolves this configuration-relative path to an absolute path. /// /// This will always return an absolute path where it's relative to the /// location for configuration for this value. pub fn resolve_path(&self, config: &Config) -> PathBuf { self.0.definition.root(config).join(&self.0.val) } /// Resolves this configuration-relative path to either an absolute path or /// something appropriate to execute from `PATH`. /// /// Values which don't look like a filesystem path (don't contain `/` or /// `\`) will be returned as-is, and everything else will fall through to an /// absolute path. pub fn resolve_program(&self, config: &Config) -> PathBuf { config.string_to_path(&self.0.val, &self.0.definition) } } /// A config type that is a program to run. /// /// This supports a list of strings like `['/path/to/program', 'somearg']` /// or a space separated string like `'/path/to/program somearg'`. /// /// This expects the first value to be the path to the program to run. /// Subsequent values are strings of arguments to pass to the program. /// /// Typically you should use `ConfigRelativePath::resolve_program` on the path /// to get the actual program. #[derive(Debug, Clone)] pub struct PathAndArgs { pub path: ConfigRelativePath, pub args: Vec, } impl<'de> serde::Deserialize<'de> for PathAndArgs { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let vsl = Value::::deserialize(deserializer)?; let mut strings = vsl.val.0; if strings.is_empty() { return Err(D::Error::invalid_length(0, &"at least one element")); } let first = strings.remove(0); let crp = Value { val: first, definition: vsl.definition, }; Ok(PathAndArgs { path: ConfigRelativePath(crp), args: strings, }) } } cargo-0.66.0/src/cargo/util/config/target.rs000066400000000000000000000241031432416201200206310ustar00rootroot00000000000000use super::{Config, ConfigKey, ConfigRelativePath, OptValue, PathAndArgs, StringList, CV}; use crate::core::compiler::{BuildOutput, LinkType}; use crate::util::CargoResult; use serde::Deserialize; use std::collections::{BTreeMap, HashMap}; use std::path::PathBuf; use toml_edit::easy as toml; /// Config definition of a `[target.'cfg(…)']` table. /// /// This is a subset of `TargetConfig`. #[derive(Debug, Deserialize)] pub struct TargetCfgConfig { pub runner: OptValue, pub rustflags: OptValue, // This is here just to ignore fields from normal `TargetConfig` because // all `[target]` tables are getting deserialized, whether they start with // `cfg(` or not. #[serde(flatten)] pub other: BTreeMap, } /// Config definition of a `[target]` table or `[host]`. #[derive(Debug, Clone)] pub struct TargetConfig { /// Process to run as a wrapper for `cargo run`, `test`, and `bench` commands. pub runner: OptValue, /// Additional rustc flags to pass. pub rustflags: OptValue, /// The path of the linker for this target. pub linker: OptValue, /// Build script override for the given library name. /// /// Any package with a `links` value for the given library name will skip /// running its build script and instead use the given output from the /// config file. pub links_overrides: BTreeMap, } /// Loads all of the `target.'cfg()'` tables. pub(super) fn load_target_cfgs(config: &Config) -> CargoResult> { // Load all [target] tables, filter out the cfg() entries. let mut result = Vec::new(); // Use a BTreeMap so the keys are sorted. This is important for // deterministic ordering of rustflags, which affects fingerprinting and // rebuilds. We may perhaps one day wish to ensure a deterministic // ordering via the order keys were defined in files perhaps. let target: BTreeMap = config.get("target")?; log::debug!("Got all targets {:#?}", target); for (key, cfg) in target { if key.starts_with("cfg(") { // Unfortunately this is not able to display the location of the // unused key. Using config::Value doesn't work. One // solution might be to create a special "Any" type, but I think // that will be quite difficult with the current design. for other_key in cfg.other.keys() { config.shell().warn(format!( "unused key `{}` in [target] config table `{}`", other_key, key ))?; } result.push((key, cfg)); } } Ok(result) } /// Returns true if the `[target]` table should be applied to host targets. pub(super) fn get_target_applies_to_host(config: &Config) -> CargoResult { if config.cli_unstable().target_applies_to_host { if let Ok(target_applies_to_host) = config.get::("target-applies-to-host") { Ok(target_applies_to_host) } else { Ok(!config.cli_unstable().host_config) } } else if config.cli_unstable().host_config { anyhow::bail!( "the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set" ); } else { Ok(true) } } /// Loads a single `[host]` table for the given triple. pub(super) fn load_host_triple(config: &Config, triple: &str) -> CargoResult { if config.cli_unstable().host_config { let host_triple_prefix = format!("host.{}", triple); let host_triple_key = ConfigKey::from_str(&host_triple_prefix); let host_prefix = match config.get_cv(&host_triple_key)? { Some(_) => host_triple_prefix, None => "host".to_string(), }; load_config_table(config, &host_prefix) } else { Ok(TargetConfig { runner: None, rustflags: None, linker: None, links_overrides: BTreeMap::new(), }) } } /// Loads a single `[target]` table for the given triple. pub(super) fn load_target_triple(config: &Config, triple: &str) -> CargoResult { load_config_table(config, &format!("target.{}", triple)) } /// Loads a single table for the given prefix. fn load_config_table(config: &Config, prefix: &str) -> CargoResult { // This needs to get each field individually because it cannot fetch the // struct all at once due to `links_overrides`. Can't use `serde(flatten)` // because it causes serde to use `deserialize_map` which means the config // deserializer does not know which keys to deserialize, which means // environment variables would not work. let runner: OptValue = config.get(&format!("{}.runner", prefix))?; let rustflags: OptValue = config.get(&format!("{}.rustflags", prefix))?; let linker: OptValue = config.get(&format!("{}.linker", prefix))?; // Links do not support environment variables. let target_key = ConfigKey::from_str(prefix); let links_overrides = match config.get_table(&target_key)? { Some(links) => parse_links_overrides(&target_key, links.val, config)?, None => BTreeMap::new(), }; Ok(TargetConfig { runner, rustflags, linker, links_overrides, }) } fn parse_links_overrides( target_key: &ConfigKey, links: HashMap, config: &Config, ) -> CargoResult> { let mut links_overrides = BTreeMap::new(); let extra_check_cfg = match config.cli_unstable().check_cfg { Some((_, _, _, output)) => output, None => false, }; for (lib_name, value) in links { // Skip these keys, it shares the namespace with `TargetConfig`. match lib_name.as_str() { // `ar` is a historical thing. "ar" | "linker" | "runner" | "rustflags" => continue, _ => {} } let mut output = BuildOutput::default(); let table = value.table(&format!("{}.{}", target_key, lib_name))?.0; // We require deterministic order of evaluation, so we must sort the pairs by key first. let mut pairs = Vec::new(); for (k, value) in table { pairs.push((k, value)); } pairs.sort_by_key(|p| p.0); for (key, value) in pairs { match key.as_str() { "rustc-flags" => { let flags = value.string(key)?; let whence = format!("target config `{}.{}` (in {})", target_key, key, flags.1); let (paths, links) = BuildOutput::parse_rustc_flags(flags.0, &whence)?; output.library_paths.extend(paths); output.library_links.extend(links); } "rustc-link-lib" => { let list = value.list(key)?; output .library_links .extend(list.iter().map(|v| v.0.clone())); } "rustc-link-search" => { let list = value.list(key)?; output .library_paths .extend(list.iter().map(|v| PathBuf::from(&v.0))); } "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => { let args = extra_link_args(LinkType::Cdylib, key, value)?; output.linker_args.extend(args); } "rustc-link-arg-bins" => { let args = extra_link_args(LinkType::Bin, key, value)?; output.linker_args.extend(args); } "rustc-link-arg" => { let args = extra_link_args(LinkType::All, key, value)?; output.linker_args.extend(args); } "rustc-link-arg-tests" => { let args = extra_link_args(LinkType::Test, key, value)?; output.linker_args.extend(args); } "rustc-link-arg-benches" => { let args = extra_link_args(LinkType::Bench, key, value)?; output.linker_args.extend(args); } "rustc-link-arg-examples" => { let args = extra_link_args(LinkType::Example, key, value)?; output.linker_args.extend(args); } "rustc-cfg" => { let list = value.list(key)?; output.cfgs.extend(list.iter().map(|v| v.0.clone())); } "rustc-check-cfg" => { if extra_check_cfg { let list = value.list(key)?; output.check_cfgs.extend(list.iter().map(|v| v.0.clone())); } else { config.shell().warn(format!( "target config `{}.{}` requires -Zcheck-cfg=output flag", target_key, key ))?; } } "rustc-env" => { for (name, val) in value.table(key)?.0 { let val = val.string(name)?.0; output.env.push((name.clone(), val.to_string())); } } "warning" | "rerun-if-changed" | "rerun-if-env-changed" => { anyhow::bail!("`{}` is not supported in build script overrides", key); } _ => { let val = value.string(key)?.0; output.metadata.push((key.clone(), val.to_string())); } } } links_overrides.insert(lib_name, output); } Ok(links_overrides) } fn extra_link_args<'a>( link_type: LinkType, key: &str, value: &'a CV, ) -> CargoResult + 'a> { let args = value.list(key)?; Ok(args.iter().map(move |v| (link_type.clone(), v.0.clone()))) } cargo-0.66.0/src/cargo/util/config/value.rs000066400000000000000000000166571432416201200204760ustar00rootroot00000000000000//! Deserialization of a `Value` type which tracks where it was deserialized //! from. //! //! Often Cargo wants to report semantic error information or other sorts of //! error information about configuration keys but it also may wish to indicate //! as an error context where the key was defined as well (to help user //! debugging). The `Value` type here can be used to deserialize a `T` value //! from configuration, but also record where it was deserialized from when it //! was read. use crate::util::config::Config; use serde::de; use std::fmt; use std::marker; use std::mem; use std::path::{Path, PathBuf}; /// A type which can be deserialized as a configuration value which records /// where it was deserialized from. #[derive(Debug, PartialEq, Clone)] pub struct Value { /// The inner value that was deserialized. pub val: T, /// The location where `val` was defined in configuration (e.g. file it was /// defined in, env var etc). pub definition: Definition, } pub type OptValue = Option>; // Deserializing `Value` is pretty special, and serde doesn't have built-in // support for this operation. To implement this we extend serde's "data model" // a bit. We configure deserialization of `Value` to basically only work with // our one deserializer using configuration. // // We define that `Value` deserialization asks the deserializer for a very // special struct name and struct field names. In doing so the deserializer will // recognize this and synthesize a magical value for the `definition` field when // we deserialize it. This protocol is how we're able to have a channel of // information flowing from the configuration deserializer into the // deserialization implementation here. // // You'll want to also check out the implementation of `ValueDeserializer` in // `de.rs`. Also note that the names below are intended to be invalid Rust // identifiers to avoid how they might conflict with other valid structures. // Finally the `definition` field is transmitted as a tuple of i32/string, which // is effectively a tagged union of `Definition` itself. pub(crate) const VALUE_FIELD: &str = "$__cargo_private_value"; pub(crate) const DEFINITION_FIELD: &str = "$__cargo_private_definition"; pub(crate) const NAME: &str = "$__cargo_private_Value"; pub(crate) static FIELDS: [&str; 2] = [VALUE_FIELD, DEFINITION_FIELD]; /// Location where a config value is defined. #[derive(Clone, Debug, Eq)] pub enum Definition { /// Defined in a `.cargo/config`, includes the path to the file. Path(PathBuf), /// Defined in an environment variable, includes the environment key. Environment(String), /// Passed in on the command line. Cli, } impl Definition { /// Root directory where this is defined. /// /// If from a file, it is the directory above `.cargo/config`. /// CLI and env are the current working directory. pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { match self { Definition::Path(p) => p.parent().unwrap().parent().unwrap(), Definition::Environment(_) | Definition::Cli => config.cwd(), } } /// Returns true if self is a higher priority to other. /// /// CLI is preferred over environment, which is preferred over files. pub fn is_higher_priority(&self, other: &Definition) -> bool { matches!( (self, other), (Definition::Cli, Definition::Environment(_)) | (Definition::Cli, Definition::Path(_)) | (Definition::Environment(_), Definition::Path(_)) ) } } impl PartialEq for Definition { fn eq(&self, other: &Definition) -> bool { // configuration values are equivalent no matter where they're defined, // but they need to be defined in the same location. For example if // they're defined in the environment that's different than being // defined in a file due to path interpretations. mem::discriminant(self) == mem::discriminant(other) } } impl fmt::Display for Definition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Definition::Path(p) => p.display().fmt(f), Definition::Environment(key) => write!(f, "environment variable `{}`", key), Definition::Cli => write!(f, "--config cli option"), } } } impl<'de, T> de::Deserialize<'de> for Value where T: de::Deserialize<'de>, { fn deserialize(deserializer: D) -> Result, D::Error> where D: de::Deserializer<'de>, { struct ValueVisitor { _marker: marker::PhantomData, } impl<'de, T> de::Visitor<'de> for ValueVisitor where T: de::Deserialize<'de>, { type Value = Value; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a value") } fn visit_map(self, mut visitor: V) -> Result, V::Error> where V: de::MapAccess<'de>, { let value = visitor.next_key::()?; if value.is_none() { return Err(de::Error::custom("value not found")); } let val: T = visitor.next_value()?; let definition = visitor.next_key::()?; if definition.is_none() { return Err(de::Error::custom("definition not found")); } let definition: Definition = visitor.next_value()?; Ok(Value { val, definition }) } } deserializer.deserialize_struct( NAME, &FIELDS, ValueVisitor { _marker: marker::PhantomData, }, ) } } struct FieldVisitor { expected: &'static str, } impl<'de> de::Visitor<'de> for FieldVisitor { type Value = (); fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a valid value field") } fn visit_str(self, s: &str) -> Result<(), E> where E: de::Error, { if s == self.expected { Ok(()) } else { Err(de::Error::custom("expected field with custom name")) } } } struct ValueKey; impl<'de> de::Deserialize<'de> for ValueKey { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { deserializer.deserialize_identifier(FieldVisitor { expected: VALUE_FIELD, })?; Ok(ValueKey) } } struct DefinitionKey; impl<'de> de::Deserialize<'de> for DefinitionKey { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { deserializer.deserialize_identifier(FieldVisitor { expected: DEFINITION_FIELD, })?; Ok(DefinitionKey) } } impl<'de> de::Deserialize<'de> for Definition { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { let (discr, value) = <(u32, String)>::deserialize(deserializer)?; match discr { 0 => Ok(Definition::Path(value.into())), 1 => Ok(Definition::Environment(value)), 2 => Ok(Definition::Cli), _ => panic!("unexpected discriminant {} value {}", discr, value), } } } cargo-0.66.0/src/cargo/util/counter.rs000066400000000000000000000040531432416201200175570ustar00rootroot00000000000000use std::time::Instant; /// A metrics counter storing only latest `N` records. pub struct MetricsCounter { /// Slots to store metrics. slots: [(usize, Instant); N], /// The slot of the oldest record. /// Also the next slot to store the new record. index: usize, } impl MetricsCounter { /// Creates a new counter with an initial value. pub fn new(init: usize, init_at: Instant) -> Self { assert!(N > 0, "number of slots must be greater than zero"); Self { slots: [(init, init_at); N], index: 0, } } /// Adds record to the counter. pub fn add(&mut self, data: usize, added_at: Instant) { self.slots[self.index] = (data, added_at); self.index = (self.index + 1) % N; } /// Calculates per-second average rate of all slots. pub fn rate(&self) -> f32 { let latest = self.slots[self.index.checked_sub(1).unwrap_or(N - 1)]; let oldest = self.slots[self.index]; let duration = (latest.1 - oldest.1).as_secs_f32(); let avg = (latest.0 - oldest.0) as f32 / duration; if f32::is_nan(avg) { 0f32 } else { avg } } } #[cfg(test)] mod tests { use super::MetricsCounter; use std::time::{Duration, Instant}; #[test] fn counter() { let now = Instant::now(); let mut counter = MetricsCounter::<3>::new(0, now); assert_eq!(counter.rate(), 0f32); counter.add(1, now + Duration::from_secs(1)); assert_eq!(counter.rate(), 1f32); counter.add(4, now + Duration::from_secs(2)); assert_eq!(counter.rate(), 2f32); counter.add(7, now + Duration::from_secs(3)); assert_eq!(counter.rate(), 3f32); counter.add(12, now + Duration::from_secs(4)); assert_eq!(counter.rate(), 4f32); } #[test] #[should_panic(expected = "number of slots must be greater than zero")] fn counter_zero_slot() { let _counter = MetricsCounter::<0>::new(0, Instant::now()); } } cargo-0.66.0/src/cargo/util/cpu.rs000066400000000000000000000170531432416201200166730ustar00rootroot00000000000000use std::io; pub struct State(imp::State); impl State { /// Captures the current state of all CPUs on the system. /// /// The `State` returned here isn't too meaningful in terms of /// interpretation across platforms, but it can be compared to previous /// states to get a meaningful cross-platform number. pub fn current() -> io::Result { imp::current().map(State) } /// Returns the percentage of time CPUs were idle from the current state /// relative to the previous state, as a percentage from 0.0 to 100.0. /// /// This function will return, as a percentage, the amount of time that the /// entire system was idle between the `previous` state and this own state. /// This can be useful to compare two snapshots in time of CPU usage to see /// how the CPU usage compares between the two. pub fn idle_since(&self, previous: &State) -> f64 { imp::pct_idle(&previous.0, &self.0) } } #[cfg(target_os = "linux")] mod imp { use std::{fs, io}; pub struct State { user: u64, nice: u64, system: u64, idle: u64, iowait: u64, irq: u64, softirq: u64, steal: u64, guest: u64, guest_nice: u64, } pub fn current() -> io::Result { let state = fs::read_to_string("/proc/stat")?; (|| { let mut parts = state.lines().next()?.split_whitespace(); if parts.next()? != "cpu" { return None; } Some(State { user: parts.next()?.parse::().ok()?, nice: parts.next()?.parse::().ok()?, system: parts.next()?.parse::().ok()?, idle: parts.next()?.parse::().ok()?, iowait: parts.next()?.parse::().ok()?, irq: parts.next()?.parse::().ok()?, softirq: parts.next()?.parse::().ok()?, steal: parts.next()?.parse::().ok()?, guest: parts.next()?.parse::().ok()?, guest_nice: parts.next()?.parse::().ok()?, }) })() .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "first line of /proc/stat malformed")) } pub fn pct_idle(prev: &State, next: &State) -> f64 { let user = next.user - prev.user; let nice = next.nice - prev.nice; let system = next.system - prev.system; let idle = next.idle - prev.idle; let iowait = next.iowait.saturating_sub(prev.iowait); let irq = next.irq - prev.irq; let softirq = next.softirq - prev.softirq; let steal = next.steal - prev.steal; let guest = next.guest - prev.guest; let guest_nice = next.guest_nice - prev.guest_nice; let total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice; (idle as f64) / (total as f64) * 100.0 } } #[cfg(target_os = "macos")] #[allow(bad_style)] mod imp { use std::io; use std::ptr; type host_t = u32; type mach_port_t = u32; type vm_map_t = mach_port_t; type vm_offset_t = usize; type vm_size_t = usize; type vm_address_t = vm_offset_t; type processor_flavor_t = i32; type natural_t = u32; type processor_info_array_t = *mut i32; type mach_msg_type_number_t = i32; type kern_return_t = i32; const PROESSOR_CPU_LOAD_INFO: processor_flavor_t = 2; const CPU_STATE_USER: usize = 0; const CPU_STATE_SYSTEM: usize = 1; const CPU_STATE_IDLE: usize = 2; const CPU_STATE_NICE: usize = 3; const CPU_STATE_MAX: usize = 4; extern "C" { static mut mach_task_self_: mach_port_t; fn mach_host_self() -> mach_port_t; fn host_processor_info( host: host_t, flavor: processor_flavor_t, out_processor_count: *mut natural_t, out_processor_info: *mut processor_info_array_t, out_processor_infoCnt: *mut mach_msg_type_number_t, ) -> kern_return_t; fn vm_deallocate( target_task: vm_map_t, address: vm_address_t, size: vm_size_t, ) -> kern_return_t; } pub struct State { user: u64, system: u64, idle: u64, nice: u64, } #[repr(C)] struct processor_cpu_load_info_data_t { cpu_ticks: [u32; CPU_STATE_MAX], } pub fn current() -> io::Result { // There's scant little documentation on `host_processor_info` // throughout the internet, so this is just modeled after what everyone // else is doing. For now this is modeled largely after libuv. unsafe { let mut num_cpus_u = 0; let mut cpu_info = ptr::null_mut(); let mut msg_type = 0; let err = host_processor_info( mach_host_self(), PROESSOR_CPU_LOAD_INFO, &mut num_cpus_u, &mut cpu_info, &mut msg_type, ); if err != 0 { return Err(io::Error::last_os_error()); } let mut ret = State { user: 0, system: 0, idle: 0, nice: 0, }; let mut current = cpu_info as *const processor_cpu_load_info_data_t; for _ in 0..num_cpus_u { ret.user += (*current).cpu_ticks[CPU_STATE_USER] as u64; ret.system += (*current).cpu_ticks[CPU_STATE_SYSTEM] as u64; ret.idle += (*current).cpu_ticks[CPU_STATE_IDLE] as u64; ret.nice += (*current).cpu_ticks[CPU_STATE_NICE] as u64; current = current.offset(1); } vm_deallocate(mach_task_self_, cpu_info as vm_address_t, msg_type as usize); Ok(ret) } } pub fn pct_idle(prev: &State, next: &State) -> f64 { let user = next.user - prev.user; let system = next.system - prev.system; let idle = next.idle - prev.idle; let nice = next.nice - prev.nice; let total = user + system + idle + nice; (idle as f64) / (total as f64) * 100.0 } } #[cfg(windows)] mod imp { use std::io; use std::mem; use winapi::shared::minwindef::*; use winapi::um::processthreadsapi::*; pub struct State { idle: FILETIME, kernel: FILETIME, user: FILETIME, } pub fn current() -> io::Result { unsafe { let mut ret = mem::zeroed::(); let r = GetSystemTimes(&mut ret.idle, &mut ret.kernel, &mut ret.user); if r != 0 { Ok(ret) } else { Err(io::Error::last_os_error()) } } } pub fn pct_idle(prev: &State, next: &State) -> f64 { fn to_u64(a: &FILETIME) -> u64 { ((a.dwHighDateTime as u64) << 32) | (a.dwLowDateTime as u64) } let idle = to_u64(&next.idle) - to_u64(&prev.idle); let kernel = to_u64(&next.kernel) - to_u64(&prev.kernel); let user = to_u64(&next.user) - to_u64(&prev.user); let total = user + kernel; (idle as f64) / (total as f64) * 100.0 } } #[cfg(not(any(target_os = "linux", target_os = "macos", windows)))] mod imp { use std::io; pub struct State; pub fn current() -> io::Result { Err(io::Error::new( io::ErrorKind::Other, "unsupported platform to learn CPU state", )) } pub fn pct_idle(_prev: &State, _next: &State) -> f64 { unimplemented!() } } cargo-0.66.0/src/cargo/util/dependency_queue.rs000066400000000000000000000217151432416201200214260ustar00rootroot00000000000000//! A graph-like structure used to represent a set of dependencies and in what //! order they should be built. //! //! This structure is used to store the dependency graph and dynamically update //! it to figure out when a dependency should be built. //! //! Dependencies in this queue are represented as a (node, edge) pair. This is //! used to model nodes which produce multiple outputs at different times but //! some nodes may only require one of the outputs and can start before the //! whole node is finished. use std::collections::{HashMap, HashSet}; use std::hash::Hash; #[derive(Debug)] pub struct DependencyQueue { /// A list of all known keys to build. /// /// The value of the hash map is list of dependencies which still need to be /// built before the package can be built. Note that the set is dynamically /// updated as more dependencies are built. dep_map: HashMap, V)>, /// A reverse mapping of a package to all packages that depend on that /// package. /// /// This map is statically known and does not get updated throughout the /// lifecycle of the DependencyQueue. /// /// This is sort of like a `HashMap<(N, E), HashSet>` map, but more /// easily indexable with just an `N` reverse_dep_map: HashMap>>, /// The relative priority of this package. Higher values should be scheduled sooner. priority: HashMap, /// An expected cost for building this package. Used to determine priority. cost: HashMap, } impl Default for DependencyQueue { fn default() -> DependencyQueue { DependencyQueue::new() } } impl DependencyQueue { /// Creates a new dependency queue with 0 packages. pub fn new() -> DependencyQueue { DependencyQueue { dep_map: HashMap::new(), reverse_dep_map: HashMap::new(), priority: HashMap::new(), cost: HashMap::new(), } } } impl DependencyQueue { /// Adds a new node and its dependencies to this queue. /// /// The `key` specified is a new node in the dependency graph, and the node /// depend on all the dependencies iterated by `dependencies`. Each /// dependency is a node/edge pair, where edges can be thought of as /// productions from nodes (aka if it's just `()` it's just waiting for the /// node to finish). /// /// An optional `value` can also be associated with `key` which is reclaimed /// when the node is ready to go. /// /// The cost parameter can be used to hint at the relative cost of building /// this node. This implementation does not care about the units of this value, so /// the calling code is free to use whatever they'd like. In general, higher cost /// nodes are expected to take longer to build. pub fn queue( &mut self, key: N, value: V, dependencies: impl IntoIterator, cost: usize, ) { assert!(!self.dep_map.contains_key(&key)); let mut my_dependencies = HashSet::new(); for (dep, edge) in dependencies { my_dependencies.insert((dep.clone(), edge.clone())); self.reverse_dep_map .entry(dep) .or_insert_with(HashMap::new) .entry(edge) .or_insert_with(HashSet::new) .insert(key.clone()); } self.dep_map.insert(key.clone(), (my_dependencies, value)); self.cost.insert(key, cost); } /// All nodes have been added, calculate some internal metadata and prepare /// for `dequeue`. pub fn queue_finished(&mut self) { let mut out = HashMap::new(); for key in self.dep_map.keys() { depth(key, &self.reverse_dep_map, &mut out); } self.priority = out .into_iter() .map(|(n, set)| { let total_cost = self.cost[&n] + set.iter().map(|key| self.cost[key]).sum::(); (n, total_cost) }) .collect(); /// Creates a flattened reverse dependency list. For a given key, finds the /// set of nodes which depend on it, including transitively. This is different /// from self.reverse_dep_map because self.reverse_dep_map only maps one level /// of reverse dependencies. fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>( key: &N, map: &HashMap>>, results: &'a mut HashMap>, ) -> &'a HashSet { if results.contains_key(key) { let depth = &results[key]; assert!(!depth.is_empty(), "cycle in DependencyQueue"); return depth; } results.insert(key.clone(), HashSet::new()); let mut set = HashSet::new(); set.insert(key.clone()); for dep in map .get(key) .into_iter() .flat_map(|it| it.values()) .flatten() { set.extend(depth(dep, map, results).iter().cloned()) } let slot = results.get_mut(key).unwrap(); *slot = set; &*slot } } /// Dequeues a package that is ready to be built. /// /// A package is ready to be built when it has 0 un-built dependencies. If /// `None` is returned then no packages are ready to be built. pub fn dequeue(&mut self) -> Option<(N, V, usize)> { let (key, priority) = self .dep_map .iter() .filter(|(_, (deps, _))| deps.is_empty()) .map(|(key, _)| (key.clone(), self.priority[key])) .max_by_key(|(_, priority)| *priority)?; let (_, data) = self.dep_map.remove(&key).unwrap(); Some((key, data, priority)) } /// Returns `true` if there are remaining packages to be built. pub fn is_empty(&self) -> bool { self.dep_map.is_empty() } /// Returns the number of remaining packages to be built. pub fn len(&self) -> usize { self.dep_map.len() } /// Indicate that something has finished. /// /// Calling this function indicates that the `node` has produced `edge`. All /// remaining work items which only depend on this node/edge pair are now /// candidates to start their job. /// /// Returns the nodes that are now allowed to be dequeued as a result of /// finishing this node. pub fn finish(&mut self, node: &N, edge: &E) -> Vec<&N> { // hashset let reverse_deps = self.reverse_dep_map.get(node).and_then(|map| map.get(edge)); let reverse_deps = match reverse_deps { Some(deps) => deps, None => return Vec::new(), }; let key = (node.clone(), edge.clone()); let mut result = Vec::new(); for dep in reverse_deps.iter() { let edges = &mut self.dep_map.get_mut(dep).unwrap().0; assert!(edges.remove(&key)); if edges.is_empty() { result.push(dep); } } result } } #[cfg(test)] mod test { use super::DependencyQueue; #[test] fn deep_first_equal_cost() { let mut q = DependencyQueue::new(); q.queue(1, (), vec![], 1); q.queue(2, (), vec![(1, ())], 1); q.queue(3, (), vec![], 1); q.queue(4, (), vec![(2, ()), (3, ())], 1); q.queue(5, (), vec![(4, ()), (3, ())], 1); q.queue_finished(); assert_eq!(q.dequeue(), Some((1, (), 5))); assert_eq!(q.dequeue(), Some((3, (), 4))); assert_eq!(q.dequeue(), None); q.finish(&3, &()); assert_eq!(q.dequeue(), None); q.finish(&1, &()); assert_eq!(q.dequeue(), Some((2, (), 4))); assert_eq!(q.dequeue(), None); q.finish(&2, &()); assert_eq!(q.dequeue(), Some((4, (), 3))); assert_eq!(q.dequeue(), None); q.finish(&4, &()); assert_eq!(q.dequeue(), Some((5, (), 2))); } #[test] fn sort_by_highest_cost() { let mut q = DependencyQueue::new(); q.queue(1, (), vec![], 1); q.queue(2, (), vec![(1, ())], 1); q.queue(3, (), vec![], 4); q.queue(4, (), vec![(2, ()), (3, ())], 1); q.queue_finished(); assert_eq!(q.dequeue(), Some((3, (), 9))); assert_eq!(q.dequeue(), Some((1, (), 4))); assert_eq!(q.dequeue(), None); q.finish(&3, &()); assert_eq!(q.dequeue(), None); q.finish(&1, &()); assert_eq!(q.dequeue(), Some((2, (), 3))); assert_eq!(q.dequeue(), None); q.finish(&2, &()); assert_eq!(q.dequeue(), Some((4, (), 2))); assert_eq!(q.dequeue(), None); q.finish(&4, &()); assert_eq!(q.dequeue(), None); } } cargo-0.66.0/src/cargo/util/diagnostic_server.rs000066400000000000000000000235701432416201200216170ustar00rootroot00000000000000//! A small TCP server to handle collection of diagnostics information in a //! cross-platform way for the `cargo fix` command. use std::collections::HashSet; use std::env; use std::io::{BufReader, Read, Write}; use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use anyhow::{Context, Error}; use cargo_util::ProcessBuilder; use log::warn; use serde::{Deserialize, Serialize}; use crate::core::Edition; use crate::util::errors::CargoResult; use crate::util::Config; const DIAGNOSTICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER"; const PLEASE_REPORT_THIS_BUG: &str = "This likely indicates a bug in either rustc or cargo itself,\n\ and we would appreciate a bug report! You're likely to see \n\ a number of compiler warnings after this message which cargo\n\ attempted to fix but failed. If you could open an issue at\n\ https://github.com/rust-lang/rust/issues\n\ quoting the full output of this command we'd be very appreciative!\n\ Note that you may be able to make some more progress in the near-term\n\ fixing code with the `--broken-code` flag\n\n\ "; #[derive(Deserialize, Serialize, Hash, Eq, PartialEq, Clone)] pub enum Message { Migrating { file: String, from_edition: Edition, to_edition: Edition, }, Fixing { file: String, }, Fixed { file: String, fixes: u32, }, FixFailed { files: Vec, krate: Option, errors: Vec, abnormal_exit: Option, }, ReplaceFailed { file: String, message: String, }, EditionAlreadyEnabled { message: String, edition: Edition, }, } impl Message { pub fn post(&self) -> Result<(), Error> { let addr = env::var(DIAGNOSTICS_SERVER_VAR).context("diagnostics collector misconfigured")?; let mut client = TcpStream::connect(&addr).context("failed to connect to parent diagnostics target")?; let s = serde_json::to_string(self).context("failed to serialize message")?; client .write_all(s.as_bytes()) .context("failed to write message to diagnostics target")?; client .shutdown(Shutdown::Write) .context("failed to shutdown")?; client .read_to_end(&mut Vec::new()) .context("failed to receive a disconnect")?; Ok(()) } } pub struct DiagnosticPrinter<'a> { config: &'a Config, dedupe: HashSet, } impl<'a> DiagnosticPrinter<'a> { pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> { DiagnosticPrinter { config, dedupe: HashSet::new(), } } pub fn print(&mut self, msg: &Message) -> CargoResult<()> { match msg { Message::Migrating { file, from_edition, to_edition, } => { if !self.dedupe.insert(msg.clone()) { return Ok(()); } self.config.shell().status( "Migrating", &format!("{} from {} edition to {}", file, from_edition, to_edition), ) } Message::Fixing { file } => self .config .shell() .verbose(|shell| shell.status("Fixing", file)), Message::Fixed { file, fixes } => { let msg = if *fixes == 1 { "fix" } else { "fixes" }; let msg = format!("{} ({} {})", file, fixes, msg); self.config.shell().status("Fixed", msg) } Message::ReplaceFailed { file, message } => { let msg = format!("error applying suggestions to `{}`\n", file); self.config.shell().warn(&msg)?; write!( self.config.shell().err(), "The full error message was:\n\n> {}\n\n", message, )?; write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; Ok(()) } Message::FixFailed { files, krate, errors, abnormal_exit, } => { if let Some(ref krate) = *krate { self.config.shell().warn(&format!( "failed to automatically apply fixes suggested by rustc \ to crate `{}`", krate, ))?; } else { self.config .shell() .warn("failed to automatically apply fixes suggested by rustc")?; } if !files.is_empty() { writeln!( self.config.shell().err(), "\nafter fixes were automatically applied the compiler \ reported errors within these files:\n" )?; for file in files { writeln!(self.config.shell().err(), " * {}", file)?; } writeln!(self.config.shell().err())?; } write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; if !errors.is_empty() { writeln!( self.config.shell().err(), "The following errors were reported:" )?; for error in errors { write!(self.config.shell().err(), "{}", error)?; if !error.ends_with('\n') { writeln!(self.config.shell().err())?; } } } if let Some(exit) = abnormal_exit { writeln!( self.config.shell().err(), "rustc exited abnormally: {}", exit )?; } writeln!( self.config.shell().err(), "Original diagnostics will follow.\n" )?; Ok(()) } Message::EditionAlreadyEnabled { message, edition } => { if !self.dedupe.insert(msg.clone()) { return Ok(()); } // Don't give a really verbose warning if it has already been issued. if self.dedupe.insert(Message::EditionAlreadyEnabled { message: "".to_string(), // Dummy, so that this only long-warns once. edition: *edition, }) { self.config.shell().warn(&format!("\ {} If you are trying to migrate from the previous edition ({prev_edition}), the process requires following these steps: 1. Start with `edition = \"{prev_edition}\"` in `Cargo.toml` 2. Run `cargo fix --edition` 3. Modify `Cargo.toml` to set `edition = \"{this_edition}\"` 4. Run `cargo build` or `cargo test` to verify the fixes worked More details may be found at https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html ", message, this_edition=edition, prev_edition=edition.previous().unwrap() )) } else { self.config.shell().warn(message) } } } } } #[derive(Debug)] pub struct RustfixDiagnosticServer { listener: TcpListener, addr: SocketAddr, } pub struct StartedServer { addr: SocketAddr, done: Arc, thread: Option>, } impl RustfixDiagnosticServer { pub fn new() -> Result { let listener = TcpListener::bind("127.0.0.1:0") .with_context(|| "failed to bind TCP listener to manage locking")?; let addr = listener.local_addr()?; Ok(RustfixDiagnosticServer { listener, addr }) } pub fn configure(&self, process: &mut ProcessBuilder) { process.env(DIAGNOSTICS_SERVER_VAR, self.addr.to_string()); } pub fn start(self, on_message: F) -> Result where F: Fn(Message) + Send + 'static, { let addr = self.addr; let done = Arc::new(AtomicBool::new(false)); let done2 = done.clone(); let thread = thread::spawn(move || { self.run(&on_message, &done2); }); Ok(StartedServer { addr, thread: Some(thread), done, }) } fn run(self, on_message: &dyn Fn(Message), done: &AtomicBool) { while let Ok((client, _)) = self.listener.accept() { if done.load(Ordering::SeqCst) { break; } let mut client = BufReader::new(client); let mut s = String::new(); if let Err(e) = client.read_to_string(&mut s) { warn!("diagnostic server failed to read: {}", e); } else { match serde_json::from_str(&s) { Ok(message) => on_message(message), Err(e) => warn!("invalid diagnostics message: {}", e), } } // The client should be kept alive until after `on_message` is // called to ensure that the client doesn't exit too soon (and // Message::Finish getting posted before Message::FixDiagnostic). drop(client); } } } impl Drop for StartedServer { fn drop(&mut self) { self.done.store(true, Ordering::SeqCst); // Ignore errors here as this is largely best-effort if TcpStream::connect(&self.addr).is_err() { return; } drop(self.thread.take().unwrap().join()); } } cargo-0.66.0/src/cargo/util/errors.rs000066400000000000000000000147571432416201200174300ustar00rootroot00000000000000#![allow(unknown_lints)] use anyhow::Error; use std::fmt; use std::path::PathBuf; pub type CargoResult = anyhow::Result; #[derive(Debug)] pub struct HttpNot200 { pub code: u32, pub url: String, } impl fmt::Display for HttpNot200 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to get 200 response from `{}`, got {}", self.url, self.code ) } } impl std::error::Error for HttpNot200 {} // ============================================================================= // Verbose error /// An error wrapper for errors that should only be displayed with `--verbose`. /// /// This should only be used in rare cases. When emitting this error, you /// should have a normal error higher up the error-cause chain (like "could /// not compile `foo`"), so at least *something* gets printed without /// `--verbose`. pub struct VerboseError { inner: Error, } impl VerboseError { pub fn new(inner: Error) -> VerboseError { VerboseError { inner } } } impl std::error::Error for VerboseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.inner.source() } } impl fmt::Debug for VerboseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } impl fmt::Display for VerboseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } // ============================================================================= // Internal error /// An unexpected, internal error. /// /// This should only be used for unexpected errors. It prints a message asking /// the user to file a bug report. pub struct InternalError { inner: Error, } impl InternalError { pub fn new(inner: Error) -> InternalError { InternalError { inner } } } impl std::error::Error for InternalError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.inner.source() } } impl fmt::Debug for InternalError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } impl fmt::Display for InternalError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } // ============================================================================= // Already printed error /// An error that does not need to be printed because it does not add any new /// information to what has already been printed. pub struct AlreadyPrintedError { inner: Error, } impl AlreadyPrintedError { pub fn new(inner: Error) -> Self { AlreadyPrintedError { inner } } } impl std::error::Error for AlreadyPrintedError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.inner.source() } } impl fmt::Debug for AlreadyPrintedError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } impl fmt::Display for AlreadyPrintedError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } // ============================================================================= // Manifest error /// Error wrapper related to a particular manifest and providing it's path. /// /// This error adds no displayable info of it's own. pub struct ManifestError { cause: Error, manifest: PathBuf, } impl ManifestError { pub fn new>(cause: E, manifest: PathBuf) -> Self { Self { cause: cause.into(), manifest, } } pub fn manifest_path(&self) -> &PathBuf { &self.manifest } /// Returns an iterator over the `ManifestError` chain of causes. /// /// So if this error was not caused by another `ManifestError` this will be empty. pub fn manifest_causes(&self) -> ManifestCauses<'_> { ManifestCauses { current: self } } } impl std::error::Error for ManifestError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.cause.source() } } impl fmt::Debug for ManifestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } impl fmt::Display for ManifestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } /// An iterator over the `ManifestError` chain of causes. pub struct ManifestCauses<'a> { current: &'a ManifestError, } impl<'a> Iterator for ManifestCauses<'a> { type Item = &'a ManifestError; fn next(&mut self) -> Option { self.current = self.current.cause.downcast_ref()?; Some(self.current) } } impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} // ============================================================================= // CLI errors pub type CliResult = Result<(), CliError>; #[derive(Debug)] /// The CLI error is the error type used at Cargo's CLI-layer. /// /// All errors from the lib side of Cargo will get wrapped with this error. /// Other errors (such as command-line argument validation) will create this /// directly. pub struct CliError { /// The error to display. This can be `None` in rare cases to exit with a /// code without displaying a message. For example `cargo run -q` where /// the resulting process exits with a nonzero code (on Windows), or an /// external subcommand that exits nonzero (we assume it printed its own /// message). pub error: Option, /// The process exit code. pub exit_code: i32, } impl CliError { pub fn new(error: anyhow::Error, code: i32) -> CliError { CliError { error: Some(error), exit_code: code, } } pub fn code(code: i32) -> CliError { CliError { error: None, exit_code: code, } } } impl From for CliError { fn from(err: anyhow::Error) -> CliError { CliError::new(err, 101) } } impl From for CliError { fn from(err: clap::Error) -> CliError { let code = if err.use_stderr() { 1 } else { 0 }; CliError::new(err.into(), code) } } impl From for CliError { fn from(err: std::io::Error) -> CliError { CliError::new(err.into(), 1) } } // ============================================================================= // Construction helpers pub fn internal(error: S) -> anyhow::Error { InternalError::new(anyhow::format_err!("{}", error)).into() } cargo-0.66.0/src/cargo/util/flock.rs000066400000000000000000000337601432416201200172050ustar00rootroot00000000000000use std::fs::{File, OpenOptions}; use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::path::{Display, Path, PathBuf}; use crate::util::errors::CargoResult; use crate::util::Config; use anyhow::Context as _; use cargo_util::paths; use sys::*; use termcolor::Color::Cyan; #[derive(Debug)] pub struct FileLock { f: Option, path: PathBuf, state: State, } #[derive(PartialEq, Debug)] enum State { Unlocked, Shared, Exclusive, } impl FileLock { /// Returns the underlying file handle of this lock. pub fn file(&self) -> &File { self.f.as_ref().unwrap() } /// Returns the underlying path that this lock points to. /// /// Note that special care must be taken to ensure that the path is not /// referenced outside the lifetime of this lock. pub fn path(&self) -> &Path { assert_ne!(self.state, State::Unlocked); &self.path } /// Returns the parent path containing this file pub fn parent(&self) -> &Path { assert_ne!(self.state, State::Unlocked); self.path.parent().unwrap() } /// Removes all sibling files to this locked file. /// /// This can be useful if a directory is locked with a sentinel file but it /// needs to be cleared out as it may be corrupt. pub fn remove_siblings(&self) -> CargoResult<()> { let path = self.path(); for entry in path.parent().unwrap().read_dir()? { let entry = entry?; if Some(&entry.file_name()[..]) == path.file_name() { continue; } let kind = entry.file_type()?; if kind.is_dir() { paths::remove_dir_all(entry.path())?; } else { paths::remove_file(entry.path())?; } } Ok(()) } } impl Read for FileLock { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.file().read(buf) } } impl Seek for FileLock { fn seek(&mut self, to: SeekFrom) -> io::Result { self.file().seek(to) } } impl Write for FileLock { fn write(&mut self, buf: &[u8]) -> io::Result { self.file().write(buf) } fn flush(&mut self) -> io::Result<()> { self.file().flush() } } impl Drop for FileLock { fn drop(&mut self) { if self.state != State::Unlocked { if let Some(f) = self.f.take() { let _ = unlock(&f); } } } } /// A "filesystem" is intended to be a globally shared, hence locked, resource /// in Cargo. /// /// The `Path` of a filesystem cannot be learned unless it's done in a locked /// fashion, and otherwise functions on this structure are prepared to handle /// concurrent invocations across multiple instances of Cargo. #[derive(Clone, Debug)] pub struct Filesystem { root: PathBuf, } impl Filesystem { /// Creates a new filesystem to be rooted at the given path. pub fn new(path: PathBuf) -> Filesystem { Filesystem { root: path } } /// Like `Path::join`, creates a new filesystem rooted at this filesystem /// joined with the given path. pub fn join>(&self, other: T) -> Filesystem { Filesystem::new(self.root.join(other)) } /// Like `Path::push`, pushes a new path component onto this filesystem. pub fn push>(&mut self, other: T) { self.root.push(other); } /// Consumes this filesystem and returns the underlying `PathBuf`. /// /// Note that this is a relatively dangerous operation and should be used /// with great caution!. pub fn into_path_unlocked(self) -> PathBuf { self.root } /// Returns the underlying `Path`. /// /// Note that this is a relatively dangerous operation and should be used /// with great caution!. pub fn as_path_unlocked(&self) -> &Path { &self.root } /// Creates the directory pointed to by this filesystem. /// /// Handles errors where other Cargo processes are also attempting to /// concurrently create this directory. pub fn create_dir(&self) -> CargoResult<()> { paths::create_dir_all(&self.root) } /// Returns an adaptor that can be used to print the path of this /// filesystem. pub fn display(&self) -> Display<'_> { self.root.display() } /// Opens exclusive access to a file, returning the locked version of a /// file. /// /// This function will create a file at `path` if it doesn't already exist /// (including intermediate directories), and then it will acquire an /// exclusive lock on `path`. If the process must block waiting for the /// lock, the `msg` is printed to `config`. /// /// The returned file can be accessed to look at the path and also has /// read/write access to the underlying file. pub fn open_rw

(&self, path: P, config: &Config, msg: &str) -> CargoResult where P: AsRef, { self.open( path.as_ref(), OpenOptions::new().read(true).write(true).create(true), State::Exclusive, config, msg, ) } /// Opens shared access to a file, returning the locked version of a file. /// /// This function will fail if `path` doesn't already exist, but if it does /// then it will acquire a shared lock on `path`. If the process must block /// waiting for the lock, the `msg` is printed to `config`. /// /// The returned file can be accessed to look at the path and also has read /// access to the underlying file. Any writes to the file will return an /// error. pub fn open_ro

(&self, path: P, config: &Config, msg: &str) -> CargoResult where P: AsRef, { self.open( path.as_ref(), OpenOptions::new().read(true), State::Shared, config, msg, ) } fn open( &self, path: &Path, opts: &OpenOptions, state: State, config: &Config, msg: &str, ) -> CargoResult { let path = self.root.join(path); // If we want an exclusive lock then if we fail because of NotFound it's // likely because an intermediate directory didn't exist, so try to // create the directory and then continue. let f = opts .open(&path) .or_else(|e| { if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { paths::create_dir_all(path.parent().unwrap())?; Ok(opts.open(&path)?) } else { Err(anyhow::Error::from(e)) } }) .with_context(|| format!("failed to open: {}", path.display()))?; match state { State::Exclusive => { acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| { lock_exclusive(&f) })?; } State::Shared => { acquire(config, msg, &path, &|| try_lock_shared(&f), &|| { lock_shared(&f) })?; } State::Unlocked => {} } Ok(FileLock { f: Some(f), path, state, }) } } impl PartialEq for Filesystem { fn eq(&self, other: &Path) -> bool { self.root == other } } impl PartialEq for Path { fn eq(&self, other: &Filesystem) -> bool { self == other.root } } /// Acquires a lock on a file in a "nice" manner. /// /// Almost all long-running blocking actions in Cargo have a status message /// associated with them as we're not sure how long they'll take. Whenever a /// conflicted file lock happens, this is the case (we're not sure when the lock /// will be released). /// /// This function will acquire the lock on a `path`, printing out a nice message /// to the console if we have to wait for it. It will first attempt to use `try` /// to acquire a lock on the crate, and in the case of contention it will emit a /// status message based on `msg` to `config`'s shell, and then use `block` to /// block waiting to acquire a lock. /// /// Returns an error if the lock could not be acquired or if any error other /// than a contention error happens. fn acquire( config: &Config, msg: &str, path: &Path, lock_try: &dyn Fn() -> io::Result<()>, lock_block: &dyn Fn() -> io::Result<()>, ) -> CargoResult<()> { // File locking on Unix is currently implemented via `flock`, which is known // to be broken on NFS. We could in theory just ignore errors that happen on // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking // forever**, even if the "non-blocking" flag is passed! // // As a result, we just skip all file locks entirely on NFS mounts. That // should avoid calling any `flock` functions at all, and it wouldn't work // there anyway. // // [1]: https://github.com/rust-lang/cargo/issues/2615 if is_on_nfs_mount(path) { return Ok(()); } match lock_try() { Ok(()) => return Ok(()), // In addition to ignoring NFS which is commonly not working we also // just ignore locking on filesystems that look like they don't // implement file locking. Err(e) if error_unsupported(&e) => return Ok(()), Err(e) => { if !error_contended(&e) { let e = anyhow::Error::from(e); let cx = format!("failed to lock file: {}", path.display()); return Err(e.context(cx)); } } } let msg = format!("waiting for file lock on {}", msg); config.shell().status_with_color("Blocking", &msg, Cyan)?; lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?; return Ok(()); #[cfg(all(target_os = "linux", not(target_env = "musl")))] fn is_on_nfs_mount(path: &Path) -> bool { use std::ffi::CString; use std::mem; use std::os::unix::prelude::*; let path = match CString::new(path.as_os_str().as_bytes()) { Ok(path) => path, Err(_) => return false, }; unsafe { let mut buf: libc::statfs = mem::zeroed(); let r = libc::statfs(path.as_ptr(), &mut buf); r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32 } } #[cfg(any(not(target_os = "linux"), target_env = "musl"))] fn is_on_nfs_mount(_path: &Path) -> bool { false } } #[cfg(unix)] mod sys { use std::fs::File; use std::io::{Error, Result}; use std::os::unix::io::AsRawFd; pub(super) fn lock_shared(file: &File) -> Result<()> { flock(file, libc::LOCK_SH) } pub(super) fn lock_exclusive(file: &File) -> Result<()> { flock(file, libc::LOCK_EX) } pub(super) fn try_lock_shared(file: &File) -> Result<()> { flock(file, libc::LOCK_SH | libc::LOCK_NB) } pub(super) fn try_lock_exclusive(file: &File) -> Result<()> { flock(file, libc::LOCK_EX | libc::LOCK_NB) } pub(super) fn unlock(file: &File) -> Result<()> { flock(file, libc::LOCK_UN) } pub(super) fn error_contended(err: &Error) -> bool { err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK) } pub(super) fn error_unsupported(err: &Error) -> bool { match err.raw_os_error() { // Unfortunately, depending on the target, these may or may not be the same. // For targets in which they are the same, the duplicate pattern causes a warning. #[allow(unreachable_patterns)] Some(libc::ENOTSUP | libc::EOPNOTSUPP) => true, Some(libc::ENOSYS) => true, _ => false, } } #[cfg(not(target_os = "solaris"))] fn flock(file: &File, flag: libc::c_int) -> Result<()> { let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; if ret < 0 { Err(Error::last_os_error()) } else { Ok(()) } } #[cfg(target_os = "solaris")] fn flock(file: &File, flag: libc::c_int) -> Result<()> { // Solaris lacks flock(), so simply succeed with a no-op Ok(()) } } #[cfg(windows)] mod sys { use std::fs::File; use std::io::{Error, Result}; use std::mem; use std::os::windows::io::AsRawHandle; use winapi::shared::minwindef::DWORD; use winapi::shared::winerror::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION}; use winapi::um::fileapi::{LockFileEx, UnlockFile}; use winapi::um::minwinbase::{LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY}; pub(super) fn lock_shared(file: &File) -> Result<()> { lock_file(file, 0) } pub(super) fn lock_exclusive(file: &File) -> Result<()> { lock_file(file, LOCKFILE_EXCLUSIVE_LOCK) } pub(super) fn try_lock_shared(file: &File) -> Result<()> { lock_file(file, LOCKFILE_FAIL_IMMEDIATELY) } pub(super) fn try_lock_exclusive(file: &File) -> Result<()> { lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY) } pub(super) fn error_contended(err: &Error) -> bool { err.raw_os_error() .map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32) } pub(super) fn error_unsupported(err: &Error) -> bool { err.raw_os_error() .map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32) } pub(super) fn unlock(file: &File) -> Result<()> { unsafe { let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } fn lock_file(file: &File, flags: DWORD) -> Result<()> { unsafe { let mut overlapped = mem::zeroed(); let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } } cargo-0.66.0/src/cargo/util/graph.rs000066400000000000000000000123621432416201200172030ustar00rootroot00000000000000use std::borrow::Borrow; use std::collections::BTreeSet; use std::fmt; pub struct Graph { nodes: im_rc::OrdMap>, } impl Graph { pub fn new() -> Graph { Graph { nodes: im_rc::OrdMap::new(), } } pub fn add(&mut self, node: N) { self.nodes.entry(node).or_insert_with(im_rc::OrdMap::new); } pub fn link(&mut self, node: N, child: N) -> &mut E { self.nodes .entry(node) .or_insert_with(im_rc::OrdMap::new) .entry(child) .or_insert_with(Default::default) } pub fn contains(&self, k: &Q) -> bool where N: Borrow, Q: Ord + Eq, { self.nodes.contains_key(k) } pub fn edge(&self, from: &N, to: &N) -> Option<&E> { self.nodes.get(from)?.get(to) } pub fn edges(&self, from: &N) -> impl Iterator { self.nodes.get(from).into_iter().flat_map(|x| x.iter()) } /// A topological sort of the `Graph` pub fn sort(&self) -> Vec { let mut ret = Vec::new(); let mut marks = BTreeSet::new(); for node in self.nodes.keys() { self.sort_inner_visit(node, &mut ret, &mut marks); } ret } fn sort_inner_visit(&self, node: &N, dst: &mut Vec, marks: &mut BTreeSet) { if !marks.insert(node.clone()) { return; } for child in self.nodes[node].keys() { self.sort_inner_visit(child, dst, marks); } dst.push(node.clone()); } pub fn iter(&self) -> impl Iterator { self.nodes.keys() } /// Checks if there is a path from `from` to `to`. pub fn is_path_from_to<'a>(&'a self, from: &'a N, to: &'a N) -> bool { let mut stack = vec![from]; let mut seen = BTreeSet::new(); seen.insert(from); while let Some(iter) = stack.pop().and_then(|p| self.nodes.get(p)) { for p in iter.keys() { if p == to { return true; } if seen.insert(p) { stack.push(p); } } } false } /// Resolves one of the paths from the given dependent package down to /// a leaf. /// /// Each element contains a node along with an edge except the first one. /// The representation would look like: /// /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)... pub fn path_to_bottom<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> { let mut result = vec![(pkg, None)]; while let Some(p) = self.nodes.get(pkg).and_then(|p| { p.iter() // Note that we can have "cycles" introduced through dev-dependency // edges, so make sure we don't loop infinitely. .find(|&(node, _)| result.iter().all(|p| p.0 != node)) .map(|(node, edge)| (node, Some(edge))) }) { result.push(p); pkg = p.0; } result } /// Resolves one of the paths from the given dependent package up to /// the root. /// /// Each element contains a node along with an edge except the first one. /// The representation would look like: /// /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)... pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> { // Note that this implementation isn't the most robust per se, we'll // likely have to tweak this over time. For now though it works for what // it's used for! let mut result = vec![(pkg, None)]; let first_pkg_depending_on = |pkg, res: &[(&N, Option<&E>)]| { self.nodes .iter() .filter(|(_, adjacent)| adjacent.contains_key(pkg)) // Note that we can have "cycles" introduced through dev-dependency // edges, so make sure we don't loop infinitely. .find(|&(node, _)| !res.iter().any(|p| p.0 == node)) .map(|(p, adjacent)| (p, adjacent.get(pkg))) }; while let Some(p) = first_pkg_depending_on(pkg, &result) { result.push(p); pkg = p.0; } result } } impl Default for Graph { fn default() -> Graph { Graph::new() } } impl fmt::Debug for Graph { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(fmt, "Graph {{")?; for (n, e) in &self.nodes { writeln!(fmt, " - {}", n)?; for n in e.keys() { writeln!(fmt, " - {}", n)?; } } write!(fmt, "}}")?; Ok(()) } } impl PartialEq for Graph { fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } } impl Eq for Graph {} impl Clone for Graph { fn clone(&self) -> Graph { Graph { nodes: self.nodes.clone(), } } } cargo-0.66.0/src/cargo/util/hasher.rs000066400000000000000000000010731432416201200173510ustar00rootroot00000000000000//! Implementation of a hasher that produces the same values across releases. //! //! The hasher should be fast and have a low chance of collisions (but is not //! sufficient for cryptographic purposes). #![allow(deprecated)] use std::hash::{Hasher, SipHasher}; pub struct StableHasher(SipHasher); impl StableHasher { pub fn new() -> StableHasher { StableHasher(SipHasher::new()) } } impl Hasher for StableHasher { fn finish(&self) -> u64 { self.0.finish() } fn write(&mut self, bytes: &[u8]) { self.0.write(bytes) } } cargo-0.66.0/src/cargo/util/hex.rs000066400000000000000000000012771432416201200166710ustar00rootroot00000000000000use super::StableHasher; use std::fs::File; use std::hash::{Hash, Hasher}; use std::io::Read; pub fn to_hex(num: u64) -> String { hex::encode(num.to_le_bytes()) } pub fn hash_u64(hashable: H) -> u64 { let mut hasher = StableHasher::new(); hashable.hash(&mut hasher); hasher.finish() } pub fn hash_u64_file(mut file: &File) -> std::io::Result { let mut hasher = StableHasher::new(); let mut buf = [0; 64 * 1024]; loop { let n = file.read(&mut buf)?; if n == 0 { break; } hasher.write(&buf[..n]); } Ok(hasher.finish()) } pub fn short_hash(hashable: &H) -> String { to_hex(hash_u64(hashable)) } cargo-0.66.0/src/cargo/util/important_paths.rs000066400000000000000000000026351432416201200213200ustar00rootroot00000000000000use crate::util::errors::CargoResult; use cargo_util::paths; use std::path::{Path, PathBuf}; /// Finds the root `Cargo.toml`. pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult { let valid_cargo_toml_file_name = "Cargo.toml"; let invalid_cargo_toml_file_name = "cargo.toml"; let mut invalid_cargo_toml_path_exists = false; for current in paths::ancestors(cwd, None) { let manifest = current.join(valid_cargo_toml_file_name); if manifest.exists() { return Ok(manifest); } if current.join(invalid_cargo_toml_file_name).exists() { invalid_cargo_toml_path_exists = true; } } if invalid_cargo_toml_path_exists { anyhow::bail!( "could not find `{}` in `{}` or any parent directory, but found cargo.toml please try to rename it to Cargo.toml", valid_cargo_toml_file_name, cwd.display() ) } else { anyhow::bail!( "could not find `{}` in `{}` or any parent directory", valid_cargo_toml_file_name, cwd.display() ) } } /// Returns the path to the `file` in `pwd`, if it exists. pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { let manifest = pwd.join(file); if manifest.exists() { Ok(manifest) } else { anyhow::bail!("Could not find `{}` in `{}`", file, pwd.display()) } } cargo-0.66.0/src/cargo/util/interning.rs000066400000000000000000000100261432416201200200720ustar00rootroot00000000000000use serde::{Serialize, Serializer}; use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::HashSet; use std::ffi::OsStr; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::path::Path; use std::ptr; use std::str; use std::sync::Mutex; fn leak(s: String) -> &'static str { Box::leak(s.into_boxed_str()) } lazy_static::lazy_static! { static ref STRING_CACHE: Mutex> = Mutex::new(HashSet::new()); } #[derive(Clone, Copy)] pub struct InternedString { inner: &'static str, } impl<'a> From<&'a str> for InternedString { fn from(item: &'a str) -> Self { InternedString::new(item) } } impl<'a> From<&'a String> for InternedString { fn from(item: &'a String) -> Self { InternedString::new(item) } } impl From for InternedString { fn from(item: String) -> Self { InternedString::new(&item) } } impl PartialEq for InternedString { fn eq(&self, other: &InternedString) -> bool { ptr::eq(self.as_str(), other.as_str()) } } impl PartialEq for InternedString { fn eq(&self, other: &str) -> bool { *self == other } } impl<'a> PartialEq<&'a str> for InternedString { fn eq(&self, other: &&str) -> bool { **self == **other } } impl Eq for InternedString {} impl InternedString { pub fn new(str: &str) -> InternedString { let mut cache = STRING_CACHE.lock().unwrap(); let s = cache.get(str).cloned().unwrap_or_else(|| { let s = leak(str.to_string()); cache.insert(s); s }); InternedString { inner: s } } pub fn as_str(&self) -> &'static str { self.inner } } impl Deref for InternedString { type Target = str; fn deref(&self) -> &'static str { self.as_str() } } impl AsRef for InternedString { fn as_ref(&self) -> &str { self.as_str() } } impl AsRef for InternedString { fn as_ref(&self) -> &OsStr { self.as_str().as_ref() } } impl AsRef for InternedString { fn as_ref(&self) -> &Path { self.as_str().as_ref() } } impl Hash for InternedString { // N.B., we can't implement this as `identity(self).hash(state)`, // because we use this for on-disk fingerprints and so need // stability across Cargo invocations. fn hash(&self, state: &mut H) { self.as_str().hash(state); } } impl Borrow for InternedString { // If we implement Hash as `identity(self).hash(state)`, // then this will need to be removed. fn borrow(&self) -> &str { self.as_str() } } impl fmt::Debug for InternedString { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self.as_str(), f) } } impl fmt::Display for InternedString { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self.as_str(), f) } } impl Ord for InternedString { fn cmp(&self, other: &InternedString) -> Ordering { self.as_str().cmp(other.as_str()) } } impl PartialOrd for InternedString { fn partial_cmp(&self, other: &InternedString) -> Option { Some(self.cmp(other)) } } impl Serialize for InternedString { fn serialize(&self, serializer: S) -> Result where S: Serializer, { serializer.serialize_str(self.inner) } } struct InternedStringVisitor; impl<'de> serde::Deserialize<'de> for InternedString { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { deserializer.deserialize_str(InternedStringVisitor) } } impl<'de> serde::de::Visitor<'de> for InternedStringVisitor { type Value = InternedString; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("an String like thing") } fn visit_str(self, v: &str) -> Result where E: serde::de::Error, { Ok(InternedString::new(v)) } } cargo-0.66.0/src/cargo/util/into_url.rs000066400000000000000000000013221432416201200177270ustar00rootroot00000000000000use std::path::{Path, PathBuf}; use url::Url; use crate::util::CargoResult; /// A type that can be converted to a Url pub trait IntoUrl { /// Performs the conversion fn into_url(self) -> CargoResult; } impl<'a> IntoUrl for &'a str { fn into_url(self) -> CargoResult { Url::parse(self).map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s)) } } impl<'a> IntoUrl for &'a Path { fn into_url(self) -> CargoResult { Url::from_file_path(self) .map_err(|()| anyhow::format_err!("invalid path url `{}`", self.display())) } } impl<'a> IntoUrl for &'a PathBuf { fn into_url(self) -> CargoResult { self.as_path().into_url() } } cargo-0.66.0/src/cargo/util/into_url_with_base.rs000066400000000000000000000025371432416201200217650ustar00rootroot00000000000000use crate::util::{CargoResult, IntoUrl}; use url::Url; /// A type that can be interpreted as a relative Url and converted to /// a Url. pub trait IntoUrlWithBase { /// Performs the conversion fn into_url_with_base(self, base: Option) -> CargoResult; } impl<'a> IntoUrlWithBase for &'a str { fn into_url_with_base(self, base: Option) -> CargoResult { let base_url = match base { Some(base) => Some( base.into_url() .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s))?, ), None => None, }; Url::options() .base_url(base_url.as_ref()) .parse(self) .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s)) } } #[cfg(test)] mod tests { use crate::util::IntoUrlWithBase; #[test] fn into_url_with_base() { assert_eq!( "rel/path" .into_url_with_base(Some("file:///abs/path/")) .unwrap() .to_string(), "file:///abs/path/rel/path" ); assert_eq!( "rel/path" .into_url_with_base(Some("file:///abs/path/popped-file")) .unwrap() .to_string(), "file:///abs/path/rel/path" ); } } cargo-0.66.0/src/cargo/util/job.rs000066400000000000000000000115341432416201200166540ustar00rootroot00000000000000//! Job management (mostly for windows) //! //! Most of the time when you're running cargo you expect Ctrl-C to actually //! terminate the entire tree of processes in play, not just the one at the top //! (cargo). This currently works "by default" on Unix platforms because Ctrl-C //! actually sends a signal to the *process group* rather than the parent //! process, so everything will get torn down. On Windows, however, this does //! not happen and Ctrl-C just kills cargo. //! //! To achieve the same semantics on Windows we use Job Objects to ensure that //! all processes die at the same time. Job objects have a mode of operation //! where when all handles to the object are closed it causes all child //! processes associated with the object to be terminated immediately. //! Conveniently whenever a process in the job object spawns a new process the //! child will be associated with the job object as well. This means if we add //! ourselves to the job object we create then everything will get torn down! pub use self::imp::Setup; pub fn setup() -> Option { unsafe { imp::setup() } } #[cfg(unix)] mod imp { use std::env; pub type Setup = (); pub unsafe fn setup() -> Option<()> { // There's a test case for the behavior of // when-cargo-is-killed-subprocesses-are-also-killed, but that requires // one cargo spawned to become its own session leader, so we do that // here. if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() { libc::setsid(); } Some(()) } } #[cfg(windows)] mod imp { use std::io; use std::mem; use std::ptr; use log::info; use winapi::shared::minwindef::*; use winapi::um::handleapi::*; use winapi::um::jobapi2::*; use winapi::um::processthreadsapi::*; use winapi::um::winnt::HANDLE; use winapi::um::winnt::*; pub struct Setup { job: Handle, } pub struct Handle { inner: HANDLE, } fn last_err() -> io::Error { io::Error::last_os_error() } pub unsafe fn setup() -> Option { // Creates a new job object for us to use and then adds ourselves to it. // Note that all errors are basically ignored in this function, // intentionally. Job objects are "relatively new" in Windows, // particularly the ability to support nested job objects. Older // Windows installs don't support this ability. We probably don't want // to force Cargo to abort in this situation or force others to *not* // use job objects, so we instead just ignore errors and assume that // we're otherwise part of someone else's job object in this case. let job = CreateJobObjectW(ptr::null_mut(), ptr::null()); if job.is_null() { return None; } let job = Handle { inner: job }; // Indicate that when all handles to the job object are gone that all // process in the object should be killed. Note that this includes our // entire process tree by default because we've added ourselves and // our children will reside in the job once we spawn a process. let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; info = mem::zeroed(); info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; let r = SetInformationJobObject( job.inner, JobObjectExtendedLimitInformation, &mut info as *mut _ as LPVOID, mem::size_of_val(&info) as DWORD, ); if r == 0 { return None; } // Assign our process to this job object, meaning that our children will // now live or die based on our existence. let me = GetCurrentProcess(); let r = AssignProcessToJobObject(job.inner, me); if r == 0 { return None; } Some(Setup { job }) } impl Drop for Setup { fn drop(&mut self) { // On normal exits (not ctrl-c), we don't want to kill any child // processes. The destructor here configures our job object to // **not** kill everything on close, then closes the job object. unsafe { let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; info = mem::zeroed(); let r = SetInformationJobObject( self.job.inner, JobObjectExtendedLimitInformation, &mut info as *mut _ as LPVOID, mem::size_of_val(&info) as DWORD, ); if r == 0 { info!("failed to configure job object to defaults: {}", last_err()); } } } } impl Drop for Handle { fn drop(&mut self) { unsafe { CloseHandle(self.inner); } } } } cargo-0.66.0/src/cargo/util/lev_distance.rs000066400000000000000000000056321432416201200205440ustar00rootroot00000000000000use std::cmp; pub fn lev_distance(me: &str, t: &str) -> usize { // Comparing the strings lowercased will result in a difference in capitalization being less distance away // than being a completely different letter. Otherwise `CHECK` is as far away from `check` as it // is from `build` (both with a distance of 5). For a single letter shortcut (e.g. `b` or `c`), they will // all be as far away from any capital single letter entry (all with a distance of 1). // By first lowercasing the strings, `C` and `c` are closer than `C` and `b`, for example. let me = me.to_lowercase(); let t = t.to_lowercase(); let t_len = t.chars().count(); if me.is_empty() { return t_len; } if t.is_empty() { return me.chars().count(); } let mut dcol = (0..=t_len).collect::>(); let mut t_last = 0; for (i, sc) in me.chars().enumerate() { let mut current = i; dcol[0] = current + 1; for (j, tc) in t.chars().enumerate() { let next = dcol[j + 1]; if sc == tc { dcol[j + 1] = current; } else { dcol[j + 1] = cmp::min(current, next); dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; } current = next; t_last = j; } } dcol[t_last + 1] } /// Find the closest element from `iter` matching `choice`. The `key` callback /// is used to select a `&str` from the iterator to compare against `choice`. pub fn closest<'a, T>( choice: &str, iter: impl Iterator, key: impl Fn(&T) -> &'a str, ) -> Option { // Only consider candidates with a lev_distance of 3 or less so we don't // suggest out-of-the-blue options. iter.map(|e| (lev_distance(choice, key(&e)), e)) .filter(|&(d, _)| d < 4) .min_by_key(|t| t.0) .map(|t| t.1) } /// Version of `closest` that returns a common "suggestion" that can be tacked /// onto the end of an error message. pub fn closest_msg<'a, T>( choice: &str, iter: impl Iterator, key: impl Fn(&T) -> &'a str, ) -> String { match closest(choice, iter, &key) { Some(e) => format!("\n\n\tDid you mean `{}`?", key(&e)), None => String::new(), } } #[test] fn test_lev_distance() { use std::char::{from_u32, MAX}; // Test bytelength agnosticity for c in (0u32..MAX as u32) .filter_map(from_u32) .map(|i| i.to_string()) { assert_eq!(lev_distance(&c, &c), 0); } let a = "\nMΓ€ry hΓ€d Γ€ little lΓ€mb\n\nLittle lΓ€mb\n"; let b = "\nMary hΓ€d Γ€ little lΓ€mb\n\nLittle lΓ€mb\n"; let c = "Mary hΓ€d Γ€ little lΓ€mb\n\nLittle lΓ€mb\n"; assert_eq!(lev_distance(a, b), 1); assert_eq!(lev_distance(b, a), 1); assert_eq!(lev_distance(a, c), 2); assert_eq!(lev_distance(c, a), 2); assert_eq!(lev_distance(b, c), 1); assert_eq!(lev_distance(c, b), 1); } cargo-0.66.0/src/cargo/util/lockserver.rs000066400000000000000000000127141432416201200202620ustar00rootroot00000000000000//! An implementation of IPC locks, guaranteed to be released if a process dies //! //! This module implements a locking server/client where the main `cargo fix` //! process will start up a server and then all the client processes will //! connect to it. The main purpose of this file is to ensure that each crate //! (aka file entry point) is only fixed by one process at a time, currently //! concurrent fixes can't happen. //! //! The basic design here is to use a TCP server which is pretty portable across //! platforms. For simplicity it just uses threads as well. Clients connect to //! the main server, inform the server what its name is, and then wait for the //! server to give it the lock (aka write a byte). use std::collections::HashMap; use std::io::{BufRead, BufReader, Read, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use std::thread::{self, JoinHandle}; use anyhow::{Context, Error}; pub struct LockServer { listener: TcpListener, addr: SocketAddr, threads: HashMap, done: Arc, } pub struct LockServerStarted { done: Arc, addr: SocketAddr, thread: Option>, } pub struct LockServerClient { _socket: TcpStream, } struct ServerClient { thread: Option>, lock: Arc)>>, } impl LockServer { pub fn new() -> Result { let listener = TcpListener::bind("127.0.0.1:0") .with_context(|| "failed to bind TCP listener to manage locking")?; let addr = listener.local_addr()?; Ok(LockServer { listener, addr, threads: HashMap::new(), done: Arc::new(AtomicBool::new(false)), }) } pub fn addr(&self) -> &SocketAddr { &self.addr } pub fn start(self) -> Result { let addr = self.addr; let done = self.done.clone(); let thread = thread::spawn(|| { self.run(); }); Ok(LockServerStarted { addr, thread: Some(thread), done, }) } fn run(mut self) { while let Ok((client, _)) = self.listener.accept() { if self.done.load(Ordering::SeqCst) { break; } // Learn the name of our connected client to figure out if it needs // to wait for another process to release the lock. let mut client = BufReader::new(client); let mut name = String::new(); if client.read_line(&mut name).is_err() { continue; } let client = client.into_inner(); // If this "named mutex" is already registered and the thread is // still going, put it on the queue. Otherwise wait on the previous // thread and we'll replace it just below. if let Some(t) = self.threads.get_mut(&name) { let mut state = t.lock.lock().unwrap(); if state.0 { state.1.push(client); continue; } drop(t.thread.take().unwrap().join()); } let lock = Arc::new(Mutex::new((true, vec![client]))); let lock2 = lock.clone(); let thread = thread::spawn(move || { loop { let mut client = { let mut state = lock2.lock().unwrap(); if state.1.is_empty() { state.0 = false; break; } else { state.1.remove(0) } }; // Inform this client that it now has the lock and wait for // it to disconnect by waiting for EOF. if client.write_all(&[1]).is_err() { continue; } let mut dst = Vec::new(); drop(client.read_to_end(&mut dst)); } }); self.threads.insert( name, ServerClient { thread: Some(thread), lock, }, ); } } } impl Drop for LockServer { fn drop(&mut self) { for (_, mut client) in self.threads.drain() { if let Some(thread) = client.thread.take() { drop(thread.join()); } } } } impl Drop for LockServerStarted { fn drop(&mut self) { self.done.store(true, Ordering::SeqCst); // Ignore errors here as this is largely best-effort if TcpStream::connect(&self.addr).is_err() { return; } drop(self.thread.take().unwrap().join()); } } impl LockServerClient { pub fn lock(addr: &SocketAddr, name: impl AsRef<[u8]>) -> Result { let mut client = TcpStream::connect(&addr).with_context(|| "failed to connect to parent lock server")?; client .write_all(name.as_ref()) .and_then(|_| client.write_all(b"\n")) .with_context(|| "failed to write to lock server")?; let mut buf = [0]; client .read_exact(&mut buf) .with_context(|| "failed to acquire lock")?; Ok(LockServerClient { _socket: client }) } } cargo-0.66.0/src/cargo/util/machine_message.rs000066400000000000000000000046201432416201200212100ustar00rootroot00000000000000use std::path::{Path, PathBuf}; use serde::ser; use serde::Serialize; use serde_json::{self, json, value::RawValue}; use crate::core::{compiler::CompileMode, PackageId, Target}; pub trait Message: ser::Serialize { fn reason(&self) -> &str; fn to_json_string(&self) -> String { let json = serde_json::to_string(self).unwrap(); assert!(json.starts_with("{\"")); let reason = json!(self.reason()); format!("{{\"reason\":{},{}", reason, &json[1..]) } } #[derive(Serialize)] pub struct FromCompiler<'a> { pub package_id: PackageId, pub manifest_path: &'a Path, pub target: &'a Target, pub message: Box, } impl<'a> Message for FromCompiler<'a> { fn reason(&self) -> &str { "compiler-message" } } #[derive(Serialize)] pub struct Artifact<'a> { pub package_id: PackageId, pub manifest_path: PathBuf, pub target: &'a Target, pub profile: ArtifactProfile, pub features: Vec, pub filenames: Vec, pub executable: Option, pub fresh: bool, } impl<'a> Message for Artifact<'a> { fn reason(&self) -> &str { "compiler-artifact" } } /// This is different from the regular `Profile` to maintain backwards /// compatibility (in particular, `test` is no longer in `Profile`, but we /// still want it to be included here). #[derive(Serialize)] pub struct ArtifactProfile { pub opt_level: &'static str, pub debuginfo: Option, pub debug_assertions: bool, pub overflow_checks: bool, pub test: bool, } #[derive(Serialize)] pub struct BuildScript<'a> { pub package_id: PackageId, pub linked_libs: &'a [String], pub linked_paths: &'a [String], pub cfgs: &'a [String], pub env: &'a [(String, String)], pub out_dir: &'a Path, } impl<'a> Message for BuildScript<'a> { fn reason(&self) -> &str { "build-script-executed" } } #[derive(Serialize)] pub struct TimingInfo<'a> { pub package_id: PackageId, pub target: &'a Target, pub mode: CompileMode, pub duration: f64, #[serde(skip_serializing_if = "Option::is_none")] pub rmeta_time: Option, } impl<'a> Message for TimingInfo<'a> { fn reason(&self) -> &str { "timing-info" } } #[derive(Serialize)] pub struct BuildFinished { pub success: bool, } impl Message for BuildFinished { fn reason(&self) -> &str { "build-finished" } } cargo-0.66.0/src/cargo/util/mod.rs000066400000000000000000000055671432416201200166720ustar00rootroot00000000000000use std::fmt; use std::time::Duration; pub use self::canonical_url::CanonicalUrl; pub use self::config::{homedir, Config, ConfigValue}; pub(crate) use self::counter::MetricsCounter; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; pub use self::errors::CliError; pub use self::errors::{internal, CargoResult, CliResult}; pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; pub use self::hasher::StableHasher; pub use self::hex::{hash_u64, short_hash, to_hex}; pub use self::into_url::IntoUrl; pub use self::into_url_with_base::IntoUrlWithBase; pub use self::lev_distance::{closest, closest_msg, lev_distance}; pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; pub use self::progress::{Progress, ProgressStyle}; pub use self::queue::Queue; pub use self::restricted_names::validate_package_name; pub use self::rustc::Rustc; pub use self::semver_ext::{OptVersionReq, VersionExt, VersionReqExt}; pub use self::to_semver::ToSemver; pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; pub use self::workspace::{ add_path_args, path_args, print_available_benches, print_available_binaries, print_available_examples, print_available_packages, print_available_tests, }; mod canonical_url; pub mod command_prelude; pub mod config; mod counter; pub mod cpu; mod dependency_queue; pub mod diagnostic_server; pub mod errors; mod flock; pub mod graph; mod hasher; pub mod hex; pub mod important_paths; pub mod interning; pub mod into_url; mod into_url_with_base; pub mod job; pub mod lev_distance; mod lockserver; pub mod machine_message; pub mod network; pub mod profile; mod progress; mod queue; pub mod restricted_names; pub mod rustc; mod semver_ext; pub mod to_semver; pub mod toml; mod vcs; mod workspace; pub fn elapsed(duration: Duration) -> String { let secs = duration.as_secs(); if secs >= 60 { format!("{}m {:02}s", secs / 60, secs % 60) } else { format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000) } } pub fn iter_join_onto(mut w: W, iter: I, delim: &str) -> fmt::Result where W: fmt::Write, I: IntoIterator, T: std::fmt::Display, { let mut it = iter.into_iter().peekable(); while let Some(n) = it.next() { write!(w, "{}", n)?; if it.peek().is_some() { write!(w, "{}", delim)?; } } Ok(()) } pub fn iter_join(iter: I, delim: &str) -> String where I: IntoIterator, T: std::fmt::Display, { let mut s = String::new(); let _ = iter_join_onto(&mut s, iter, delim); s } pub fn indented_lines(text: &str) -> String { text.lines() .map(|line| { if line.is_empty() { String::from("\n") } else { format!(" {}\n", line) } }) .collect() } cargo-0.66.0/src/cargo/util/network.rs000066400000000000000000000112711432416201200175710ustar00rootroot00000000000000use anyhow::Error; use crate::util::errors::{CargoResult, HttpNot200}; use crate::util::Config; use std::task::Poll; pub trait PollExt { fn expect(self, msg: &str) -> T; } impl PollExt for Poll { #[track_caller] fn expect(self, msg: &str) -> T { match self { Poll::Ready(val) => val, Poll::Pending => panic!("{}", msg), } } } pub struct Retry<'a> { config: &'a Config, remaining: u32, } impl<'a> Retry<'a> { pub fn new(config: &'a Config) -> CargoResult> { Ok(Retry { config, remaining: config.net_config()?.retry.unwrap_or(2), }) } pub fn r#try(&mut self, f: impl FnOnce() -> CargoResult) -> CargoResult> { match f() { Err(ref e) if maybe_spurious(e) && self.remaining > 0 => { let msg = format!( "spurious network error ({} tries remaining): {}", self.remaining, e.root_cause(), ); self.config.shell().warn(msg)?; self.remaining -= 1; Ok(None) } other => other.map(Some), } } } fn maybe_spurious(err: &Error) -> bool { if let Some(git_err) = err.downcast_ref::() { match git_err.class() { git2::ErrorClass::Net | git2::ErrorClass::Os | git2::ErrorClass::Zlib | git2::ErrorClass::Http => return true, _ => (), } } if let Some(curl_err) = err.downcast_ref::() { if curl_err.is_couldnt_connect() || curl_err.is_couldnt_resolve_proxy() || curl_err.is_couldnt_resolve_host() || curl_err.is_operation_timedout() || curl_err.is_recv_error() || curl_err.is_send_error() || curl_err.is_http2_error() || curl_err.is_http2_stream_error() || curl_err.is_ssl_connect_error() || curl_err.is_partial_file() { return true; } } if let Some(not_200) = err.downcast_ref::() { if 500 <= not_200.code && not_200.code < 600 { return true; } } false } /// Wrapper method for network call retry logic. /// /// Retry counts provided by Config object `net.retry`. Config shell outputs /// a warning on per retry. /// /// Closure must return a `CargoResult`. /// /// # Examples /// /// ``` /// # use crate::cargo::util::{CargoResult, Config}; /// # let download_something = || return Ok(()); /// # let config = Config::default().unwrap(); /// use cargo::util::network; /// let cargo_result = network::with_retry(&config, || download_something()); /// ``` pub fn with_retry(config: &Config, mut callback: F) -> CargoResult where F: FnMut() -> CargoResult, { let mut retry = Retry::new(config)?; loop { if let Some(ret) = retry.r#try(&mut callback)? { return Ok(ret); } } } #[test] fn with_retry_repeats_the_call_then_works() { use crate::core::Shell; //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry let error1 = HttpNot200 { code: 501, url: "Uri".to_string(), } .into(); let error2 = HttpNot200 { code: 502, url: "Uri".to_string(), } .into(); let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; let config = Config::default().unwrap(); *config.shell() = Shell::from_write(Box::new(Vec::new())); let result = with_retry(&config, || results.pop().unwrap()); assert!(result.is_ok()) } #[test] fn with_retry_finds_nested_spurious_errors() { use crate::core::Shell; //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry //String error messages are not considered spurious let error1 = anyhow::Error::from(HttpNot200 { code: 501, url: "Uri".to_string(), }); let error1 = anyhow::Error::from(error1.context("A non-spurious wrapping err")); let error2 = anyhow::Error::from(HttpNot200 { code: 502, url: "Uri".to_string(), }); let error2 = anyhow::Error::from(error2.context("A second chained error")); let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; let config = Config::default().unwrap(); *config.shell() = Shell::from_write(Box::new(Vec::new())); let result = with_retry(&config, || results.pop().unwrap()); assert!(result.is_ok()) } #[test] fn curle_http2_stream_is_spurious() { let code = curl_sys::CURLE_HTTP2_STREAM; let err = curl::Error::new(code); assert!(maybe_spurious(&err.into())); } cargo-0.66.0/src/cargo/util/profile.rs000066400000000000000000000047571432416201200175530ustar00rootroot00000000000000use std::cell::RefCell; use std::env; use std::fmt; use std::io::{stdout, StdoutLock, Write}; use std::iter::repeat; use std::mem; use std::time; thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); type Message = (usize, u64, String); pub struct Profiler { desc: String, } fn enabled_level() -> Option { env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok()) } pub fn start(desc: T) -> Profiler { if enabled_level().is_none() { return Profiler { desc: String::new(), }; } PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now())); Profiler { desc: desc.to_string(), } } impl Drop for Profiler { fn drop(&mut self) { let enabled = match enabled_level() { Some(i) => i, None => return, }; let (start, stack_len) = PROFILE_STACK.with(|stack| { let mut stack = stack.borrow_mut(); let start = stack.pop().unwrap(); (start, stack.len()) }); let duration = start.elapsed(); let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_millis()); let msg = (stack_len, duration_ms, mem::take(&mut self.desc)); MESSAGES.with(|msgs| msgs.borrow_mut().push(msg)); if stack_len == 0 { fn print(lvl: usize, msgs: &[Message], enabled: usize, stdout: &mut StdoutLock<'_>) { if lvl > enabled { return; } let mut last = 0; for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { if l != lvl { continue; } writeln!( stdout, "{} {:6}ms - {}", repeat(" ").take(lvl + 1).collect::(), time, msg ) .expect("printing profiling info to stdout"); print(lvl + 1, &msgs[last..i], enabled, stdout); last = i; } } let stdout = stdout(); MESSAGES.with(|msgs| { let mut msgs = msgs.borrow_mut(); print(0, &msgs, enabled, &mut stdout.lock()); msgs.clear(); }); } } } cargo-0.66.0/src/cargo/util/progress.rs000066400000000000000000000332551432416201200177520ustar00rootroot00000000000000use std::cmp; use std::env; use std::time::{Duration, Instant}; use crate::core::shell::Verbosity; use crate::util::config::ProgressWhen; use crate::util::{CargoResult, Config}; use cargo_util::is_ci; use unicode_width::UnicodeWidthChar; pub struct Progress<'cfg> { state: Option>, } pub enum ProgressStyle { Percentage, Ratio, Indeterminate, } struct Throttle { first: bool, last_update: Instant, } struct State<'cfg> { config: &'cfg Config, format: Format, name: String, done: bool, throttle: Throttle, last_line: Option, fixed_width: Option, } struct Format { style: ProgressStyle, max_width: usize, max_print: usize, } impl<'cfg> Progress<'cfg> { pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> { // report no progress when -q (for quiet) or TERM=dumb are set // or if running on Continuous Integration service like Travis where the // output logs get mangled. let dumb = match env::var("TERM") { Ok(term) => term == "dumb", Err(_) => false, }; let progress_config = cfg.progress_config(); match progress_config.when { ProgressWhen::Always => return Progress::new_priv(name, style, cfg), ProgressWhen::Never => return Progress { state: None }, ProgressWhen::Auto => {} } if cfg.shell().verbosity() == Verbosity::Quiet || dumb || is_ci() { return Progress { state: None }; } Progress::new_priv(name, style, cfg) } fn new_priv(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> { let progress_config = cfg.progress_config(); let width = progress_config .width .or_else(|| cfg.shell().err_width().progress_max_width()); Progress { state: width.map(|n| State { config: cfg, format: Format { style, max_width: n, // 50 gives some space for text after the progress bar, // even on narrow (e.g. 80 char) terminals. max_print: 50, }, name: name.to_string(), done: false, throttle: Throttle::new(), last_line: None, fixed_width: progress_config.width, }), } } pub fn disable(&mut self) { self.state = None; } pub fn is_enabled(&self) -> bool { self.state.is_some() } pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> { Self::with_style(name, ProgressStyle::Percentage, cfg) } pub fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { let s = match &mut self.state { Some(s) => s, None => return Ok(()), }; // Don't update too often as it can cause excessive performance loss // just putting stuff onto the terminal. We also want to avoid // flickering by not drawing anything that goes away too quickly. As a // result we've got two branches here: // // 1. If we haven't drawn anything, we wait for a period of time to // actually start drawing to the console. This ensures that // short-lived operations don't flicker on the console. Currently // there's a 500ms delay to when we first draw something. // 2. If we've drawn something, then we rate limit ourselves to only // draw to the console every so often. Currently there's a 100ms // delay between updates. if !s.throttle.allowed() { return Ok(()); } s.tick(cur, max, msg) } pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { match self.state { Some(ref mut s) => s.tick(cur, max, msg), None => Ok(()), } } pub fn update_allowed(&mut self) -> bool { match &mut self.state { Some(s) => s.throttle.allowed(), None => false, } } pub fn print_now(&mut self, msg: &str) -> CargoResult<()> { match &mut self.state { Some(s) => s.print("", msg), None => Ok(()), } } pub fn clear(&mut self) { if let Some(ref mut s) = self.state { s.clear(); } } } impl Throttle { fn new() -> Throttle { Throttle { first: true, last_update: Instant::now(), } } fn allowed(&mut self) -> bool { if self.first { let delay = Duration::from_millis(500); if self.last_update.elapsed() < delay { return false; } } else { let interval = Duration::from_millis(100); if self.last_update.elapsed() < interval { return false; } } self.update(); true } fn update(&mut self) { self.first = false; self.last_update = Instant::now(); } } impl<'cfg> State<'cfg> { fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { if self.done { return Ok(()); } if max > 0 && cur == max { self.done = true; } // Write out a pretty header, then the progress bar itself, and then // return back to the beginning of the line for the next print. self.try_update_max_width(); if let Some(pbar) = self.format.progress(cur, max) { self.print(&pbar, msg)?; } Ok(()) } fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> { self.throttle.update(); self.try_update_max_width(); // make sure we have enough room for the header if self.format.max_width < 15 { return Ok(()); } let mut line = prefix.to_string(); self.format.render(&mut line, msg); while line.len() < self.format.max_width - 15 { line.push(' '); } // Only update if the line has changed. if self.config.shell().is_cleared() || self.last_line.as_ref() != Some(&line) { let mut shell = self.config.shell(); shell.set_needs_clear(false); shell.status_header(&self.name)?; write!(shell.err(), "{}\r", line)?; self.last_line = Some(line); shell.set_needs_clear(true); } Ok(()) } fn clear(&mut self) { // No need to clear if the progress is not currently being displayed. if self.last_line.is_some() && !self.config.shell().is_cleared() { self.config.shell().err_erase_line(); self.last_line = None; } } fn try_update_max_width(&mut self) { if self.fixed_width.is_none() { if let Some(n) = self.config.shell().err_width().progress_max_width() { self.format.max_width = n; } } } } impl Format { fn progress(&self, cur: usize, max: usize) -> Option { assert!(cur <= max); // Render the percentage at the far right and then figure how long the // progress bar is let pct = (cur as f64) / (max as f64); let pct = if !pct.is_finite() { 0.0 } else { pct }; let stats = match self.style { ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0), ProgressStyle::Ratio => format!(" {}/{}", cur, max), ProgressStyle::Indeterminate => String::new(), }; let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */; let display_width = match self.width().checked_sub(extra_len) { Some(n) => n, None => return None, }; let mut string = String::with_capacity(self.max_width); string.push('['); let hashes = display_width as f64 * pct; let hashes = hashes as usize; // Draw the `===>` if hashes > 0 { for _ in 0..hashes - 1 { string.push('='); } if cur == max { string.push('='); } else { string.push('>'); } } // Draw the empty space we have left to do for _ in 0..(display_width - hashes) { string.push(' '); } string.push(']'); string.push_str(&stats); Some(string) } fn render(&self, string: &mut String, msg: &str) { let mut avail_msg_len = self.max_width - string.len() - 15; let mut ellipsis_pos = 0; if avail_msg_len <= 3 { return; } for c in msg.chars() { let display_width = c.width().unwrap_or(0); if avail_msg_len >= display_width { avail_msg_len -= display_width; string.push(c); if avail_msg_len >= 3 { ellipsis_pos = string.len(); } } else { string.truncate(ellipsis_pos); string.push_str("..."); break; } } } #[cfg(test)] fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option { let mut ret = self.progress(cur, max)?; self.render(&mut ret, msg); Some(ret) } fn width(&self) -> usize { cmp::min(self.max_width, self.max_print) } } impl<'cfg> Drop for State<'cfg> { fn drop(&mut self) { self.clear(); } } #[test] fn test_progress_status() { let format = Format { style: ProgressStyle::Ratio, max_print: 40, max_width: 60, }; assert_eq!( format.progress_status(0, 4, ""), Some("[ ] 0/4".to_string()) ); assert_eq!( format.progress_status(1, 4, ""), Some("[===> ] 1/4".to_string()) ); assert_eq!( format.progress_status(2, 4, ""), Some("[========> ] 2/4".to_string()) ); assert_eq!( format.progress_status(3, 4, ""), Some("[=============> ] 3/4".to_string()) ); assert_eq!( format.progress_status(4, 4, ""), Some("[===================] 4/4".to_string()) ); assert_eq!( format.progress_status(3999, 4000, ""), Some("[===========> ] 3999/4000".to_string()) ); assert_eq!( format.progress_status(4000, 4000, ""), Some("[=============] 4000/4000".to_string()) ); assert_eq!( format.progress_status(3, 4, ": short message"), Some("[=============> ] 3/4: short message".to_string()) ); assert_eq!( format.progress_status(3, 4, ": msg thats just fit"), Some("[=============> ] 3/4: msg thats just fit".to_string()) ); assert_eq!( format.progress_status(3, 4, ": msg that's just fit"), Some("[=============> ] 3/4: msg that's just...".to_string()) ); // combining diacritics have width zero and thus can fit max_width. let zalgo_msg = "zΜΈΜ§Μ’Μ—Ν‰ΜΜ¦ΝΜ±Ν§Ν¦Ν¨Μ‘Μ…ΜŒΝ₯́͒aΜ’Ν¬Ν¨Μ½Ν―Μ…Μ‘Ν₯͋̏̑ͫ̄͒͏̫̝ΜͺΜ€ΝŽΜ±Μ£ΝΜ­ΜžΜ™Μ±Ν™ΝΜ˜Μ­ΝšlΜΆΜ‘Μ›Μ₯̝̰̭̹̯̯̞ΜͺΝ‡Μ±Μ¦Ν™Ν”Μ˜ΜΌΝ‡Ν“ΜˆΝ¨Ν—Ν§Μ“Ν’Ν¦Μ€Μ‡Ν£ΜˆΝ­ΝŠΝ›ΜƒΜ‘Ν’ΜΏΜ•ΝœgΜΈΜ·Μ’Μ©Μ»Μ»ΝšΜ Ν“ΜžΜ₯ΝΝ©ΝŒΜ‘Ν₯ΜŠΜ½Ν‹ΝΜΝŒΝ›ΜΜ‡Μ‘Ν¨ΜΝ…oΝ™Μ³Μ£Ν”Μ°Μ ΜœΝ•Ν•ΜžΜ¦Μ™Μ­ΜœΜ―ΜΉΜ¬Μ»Μ“Ν‘Ν¦Ν‹ΜˆΜ‰ΝŒΜƒΝ―Μ€Μ‚Ν Ν… ΜΈΜ‘ΝŽΜ¦Μ²Μ–Μ€ΜΊΜœΜΜ±Μ°Μ₯Ν”Μ―Μ…ΜΝ¬Μ‚Ν¨Μ‹ΜƒΜ½ΜˆΜΜΎΜ”Μ‡Ν£ΜšΝœΝœhΜ‘Ν«ΜΜ…ΜΏΜΜ€Νœ‰Μ›Ν‡Μ­ΜΉΜ°Μ Ν™ΜžαΊ½ΜΆΜ™ΜΉΜ³Μ–Ν‰ΝŽΜ¦Ν‚Μ‹Μ“ΝΜ”Ν¬ΜΜ€Ν‚ΜŒΝ‘Μ’Ν†ΜšΝœΝ  Ν“Ν“ΜŸΝΜΜ¬ΜΜΜ°Ν“ΝŽΜΌΜ»Ν¦ΝΜΎΜ”Ν’ΜƒΜ“ΝŸΝŸcΜΜ¦ΝΜΊΝˆΝšΜ―Ν•Μ„Μ’ΝΜ‚ΝŠΜŠΝ—ΝŠΝ€Ν£Μ€Ν˜Μ•ΝΝžo̢͍͚͍̣ΜΝŒΝ¦Μ½Μ‘Ν©Μ…Ν̐̽̏͗́͂̅ΝͺΝ mΜ·Μ§Ν–Μ»Ν”Μ₯ΜͺΜ­Ν‰Ν‰Μ€Μ»Ν–Μ©Μ€Ν–Μ˜Ν¦Μ‚ΝŒΜ†Μ‚Ν¦Μ’ΝŠΝ―Ν¬ΝŠΜ‰ΜŒΝ¬ΝΝ‘è̡̹̣͍̜̺̯̫̹̠̀ΜΝŽΝ™Μ―ΝšΜ°ΜΌΝ—ΝΜ€Μ’Ν‚Μ‰Μ€ΜšΝΝžs̡̲͍͙͖ΜͺΝ“Ν“ΜΊΜ±Μ­Μ©Μ£Ν–Μ£Ν€Ν€Ν‚ΜŽΜˆΝ—Ν†Ν¨ΝͺΜ†ΜˆΝ—ΝΝ "; assert_eq!( format.progress_status(3, 4, zalgo_msg), Some("[=============> ] 3/4".to_string() + zalgo_msg) ); // some non-ASCII ellipsize test assert_eq!( format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"), Some("[=============> ] 3/4_123456789123456e\u{301}\u{301}...".to_string()) ); assert_eq!( format.progress_status(3, 4, "οΌšζ―ε€‹ζΌ’ε­—δ½”ζ“šδΊ†ε…©ε€‹ε­—ε…ƒ"), Some("[=============> ] 3/4οΌšζ―ε€‹ζΌ’ε­—δ½”ζ“šδΊ†...".to_string()) ); assert_eq!( // handle breaking at middle of character format.progress_status(3, 4, ":-ζ―ε€‹ζΌ’ε­—δ½”ζ“šδΊ†ε…©ε€‹ε­—ε…ƒ"), Some("[=============> ] 3/4:-ζ―ε€‹ζΌ’ε­—δ½”ζ“šδΊ†...".to_string()) ); } #[test] fn test_progress_status_percentage() { let format = Format { style: ProgressStyle::Percentage, max_print: 40, max_width: 60, }; assert_eq!( format.progress_status(0, 77, ""), Some("[ ] 0.00%".to_string()) ); assert_eq!( format.progress_status(1, 77, ""), Some("[ ] 1.30%".to_string()) ); assert_eq!( format.progress_status(76, 77, ""), Some("[=============> ] 98.70%".to_string()) ); assert_eq!( format.progress_status(77, 77, ""), Some("[===============] 100.00%".to_string()) ); } #[test] fn test_progress_status_too_short() { let format = Format { style: ProgressStyle::Percentage, max_print: 25, max_width: 25, }; assert_eq!( format.progress_status(1, 1, ""), Some("[] 100.00%".to_string()) ); let format = Format { style: ProgressStyle::Percentage, max_print: 24, max_width: 24, }; assert_eq!(format.progress_status(1, 1, ""), None); } cargo-0.66.0/src/cargo/util/queue.rs000066400000000000000000000043051432416201200172240ustar00rootroot00000000000000use std::collections::VecDeque; use std::sync::{Condvar, Mutex}; use std::time::Duration; /// A simple, threadsafe, queue of items of type `T` /// /// This is a sort of channel where any thread can push to a queue and any /// thread can pop from a queue. /// /// This supports both bounded and unbounded operations. `push` will never block, /// and allows the queue to grow without bounds. `push_bounded` will block if the /// queue is over capacity, and will resume once there is enough capacity. pub struct Queue { state: Mutex>, popper_cv: Condvar, bounded_cv: Condvar, bound: usize, } struct State { items: VecDeque, } impl Queue { pub fn new(bound: usize) -> Queue { Queue { state: Mutex::new(State { items: VecDeque::new(), }), popper_cv: Condvar::new(), bounded_cv: Condvar::new(), bound, } } pub fn push(&self, item: T) { self.state.lock().unwrap().items.push_back(item); self.popper_cv.notify_one(); } /// Pushes an item onto the queue, blocking if the queue is full. pub fn push_bounded(&self, item: T) { let locked_state = self.state.lock().unwrap(); let mut state = self .bounded_cv .wait_while(locked_state, |s| s.items.len() >= self.bound) .unwrap(); state.items.push_back(item); self.popper_cv.notify_one(); } pub fn pop(&self, timeout: Duration) -> Option { let (mut state, result) = self .popper_cv .wait_timeout_while(self.state.lock().unwrap(), timeout, |s| s.items.is_empty()) .unwrap(); if result.timed_out() { None } else { let value = state.items.pop_front()?; if state.items.len() < self.bound { // Assumes threads cannot be canceled. self.bounded_cv.notify_one(); } Some(value) } } pub fn try_pop_all(&self) -> Vec { let mut state = self.state.lock().unwrap(); let result = state.items.drain(..).collect(); self.bounded_cv.notify_all(); result } } cargo-0.66.0/src/cargo/util/restricted_names.rs000066400000000000000000000070251432416201200214350ustar00rootroot00000000000000//! Helpers for validating and checking names like package and crate names. use crate::util::CargoResult; use anyhow::bail; use std::path::Path; /// Returns `true` if the name contains non-ASCII characters. pub fn is_non_ascii_name(name: &str) -> bool { name.chars().any(|ch| ch > '\x7f') } /// A Rust keyword. pub fn is_keyword(name: &str) -> bool { // See https://doc.rust-lang.org/reference/keywords.html [ "Self", "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", "pub", "ref", "return", "self", "static", "struct", "super", "trait", "true", "try", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", ] .contains(&name) } /// These names cannot be used on Windows, even with an extension. pub fn is_windows_reserved(name: &str) -> bool { [ "con", "prn", "aux", "nul", "com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8", "com9", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9", ] .contains(&name.to_ascii_lowercase().as_str()) } /// An artifact with this name will conflict with one of Cargo's build directories. pub fn is_conflicting_artifact_name(name: &str) -> bool { ["deps", "examples", "build", "incremental"].contains(&name) } /// Check the base requirements for a package name. /// /// This can be used for other things than package names, to enforce some /// level of sanity. Note that package names have other restrictions /// elsewhere. `cargo new` has a few restrictions, such as checking for /// reserved names. crates.io has even more restrictions. pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> { let mut chars = name.chars(); if let Some(ch) = chars.next() { if ch.is_digit(10) { // A specific error for a potentially common case. bail!( "the name `{}` cannot be used as a {}, \ the name cannot start with a digit{}", name, what, help ); } if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') { bail!( "invalid character `{}` in {}: `{}`, \ the first character must be a Unicode XID start character \ (most letters or `_`){}", ch, what, name, help ); } } for ch in chars { if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') { bail!( "invalid character `{}` in {}: `{}`, \ characters must be Unicode XID characters \ (numbers, `-`, `_`, or most letters){}", ch, what, name, help ); } } Ok(()) } /// Check the entire path for names reserved in Windows. pub fn is_windows_reserved_path(path: &Path) -> bool { path.iter() .filter_map(|component| component.to_str()) .any(|component| { let stem = component.split('.').next().unwrap(); is_windows_reserved(stem) }) } /// Returns `true` if the name contains any glob pattern wildcards. pub fn is_glob_pattern>(name: T) -> bool { name.as_ref().contains(&['*', '?', '[', ']'][..]) } cargo-0.66.0/src/cargo/util/rustc.rs000066400000000000000000000301631432416201200172410ustar00rootroot00000000000000use std::collections::hash_map::HashMap; use std::env; use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::sync::Mutex; use anyhow::Context as _; use cargo_util::{paths, ProcessBuilder, ProcessError}; use log::{debug, info, warn}; use serde::{Deserialize, Serialize}; use crate::util::interning::InternedString; use crate::util::{profile, CargoResult, StableHasher}; /// Information on the `rustc` executable #[derive(Debug)] pub struct Rustc { /// The location of the exe pub path: PathBuf, /// An optional program that will be passed the path of the rust exe as its first argument, and /// rustc args following this. pub wrapper: Option, /// An optional wrapper to be used in addition to `rustc.wrapper` for workspace crates pub workspace_wrapper: Option, /// Verbose version information (the output of `rustc -vV`) pub verbose_version: String, /// The rustc version (`1.23.4-beta.2`), this comes from verbose_version. pub version: semver::Version, /// The host triple (arch-platform-OS), this comes from verbose_version. pub host: InternedString, cache: Mutex, } impl Rustc { /// Runs the compiler at `path` to learn various pieces of information about /// it, with an optional wrapper. /// /// If successful this function returns a description of the compiler along /// with a list of its capabilities. pub fn new( path: PathBuf, wrapper: Option, workspace_wrapper: Option, rustup_rustc: &Path, cache_location: Option, ) -> CargoResult { let _p = profile::start("Rustc::new"); let mut cache = Cache::load( wrapper.as_deref(), workspace_wrapper.as_deref(), &path, rustup_rustc, cache_location, ); let mut cmd = ProcessBuilder::new(&path); cmd.arg("-vV"); let verbose_version = cache.cached_output(&cmd, 0)?.0; let extract = |field: &str| -> CargoResult<&str> { verbose_version .lines() .find(|l| l.starts_with(field)) .map(|l| &l[field.len()..]) .ok_or_else(|| { anyhow::format_err!( "`rustc -vV` didn't have a line for `{}`, got:\n{}", field.trim(), verbose_version ) }) }; let host = InternedString::new(extract("host: ")?); let version = semver::Version::parse(extract("release: ")?).with_context(|| { format!( "rustc version does not appear to be a valid semver version, from:\n{}", verbose_version ) })?; Ok(Rustc { path, wrapper, workspace_wrapper, verbose_version, version, host, cache: Mutex::new(cache), }) } /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn process(&self) -> ProcessBuilder { let mut cmd = ProcessBuilder::new(self.path.as_path()).wrapped(self.wrapper.as_ref()); cmd.retry_with_argfile(true); cmd } /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn workspace_process(&self) -> ProcessBuilder { let mut cmd = ProcessBuilder::new(self.path.as_path()) .wrapped(self.workspace_wrapper.as_ref()) .wrapped(self.wrapper.as_ref()); cmd.retry_with_argfile(true); cmd } pub fn process_no_wrapper(&self) -> ProcessBuilder { let mut cmd = ProcessBuilder::new(&self.path); cmd.retry_with_argfile(true); cmd } /// Gets the output for the given command. /// /// This will return the cached value if available, otherwise it will run /// the command and cache the output. /// /// `extra_fingerprint` is extra data to include in the cache fingerprint. /// Use this if there is other information about the environment that may /// affect the output that is not part of `cmd`. /// /// Returns a tuple of strings `(stdout, stderr)`. pub fn cached_output( &self, cmd: &ProcessBuilder, extra_fingerprint: u64, ) -> CargoResult<(String, String)> { self.cache .lock() .unwrap() .cached_output(cmd, extra_fingerprint) } } /// It is a well known fact that `rustc` is not the fastest compiler in the /// world. What is less known is that even `rustc --version --verbose` takes /// about a hundred milliseconds! Because we need compiler version info even /// for no-op builds, we cache it here, based on compiler's mtime and rustup's /// current toolchain. /// /// /// #[derive(Debug)] struct Cache { cache_location: Option, dirty: bool, data: CacheData, } #[derive(Serialize, Deserialize, Debug, Default)] struct CacheData { rustc_fingerprint: u64, outputs: HashMap, successes: HashMap, } #[derive(Serialize, Deserialize, Debug)] struct Output { success: bool, status: String, code: Option, stdout: String, stderr: String, } impl Cache { fn load( wrapper: Option<&Path>, workspace_wrapper: Option<&Path>, rustc: &Path, rustup_rustc: &Path, cache_location: Option, ) -> Cache { match ( cache_location, rustc_fingerprint(wrapper, workspace_wrapper, rustc, rustup_rustc), ) { (Some(cache_location), Ok(rustc_fingerprint)) => { let empty = CacheData { rustc_fingerprint, outputs: HashMap::new(), successes: HashMap::new(), }; let mut dirty = true; let data = match read(&cache_location) { Ok(data) => { if data.rustc_fingerprint == rustc_fingerprint { debug!("reusing existing rustc info cache"); dirty = false; data } else { debug!("different compiler, creating new rustc info cache"); empty } } Err(e) => { debug!("failed to read rustc info cache: {}", e); empty } }; return Cache { cache_location: Some(cache_location), dirty, data, }; fn read(path: &Path) -> CargoResult { let json = paths::read(path)?; Ok(serde_json::from_str(&json)?) } } (_, fingerprint) => { if let Err(e) = fingerprint { warn!("failed to calculate rustc fingerprint: {}", e); } debug!("rustc info cache disabled"); Cache { cache_location: None, dirty: false, data: CacheData::default(), } } } } fn cached_output( &mut self, cmd: &ProcessBuilder, extra_fingerprint: u64, ) -> CargoResult<(String, String)> { let key = process_fingerprint(cmd, extra_fingerprint); if self.data.outputs.contains_key(&key) { debug!("rustc info cache hit"); } else { debug!("rustc info cache miss"); debug!("running {}", cmd); let output = cmd.output()?; let stdout = String::from_utf8(output.stdout) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; let stderr = String::from_utf8(output.stderr) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; self.data.outputs.insert( key, Output { success: output.status.success(), status: if output.status.success() { String::new() } else { cargo_util::exit_status_to_string(output.status) }, code: output.status.code(), stdout, stderr, }, ); self.dirty = true; } let output = &self.data.outputs[&key]; if output.success { Ok((output.stdout.clone(), output.stderr.clone())) } else { Err(ProcessError::new_raw( &format!("process didn't exit successfully: {}", cmd), output.code, &output.status, Some(output.stdout.as_ref()), Some(output.stderr.as_ref()), ) .into()) } } } impl Drop for Cache { fn drop(&mut self) { if !self.dirty { return; } if let Some(ref path) = self.cache_location { let json = serde_json::to_string(&self.data).unwrap(); match paths::write(path, json.as_bytes()) { Ok(()) => info!("updated rustc info cache"), Err(e) => warn!("failed to update rustc info cache: {}", e), } } } } fn rustc_fingerprint( wrapper: Option<&Path>, workspace_wrapper: Option<&Path>, rustc: &Path, rustup_rustc: &Path, ) -> CargoResult { let mut hasher = StableHasher::new(); let hash_exe = |hasher: &mut _, path| -> CargoResult<()> { let path = paths::resolve_executable(path)?; path.hash(hasher); paths::mtime(&path)?.hash(hasher); Ok(()) }; hash_exe(&mut hasher, rustc)?; if let Some(wrapper) = wrapper { hash_exe(&mut hasher, wrapper)?; } if let Some(workspace_wrapper) = workspace_wrapper { hash_exe(&mut hasher, workspace_wrapper)?; } // Rustup can change the effective compiler without touching // the `rustc` binary, so we try to account for this here. // If we see rustup's env vars, we mix them into the fingerprint, // but we also mix in the mtime of the actual compiler (and not // the rustup shim at `~/.cargo/bin/rustup`), because `RUSTUP_TOOLCHAIN` // could be just `stable-x86_64-unknown-linux-gnu`, i.e, it could // not mention the version of Rust at all, which changes after // `rustup update`. // // If we don't see rustup env vars, but it looks like the compiler // is managed by rustup, we conservatively bail out. let maybe_rustup = rustup_rustc == rustc; match ( maybe_rustup, env::var("RUSTUP_HOME"), env::var("RUSTUP_TOOLCHAIN"), ) { (_, Ok(rustup_home), Ok(rustup_toolchain)) => { debug!("adding rustup info to rustc fingerprint"); rustup_toolchain.hash(&mut hasher); rustup_home.hash(&mut hasher); let real_rustc = Path::new(&rustup_home) .join("toolchains") .join(rustup_toolchain) .join("bin") .join("rustc") .with_extension(env::consts::EXE_EXTENSION); paths::mtime(&real_rustc)?.hash(&mut hasher); } (true, _, _) => anyhow::bail!("probably rustup rustc, but without rustup's env vars"), _ => (), } Ok(hasher.finish()) } fn process_fingerprint(cmd: &ProcessBuilder, extra_fingerprint: u64) -> u64 { let mut hasher = StableHasher::new(); extra_fingerprint.hash(&mut hasher); cmd.get_args().for_each(|arg| arg.hash(&mut hasher)); let mut env = cmd.get_envs().iter().collect::>(); env.sort_unstable(); env.hash(&mut hasher); hasher.finish() } cargo-0.66.0/src/cargo/util/semver_ext.rs000066400000000000000000000101631432416201200202600ustar00rootroot00000000000000use semver::{Comparator, Op, Version, VersionReq}; use std::fmt::{self, Display}; #[derive(PartialEq, Eq, Hash, Clone, Debug)] pub enum OptVersionReq { Any, Req(VersionReq), /// The exact locked version and the original version requirement. Locked(Version, VersionReq), } pub trait VersionExt { fn is_prerelease(&self) -> bool; } pub trait VersionReqExt { fn exact(version: &Version) -> Self; } impl VersionExt for Version { fn is_prerelease(&self) -> bool { !self.pre.is_empty() } } impl VersionReqExt for VersionReq { fn exact(version: &Version) -> Self { VersionReq { comparators: vec![Comparator { op: Op::Exact, major: version.major, minor: Some(version.minor), patch: Some(version.patch), pre: version.pre.clone(), }], } } } impl OptVersionReq { pub fn exact(version: &Version) -> Self { OptVersionReq::Req(VersionReq::exact(version)) } pub fn is_exact(&self) -> bool { match self { OptVersionReq::Any => false, OptVersionReq::Req(req) => { req.comparators.len() == 1 && { let cmp = &req.comparators[0]; cmp.op == Op::Exact && cmp.minor.is_some() && cmp.patch.is_some() } } OptVersionReq::Locked(..) => true, } } pub fn lock_to(&mut self, version: &Version) { assert!(self.matches(version), "cannot lock {} to {}", self, version); use OptVersionReq::*; let version = version.clone(); *self = match self { Any => Locked(version, VersionReq::STAR), Req(req) => Locked(version, req.clone()), Locked(_, req) => Locked(version, req.clone()), }; } pub fn is_locked(&self) -> bool { matches!(self, OptVersionReq::Locked(..)) } /// Gets the version to which this req is locked, if any. pub fn locked_version(&self) -> Option<&Version> { match self { OptVersionReq::Locked(version, _) => Some(version), _ => None, } } pub fn matches(&self, version: &Version) -> bool { match self { OptVersionReq::Any => true, OptVersionReq::Req(req) => req.matches(version), OptVersionReq::Locked(v, _) => { v.major == version.major && v.minor == version.minor && v.patch == version.patch && v.pre == version.pre } } } } impl Display for OptVersionReq { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { OptVersionReq::Any => f.write_str("*"), OptVersionReq::Req(req) => Display::fmt(req, f), OptVersionReq::Locked(_, req) => Display::fmt(req, f), } } } impl From for OptVersionReq { fn from(req: VersionReq) -> Self { OptVersionReq::Req(req) } } #[cfg(test)] mod tests { use super::*; #[test] fn locked_has_the_same_with_exact() { fn test_versions(target_ver: &str, vers: &[&str]) { let ver = Version::parse(target_ver).unwrap(); let exact = OptVersionReq::exact(&ver); let mut locked = exact.clone(); locked.lock_to(&ver); for v in vers { let v = Version::parse(v).unwrap(); assert_eq!(exact.matches(&v), locked.matches(&v)); } } test_versions( "1.0.0", &["1.0.0", "1.0.1", "0.9.9", "0.10.0", "0.1.0", "1.0.0-pre"], ); test_versions("0.9.0", &["0.9.0", "0.9.1", "1.9.0", "0.0.9", "0.9.0-pre"]); test_versions("0.0.2", &["0.0.2", "0.0.1", "0.0.3", "0.0.2-pre"]); test_versions( "0.1.0-beta2.a", &[ "0.1.0-beta2.a", "0.9.1", "0.1.0", "0.1.1-beta2.a", "0.1.0-beta2", ], ); test_versions("0.1.0+meta", &["0.1.0", "0.1.0+meta", "0.1.0+any"]); } } cargo-0.66.0/src/cargo/util/to_semver.rs000066400000000000000000000013371432416201200201050ustar00rootroot00000000000000use crate::util::errors::CargoResult; use semver::Version; pub trait ToSemver { fn to_semver(self) -> CargoResult; } impl ToSemver for Version { fn to_semver(self) -> CargoResult { Ok(self) } } impl<'a> ToSemver for &'a str { fn to_semver(self) -> CargoResult { match Version::parse(self.trim()) { Ok(v) => Ok(v), Err(..) => Err(anyhow::format_err!("cannot parse '{}' as a semver", self)), } } } impl<'a> ToSemver for &'a String { fn to_semver(self) -> CargoResult { (**self).to_semver() } } impl<'a> ToSemver for &'a Version { fn to_semver(self) -> CargoResult { Ok(self.clone()) } } cargo-0.66.0/src/cargo/util/toml/000077500000000000000000000000001432416201200165035ustar00rootroot00000000000000cargo-0.66.0/src/cargo/util/toml/mod.rs000066400000000000000000003202231432416201200176320ustar00rootroot00000000000000use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fmt; use std::marker::PhantomData; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::str::{self, FromStr}; use anyhow::{anyhow, bail, Context as _}; use cargo_platform::Platform; use cargo_util::paths; use lazycell::LazyCell; use log::{debug, trace}; use semver::{self, VersionReq}; use serde::de; use serde::ser; use serde::{Deserialize, Serialize}; use toml_edit::easy as toml; use url::Url; use crate::core::compiler::{CompileKind, CompileTarget}; use crate::core::dependency::{Artifact, ArtifactTarget, DepKind}; use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings}; use crate::core::resolver::ResolveBehavior; use crate::core::{ find_workspace_root, resolve_relative_path, Dependency, Manifest, PackageId, Summary, Target, }; use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace}; use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; use crate::util::{ self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl, VersionReqExt, }; mod targets; use self::targets::targets; pub use toml_edit::de::Error as TomlDeError; pub use toml_edit::TomlError as TomlEditError; /// Loads a `Cargo.toml` from a file on disk. /// /// This could result in a real or virtual manifest being returned. /// /// A list of nested paths is also returned, one for each path dependency /// within the manifest. For virtual manifests, these paths can only /// come from patched or replaced dependencies. These paths are not /// canonicalized. pub fn read_manifest( path: &Path, source_id: SourceId, config: &Config, ) -> Result<(EitherManifest, Vec), ManifestError> { trace!( "read_manifest; path={}; source-id={}", path.display(), source_id ); let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?; read_manifest_from_str(&contents, path, source_id, config) .with_context(|| format!("failed to parse manifest at `{}`", path.display())) .map_err(|err| ManifestError::new(err, path.into())) } /// Parse an already-loaded `Cargo.toml` as a Cargo manifest. /// /// This could result in a real or virtual manifest being returned. /// /// A list of nested paths is also returned, one for each path dependency /// within the manifest. For virtual manifests, these paths can only /// come from patched or replaced dependencies. These paths are not /// canonicalized. pub fn read_manifest_from_str( contents: &str, manifest_file: &Path, source_id: SourceId, config: &Config, ) -> CargoResult<(EitherManifest, Vec)> { let package_root = manifest_file.parent().unwrap(); let toml = { let pretty_filename = manifest_file .strip_prefix(config.cwd()) .unwrap_or(manifest_file); parse_document(contents, pretty_filename, config)? }; // Provide a helpful error message for a common user error. if let Some(package) = toml.get("package").or_else(|| toml.get("project")) { if let Some(feats) = package.get("cargo-features") { let mut feats = feats.clone(); if let Some(value) = feats.as_value_mut() { // Only keep formatting inside of the `[]` and not formatting around it value.decor_mut().clear(); } bail!( "cargo-features = {} was found in the wrong location: it \ should be set at the top of Cargo.toml before any tables", feats.to_string() ); } } let mut unused = BTreeSet::new(); let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| { let mut key = String::new(); stringify(&mut key, &path); unused.insert(key); })?; let add_unused = |warnings: &mut Warnings| { for key in unused { warnings.add_warning(format!("unused manifest key: {}", key)); if key == "profiles.debug" { warnings.add_warning("use `[profile.dev]` to configure debug builds".to_string()); } } }; let manifest = Rc::new(manifest); if let Some(deps) = manifest .workspace .as_ref() .and_then(|ws| ws.dependencies.as_ref()) { for (name, dep) in deps { if dep.is_optional() { bail!( "{} is optional, but workspace dependencies cannot be optional", name ); } if let TomlDependency::Workspace(_) = dep { bail!( "{} was specified as `workspace.dependencies.{}.workspace = true`, but \ workspace dependencies cannot specify `workspace = true`", name, name ); } } } return if manifest.project.is_some() || manifest.package.is_some() { let (mut manifest, paths) = TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?; add_unused(manifest.warnings_mut()); if manifest.targets().iter().all(|t| t.is_custom_build()) { bail!( "no targets specified in the manifest\n\ either src/lib.rs, src/main.rs, a [lib] section, or \ [[bin]] section must be present" ) } Ok((EitherManifest::Real(manifest), paths)) } else { let (mut m, paths) = TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?; add_unused(m.warnings_mut()); Ok((EitherManifest::Virtual(m), paths)) }; fn stringify(dst: &mut String, path: &serde_ignored::Path<'_>) { use serde_ignored::Path; match *path { Path::Root => {} Path::Seq { parent, index } => { stringify(dst, parent); if !dst.is_empty() { dst.push('.'); } dst.push_str(&index.to_string()); } Path::Map { parent, ref key } => { stringify(dst, parent); if !dst.is_empty() { dst.push('.'); } dst.push_str(key); } Path::Some { parent } | Path::NewtypeVariant { parent } | Path::NewtypeStruct { parent } => stringify(dst, parent), } } } /// Attempts to parse a string into a [`toml::Value`]. This is not specific to any /// particular kind of TOML file. /// /// The purpose of this wrapper is to detect invalid TOML which was previously /// accepted and display a warning to the user in that case. The `file` and `config` /// parameters are only used by this fallback path. pub fn parse(toml: &str, _file: &Path, _config: &Config) -> CargoResult { // At the moment, no compatibility checks are needed. toml.parse() .map_err(|e| anyhow::Error::from(e).context("could not parse input as TOML")) } pub fn parse_document( toml: &str, _file: &Path, _config: &Config, ) -> CargoResult { // At the moment, no compatibility checks are needed. toml.parse() .map_err(|e| anyhow::Error::from(e).context("could not parse input as TOML")) } /// Warn about paths that have been deprecated and may conflict. fn warn_on_deprecated(new_path: &str, name: &str, kind: &str, warnings: &mut Vec) { let old_path = new_path.replace("-", "_"); warnings.push(format!( "conflicting between `{new_path}` and `{old_path}` in the `{name}` {kind}.\n `{old_path}` is ignored and not recommended for use in the future" )) } type TomlLibTarget = TomlTarget; type TomlBinTarget = TomlTarget; type TomlExampleTarget = TomlTarget; type TomlTestTarget = TomlTarget; type TomlBenchTarget = TomlTarget; #[derive(Clone, Debug, Serialize)] #[serde(untagged)] pub enum TomlDependency { /// In the simple format, only a version is specified, eg. /// `package = ""` Simple(String), /// `package.workspace = true` Workspace(TomlWorkspaceDependency), /// The simple format is equivalent to a detailed dependency /// specifying only a version, eg. /// `package = { version = "" }` Detailed(DetailedTomlDependency

), } impl<'de, P: Deserialize<'de> + Clone> de::Deserialize<'de> for TomlDependency

{ fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { struct TomlDependencyVisitor

(PhantomData

); impl<'de, P: Deserialize<'de> + Clone> de::Visitor<'de> for TomlDependencyVisitor

{ type Value = TomlDependency

; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str( "a version string like \"0.9.8\" or a \ detailed dependency like { version = \"0.9.8\" }", ) } fn visit_str(self, s: &str) -> Result where E: de::Error, { Ok(TomlDependency::Simple(s.to_owned())) } fn visit_map(self, map: V) -> Result where V: de::MapAccess<'de>, { let mvd = de::value::MapAccessDeserializer::new(map); let details: IntermediateDependency

= IntermediateDependency::deserialize(mvd)?; if let Some(workspace) = details.workspace { if workspace { Ok(TomlDependency::Workspace(TomlWorkspaceDependency { workspace: true, features: details.features, optional: details.optional, })) } else { return Err(de::Error::custom("workspace cannot be false")); } } else { Ok(TomlDependency::Detailed(DetailedTomlDependency { version: details.version, registry: details.registry, registry_index: details.registry_index, path: details.path, git: details.git, branch: details.branch, tag: details.tag, rev: details.rev, features: details.features, optional: details.optional, default_features: details.default_features, default_features2: details.default_features2, package: details.package, public: details.public, artifact: details.artifact, lib: details.lib, target: details.target, })) } } } deserializer.deserialize_any(TomlDependencyVisitor(PhantomData)) } } pub trait ResolveToPath { fn resolve(&self, config: &Config) -> PathBuf; } impl ResolveToPath for String { fn resolve(&self, _: &Config) -> PathBuf { self.into() } } impl ResolveToPath for ConfigRelativePath { fn resolve(&self, c: &Config) -> PathBuf { self.resolve_path(c) } } // This is here due to parsing of TomlDependency works. // At the time of writing it can not be derived in anyway I could find. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct IntermediateDependency

{ workspace: Option, version: Option, registry: Option, registry_index: Option, path: Option

, git: Option, branch: Option, tag: Option, rev: Option, features: Option>, optional: Option, default_features: Option, #[serde(rename = "default_features")] default_features2: Option, package: Option, public: Option, artifact: Option, lib: Option, target: Option, } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct TomlWorkspaceDependency { workspace: bool, features: Option>, optional: Option, } #[derive(Deserialize, Serialize, Clone, Debug)] #[serde(rename_all = "kebab-case")] pub struct DetailedTomlDependency { version: Option, registry: Option, /// The URL of the `registry` field. /// This is an internal implementation detail. When Cargo creates a /// package, it replaces `registry` with `registry-index` so that the /// manifest contains the correct URL. All users won't have the same /// registry names configured, so Cargo can't rely on just the name for /// crates published by other users. registry_index: Option, // `path` is relative to the file it appears in. If that's a `Cargo.toml`, it'll be relative to // that TOML file, and if it's a `.cargo/config` file, it'll be relative to that file. path: Option

, git: Option, branch: Option, tag: Option, rev: Option, features: Option>, optional: Option, default_features: Option, #[serde(rename = "default_features")] default_features2: Option, package: Option, public: Option, /// One or more of `bin`, `cdylib`, `staticlib`, `bin:`. artifact: Option, /// If set, the artifact should also be a dependency lib: Option, /// A platform name, like `x86_64-apple-darwin` target: Option, } // Explicit implementation so we avoid pulling in P: Default impl Default for DetailedTomlDependency

{ fn default() -> Self { Self { version: Default::default(), registry: Default::default(), registry_index: Default::default(), path: Default::default(), git: Default::default(), branch: Default::default(), tag: Default::default(), rev: Default::default(), features: Default::default(), optional: Default::default(), default_features: Default::default(), default_features2: Default::default(), package: Default::default(), public: Default::default(), artifact: Default::default(), lib: Default::default(), target: Default::default(), } } } /// This type is used to deserialize `Cargo.toml` files. #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct TomlManifest { cargo_features: Option>, package: Option>, project: Option>, profile: Option, lib: Option, bin: Option>, example: Option>, test: Option>, bench: Option>, dependencies: Option>, dev_dependencies: Option>, #[serde(rename = "dev_dependencies")] dev_dependencies2: Option>, build_dependencies: Option>, #[serde(rename = "build_dependencies")] build_dependencies2: Option>, features: Option>>, target: Option>, replace: Option>, patch: Option>>, workspace: Option, badges: Option>>>, } #[derive(Deserialize, Serialize, Clone, Debug, Default)] pub struct TomlProfiles(BTreeMap); impl TomlProfiles { pub fn get_all(&self) -> &BTreeMap { &self.0 } pub fn get(&self, name: &str) -> Option<&TomlProfile> { self.0.get(name) } pub fn validate(&self, features: &Features, warnings: &mut Vec) -> CargoResult<()> { for (name, profile) in &self.0 { profile.validate(name, features, warnings)?; } Ok(()) } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct TomlOptLevel(pub String); impl<'de> de::Deserialize<'de> for TomlOptLevel { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = TomlOptLevel; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("an optimization level") } fn visit_i64(self, value: i64) -> Result where E: de::Error, { Ok(TomlOptLevel(value.to_string())) } fn visit_str(self, value: &str) -> Result where E: de::Error, { if value == "s" || value == "z" { Ok(TomlOptLevel(value.to_string())) } else { Err(E::custom(format!( "must be `0`, `1`, `2`, `3`, `s` or `z`, \ but found the string: \"{}\"", value ))) } } } d.deserialize_any(Visitor) } } impl ser::Serialize for TomlOptLevel { fn serialize(&self, serializer: S) -> Result where S: ser::Serializer, { match self.0.parse::() { Ok(n) => n.serialize(serializer), Err(_) => self.0.serialize(serializer), } } } #[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] #[serde(untagged, expecting = "expected a boolean or an integer")] pub enum U32OrBool { U32(u32), Bool(bool), } #[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)] #[serde(default, rename_all = "kebab-case")] pub struct TomlProfile { pub opt_level: Option, pub lto: Option, pub codegen_backend: Option, pub codegen_units: Option, pub debug: Option, pub split_debuginfo: Option, pub debug_assertions: Option, pub rpath: Option, pub panic: Option, pub overflow_checks: Option, pub incremental: Option, pub dir_name: Option, pub inherits: Option, pub strip: Option, // Note that `rustflags` is used for the cargo-feature `profile_rustflags` pub rustflags: Option>, // These two fields must be last because they are sub-tables, and TOML // requires all non-tables to be listed first. pub package: Option>, pub build_override: Option>, } #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum ProfilePackageSpec { Spec(PackageIdSpec), All, } impl ser::Serialize for ProfilePackageSpec { fn serialize(&self, s: S) -> Result where S: ser::Serializer, { self.to_string().serialize(s) } } impl<'de> de::Deserialize<'de> for ProfilePackageSpec { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let string = String::deserialize(d)?; if string == "*" { Ok(ProfilePackageSpec::All) } else { PackageIdSpec::parse(&string) .map_err(de::Error::custom) .map(ProfilePackageSpec::Spec) } } } impl fmt::Display for ProfilePackageSpec { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProfilePackageSpec::Spec(spec) => spec.fmt(f), ProfilePackageSpec::All => f.write_str("*"), } } } impl TomlProfile { pub fn validate( &self, name: &str, features: &Features, warnings: &mut Vec, ) -> CargoResult<()> { self.validate_profile(name, features)?; if let Some(ref profile) = self.build_override { profile.validate_override("build-override")?; profile.validate_profile(&format!("{name}.build-override"), features)?; } if let Some(ref packages) = self.package { for (override_name, profile) in packages { profile.validate_override("package")?; profile.validate_profile(&format!("{name}.package.{override_name}"), features)?; } } // Profile name validation Self::validate_name(name)?; if let Some(dir_name) = self.dir_name { // This is disabled for now, as we would like to stabilize named // profiles without this, and then decide in the future if it is // needed. This helps simplify the UI a little. bail!( "dir-name=\"{}\" in profile `{}` is not currently allowed, \ directory names are tied to the profile name for custom profiles", dir_name, name ); } // `inherits` validation if matches!(self.inherits.map(|s| s.as_str()), Some("debug")) { bail!( "profile.{}.inherits=\"debug\" should be profile.{}.inherits=\"dev\"", name, name ); } match name { "doc" => { warnings.push("profile `doc` is deprecated and has no effect".to_string()); } "test" | "bench" => { if self.panic.is_some() { warnings.push(format!("`panic` setting is ignored for `{}` profile", name)) } } _ => {} } if let Some(panic) = &self.panic { if panic != "unwind" && panic != "abort" { bail!( "`panic` setting of `{}` is not a valid setting, \ must be `unwind` or `abort`", panic ); } } if let Some(StringOrBool::String(arg)) = &self.lto { if arg == "true" || arg == "false" { bail!( "`lto` setting of string `\"{arg}\"` for `{name}` profile is not \ a valid setting, must be a boolean (`true`/`false`) or a string \ (`\"thin\"`/`\"fat\"`/`\"off\"`) or omitted.", ); } } Ok(()) } /// Validate dir-names and profile names according to RFC 2678. pub fn validate_name(name: &str) -> CargoResult<()> { if let Some(ch) = name .chars() .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-') { bail!( "invalid character `{}` in profile name `{}`\n\ Allowed characters are letters, numbers, underscore, and hyphen.", ch, name ); } const SEE_DOCS: &str = "See https://doc.rust-lang.org/cargo/reference/profiles.html \ for more on configuring profiles."; let lower_name = name.to_lowercase(); if lower_name == "debug" { bail!( "profile name `{}` is reserved\n\ To configure the default development profile, use the name `dev` \ as in [profile.dev]\n\ {}", name, SEE_DOCS ); } if lower_name == "build-override" { bail!( "profile name `{}` is reserved\n\ To configure build dependency settings, use [profile.dev.build-override] \ and [profile.release.build-override]\n\ {}", name, SEE_DOCS ); } // These are some arbitrary reservations. We have no plans to use // these, but it seems safer to reserve a few just in case we want to // add more built-in profiles in the future. We can also uses special // syntax like cargo:foo if needed. But it is unlikely these will ever // be used. if matches!( lower_name.as_str(), "build" | "check" | "clean" | "config" | "fetch" | "fix" | "install" | "metadata" | "package" | "publish" | "report" | "root" | "run" | "rust" | "rustc" | "rustdoc" | "target" | "tmp" | "uninstall" ) || lower_name.starts_with("cargo") { bail!( "profile name `{}` is reserved\n\ Please choose a different name.\n\ {}", name, SEE_DOCS ); } Ok(()) } /// Validates a profile. /// /// This is a shallow check, which is reused for the profile itself and any overrides. fn validate_profile(&self, name: &str, features: &Features) -> CargoResult<()> { if let Some(codegen_backend) = &self.codegen_backend { features.require(Feature::codegen_backend())?; if codegen_backend.contains(|c: char| !c.is_ascii_alphanumeric() && c != '_') { bail!( "`profile.{}.codegen-backend` setting of `{}` is not a valid backend name.", name, codegen_backend, ); } } if self.rustflags.is_some() { features.require(Feature::profile_rustflags())?; } Ok(()) } /// Validation that is specific to an override. fn validate_override(&self, which: &str) -> CargoResult<()> { if self.package.is_some() { bail!("package-specific profiles cannot be nested"); } if self.build_override.is_some() { bail!("build-override profiles cannot be nested"); } if self.panic.is_some() { bail!("`panic` may not be specified in a `{}` profile", which) } if self.lto.is_some() { bail!("`lto` may not be specified in a `{}` profile", which) } if self.rpath.is_some() { bail!("`rpath` may not be specified in a `{}` profile", which) } Ok(()) } /// Overwrite self's values with the given profile. pub fn merge(&mut self, profile: &TomlProfile) { if let Some(v) = &profile.opt_level { self.opt_level = Some(v.clone()); } if let Some(v) = &profile.lto { self.lto = Some(v.clone()); } if let Some(v) = profile.codegen_backend { self.codegen_backend = Some(v); } if let Some(v) = profile.codegen_units { self.codegen_units = Some(v); } if let Some(v) = &profile.debug { self.debug = Some(v.clone()); } if let Some(v) = profile.debug_assertions { self.debug_assertions = Some(v); } if let Some(v) = &profile.split_debuginfo { self.split_debuginfo = Some(v.clone()); } if let Some(v) = profile.rpath { self.rpath = Some(v); } if let Some(v) = &profile.panic { self.panic = Some(v.clone()); } if let Some(v) = profile.overflow_checks { self.overflow_checks = Some(v); } if let Some(v) = profile.incremental { self.incremental = Some(v); } if let Some(v) = &profile.rustflags { self.rustflags = Some(v.clone()); } if let Some(other_package) = &profile.package { match &mut self.package { Some(self_package) => { for (spec, other_pkg_profile) in other_package { match self_package.get_mut(spec) { Some(p) => p.merge(other_pkg_profile), None => { self_package.insert(spec.clone(), other_pkg_profile.clone()); } } } } None => self.package = Some(other_package.clone()), } } if let Some(other_bo) = &profile.build_override { match &mut self.build_override { Some(self_bo) => self_bo.merge(other_bo), None => self.build_override = Some(other_bo.clone()), } } if let Some(v) = &profile.inherits { self.inherits = Some(*v); } if let Some(v) = &profile.dir_name { self.dir_name = Some(*v); } if let Some(v) = &profile.strip { self.strip = Some(v.clone()); } } } /// A StringOrVec can be parsed from either a TOML string or array, /// but is always stored as a vector. #[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)] pub struct StringOrVec(Vec); impl<'de> de::Deserialize<'de> for StringOrVec { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = StringOrVec; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("string or list of strings") } fn visit_str(self, s: &str) -> Result where E: de::Error, { Ok(StringOrVec(vec![s.to_string()])) } fn visit_seq(self, v: V) -> Result where V: de::SeqAccess<'de>, { let seq = de::value::SeqAccessDeserializer::new(v); Vec::deserialize(seq).map(StringOrVec) } } deserializer.deserialize_any(Visitor) } } impl StringOrVec { pub fn iter<'a>(&'a self) -> std::slice::Iter<'a, String> { self.0.iter() } } #[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] #[serde(untagged, expecting = "expected a boolean or a string")] pub enum StringOrBool { String(String), Bool(bool), } #[derive(PartialEq, Clone, Debug, Serialize)] #[serde(untagged)] pub enum VecStringOrBool { VecString(Vec), Bool(bool), } impl<'de> de::Deserialize<'de> for VecStringOrBool { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = VecStringOrBool; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a boolean or vector of strings") } fn visit_seq(self, v: V) -> Result where V: de::SeqAccess<'de>, { let seq = de::value::SeqAccessDeserializer::new(v); Vec::deserialize(seq).map(VecStringOrBool::VecString) } fn visit_bool(self, b: bool) -> Result where E: de::Error, { Ok(VecStringOrBool::Bool(b)) } } deserializer.deserialize_any(Visitor) } } fn version_trim_whitespace<'de, D>( deserializer: D, ) -> Result, D::Error> where D: de::Deserializer<'de>, { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = MaybeWorkspace; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("SemVer version") } fn visit_str(self, string: &str) -> Result where E: de::Error, { match string.trim().parse().map_err(de::Error::custom) { Ok(parsed) => Ok(MaybeWorkspace::Defined(parsed)), Err(e) => Err(e), } } fn visit_map(self, map: V) -> Result where V: de::MapAccess<'de>, { let mvd = de::value::MapAccessDeserializer::new(map); TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace) } } deserializer.deserialize_any(Visitor) } /// Enum that allows for the parsing of `field.workspace = true` in a Cargo.toml /// /// It allows for things to be inherited from a workspace or defined as needed #[derive(Deserialize, Serialize, Clone, Debug)] #[serde(untagged)] pub enum MaybeWorkspace { Workspace(TomlWorkspaceField), Defined(T), } impl MaybeWorkspace { fn resolve<'a>( self, label: &str, get_ws_field: impl FnOnce() -> CargoResult, ) -> CargoResult { match self { MaybeWorkspace::Defined(value) => Ok(value), MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: true }) => get_ws_field() .context(format!( "error inheriting `{}` from workspace root manifest's `workspace.package.{}`", label, label )), MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: false }) => Err(anyhow!( "`workspace=false` is unsupported for `package.{}`", label, )), } } fn as_defined(&self) -> Option<&T> { match self { MaybeWorkspace::Workspace(_) => None, MaybeWorkspace::Defined(defined) => Some(defined), } } } fn maybe_workspace_vec_string<'de, D>( deserializer: D, ) -> Result>>, D::Error> where D: de::Deserializer<'de>, { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = Option>>; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("vector of strings") } fn visit_seq(self, v: V) -> Result where V: de::SeqAccess<'de>, { let seq = de::value::SeqAccessDeserializer::new(v); let defined = Vec::::deserialize(seq).map(MaybeWorkspace::Defined)?; Ok(Some(defined)) } fn visit_map(self, map: V) -> Result where V: de::MapAccess<'de>, { let mvd = de::value::MapAccessDeserializer::new(map); let workspace = TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)?; Ok(Some(workspace)) } } deserializer.deserialize_any(Visitor) } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct TomlWorkspaceField { workspace: bool, } /// Represents the `package`/`project` sections of a `Cargo.toml`. /// /// Note that the order of the fields matters, since this is the order they /// are serialized to a TOML file. For example, you cannot have values after /// the field `metadata`, since it is a table and values cannot appear after /// tables. #[derive(Deserialize, Serialize, Clone, Debug)] #[serde(rename_all = "kebab-case")] pub struct TomlProject { edition: Option>, rust_version: Option>, name: InternedString, #[serde(deserialize_with = "version_trim_whitespace")] version: MaybeWorkspace, #[serde(default)] #[serde(deserialize_with = "maybe_workspace_vec_string")] authors: Option>>, build: Option, metabuild: Option, #[serde(rename = "default-target")] default_target: Option, #[serde(rename = "forced-target")] forced_target: Option, links: Option, #[serde(default)] #[serde(deserialize_with = "maybe_workspace_vec_string")] exclude: Option>>, #[serde(default)] #[serde(deserialize_with = "maybe_workspace_vec_string")] include: Option>>, publish: Option>, workspace: Option, im_a_teapot: Option, autobins: Option, autoexamples: Option, autotests: Option, autobenches: Option, default_run: Option, // Package metadata. description: Option>, homepage: Option>, documentation: Option>, readme: Option>, #[serde(default)] #[serde(deserialize_with = "maybe_workspace_vec_string")] keywords: Option>>, #[serde(default)] #[serde(deserialize_with = "maybe_workspace_vec_string")] categories: Option>>, license: Option>, license_file: Option>, repository: Option>, resolver: Option, // Note that this field must come last due to the way toml serialization // works which requires tables to be emitted after all values. metadata: Option, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct TomlWorkspace { members: Option>, #[serde(rename = "default-members")] default_members: Option>, exclude: Option>, resolver: Option, // Properties that can be inherited by members. package: Option, dependencies: Option>, // Note that this field must come last due to the way toml serialization // works which requires tables to be emitted after all values. metadata: Option, } /// A group of fields that are inheritable by members of the workspace #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct InheritableFields { // We use skip here since it will never be present when deserializing // and we don't want it present when serializing #[serde(skip)] dependencies: Option>, version: Option, authors: Option>, description: Option, homepage: Option, documentation: Option, readme: Option, keywords: Option>, categories: Option>, license: Option, #[serde(rename = "license-file")] license_file: Option, repository: Option, publish: Option, edition: Option, badges: Option>>, exclude: Option>, include: Option>, #[serde(rename = "rust-version")] rust_version: Option, // We use skip here since it will never be present when deserializing // and we don't want it present when serializing #[serde(skip)] ws_root: PathBuf, } impl InheritableFields { pub fn update_deps(&mut self, deps: Option>) { self.dependencies = deps; } pub fn update_ws_path(&mut self, ws_root: PathBuf) { self.ws_root = ws_root; } pub fn dependencies(&self) -> CargoResult> { self.dependencies.clone().map_or( Err(anyhow!("`workspace.dependencies` was not defined")), |d| Ok(d), ) } pub fn get_dependency(&self, name: &str) -> CargoResult { self.dependencies.clone().map_or( Err(anyhow!("`workspace.dependencies` was not defined")), |deps| { deps.get(name).map_or( Err(anyhow!( "`dependency.{}` was not found in `workspace.dependencies`", name )), |dep| Ok(dep.clone()), ) }, ) } pub fn version(&self) -> CargoResult { self.version.clone().map_or( Err(anyhow!("`workspace.package.version` was not defined")), |d| Ok(d), ) } pub fn authors(&self) -> CargoResult> { self.authors.clone().map_or( Err(anyhow!("`workspace.package.authors` was not defined")), |d| Ok(d), ) } pub fn description(&self) -> CargoResult { self.description.clone().map_or( Err(anyhow!("`workspace.package.description` was not defined")), |d| Ok(d), ) } pub fn homepage(&self) -> CargoResult { self.homepage.clone().map_or( Err(anyhow!("`workspace.package.homepage` was not defined")), |d| Ok(d), ) } pub fn documentation(&self) -> CargoResult { self.documentation.clone().map_or( Err(anyhow!("`workspace.package.documentation` was not defined")), |d| Ok(d), ) } pub fn readme(&self, package_root: &Path) -> CargoResult { readme_for_project(self.ws_root.as_path(), self.readme.clone()).map_or( Err(anyhow!("`workspace.package.readme` was not defined")), |readme| { let rel_path = resolve_relative_path("readme", &self.ws_root, package_root, &readme)?; Ok(StringOrBool::String(rel_path)) }, ) } pub fn keywords(&self) -> CargoResult> { self.keywords.clone().map_or( Err(anyhow!("`workspace.package.keywords` was not defined")), |d| Ok(d), ) } pub fn categories(&self) -> CargoResult> { self.categories.clone().map_or( Err(anyhow!("`workspace.package.categories` was not defined")), |d| Ok(d), ) } pub fn license(&self) -> CargoResult { self.license.clone().map_or( Err(anyhow!("`workspace.package.license` was not defined")), |d| Ok(d), ) } pub fn license_file(&self, package_root: &Path) -> CargoResult { self.license_file.clone().map_or( Err(anyhow!("`workspace.package.license_file` was not defined")), |d| resolve_relative_path("license-file", &self.ws_root, package_root, &d), ) } pub fn repository(&self) -> CargoResult { self.repository.clone().map_or( Err(anyhow!("`workspace.package.repository` was not defined")), |d| Ok(d), ) } pub fn publish(&self) -> CargoResult { self.publish.clone().map_or( Err(anyhow!("`workspace.package.publish` was not defined")), |d| Ok(d), ) } pub fn edition(&self) -> CargoResult { self.edition.clone().map_or( Err(anyhow!("`workspace.package.edition` was not defined")), |d| Ok(d), ) } pub fn rust_version(&self) -> CargoResult { self.rust_version.clone().map_or( Err(anyhow!("`workspace.package.rust-version` was not defined")), |d| Ok(d), ) } pub fn badges(&self) -> CargoResult>> { self.badges.clone().map_or( Err(anyhow!("`workspace.package.badges` was not defined")), |d| Ok(d), ) } pub fn exclude(&self) -> CargoResult> { self.exclude.clone().map_or( Err(anyhow!("`workspace.package.exclude` was not defined")), |d| Ok(d), ) } pub fn include(&self) -> CargoResult> { self.include.clone().map_or( Err(anyhow!("`workspace.package.include` was not defined")), |d| Ok(d), ) } pub fn ws_root(&self) -> &PathBuf { &self.ws_root } } impl TomlProject { pub fn to_package_id( &self, source_id: SourceId, version: semver::Version, ) -> CargoResult { PackageId::new(self.name, version, source_id) } } struct Context<'a, 'b> { deps: &'a mut Vec, source_id: SourceId, nested_paths: &'a mut Vec, config: &'b Config, warnings: &'a mut Vec, platform: Option, root: &'a Path, features: &'a Features, } impl TomlManifest { /// Prepares the manifest for publishing. // - Path and git components of dependency specifications are removed. // - License path is updated to point within the package. pub fn prepare_for_publish( &self, ws: &Workspace<'_>, package_root: &Path, ) -> CargoResult { let config = ws.config(); let mut package = self .package .as_ref() .or_else(|| self.project.as_ref()) .unwrap() .clone(); package.workspace = None; let current_resolver = package .resolver .as_ref() .map(|r| ResolveBehavior::from_manifest(r)) .unwrap_or_else(|| { package .edition .as_ref() .and_then(|e| e.as_defined()) .map(|e| Edition::from_str(e)) .unwrap_or(Ok(Edition::Edition2015)) .map(|e| e.default_resolve_behavior()) })?; if ws.resolve_behavior() != current_resolver { // This ensures the published crate if built as a root (e.g. `cargo install`) will // use the same resolver behavior it was tested with in the workspace. // To avoid forcing a higher MSRV we don't explicitly set this if it would implicitly // result in the same thing. package.resolver = Some(ws.resolve_behavior().to_manifest()); } if let Some(license_file) = &package.license_file { let license_file = license_file .as_defined() .context("license file should have been resolved before `prepare_for_publish()`")?; let license_path = Path::new(&license_file); let abs_license_path = paths::normalize_path(&package_root.join(license_path)); if abs_license_path.strip_prefix(package_root).is_err() { // This path points outside of the package root. `cargo package` // will copy it into the root, so adjust the path to this location. package.license_file = Some(MaybeWorkspace::Defined( license_path .file_name() .unwrap() .to_str() .unwrap() .to_string(), )); } } if let Some(readme) = &package.readme { let readme = readme .as_defined() .context("readme should have been resolved before `prepare_for_publish()`")?; match readme { StringOrBool::String(readme) => { let readme_path = Path::new(&readme); let abs_readme_path = paths::normalize_path(&package_root.join(readme_path)); if abs_readme_path.strip_prefix(package_root).is_err() { // This path points outside of the package root. `cargo package` // will copy it into the root, so adjust the path to this location. package.readme = Some(MaybeWorkspace::Defined(StringOrBool::String( readme_path .file_name() .unwrap() .to_str() .unwrap() .to_string(), ))); } } StringOrBool::Bool(_) => {} } } let all = |_d: &TomlDependency| true; return Ok(TomlManifest { package: Some(package), project: None, profile: self.profile.clone(), lib: self.lib.clone(), bin: self.bin.clone(), example: self.example.clone(), test: self.test.clone(), bench: self.bench.clone(), dependencies: map_deps(config, self.dependencies.as_ref(), all)?, dev_dependencies: map_deps( config, self.dev_dependencies .as_ref() .or_else(|| self.dev_dependencies2.as_ref()), TomlDependency::is_version_specified, )?, dev_dependencies2: None, build_dependencies: map_deps( config, self.build_dependencies .as_ref() .or_else(|| self.build_dependencies2.as_ref()), all, )?, build_dependencies2: None, features: self.features.clone(), target: match self.target.as_ref().map(|target_map| { target_map .iter() .map(|(k, v)| { Ok(( k.clone(), TomlPlatform { dependencies: map_deps(config, v.dependencies.as_ref(), all)?, dev_dependencies: map_deps( config, v.dev_dependencies .as_ref() .or_else(|| v.dev_dependencies2.as_ref()), TomlDependency::is_version_specified, )?, dev_dependencies2: None, build_dependencies: map_deps( config, v.build_dependencies .as_ref() .or_else(|| v.build_dependencies2.as_ref()), all, )?, build_dependencies2: None, }, )) }) .collect() }) { Some(Ok(v)) => Some(v), Some(Err(e)) => return Err(e), None => None, }, replace: None, patch: None, workspace: None, badges: self.badges.clone(), cargo_features: self.cargo_features.clone(), }); fn map_deps( config: &Config, deps: Option<&BTreeMap>, filter: impl Fn(&TomlDependency) -> bool, ) -> CargoResult>> { let deps = match deps { Some(deps) => deps, None => return Ok(None), }; let deps = deps .iter() .filter(|(_k, v)| filter(v)) .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?))) .collect::>>()?; Ok(Some(deps)) } fn map_dependency(config: &Config, dep: &TomlDependency) -> CargoResult { match dep { TomlDependency::Detailed(d) => { let mut d = d.clone(); // Path dependencies become crates.io deps. d.path.take(); // Same with git dependencies. d.git.take(); d.branch.take(); d.tag.take(); d.rev.take(); // registry specifications are elaborated to the index URL if let Some(registry) = d.registry.take() { let src = SourceId::alt_registry(config, ®istry)?; d.registry_index = Some(src.url().to_string()); } Ok(TomlDependency::Detailed(d)) } TomlDependency::Simple(s) => Ok(TomlDependency::Detailed(DetailedTomlDependency { version: Some(s.clone()), ..Default::default() })), // Unreachable as we resolve everything before this TomlDependency::Workspace(_) => unreachable!(), } } } pub fn to_real_manifest( me: &Rc, source_id: SourceId, package_root: &Path, config: &Config, ) -> CargoResult<(Manifest, Vec)> { fn get_ws( config: &Config, resolved_path: &Path, workspace_config: &WorkspaceConfig, ) -> CargoResult { match workspace_config { WorkspaceConfig::Root(root) => Ok(root.inheritable().clone()), WorkspaceConfig::Member { root: Some(ref path_to_root), } => { let path = resolved_path .parent() .unwrap() .join(path_to_root) .join("Cargo.toml"); let root_path = paths::normalize_path(&path); inheritable_from_path(config, root_path) } WorkspaceConfig::Member { root: None } => { match find_workspace_root(&resolved_path, config)? { Some(path_to_root) => inheritable_from_path(config, path_to_root), None => Err(anyhow!("failed to find a workspace root")), } } } } let mut nested_paths = vec![]; let mut warnings = vec![]; let mut errors = vec![]; // Parse features first so they will be available when parsing other parts of the TOML. let empty = Vec::new(); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?; let project = me.project.clone().or_else(|| me.package.clone()); let project = &mut project.ok_or_else(|| anyhow!("no `package` section found"))?; let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { (Some(toml_config), None) => { let mut inheritable = toml_config.package.clone().unwrap_or_default(); inheritable.update_ws_path(package_root.to_path_buf()); inheritable.update_deps(toml_config.dependencies.clone()); let ws_root_config = WorkspaceRootConfig::new( package_root, &toml_config.members, &toml_config.default_members, &toml_config.exclude, &Some(inheritable), &toml_config.metadata, ); config .ws_roots .borrow_mut() .insert(package_root.to_path_buf(), ws_root_config.clone()); WorkspaceConfig::Root(ws_root_config) } (None, root) => WorkspaceConfig::Member { root: root.cloned(), }, (Some(..), Some(..)) => bail!( "cannot configure both `package.workspace` and \ `[workspace]`, only one can be specified" ), }; let package_name = project.name.trim(); if package_name.is_empty() { bail!("package name cannot be an empty string") } validate_package_name(package_name, "package name", "")?; let resolved_path = package_root.join("Cargo.toml"); let inherit_cell: LazyCell = LazyCell::new(); let inherit = || inherit_cell.try_borrow_with(|| get_ws(config, &resolved_path, &workspace_config)); let version = project .version .clone() .resolve("version", || inherit()?.version())?; project.version = MaybeWorkspace::Defined(version.clone()); let pkgid = project.to_package_id(source_id, version)?; let edition = if let Some(edition) = project.edition.clone() { let edition: Edition = edition .resolve("edition", || inherit()?.edition())? .parse() .with_context(|| "failed to parse the `edition` key")?; project.edition = Some(MaybeWorkspace::Defined(edition.to_string())); edition } else { Edition::Edition2015 }; // Add these lines if start a new unstable edition. // ``` // if edition == Edition::Edition20xx { // features.require(Feature::edition20xx))?; // } // ``` if !edition.is_stable() { // Guard in case someone forgets to add .require() return Err(util::errors::internal(format!( "edition {} should be gated", edition ))); } let rust_version = if let Some(rust_version) = &project.rust_version { let rust_version = rust_version .clone() .resolve("rust_version", || inherit()?.rust_version())?; let req = match semver::VersionReq::parse(&rust_version) { // Exclude semver operators like `^` and pre-release identifiers Ok(req) if rust_version.chars().all(|c| c.is_ascii_digit() || c == '.') => req, _ => bail!("`rust-version` must be a value like \"1.32\""), }; if let Some(first_version) = edition.first_version() { let unsupported = semver::Version::new(first_version.major, first_version.minor - 1, 9999); if req.matches(&unsupported) { bail!( "rust-version {} is older than first version ({}) required by \ the specified edition ({})", rust_version, first_version, edition, ) } } Some(rust_version.clone()) } else { None }; if project.metabuild.is_some() { features.require(Feature::metabuild())?; } let resolve_behavior = match ( project.resolver.as_ref(), me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()), ) { (None, None) => None, (Some(s), None) | (None, Some(s)) => Some(ResolveBehavior::from_manifest(s)?), (Some(_), Some(_)) => { bail!("cannot specify `resolver` field in both `[workspace]` and `[package]`") } }; // If we have no lib at all, use the inferred lib, if available. // If we have a lib with a path, we're done. // If we have a lib with no path, use the inferred lib or else the package name. let targets = targets( &features, me, package_name, package_root, edition, &project.build, &project.metabuild, &mut warnings, &mut errors, )?; if targets.is_empty() { debug!("manifest has no build targets"); } if let Err(e) = unique_build_targets(&targets, package_root) { warnings.push(format!( "file found to be present in multiple \ build targets: {}", e )); } if let Some(links) = &project.links { if !targets.iter().any(|t| t.is_custom_build()) { bail!( "package `{}` specifies that it links to `{}` but does not \ have a custom build script", pkgid, links ) } } let mut deps = Vec::new(); let mut cx = Context { deps: &mut deps, source_id, nested_paths: &mut nested_paths, config, warnings: &mut warnings, features: &features, platform: None, root: package_root, }; fn process_dependencies( cx: &mut Context<'_, '_>, new_deps: Option<&BTreeMap>, kind: Option, workspace_config: &WorkspaceConfig, inherit_cell: &LazyCell, ) -> CargoResult>> { let dependencies = match new_deps { Some(dependencies) => dependencies, None => return Ok(None), }; let inherit = || { inherit_cell.try_borrow_with(|| { get_ws(cx.config, &cx.root.join("Cargo.toml"), &workspace_config) }) }; let mut deps: BTreeMap = BTreeMap::new(); for (n, v) in dependencies.iter() { let resolved = v.clone().resolve(n, cx, || inherit())?; let dep = resolved.to_dependency(n, cx, kind)?; validate_package_name(dep.name_in_toml().as_str(), "dependency name", "")?; cx.deps.push(dep); deps.insert(n.to_string(), resolved.clone()); } Ok(Some(deps)) } // Collect the dependencies. let dependencies = process_dependencies( &mut cx, me.dependencies.as_ref(), None, &workspace_config, &inherit_cell, )?; if me.dev_dependencies.is_some() && me.dev_dependencies2.is_some() { warn_on_deprecated("dev-dependencies", package_name, "package", cx.warnings); } let dev_deps = me .dev_dependencies .as_ref() .or_else(|| me.dev_dependencies2.as_ref()); let dev_deps = process_dependencies( &mut cx, dev_deps, Some(DepKind::Development), &workspace_config, &inherit_cell, )?; if me.build_dependencies.is_some() && me.build_dependencies2.is_some() { warn_on_deprecated("build-dependencies", package_name, "package", cx.warnings); } let build_deps = me .build_dependencies .as_ref() .or_else(|| me.build_dependencies2.as_ref()); let build_deps = process_dependencies( &mut cx, build_deps, Some(DepKind::Build), &workspace_config, &inherit_cell, )?; let mut target: BTreeMap = BTreeMap::new(); for (name, platform) in me.target.iter().flatten() { cx.platform = { let platform: Platform = name.parse()?; platform.check_cfg_attributes(cx.warnings); Some(platform) }; let deps = process_dependencies( &mut cx, platform.dependencies.as_ref(), None, &workspace_config, &inherit_cell, ) .unwrap(); if platform.build_dependencies.is_some() && platform.build_dependencies2.is_some() { warn_on_deprecated("build-dependencies", name, "platform target", cx.warnings); } let build_deps = platform .build_dependencies .as_ref() .or_else(|| platform.build_dependencies2.as_ref()); let build_deps = process_dependencies( &mut cx, build_deps, Some(DepKind::Build), &workspace_config, &inherit_cell, ) .unwrap(); if platform.dev_dependencies.is_some() && platform.dev_dependencies2.is_some() { warn_on_deprecated("dev-dependencies", name, "platform target", cx.warnings); } let dev_deps = platform .dev_dependencies .as_ref() .or_else(|| platform.dev_dependencies2.as_ref()); let dev_deps = process_dependencies( &mut cx, dev_deps, Some(DepKind::Development), &workspace_config, &inherit_cell, ) .unwrap(); target.insert( name.clone(), TomlPlatform { dependencies: deps, build_dependencies: build_deps, build_dependencies2: None, dev_dependencies: dev_deps, dev_dependencies2: None, }, ); } let target = if target.is_empty() { None } else { Some(target) }; let replace = me.replace(&mut cx)?; let patch = me.patch(&mut cx)?; { let mut names_sources = BTreeMap::new(); for dep in &deps { let name = dep.name_in_toml(); let prev = names_sources.insert(name.to_string(), dep.source_id()); if prev.is_some() && prev != Some(dep.source_id()) { bail!( "Dependency '{}' has different source paths depending on the build \ target. Each dependency must have a single canonical source path \ irrespective of build target.", name ); } } } let exclude = project .exclude .clone() .map(|mw| mw.resolve("exclude", || inherit()?.exclude())) .transpose()? .unwrap_or_default(); let include = project .include .clone() .map(|mw| mw.resolve("include", || inherit()?.include())) .transpose()? .unwrap_or_default(); let empty_features = BTreeMap::new(); let summary = Summary::new( config, pkgid, deps, me.features.as_ref().unwrap_or(&empty_features), project.links.as_deref(), )?; let metadata = ManifestMetadata { description: project .description .clone() .map(|mw| mw.resolve("description", || inherit()?.description())) .transpose()?, homepage: project .homepage .clone() .map(|mw| mw.resolve("homepage", || inherit()?.homepage())) .transpose()?, documentation: project .documentation .clone() .map(|mw| mw.resolve("documentation", || inherit()?.documentation())) .transpose()?, readme: readme_for_project( package_root, project .readme .clone() .map(|mw| mw.resolve("readme", || inherit()?.readme(package_root))) .transpose()?, ), authors: project .authors .clone() .map(|mw| mw.resolve("authors", || inherit()?.authors())) .transpose()? .unwrap_or_default(), license: project .license .clone() .map(|mw| mw.resolve("license", || inherit()?.license())) .transpose()?, license_file: project .license_file .clone() .map(|mw| mw.resolve("license", || inherit()?.license_file(package_root))) .transpose()?, repository: project .repository .clone() .map(|mw| mw.resolve("repository", || inherit()?.repository())) .transpose()?, keywords: project .keywords .clone() .map(|mw| mw.resolve("keywords", || inherit()?.keywords())) .transpose()? .unwrap_or_default(), categories: project .categories .clone() .map(|mw| mw.resolve("categories", || inherit()?.categories())) .transpose()? .unwrap_or_default(), badges: me .badges .clone() .map(|mw| mw.resolve("badges", || inherit()?.badges())) .transpose()? .unwrap_or_default(), links: project.links.clone(), }; project.description = metadata .description .clone() .map(|description| MaybeWorkspace::Defined(description)); project.homepage = metadata .homepage .clone() .map(|homepage| MaybeWorkspace::Defined(homepage)); project.documentation = metadata .documentation .clone() .map(|documentation| MaybeWorkspace::Defined(documentation)); project.readme = metadata .readme .clone() .map(|readme| MaybeWorkspace::Defined(StringOrBool::String(readme))); project.authors = project .authors .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.authors.clone())); project.license = metadata .license .clone() .map(|license| MaybeWorkspace::Defined(license)); project.license_file = metadata .license_file .clone() .map(|license_file| MaybeWorkspace::Defined(license_file)); project.repository = metadata .repository .clone() .map(|repository| MaybeWorkspace::Defined(repository)); project.keywords = project .keywords .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.keywords.clone())); project.categories = project .categories .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.categories.clone())); project.rust_version = rust_version.clone().map(|rv| MaybeWorkspace::Defined(rv)); project.exclude = project .exclude .as_ref() .map(|_| MaybeWorkspace::Defined(exclude.clone())); project.include = project .include .as_ref() .map(|_| MaybeWorkspace::Defined(include.clone())); let profiles = me.profile.clone(); if let Some(profiles) = &profiles { profiles.validate(&features, &mut warnings)?; } let publish = project .publish .clone() .map(|publish| publish.resolve("publish", || inherit()?.publish()).unwrap()); project.publish = publish.clone().map(|p| MaybeWorkspace::Defined(p)); let publish = match publish { Some(VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()), Some(VecStringOrBool::Bool(false)) => Some(vec![]), None | Some(VecStringOrBool::Bool(true)) => None, }; if summary.features().contains_key("default-features") { warnings.push( "`default-features = [\"..\"]` was found in [features]. \ Did you mean to use `default = [\"..\"]`?" .to_string(), ) } if let Some(run) = &project.default_run { if !targets .iter() .filter(|t| t.is_bin()) .any(|t| t.name() == run) { let suggestion = util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name()); bail!("default-run target `{}` not found{}", run, suggestion); } } let default_kind = project .default_target .as_ref() .map(|t| CompileTarget::new(&*t)) .transpose()? .map(CompileKind::Target); let forced_kind = project .forced_target .as_ref() .map(|t| CompileTarget::new(&*t)) .transpose()? .map(CompileKind::Target); let custom_metadata = project.metadata.clone(); let resolved_toml = TomlManifest { cargo_features: me.cargo_features.clone(), package: Some(project.clone()), project: None, profile: me.profile.clone(), lib: me.lib.clone(), bin: me.bin.clone(), example: me.example.clone(), test: me.test.clone(), bench: me.bench.clone(), dependencies, dev_dependencies: dev_deps, dev_dependencies2: None, build_dependencies: build_deps, build_dependencies2: None, features: me.features.clone(), target, replace: me.replace.clone(), patch: me.patch.clone(), workspace: me.workspace.clone(), badges: me .badges .as_ref() .map(|_| MaybeWorkspace::Defined(metadata.badges.clone())), }; let mut manifest = Manifest::new( summary, default_kind, forced_kind, targets, exclude, include, project.links.clone(), metadata, custom_metadata, profiles, publish, replace, patch, workspace_config, features, edition, rust_version, project.im_a_teapot, project.default_run.clone(), Rc::new(resolved_toml), project.metabuild.clone().map(|sov| sov.0), resolve_behavior, ); if project.license_file.is_some() && project.license.is_some() { manifest.warnings_mut().add_warning( "only one of `license` or `license-file` is necessary\n\ `license` should be used if the package license can be expressed \ with a standard SPDX expression.\n\ `license-file` should be used if the package uses a non-standard license.\n\ See https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields \ for more information." .to_string(), ); } for warning in warnings { manifest.warnings_mut().add_warning(warning); } for error in errors { manifest.warnings_mut().add_critical_warning(error); } manifest.feature_gate()?; Ok((manifest, nested_paths)) } fn to_virtual_manifest( me: &Rc, source_id: SourceId, root: &Path, config: &Config, ) -> CargoResult<(VirtualManifest, Vec)> { if me.project.is_some() { bail!("this virtual manifest specifies a [project] section, which is not allowed"); } if me.package.is_some() { bail!("this virtual manifest specifies a [package] section, which is not allowed"); } if me.lib.is_some() { bail!("this virtual manifest specifies a [lib] section, which is not allowed"); } if me.bin.is_some() { bail!("this virtual manifest specifies a [[bin]] section, which is not allowed"); } if me.example.is_some() { bail!("this virtual manifest specifies a [[example]] section, which is not allowed"); } if me.test.is_some() { bail!("this virtual manifest specifies a [[test]] section, which is not allowed"); } if me.bench.is_some() { bail!("this virtual manifest specifies a [[bench]] section, which is not allowed"); } if me.dependencies.is_some() { bail!("this virtual manifest specifies a [dependencies] section, which is not allowed"); } if me.dev_dependencies.is_some() || me.dev_dependencies2.is_some() { bail!("this virtual manifest specifies a [dev-dependencies] section, which is not allowed"); } if me.build_dependencies.is_some() || me.build_dependencies2.is_some() { bail!("this virtual manifest specifies a [build-dependencies] section, which is not allowed"); } if me.features.is_some() { bail!("this virtual manifest specifies a [features] section, which is not allowed"); } if me.target.is_some() { bail!("this virtual manifest specifies a [target] section, which is not allowed"); } if me.badges.is_some() { bail!("this virtual manifest specifies a [badges] section, which is not allowed"); } let mut nested_paths = Vec::new(); let mut warnings = Vec::new(); let mut deps = Vec::new(); let empty = Vec::new(); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?; let (replace, patch) = { let mut cx = Context { deps: &mut deps, source_id, nested_paths: &mut nested_paths, config, warnings: &mut warnings, platform: None, features: &features, root, }; (me.replace(&mut cx)?, me.patch(&mut cx)?) }; let profiles = me.profile.clone(); if let Some(profiles) = &profiles { profiles.validate(&features, &mut warnings)?; } let resolve_behavior = me .workspace .as_ref() .and_then(|ws| ws.resolver.as_deref()) .map(|r| ResolveBehavior::from_manifest(r)) .transpose()?; let workspace_config = match me.workspace { Some(ref toml_config) => { let mut inheritable = toml_config.package.clone().unwrap_or_default(); inheritable.update_ws_path(root.to_path_buf()); inheritable.update_deps(toml_config.dependencies.clone()); let ws_root_config = WorkspaceRootConfig::new( root, &toml_config.members, &toml_config.default_members, &toml_config.exclude, &Some(inheritable), &toml_config.metadata, ); config .ws_roots .borrow_mut() .insert(root.to_path_buf(), ws_root_config.clone()); WorkspaceConfig::Root(ws_root_config) } None => { bail!("virtual manifests must be configured with [workspace]"); } }; Ok(( VirtualManifest::new( replace, patch, workspace_config, profiles, features, resolve_behavior, ), nested_paths, )) } fn replace(&self, cx: &mut Context<'_, '_>) -> CargoResult> { if self.patch.is_some() && self.replace.is_some() { bail!("cannot specify both [replace] and [patch]"); } let mut replace = Vec::new(); for (spec, replacement) in self.replace.iter().flatten() { let mut spec = PackageIdSpec::parse(spec).with_context(|| { format!( "replacements must specify a valid semver \ version to replace, but `{}` does not", spec ) })?; if spec.url().is_none() { spec.set_url(CRATES_IO_INDEX.parse().unwrap()); } if replacement.is_version_specified() { bail!( "replacements cannot specify a version \ requirement, but found one for `{}`", spec ); } let mut dep = replacement.to_dependency(spec.name().as_str(), cx, None)?; let version = spec.version().ok_or_else(|| { anyhow!( "replacements must specify a version \ to replace, but `{}` does not", spec ) })?; dep.set_version_req(VersionReq::exact(version)) .lock_version(version); replace.push((spec, dep)); } Ok(replace) } fn patch(&self, cx: &mut Context<'_, '_>) -> CargoResult>> { let mut patch = HashMap::new(); for (url, deps) in self.patch.iter().flatten() { let url = match &url[..] { CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), _ => cx .config .get_registry_index(url) .or_else(|_| url.into_url()) .with_context(|| { format!("[patch] entry `{}` should be a URL or registry name", url) })?, }; patch.insert( url, deps.iter() .map(|(name, dep)| dep.to_dependency(name, cx, None)) .collect::>>()?, ); } Ok(patch) } /// Returns the path to the build script if one exists for this crate. fn maybe_custom_build( &self, build: &Option, package_root: &Path, ) -> Option { let build_rs = package_root.join("build.rs"); match *build { // Explicitly no build script. Some(StringOrBool::Bool(false)) => None, Some(StringOrBool::Bool(true)) => Some(build_rs), Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)), None => { // If there is a `build.rs` file next to the `Cargo.toml`, assume it is // a build script. if build_rs.is_file() { Some(build_rs) } else { None } } } } pub fn has_profiles(&self) -> bool { self.profile.is_some() } pub fn features(&self) -> Option<&BTreeMap>> { self.features.as_ref() } } fn inheritable_from_path( config: &Config, workspace_path: PathBuf, ) -> CargoResult { // Workspace path should have Cargo.toml at the end let workspace_path_root = workspace_path.parent().unwrap(); // Let the borrow exit scope so that it can be picked up if there is a need to // read a manifest if let Some(ws_root) = config.ws_roots.borrow().get(workspace_path_root) { return Ok(ws_root.inheritable().clone()); }; let source_id = SourceId::for_path(workspace_path_root)?; let (man, _) = read_manifest(&workspace_path, source_id, config)?; match man.workspace_config() { WorkspaceConfig::Root(root) => { config .ws_roots .borrow_mut() .insert(workspace_path, root.clone()); Ok(root.inheritable().clone()) } _ => bail!( "root of a workspace inferred but wasn't a root: {}", workspace_path.display() ), } } /// Returns the name of the README file for a `TomlProject`. pub fn readme_for_project(package_root: &Path, readme: Option) -> Option { match &readme { None => default_readme_from_package_root(package_root), Some(value) => match value { StringOrBool::Bool(false) => None, StringOrBool::Bool(true) => Some("README.md".to_string()), StringOrBool::String(v) => Some(v.clone()), }, } } const DEFAULT_README_FILES: [&str; 3] = ["README.md", "README.txt", "README"]; /// Checks if a file with any of the default README file names exists in the package root. /// If so, returns a `String` representing that name. fn default_readme_from_package_root(package_root: &Path) -> Option { for &readme_filename in DEFAULT_README_FILES.iter() { if package_root.join(readme_filename).is_file() { return Some(readme_filename.to_string()); } } None } /// Checks a list of build targets, and ensures the target names are unique within a vector. /// If not, the name of the offending build target is returned. fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> { let mut seen = HashSet::new(); for target in targets { if let TargetSourcePath::Path(path) = target.src_path() { let full = package_root.join(path); if !seen.insert(full.clone()) { return Err(full.display().to_string()); } } } Ok(()) } impl TomlDependency

{ pub(crate) fn to_dependency_split( &self, name: &str, source_id: SourceId, nested_paths: &mut Vec, config: &Config, warnings: &mut Vec, platform: Option, root: &Path, features: &Features, kind: Option, ) -> CargoResult { self.to_dependency( name, &mut Context { deps: &mut Vec::new(), source_id, nested_paths, config, warnings, platform, root, features, }, kind, ) } fn to_dependency( &self, name: &str, cx: &mut Context<'_, '_>, kind: Option, ) -> CargoResult { match *self { TomlDependency::Simple(ref version) => DetailedTomlDependency::

{ version: Some(version.clone()), ..Default::default() } .to_dependency(name, cx, kind), TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind), TomlDependency::Workspace(_) => unreachable!(), } } fn is_version_specified(&self) -> bool { match self { TomlDependency::Detailed(d) => d.version.is_some(), TomlDependency::Simple(..) => true, TomlDependency::Workspace(_) => unreachable!(), } } fn is_optional(&self) -> bool { match self { TomlDependency::Detailed(d) => d.optional.unwrap_or(false), TomlDependency::Simple(..) => false, TomlDependency::Workspace(w) => w.optional.unwrap_or(false), } } } impl TomlDependency { fn resolve<'a>( self, label: &str, cx: &mut Context<'_, '_>, get_inheritable: impl FnOnce() -> CargoResult<&'a InheritableFields>, ) -> CargoResult { match self { TomlDependency::Detailed(d) => Ok(TomlDependency::Detailed(d)), TomlDependency::Simple(s) => Ok(TomlDependency::Simple(s)), TomlDependency::Workspace(TomlWorkspaceDependency { workspace: true, features, optional, }) => { let inheritable = get_inheritable()?; inheritable.get_dependency(label).context(format!( "error reading `dependencies.{}` from workspace root manifest's `workspace.dependencies.{}`", label, label )).map(|dep| { match dep { TomlDependency::Simple(s) => { if optional.is_some() || features.is_some() { Ok(TomlDependency::Detailed(DetailedTomlDependency { version: Some(s), optional, features, ..Default::default() })) } else { Ok(TomlDependency::Simple(s)) } }, TomlDependency::Detailed(d) => { let mut dep = d.clone(); dep.add_features(features); dep.update_optional(optional); dep.resolve_path(label,inheritable.ws_root(), cx.root)?; Ok(TomlDependency::Detailed(dep)) }, TomlDependency::Workspace(_) => { unreachable!( "We check that no workspace defines dependencies with \ `{{ workspace = true }}` when we read a manifest from a string. \ this should not happen but did on {}", label ) }, } })? } TomlDependency::Workspace(TomlWorkspaceDependency { workspace: false, .. }) => Err(anyhow!( "`workspace=false` is unsupported for `package.dependencies.{}`", label, )), } } } impl DetailedTomlDependency

{ fn to_dependency( &self, name_in_toml: &str, cx: &mut Context<'_, '_>, kind: Option, ) -> CargoResult { if self.version.is_none() && self.path.is_none() && self.git.is_none() { let msg = format!( "dependency ({}) specified without \ providing a local path, Git repository, or \ version to use. This will be considered an \ error in future versions", name_in_toml ); cx.warnings.push(msg); } if let Some(version) = &self.version { if version.contains('+') { cx.warnings.push(format!( "version requirement `{}` for dependency `{}` \ includes semver metadata which will be ignored, removing the \ metadata is recommended to avoid confusion", version, name_in_toml )); } } if self.git.is_none() { let git_only_keys = [ (&self.branch, "branch"), (&self.tag, "tag"), (&self.rev, "rev"), ]; for &(key, key_name) in &git_only_keys { if key.is_some() { bail!( "key `{}` is ignored for dependency ({}).", key_name, name_in_toml ); } } } // Early detection of potentially misused feature syntax // instead of generating a "feature not found" error. if let Some(features) = &self.features { for feature in features { if feature.contains('/') { bail!( "feature `{}` in dependency `{}` is not allowed to contain slashes\n\ If you want to enable features of a transitive dependency, \ the direct dependency needs to re-export those features from \ the `[features]` table.", feature, name_in_toml ); } if feature.starts_with("dep:") { bail!( "feature `{}` in dependency `{}` is not allowed to use explicit \ `dep:` syntax\n\ If you want to enable an optional dependency, specify the name \ of the optional dependency without the `dep:` prefix, or specify \ a feature from the dependency's `[features]` table that enables \ the optional dependency.", feature, name_in_toml ); } } } let new_source_id = match ( self.git.as_ref(), self.path.as_ref(), self.registry.as_ref(), self.registry_index.as_ref(), ) { (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!( "dependency ({}) specification is ambiguous. \ Only one of `git` or `registry` is allowed.", name_in_toml ), (_, _, Some(_), Some(_)) => bail!( "dependency ({}) specification is ambiguous. \ Only one of `registry` or `registry-index` is allowed.", name_in_toml ), (Some(git), maybe_path, _, _) => { if maybe_path.is_some() { bail!( "dependency ({}) specification is ambiguous. \ Only one of `git` or `path` is allowed.", name_in_toml ); } let n_details = [&self.branch, &self.tag, &self.rev] .iter() .filter(|d| d.is_some()) .count(); if n_details > 1 { bail!( "dependency ({}) specification is ambiguous. \ Only one of `branch`, `tag` or `rev` is allowed.", name_in_toml ); } let reference = self .branch .clone() .map(GitReference::Branch) .or_else(|| self.tag.clone().map(GitReference::Tag)) .or_else(|| self.rev.clone().map(GitReference::Rev)) .unwrap_or(GitReference::DefaultBranch); let loc = git.into_url()?; if let Some(fragment) = loc.fragment() { let msg = format!( "URL fragment `#{}` in git URL is ignored for dependency ({}). \ If you were trying to specify a specific git revision, \ use `rev = \"{}\"` in the dependency declaration.", fragment, name_in_toml, fragment ); cx.warnings.push(msg) } SourceId::for_git(&loc, reference)? } (None, Some(path), _, _) => { let path = path.resolve(cx.config); cx.nested_paths.push(path.clone()); // If the source ID for the package we're parsing is a path // source, then we normalize the path here to get rid of // components like `..`. // // The purpose of this is to get a canonical ID for the package // that we're depending on to ensure that builds of this package // always end up hashing to the same value no matter where it's // built from. if cx.source_id.is_path() { let path = cx.root.join(path); let path = paths::normalize_path(&path); SourceId::for_path(&path)? } else { cx.source_id } } (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?, (None, None, None, Some(registry_index)) => { let url = registry_index.into_url()?; SourceId::for_registry(&url)? } (None, None, None, None) => SourceId::crates_io(cx.config)?, }; let (pkg_name, explicit_name_in_toml) = match self.package { Some(ref s) => (&s[..], Some(name_in_toml)), None => (name_in_toml, None), }; let version = self.version.as_deref(); let mut dep = Dependency::parse(pkg_name, version, new_source_id)?; if self.default_features.is_some() && self.default_features2.is_some() { warn_on_deprecated("default-features", name_in_toml, "dependency", cx.warnings); } dep.set_features(self.features.iter().flatten()) .set_default_features( self.default_features .or(self.default_features2) .unwrap_or(true), ) .set_optional(self.optional.unwrap_or(false)) .set_platform(cx.platform.clone()); if let Some(registry) = &self.registry { let registry_id = SourceId::alt_registry(cx.config, registry)?; dep.set_registry_id(registry_id); } if let Some(registry_index) = &self.registry_index { let url = registry_index.into_url()?; let registry_id = SourceId::for_registry(&url)?; dep.set_registry_id(registry_id); } if let Some(kind) = kind { dep.set_kind(kind); } if let Some(name_in_toml) = explicit_name_in_toml { dep.set_explicit_name_in_toml(name_in_toml); } if let Some(p) = self.public { cx.features.require(Feature::public_dependency())?; if dep.kind() != DepKind::Normal { bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind()); } dep.set_public(p); } if let (Some(artifact), is_lib, target) = ( self.artifact.as_ref(), self.lib.unwrap_or(false), self.target.as_deref(), ) { if cx.config.cli_unstable().bindeps { let artifact = Artifact::parse(artifact, is_lib, target)?; if dep.kind() != DepKind::Build && artifact.target() == Some(ArtifactTarget::BuildDependencyAssumeTarget) { bail!( r#"`target = "target"` in normal- or dev-dependencies has no effect ({})"#, name_in_toml ); } dep.set_artifact(artifact) } else { bail!("`artifact = …` requires `-Z bindeps` ({})", name_in_toml); } } else if self.lib.is_some() || self.target.is_some() { for (is_set, specifier) in [ (self.lib.is_some(), "lib"), (self.target.is_some(), "target"), ] { if !is_set { continue; } bail!( "'{}' specifier cannot be used without an 'artifact = …' value ({})", specifier, name_in_toml ) } } Ok(dep) } } impl DetailedTomlDependency { fn add_features(&mut self, features: Option>) { self.features = match (self.features.clone(), features.clone()) { (Some(dep_feat), Some(inherit_feat)) => Some( dep_feat .into_iter() .chain(inherit_feat) .collect::>(), ), (Some(dep_fet), None) => Some(dep_fet), (None, Some(inherit_feat)) => Some(inherit_feat), (None, None) => None, }; } fn update_optional(&mut self, optional: Option) { self.optional = optional; } fn resolve_path( &mut self, name: &str, root_path: &Path, package_root: &Path, ) -> CargoResult<()> { if let Some(rel_path) = &self.path { self.path = Some(resolve_relative_path( name, root_path, package_root, rel_path, )?) } Ok(()) } } #[derive(Default, Serialize, Deserialize, Debug, Clone)] struct TomlTarget { name: Option, // The intention was to only accept `crate-type` here but historical // versions of Cargo also accepted `crate_type`, so look for both. #[serde(rename = "crate-type")] crate_type: Option>, #[serde(rename = "crate_type")] crate_type2: Option>, path: Option, // Note that `filename` is used for the cargo-feature `different_binary_name` filename: Option, test: Option, doctest: Option, bench: Option, doc: Option, plugin: Option, #[serde(rename = "proc-macro")] proc_macro_raw: Option, #[serde(rename = "proc_macro")] proc_macro_raw2: Option, harness: Option, #[serde(rename = "required-features")] required_features: Option>, edition: Option, } #[derive(Clone)] struct PathValue(PathBuf); impl<'de> de::Deserialize<'de> for PathValue { fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { Ok(PathValue(String::deserialize(deserializer)?.into())) } } impl ser::Serialize for PathValue { fn serialize(&self, serializer: S) -> Result where S: ser::Serializer, { self.0.serialize(serializer) } } /// Corresponds to a `target` entry, but `TomlTarget` is already used. #[derive(Serialize, Deserialize, Debug, Clone)] struct TomlPlatform { dependencies: Option>, #[serde(rename = "build-dependencies")] build_dependencies: Option>, #[serde(rename = "build_dependencies")] build_dependencies2: Option>, #[serde(rename = "dev-dependencies")] dev_dependencies: Option>, #[serde(rename = "dev_dependencies")] dev_dependencies2: Option>, } impl TomlTarget { fn new() -> TomlTarget { TomlTarget::default() } fn name(&self) -> String { match self.name { Some(ref name) => name.clone(), None => panic!("target name is required"), } } fn validate_proc_macro(&self, warnings: &mut Vec) { if self.proc_macro_raw.is_some() && self.proc_macro_raw2.is_some() { warn_on_deprecated( "proc-macro", self.name().as_str(), "library target", warnings, ); } } fn proc_macro(&self) -> Option { self.proc_macro_raw.or(self.proc_macro_raw2).or_else(|| { if let Some(types) = self.crate_types() { if types.contains(&"proc-macro".to_string()) { return Some(true); } } None }) } fn validate_crate_types(&self, target_kind_human: &str, warnings: &mut Vec) { if self.crate_type.is_some() && self.crate_type2.is_some() { warn_on_deprecated( "crate-type", self.name().as_str(), format!("{target_kind_human} target").as_str(), warnings, ); } } fn crate_types(&self) -> Option<&Vec> { self.crate_type .as_ref() .or_else(|| self.crate_type2.as_ref()) } } impl fmt::Debug for PathValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } cargo-0.66.0/src/cargo/util/toml/targets.rs000066400000000000000000000727301432416201200205330ustar00rootroot00000000000000//! This module implements Cargo conventions for directory layout: //! //! * `src/lib.rs` is a library //! * `src/main.rs` is a binary //! * `src/bin/*.rs` are binaries //! * `examples/*.rs` are examples //! * `tests/*.rs` are integration tests //! * `benches/*.rs` are benchmarks //! //! It is a bit tricky because we need match explicit information from `Cargo.toml` //! with implicit info in directory layout. use std::collections::HashSet; use std::fs::{self, DirEntry}; use std::path::{Path, PathBuf}; use super::{ PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget, TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, }; use crate::core::compiler::CrateType; use crate::core::{Edition, Feature, Features, Target}; use crate::util::errors::CargoResult; use crate::util::restricted_names; use anyhow::Context as _; const DEFAULT_TEST_DIR_NAME: &'static str = "tests"; const DEFAULT_BENCH_DIR_NAME: &'static str = "benches"; const DEFAULT_EXAMPLE_DIR_NAME: &'static str = "examples"; const DEFAULT_BIN_DIR_NAME: &'static str = "bin"; pub fn targets( features: &Features, manifest: &TomlManifest, package_name: &str, package_root: &Path, edition: Edition, custom_build: &Option, metabuild: &Option, warnings: &mut Vec, errors: &mut Vec, ) -> CargoResult> { let mut targets = Vec::new(); let has_lib; if let Some(target) = clean_lib( manifest.lib.as_ref(), package_root, package_name, edition, warnings, )? { targets.push(target); has_lib = true; } else { has_lib = false; } let package = manifest .package .as_ref() .or_else(|| manifest.project.as_ref()) .ok_or_else(|| anyhow::format_err!("manifest has no `package` (or `project`)"))?; targets.extend(clean_bins( features, manifest.bin.as_ref(), package_root, package_name, edition, package.autobins, warnings, errors, has_lib, )?); targets.extend(clean_examples( manifest.example.as_ref(), package_root, edition, package.autoexamples, warnings, errors, )?); targets.extend(clean_tests( manifest.test.as_ref(), package_root, edition, package.autotests, warnings, errors, )?); targets.extend(clean_benches( manifest.bench.as_ref(), package_root, edition, package.autobenches, warnings, errors, )?); // processing the custom build script if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) { if metabuild.is_some() { anyhow::bail!("cannot specify both `metabuild` and `build`"); } let name = format!( "build-script-{}", custom_build .file_stem() .and_then(|s| s.to_str()) .unwrap_or("") ); targets.push(Target::custom_build_target( &name, package_root.join(custom_build), edition, )); } if let Some(metabuild) = metabuild { // Verify names match available build deps. let bdeps = manifest.build_dependencies.as_ref(); for name in &metabuild.0 { if !bdeps.map_or(false, |bd| bd.contains_key(name)) { anyhow::bail!( "metabuild package `{}` must be specified in `build-dependencies`", name ); } } targets.push(Target::metabuild_target(&format!( "metabuild-{}", package.name ))); } Ok(targets) } fn clean_lib( toml_lib: Option<&TomlLibTarget>, package_root: &Path, package_name: &str, edition: Edition, warnings: &mut Vec, ) -> CargoResult> { let inferred = inferred_lib(package_root); let lib = match toml_lib { Some(lib) => { if let Some(ref name) = lib.name { // XXX: other code paths dodge this validation if name.contains('-') { anyhow::bail!("library target names cannot contain hyphens: {}", name) } } Some(TomlTarget { name: lib.name.clone().or_else(|| Some(package_name.to_owned())), ..lib.clone() }) } None => inferred.as_ref().map(|lib| TomlTarget { name: Some(package_name.to_string()), path: Some(PathValue(lib.clone())), ..TomlTarget::new() }), }; let lib = match lib { Some(ref lib) => lib, None => return Ok(None), }; lib.validate_proc_macro(warnings); lib.validate_crate_types("library", warnings); validate_target_name(lib, "library", "lib", warnings)?; let path = match (lib.path.as_ref(), inferred) { (Some(path), _) => package_root.join(&path.0), (None, Some(path)) => path, (None, None) => { let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name())); if edition == Edition::Edition2015 && legacy_path.exists() { warnings.push(format!( "path `{}` was erroneously implicitly accepted for library `{}`,\n\ please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", legacy_path.display(), lib.name() )); legacy_path } else { anyhow::bail!( "can't find library `{}`, \ rename file to `src/lib.rs` or specify lib.path", lib.name() ) } } }; // Per the Macros 1.1 RFC: // // > Initially if a crate is compiled with the `proc-macro` crate type // > (and possibly others) it will forbid exporting any items in the // > crate other than those functions tagged #[proc_macro_derive] and // > those functions must also be placed at the crate root. // // A plugin requires exporting plugin_registrar so a crate cannot be // both at once. let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) { (Some(kinds), _, _) if kinds.contains(&CrateType::Dylib.as_str().to_owned()) && kinds.contains(&CrateType::Cdylib.as_str().to_owned()) => { anyhow::bail!(format!( "library `{}` cannot set the crate type of both `dylib` and `cdylib`", lib.name() )); } (Some(kinds), _, _) if kinds.contains(&"proc-macro".to_string()) => { if let Some(true) = lib.plugin { // This is a warning to retain backwards compatibility. warnings.push(format!( "proc-macro library `{}` should not specify `plugin = true`", lib.name() )); } warnings.push(format!( "library `{}` should only specify `proc-macro = true` instead of setting `crate-type`", lib.name() )); if kinds.len() > 1 { anyhow::bail!("cannot mix `proc-macro` crate type with others"); } vec![CrateType::ProcMacro] } (_, Some(true), Some(true)) => { anyhow::bail!("`lib.plugin` and `lib.proc-macro` cannot both be `true`") } (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(), (None, Some(true), _) => vec![CrateType::Dylib], (None, _, Some(true)) => vec![CrateType::ProcMacro], (None, _, _) => vec![CrateType::Lib], }; let mut target = Target::lib_target(&lib.name(), crate_types, path, edition); configure(lib, &mut target)?; Ok(Some(target)) } fn clean_bins( features: &Features, toml_bins: Option<&Vec>, package_root: &Path, package_name: &str, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, has_lib: bool, ) -> CargoResult> { let inferred = inferred_bins(package_root, package_name); let bins = toml_targets_and_inferred( toml_bins, &inferred, package_root, autodiscover, edition, warnings, "binary", "bin", "autobins", ); // This loop performs basic checks on each of the TomlTarget in `bins`. for bin in &bins { // For each binary, check if the `filename` parameter is populated. If it is, // check if the corresponding cargo feature has been activated. if bin.filename.is_some() { features.require(Feature::different_binary_name())?; } validate_target_name(bin, "binary", "bin", warnings)?; let name = bin.name(); if let Some(crate_types) = bin.crate_types() { if !crate_types.is_empty() { errors.push(format!( "the target `{}` is a binary and can't have any \ crate-types set (currently \"{}\")", name, crate_types.join(", ") )); } } if bin.proc_macro() == Some(true) { errors.push(format!( "the target `{}` is a binary and can't have `proc-macro` \ set `true`", name )); } if restricted_names::is_conflicting_artifact_name(&name) { anyhow::bail!( "the binary target name `{}` is forbidden, \ it conflicts with with cargo's build directory names", name ) } } validate_unique_names(&bins, "binary")?; let mut result = Vec::new(); for bin in &bins { let path = target_path(bin, &inferred, "bin", package_root, edition, &mut |_| { if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) { warnings.push(format!( "path `{}` was erroneously implicitly accepted for binary `{}`,\n\ please set bin.path in Cargo.toml", legacy_path.display(), bin.name() )); Some(legacy_path) } else { None } }); let path = match path { Ok(path) => path, Err(e) => anyhow::bail!("{}", e), }; let mut target = Target::bin_target( &bin.name(), bin.filename.clone(), path, bin.required_features.clone(), edition, ); configure(bin, &mut target)?; result.push(target); } return Ok(result); fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option { if !has_lib { let path = package_root.join("src").join(format!("{}.rs", name)); if path.exists() { return Some(path); } } let path = package_root.join("src").join("main.rs"); if path.exists() { return Some(path); } let path = package_root .join("src") .join(DEFAULT_BIN_DIR_NAME) .join("main.rs"); if path.exists() { return Some(path); } None } } fn clean_examples( toml_examples: Option<&Vec>, package_root: &Path, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, ) -> CargoResult> { let inferred = infer_from_directory(&package_root.join(DEFAULT_EXAMPLE_DIR_NAME)); let targets = clean_targets( "example", "example", toml_examples, &inferred, package_root, edition, autodiscover, warnings, errors, "autoexamples", )?; let mut result = Vec::new(); for (path, toml) in targets { toml.validate_crate_types("example", warnings); let crate_types = match toml.crate_types() { Some(kinds) => kinds.iter().map(|s| s.into()).collect(), None => Vec::new(), }; let mut target = Target::example_target( &toml.name(), crate_types, path, toml.required_features.clone(), edition, ); configure(&toml, &mut target)?; result.push(target); } Ok(result) } fn clean_tests( toml_tests: Option<&Vec>, package_root: &Path, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, ) -> CargoResult> { let inferred = infer_from_directory(&package_root.join(DEFAULT_TEST_DIR_NAME)); let targets = clean_targets( "test", "test", toml_tests, &inferred, package_root, edition, autodiscover, warnings, errors, "autotests", )?; let mut result = Vec::new(); for (path, toml) in targets { let mut target = Target::test_target(&toml.name(), path, toml.required_features.clone(), edition); configure(&toml, &mut target)?; result.push(target); } Ok(result) } fn clean_benches( toml_benches: Option<&Vec>, package_root: &Path, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, ) -> CargoResult> { let mut legacy_warnings = vec![]; let targets = { let mut legacy_bench_path = |bench: &TomlTarget| { let legacy_path = package_root.join("src").join("bench.rs"); if !(bench.name() == "bench" && legacy_path.exists()) { return None; } legacy_warnings.push(format!( "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\ please set bench.path in Cargo.toml", legacy_path.display(), bench.name() )); Some(legacy_path) }; let inferred = infer_from_directory(&package_root.join("benches")); clean_targets_with_legacy_path( "benchmark", "bench", toml_benches, &inferred, package_root, edition, autodiscover, warnings, errors, &mut legacy_bench_path, "autobenches", )? }; warnings.append(&mut legacy_warnings); let mut result = Vec::new(); for (path, toml) in targets { let mut target = Target::bench_target(&toml.name(), path, toml.required_features.clone(), edition); configure(&toml, &mut target)?; result.push(target); } Ok(result) } fn clean_targets( target_kind_human: &str, target_kind: &str, toml_targets: Option<&Vec>, inferred: &[(String, PathBuf)], package_root: &Path, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, autodiscover_flag_name: &str, ) -> CargoResult> { clean_targets_with_legacy_path( target_kind_human, target_kind, toml_targets, inferred, package_root, edition, autodiscover, warnings, errors, &mut |_| None, autodiscover_flag_name, ) } fn clean_targets_with_legacy_path( target_kind_human: &str, target_kind: &str, toml_targets: Option<&Vec>, inferred: &[(String, PathBuf)], package_root: &Path, edition: Edition, autodiscover: Option, warnings: &mut Vec, errors: &mut Vec, legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, autodiscover_flag_name: &str, ) -> CargoResult> { let toml_targets = toml_targets_and_inferred( toml_targets, inferred, package_root, autodiscover, edition, warnings, target_kind_human, target_kind, autodiscover_flag_name, ); for target in &toml_targets { validate_target_name(target, target_kind_human, target_kind, warnings)?; } validate_unique_names(&toml_targets, target_kind)?; let mut result = Vec::new(); for target in toml_targets { let path = target_path( &target, inferred, target_kind, package_root, edition, legacy_path, ); let path = match path { Ok(path) => path, Err(e) => { errors.push(e); continue; } }; result.push((path, target)); } Ok(result) } fn inferred_lib(package_root: &Path) -> Option { let lib = package_root.join("src").join("lib.rs"); if lib.exists() { Some(lib) } else { None } } fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> { let main = package_root.join("src").join("main.rs"); let mut result = Vec::new(); if main.exists() { result.push((package_name.to_string(), main)); } result.extend(infer_from_directory( &package_root.join("src").join(DEFAULT_BIN_DIR_NAME), )); result } fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> { let entries = match fs::read_dir(directory) { Err(_) => return Vec::new(), Ok(dir) => dir, }; entries .filter_map(|e| e.ok()) .filter(is_not_dotfile) .filter_map(|d| infer_any(&d)) .collect() } fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> { if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") { infer_file(entry) } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) { infer_subdirectory(entry) } else { None } } fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> { let path = entry.path(); path.file_stem() .and_then(|p| p.to_str()) .map(|p| (p.to_owned(), path.clone())) } fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> { let path = entry.path(); let main = path.join("main.rs"); let name = path.file_name().and_then(|n| n.to_str()); match (name, main.exists()) { (Some(name), true) => Some((name.to_owned(), main)), _ => None, } } fn is_not_dotfile(entry: &DirEntry) -> bool { entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false) } fn toml_targets_and_inferred( toml_targets: Option<&Vec>, inferred: &[(String, PathBuf)], package_root: &Path, autodiscover: Option, edition: Edition, warnings: &mut Vec, target_kind_human: &str, target_kind: &str, autodiscover_flag_name: &str, ) -> Vec { let inferred_targets = inferred_to_toml_targets(inferred); match toml_targets { None => { if let Some(false) = autodiscover { vec![] } else { inferred_targets } } Some(targets) => { let mut targets = targets.clone(); let target_path = |target: &TomlTarget| target.path.clone().map(|p| package_root.join(p.0)); let mut seen_names = HashSet::new(); let mut seen_paths = HashSet::new(); for target in targets.iter() { seen_names.insert(target.name.clone()); seen_paths.insert(target_path(target)); } let mut rem_targets = vec![]; for target in inferred_targets { if !seen_names.contains(&target.name) && !seen_paths.contains(&target_path(&target)) { rem_targets.push(target); } } let autodiscover = match autodiscover { Some(autodiscover) => autodiscover, None => { if edition == Edition::Edition2015 { if !rem_targets.is_empty() { let mut rem_targets_str = String::new(); for t in rem_targets.iter() { if let Some(p) = t.path.clone() { rem_targets_str.push_str(&format!("* {}\n", p.0.display())) } } warnings.push(format!( "\ An explicit [[{section}]] section is specified in Cargo.toml which currently disables Cargo from automatically inferring other {target_kind_human} targets. This inference behavior will change in the Rust 2018 edition and the following files will be included as a {target_kind_human} target: {rem_targets_str} This is likely to break cargo build or cargo test as these files may not be ready to be compiled as a {target_kind_human} target today. You can future-proof yourself and disable this warning by adding `{autodiscover_flag_name} = false` to your [package] section. You may also move the files to a location where Cargo would not automatically infer them to be a target, such as in subfolders. For more information on this warning you can consult https://github.com/rust-lang/cargo/issues/5330", section = target_kind, target_kind_human = target_kind_human, rem_targets_str = rem_targets_str, autodiscover_flag_name = autodiscover_flag_name, )); }; false } else { true } } }; if autodiscover { targets.append(&mut rem_targets); } targets } } } fn inferred_to_toml_targets(inferred: &[(String, PathBuf)]) -> Vec { inferred .iter() .map(|&(ref name, ref path)| TomlTarget { name: Some(name.clone()), path: Some(PathValue(path.clone())), ..TomlTarget::new() }) .collect() } fn validate_target_name( target: &TomlTarget, target_kind_human: &str, target_kind: &str, warnings: &mut Vec, ) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { anyhow::bail!("{} target names cannot be empty", target_kind_human) } if cfg!(windows) && restricted_names::is_windows_reserved(name) { warnings.push(format!( "{} target `{}` is a reserved Windows filename, \ this target will not work on Windows platforms", target_kind_human, name )); } } None => anyhow::bail!( "{} target {}.name is required", target_kind_human, target_kind ), } Ok(()) } /// Will check a list of toml targets, and make sure the target names are unique within a vector. fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> { let mut seen = HashSet::new(); for name in targets.iter().map(|e| e.name()) { if !seen.insert(name.clone()) { anyhow::bail!( "found duplicate {target_kind} name {name}, \ but all {target_kind} targets must have a unique name", target_kind = target_kind, name = name ); } } Ok(()) } fn configure(toml: &TomlTarget, target: &mut Target) -> CargoResult<()> { let t2 = target.clone(); target .set_tested(toml.test.unwrap_or_else(|| t2.tested())) .set_doc(toml.doc.unwrap_or_else(|| t2.documented())) .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested())) .set_benched(toml.bench.unwrap_or_else(|| t2.benched())) .set_harness(toml.harness.unwrap_or_else(|| t2.harness())) .set_proc_macro(toml.proc_macro().unwrap_or_else(|| t2.proc_macro())) .set_for_host(match (toml.plugin, toml.proc_macro()) { (None, None) => t2.for_host(), (Some(true), _) | (_, Some(true)) => true, (Some(false), _) | (_, Some(false)) => false, }); if let Some(edition) = toml.edition.clone() { target.set_edition( edition .parse() .with_context(|| "failed to parse the `edition` key")?, ); } Ok(()) } /// Build an error message for a target path that cannot be determined either /// by auto-discovery or specifying. /// /// This function tries to detect commonly wrong paths for targets: /// /// test -> tests/*.rs, tests/*/main.rs /// bench -> benches/*.rs, benches/*/main.rs /// example -> examples/*.rs, examples/*/main.rs /// bin -> src/bin/*.rs, src/bin/*/main.rs /// /// Note that the logic need to sync with [`infer_from_directory`] if changes. fn target_path_not_found_error_message( package_root: &Path, target: &TomlTarget, target_kind: &str, ) -> String { fn possible_target_paths(name: &str, kind: &str, commonly_wrong: bool) -> [PathBuf; 2] { let mut target_path = PathBuf::new(); match (kind, commonly_wrong) { // commonly wrong paths ("test" | "bench" | "example", true) => target_path.push(kind), ("bin", true) => { target_path.push("src"); target_path.push("bins"); } // default inferred paths ("test", false) => target_path.push(DEFAULT_TEST_DIR_NAME), ("bench", false) => target_path.push(DEFAULT_BENCH_DIR_NAME), ("example", false) => target_path.push(DEFAULT_EXAMPLE_DIR_NAME), ("bin", false) => { target_path.push("src"); target_path.push(DEFAULT_BIN_DIR_NAME); } _ => unreachable!("invalid target kind: {}", kind), } target_path.push(name); let target_path_file = { let mut path = target_path.clone(); path.set_extension("rs"); path }; let target_path_subdir = { target_path.push("main.rs"); target_path }; return [target_path_file, target_path_subdir]; } let target_name = target.name(); let commonly_wrong_paths = possible_target_paths(&target_name, target_kind, true); let possible_paths = possible_target_paths(&target_name, target_kind, false); let existing_wrong_path_index = match ( package_root.join(&commonly_wrong_paths[0]).exists(), package_root.join(&commonly_wrong_paths[1]).exists(), ) { (true, _) => Some(0), (_, true) => Some(1), _ => None, }; if let Some(i) = existing_wrong_path_index { return format!( "\ can't find `{name}` {kind} at default paths, but found a file at `{wrong_path}`. Perhaps rename the file to `{possible_path}` for target auto-discovery, \ or specify {kind}.path if you want to use a non-default path.", name = target_name, kind = target_kind, wrong_path = commonly_wrong_paths[i].display(), possible_path = possible_paths[i].display(), ); } format!( "can't find `{name}` {kind} at `{path_file}` or `{path_dir}`. \ Please specify {kind}.path if you want to use a non-default path.", name = target_name, kind = target_kind, path_file = possible_paths[0].display(), path_dir = possible_paths[1].display(), ) } fn target_path( target: &TomlTarget, inferred: &[(String, PathBuf)], target_kind: &str, package_root: &Path, edition: Edition, legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, ) -> Result { if let Some(ref path) = target.path { // Should we verify that this path exists here? return Ok(package_root.join(&path.0)); } let name = target.name(); let mut matching = inferred .iter() .filter(|&&(ref n, _)| n == &name) .map(|&(_, ref p)| p.clone()); let first = matching.next(); let second = matching.next(); match (first, second) { (Some(path), None) => Ok(path), (None, None) => { if edition == Edition::Edition2015 { if let Some(path) = legacy_path(target) { return Ok(path); } } Err(target_path_not_found_error_message( package_root, target, target_kind, )) } (Some(p0), Some(p1)) => { if edition == Edition::Edition2015 { if let Some(path) = legacy_path(target) { return Ok(path); } } Err(format!( "\ cannot infer path for `{}` {} Cargo doesn't know which to use because multiple target files found at `{}` and `{}`.", target.name(), target_kind, p0.strip_prefix(package_root).unwrap_or(&p0).display(), p1.strip_prefix(package_root).unwrap_or(&p1).display(), )) } (None, Some(_)) => unreachable!(), } } cargo-0.66.0/src/cargo/util/vcs.rs000066400000000000000000000053721432416201200167000ustar00rootroot00000000000000use crate::util::CargoResult; use cargo_util::paths; use cargo_util::ProcessBuilder; use std::path::Path; // Check if we are in an existing repo. We define that to be true if either: // // 1. We are in a git repo and the path to the new package is not an ignored // path in that repo. // 2. We are in an HG repo. pub fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { fn in_git_repo(path: &Path, cwd: &Path) -> bool { if let Ok(repo) = GitRepo::discover(path, cwd) { // Don't check if the working directory itself is ignored. if repo.workdir().map_or(false, |workdir| workdir == path) { true } else { !repo.is_path_ignored(path).unwrap_or(false) } } else { false } } in_git_repo(path, cwd) || HgRepo::discover(path, cwd).is_ok() } pub struct HgRepo; pub struct GitRepo; pub struct PijulRepo; pub struct FossilRepo; impl GitRepo { pub fn init(path: &Path, _: &Path) -> CargoResult { git2::Repository::init(path)?; Ok(GitRepo) } pub fn discover(path: &Path, _: &Path) -> Result { git2::Repository::discover(path) } } impl HgRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { ProcessBuilder::new("hg") .cwd(cwd) .arg("init") .arg("--") .arg(path) .exec()?; Ok(HgRepo) } pub fn discover(path: &Path, cwd: &Path) -> CargoResult { ProcessBuilder::new("hg") .cwd(cwd) .arg("--cwd") .arg(path) .arg("root") .exec_with_output()?; Ok(HgRepo) } } impl PijulRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { ProcessBuilder::new("pijul") .cwd(cwd) .arg("init") .arg("--") .arg(path) .exec()?; Ok(PijulRepo) } } impl FossilRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { // fossil doesn't create the directory so we'll do that first paths::create_dir_all(path)?; // set up the paths we'll use let db_fname = ".fossil"; let mut db_path = path.to_owned(); db_path.push(db_fname); // then create the fossil DB in that location ProcessBuilder::new("fossil") .cwd(cwd) .arg("init") .arg("--") .arg(&db_path) .exec()?; // open it in that new directory ProcessBuilder::new("fossil") .cwd(&path) .arg("open") .arg("--") .arg(db_fname) .exec()?; Ok(FossilRepo) } } cargo-0.66.0/src/cargo/util/workspace.rs000066400000000000000000000107211432416201200200750ustar00rootroot00000000000000use crate::core::compiler::Unit; use crate::core::manifest::TargetSourcePath; use crate::core::{Target, Workspace}; use crate::ops::CompileOptions; use crate::util::CargoResult; use anyhow::bail; use cargo_util::ProcessBuilder; use std::fmt::Write; use std::path::PathBuf; fn get_available_targets<'a>( filter_fn: fn(&Target) -> bool, ws: &'a Workspace<'_>, options: &'a CompileOptions, ) -> CargoResult> { let packages = options.spec.get_packages(ws)?; let mut targets: Vec<_> = packages .into_iter() .flat_map(|pkg| { pkg.manifest() .targets() .iter() .filter(|target| filter_fn(target)) }) .map(Target::name) .collect(); targets.sort(); Ok(targets) } fn print_available_targets( filter_fn: fn(&Target) -> bool, ws: &Workspace<'_>, options: &CompileOptions, option_name: &str, plural_name: &str, ) -> CargoResult<()> { let targets = get_available_targets(filter_fn, ws, options)?; let mut output = String::new(); writeln!(output, "\"{}\" takes one argument.", option_name)?; if targets.is_empty() { writeln!(output, "No {} available.", plural_name)?; } else { writeln!(output, "Available {}:", plural_name)?; for target in targets { writeln!(output, " {}", target)?; } } bail!("{}", output) } pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> { let packages = ws .members() .map(|pkg| pkg.name().as_str()) .collect::>(); let mut output = "\"--package \" requires a SPEC format value, \ which can be any package ID specifier in the dependency graph.\n\ Run `cargo help pkgid` for more information about SPEC format.\n\n" .to_string(); if packages.is_empty() { // This would never happen. // Just in case something regresses we covers it here. writeln!(output, "No packages available.")?; } else { writeln!(output, "Possible packages/workspace members:")?; for package in packages { writeln!(output, " {}", package)?; } } bail!("{}", output) } pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { print_available_targets(Target::is_example, ws, options, "--example", "examples") } pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { print_available_targets(Target::is_bin, ws, options, "--bin", "binaries") } pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { print_available_targets(Target::is_bench, ws, options, "--bench", "benches") } pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { print_available_targets(Target::is_test, ws, options, "--test", "tests") } /// The path that we pass to rustc is actually fairly important because it will /// show up in error messages (important for readability), debug information /// (important for caching), etc. As a result we need to be pretty careful how we /// actually invoke rustc. /// /// In general users don't expect `cargo build` to cause rebuilds if you change /// directories. That could be if you just change directories in the package or /// if you literally move the whole package wholesale to a new directory. As a /// result we mostly don't factor in `cwd` to this calculation. Instead we try to /// track the workspace as much as possible and we update the current directory /// of rustc/rustdoc where appropriate. /// /// The first returned value here is the argument to pass to rustc, and the /// second is the cwd that rustc should operate in. pub fn path_args(ws: &Workspace<'_>, unit: &Unit) -> (PathBuf, PathBuf) { let ws_root = ws.root(); let src = match unit.target.src_path() { TargetSourcePath::Path(path) => path.to_path_buf(), TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(ws.target_dir()), }; assert!(src.is_absolute()); if unit.pkg.package_id().source_id().is_path() { if let Ok(path) = src.strip_prefix(ws_root) { return (path.to_path_buf(), ws_root.to_path_buf()); } } (src, unit.pkg.root().to_path_buf()) } pub fn add_path_args(ws: &Workspace<'_>, unit: &Unit, cmd: &mut ProcessBuilder) { let (arg, cwd) = path_args(ws, unit); cmd.arg(arg); cmd.cwd(cwd); } cargo-0.66.0/src/cargo/version.rs000066400000000000000000000055741432416201200166210ustar00rootroot00000000000000//! Code for representing cargo's release version number. use std::fmt; /// Information about the git repository where cargo was built from. pub struct CommitInfo { pub short_commit_hash: String, pub commit_hash: String, pub commit_date: String, } /// Cargo's version. pub struct VersionInfo { /// Cargo's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc. pub version: String, /// The release channel we were built for (stable/beta/nightly/dev). /// /// `None` if not built via rustuild. pub release_channel: Option, /// Information about the Git repository we may have been built from. /// /// `None` if not built from a git repo. pub commit_info: Option, } impl fmt::Display for VersionInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.version)?; if let Some(ref ci) = self.commit_info { write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; }; Ok(()) } } /// Returns information about cargo's version. pub fn version() -> VersionInfo { macro_rules! option_env_str { ($name:expr) => { option_env!($name).map(|s| s.to_string()) }; } // This is the version set in rustbuild, which we use to match rustc. let version = option_env_str!("CFG_RELEASE").unwrap_or_else(|| { // If cargo is not being built by rustbuild, then we just use the // version from cargo's own `Cargo.toml`. // // There are two versions at play here: // - version of cargo-the-binary, which you see when you type `cargo --version` // - version of cargo-the-library, which you download from crates.io for use // in your packages. // // The library is permanently unstable, so it always has a 0 major // version. However, the CLI now reports a stable 1.x version // (starting in 1.26) which stays in sync with rustc's version. // // Coincidentally, the minor version for cargo-the-library is always // +1 of rustc's minor version (that is, `rustc 1.11.0` corresponds to // `cargo `0.12.0`). The versions always get bumped in lockstep, so // this should continue to hold. let minor = env!("CARGO_PKG_VERSION_MINOR").parse::().unwrap() - 1; let patch = env!("CARGO_PKG_VERSION_PATCH").parse::().unwrap(); format!("1.{}.{}", minor, patch) }); let release_channel = option_env_str!("CFG_RELEASE_CHANNEL"); let commit_info = option_env_str!("CARGO_COMMIT_HASH").map(|commit_hash| CommitInfo { short_commit_hash: option_env_str!("CARGO_COMMIT_SHORT_HASH").unwrap(), commit_hash, commit_date: option_env_str!("CARGO_COMMIT_DATE").unwrap(), }); VersionInfo { version, release_channel, commit_info, } } cargo-0.66.0/src/doc/000077500000000000000000000000001432416201200142255ustar00rootroot00000000000000cargo-0.66.0/src/doc/.gitignore000066400000000000000000000000321432416201200162100ustar00rootroot00000000000000# Ignore built book book/ cargo-0.66.0/src/doc/README.md000066400000000000000000000043741432416201200155140ustar00rootroot00000000000000# Cargo documentation This directory contains Cargo's documentation. There are two parts, [The Cargo Book] which is built with [mdbook] and the man pages, which are built with [mdman]. [The Cargo Book]: https://doc.rust-lang.org/cargo/ [mdBook]: https://github.com/rust-lang/mdBook [mdman]: https://github.com/rust-lang/cargo/tree/master/crates/mdman/ ### Building the book Building the book requires [mdBook]. To get it: ```console $ cargo install mdbook ``` To build the book: ```console $ mdbook build ``` `mdbook` provides a variety of different commands and options to help you work on the book: * `mdbook build --open`: Build the book and open it in a web browser. * `mdbook serve`: Launches a web server on localhost. It also automatically rebuilds the book whenever any file changes and automatically reloads your web browser. The book contents are driven by the [`SUMMARY.md`](src/SUMMARY.md) file, and every file must be linked there. ### Building the man pages The man pages use a tool called [mdman] to convert markdown to a man page format. Check out the documentation at [`mdman/doc/`](../../crates/mdman/doc/) for more details. The man pages are converted from a templated markdown (located in the [`src/doc/man/`](man) directory) to three different formats: 1. Troff-style man pages, saved in [`src/etc/man/`](../etc/man). 2. Markdown (with some HTML) for the Cargo Book, saved in [`src/doc/src/commands/`](src/commands). 3. Plain text (needed for embedded man pages on platforms without man such as Windows), saved in [`src/doc/man/generated_txt/`](man/generated_txt). To rebuild the man pages, run the script `build-man.sh` in the `src/doc` directory. ```console $ ./build-man.sh ``` ### SemVer chapter tests There is a script to verify that the examples in the SemVer chapter work as intended. To run the tests, go into the `semver-check` directory and run `cargo run`. ## Contributing We'd love your help with improving the documentation! Please feel free to [open issues](https://github.com/rust-lang/cargo/issues) about anything, and send in PRs for things you'd like to fix or change. If your change is large, please open an issue first, so we can make sure that it's something we'd accept before you go through the work of getting a PR together. cargo-0.66.0/src/doc/book.toml000066400000000000000000000004731432416201200160600ustar00rootroot00000000000000[book] title = "The Cargo Book" author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community" [output.html] git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src" edit-url-template = "https://github.com/rust-lang/cargo/edit/master/src/doc/{path}" cargo-0.66.0/src/doc/build-man.sh000077500000000000000000000020771432416201200164420ustar00rootroot00000000000000#!/bin/bash # # This script builds the Cargo man pages. # # The source for the man pages are located in src/doc/man/ in markdown format. # These also are handlebars templates, see crates/mdman/README.md for details. # # The generated man pages are placed in the src/etc/man/ directory. The pages # are also expanded into markdown (after being expanded by handlebars) and # saved in the src/doc/src/commands/ directory. These are included in the # Cargo book, which is converted to HTML by mdbook. set -e cd "$(dirname "${BASH_SOURCE[0]}")" OPTIONS="--url https://doc.rust-lang.org/cargo/commands/ \ --man rustc:1=https://doc.rust-lang.org/rustc/index.html \ --man rustdoc:1=https://doc.rust-lang.org/rustdoc/index.html" cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ -t md -o src/commands man/cargo*.md \ $OPTIONS cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ -t txt -o man/generated_txt man/cargo*.md \ $OPTIONS cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ -t man -o ../etc/man man/cargo*.md \ $OPTIONS cargo-0.66.0/src/doc/contrib/000077500000000000000000000000001432416201200156655ustar00rootroot00000000000000cargo-0.66.0/src/doc/contrib/README.md000066400000000000000000000010401432416201200171370ustar00rootroot00000000000000# Cargo Contributor Guide This is the source of the Cargo Contributor Guide, published at . It is written in Markdown, using the [mdbook] tool to convert to HTML. If you are editing these pages, the best option to view the results is to run `mdbook serve`, which will start a web server on localhost that you can visit to view the book, and it will automatically reload each time you edit a page. This is published via GitHub Actions to GitHub Pages. [mdbook]: https://rust-lang.github.io/mdBook/ cargo-0.66.0/src/doc/contrib/book.toml000066400000000000000000000002521432416201200175130ustar00rootroot00000000000000[book] title = "Cargo Contributor Guide" authors = ["Eric Huss"] [output.html] git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/contrib/src" cargo-0.66.0/src/doc/contrib/src/000077500000000000000000000000001432416201200164545ustar00rootroot00000000000000cargo-0.66.0/src/doc/contrib/src/SUMMARY.md000066400000000000000000000015071432416201200201360ustar00rootroot00000000000000# Summary - [Introduction](./index.md) - [Issue Tracker](./issues.md) - [Process](./process/index.md) - [Working on Cargo](./process/working-on-cargo.md) - [Release process](./process/release.md) - [Unstable features](./process/unstable.md) - [Architecture](./architecture/index.md) - [Codebase Overview](./architecture/codebase.md) - [SubCommands](./architecture/subcommands.md) - [Console Output](./architecture/console.md) - [Packages and Resolution](./architecture/packages.md) - [Compilation](./architecture/compilation.md) - [Files](./architecture/files.md) - [Tests](./tests/index.md) - [Running Tests](./tests/running.md) - [Writing Tests](./tests/writing.md) - [Benchmarking and Profiling](./tests/profiling.md) - [Crater](./tests/crater.md) - [Design Principles](./design.md) cargo-0.66.0/src/doc/contrib/src/architecture/000077500000000000000000000000001432416201200211365ustar00rootroot00000000000000cargo-0.66.0/src/doc/contrib/src/architecture/codebase.md000066400000000000000000000133741432416201200232350ustar00rootroot00000000000000# Codebase Overview This is a very high-level overview of the Cargo codebase. * [`src/bin/cargo`](https://github.com/rust-lang/cargo/tree/master/src/bin/cargo) β€” Cargo is split in a library and a binary. This is the binary side that handles argument parsing, and then calls into the library to perform the appropriate subcommand. Each Cargo subcommand is a separate module here. See [SubCommands](subcommands.md). * [`src/cargo/ops`](https://github.com/rust-lang/cargo/tree/master/src/cargo/ops) β€” Every major operation is implemented here. This is where the binary CLI usually calls into to perform the appropriate action. * [`src/cargo/ops/cargo_compile.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_compile.rs) β€” This is the entry point for all the compilation commands. This is a good place to start if you want to follow how compilation starts and flows to completion. * [`src/cargo/core/resolver`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/resolver) β€” This is the dependency and feature resolvers. * [`src/cargo/core/compiler`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/compiler) β€” This is the code responsible for running `rustc` and `rustdoc`. * [`src/cargo/core/compiler/build_context/mod.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/mod.rs) β€” The `BuildContext` is the result of the "front end" of the build process. This contains the graph of work to perform and any settings necessary for `rustc`. After this is built, the next stage of building is handled in `Context`. * [`src/cargo/core/compiler/context`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/context/mod.rs) β€” The `Context` is the mutable state used during the build process. This is the core of the build process, and everything is coordinated through this. * [`src/cargo/core/compiler/fingerprint.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/fingerprint.rs) β€” The `fingerprint` module contains all the code that handles detecting if a crate needs to be recompiled. * [`src/cargo/core/source`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/source) β€” The `Source` trait is an abstraction over different sources of packages. Sources are uniquely identified by a `SourceId`. Sources are implemented in the [`src/cargo/sources`](https://github.com/rust-lang/cargo/tree/master/src/cargo/sources) directory. * [`src/cargo/util`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util) β€” This directory contains generally-useful utility modules. * [`src/cargo/util/config`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util/config) β€” This directory contains the config parser. It makes heavy use of [serde](https://serde.rs/) to merge and translate config values. The `Config` is usually accessed from the [`Workspace`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/workspace.rs), though references to it are scattered around for more convenient access. * [`src/cargo/util/toml`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util/toml) β€” This directory contains the code for parsing `Cargo.toml` files. * [`src/cargo/ops/lockfile.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/lockfile.rs) β€” This is where `Cargo.lock` files are loaded and saved. * [`src/doc`](https://github.com/rust-lang/cargo/tree/master/src/doc) β€” This directory contains Cargo's documentation and man pages. * [`src/etc`](https://github.com/rust-lang/cargo/tree/master/src/etc) β€” These are files that get distributed in the `etc` directory in the Rust release. The man pages are auto-generated by a script in the `src/doc` directory. * [`crates`](https://github.com/rust-lang/cargo/tree/master/crates) β€” A collection of independent crates used by Cargo. ## Extra crates Some functionality is split off into separate crates, usually in the [`crates`](https://github.com/rust-lang/cargo/tree/master/crates) directory. * [`cargo-platform`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-platform) β€” This library handles parsing `cfg` expressions. * [`cargo-test-macro`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-macro) β€” This is a proc-macro used by the test suite to define tests. More information can be found at [`cargo_test` attribute](../tests/writing.md#cargo_test-attribute). * [`cargo-test-support`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-support) β€” This contains a variety of code to support [writing tests](../tests/writing.md). * [`cargo-util`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-util) β€” This contains general utility code that is shared between cargo and the testsuite. * [`crates-io`](https://github.com/rust-lang/cargo/tree/master/crates/crates-io) β€” This contains code for accessing the crates.io API. * [`credential`](https://github.com/rust-lang/cargo/tree/master/crates/credential) β€” This subdirectory contains several packages for implementing the experimental [credential-process](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process) feature. * [`mdman`](https://github.com/rust-lang/cargo/tree/master/crates/mdman) β€” This is a utility for generating cargo's man pages. See [Building the man pages](https://github.com/rust-lang/cargo/tree/master/src/doc#building-the-man-pages) for more information. * [`resolver-tests`](https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests) β€” This is a dedicated package that defines tests for the [dependency resolver](../architecture/packages.md#resolver). cargo-0.66.0/src/doc/contrib/src/architecture/compilation.md000066400000000000000000000050711432416201200240010ustar00rootroot00000000000000# Compilation The [`Unit`] is the primary data structure representing a single execution of the compiler. It (mostly) contains all the information needed to determine which flags to pass to the compiler. The entry to the compilation process is located in the [`cargo_compile`] module. The compilation can be conceptually broken into these steps: 1. Perform dependency resolution (see [the resolution chapter]). 2. Generate the root `Unit`s, the things the user requested to compile on the command-line. This is done in [`generate_targets`]. 3. Starting from the root `Unit`s, generate the [`UnitGraph`] by walking the dependency graph from the resolver. The `UnitGraph` contains all of the `Unit` structs, and information about the dependency relationships between units. This is done in the [`unit_dependencies`] module. 4. Construct the [`BuildContext`] with all of the information collected so far. This is the end of the "front end" of compilation. 5. Create a [`Context`], a large, mutable data structure that coordinates the compilation process. 6. The [`Context`] will create a [`JobQueue`], a data structure that tracks which units need to be built. 7. [`drain_the_queue`] does the compilation process. This is the only point in Cargo that currently uses threads. 8. The result of the compilation is stored in the [`Compilation`] struct. This can be used for various things, such as running tests after the compilation has finished. [`cargo_compile`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_compile.rs [`generate_targets`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/ops/cargo_compile.rs#L725-L739 [`UnitGraph`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit_graph.rs [the resolution chapter]: packages.md [`Unit`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit.rs [`unit_dependencies`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit_dependencies.rs [`BuildContext`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/mod.rs [`Context`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/context/mod.rs [`JobQueue`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/job_queue.rs [`drain_the_queue`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/compiler/job_queue.rs#L623-L634 [`Compilation`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/compilation.rs cargo-0.66.0/src/doc/contrib/src/architecture/console.md000066400000000000000000000071371432416201200231320ustar00rootroot00000000000000# Console Output All of Cargo's output should go through the [`Shell`] struct. You can normally obtain the `Shell` instance from the [`Config`] struct. Do **not** use the std `println!` macros. Most of Cargo's output goes to stderr. When running in JSON mode, the output goes to stdout. It is important to properly handle errors when writing to the console. Informational commands, like `cargo list`, should ignore any errors writing the output. There are some [`drop_print`] macros that are intended to make this easier. Messages written during compilation should handle errors, and abort the build if they are unable to be displayed. This is generally automatically handled in the [`JobQueue`] as it processes each message. [`Shell`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/shell.rs [`Config`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/config/mod.rs [`drop_print`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/config/mod.rs#L1820-L1848 [`JobQueue`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/job_queue.rs ## Errors Cargo uses [`anyhow`] for managing errors. This makes it convenient to "chain" errors together, so that Cargo can report how an error originated, and what it was trying to do at the time. Error helpers are implemented in the [`errors`] module. Use the `InternalError` error type for errors that are not expected to happen. This will print a message to the user to file a bug report. The binary side of Cargo uses the `CliError` struct to wrap the process exit code. Usually Cargo exits with 101 for an error, but some commands like `cargo test` will exit with different codes. [`errors`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/errors.rs ## Style Some guidelines for Cargo's output: * Keep the normal output brief. Cargo is already fairly noisy, so try to keep the output as brief and clean as possible. * Good error messages are very important! Try to keep them brief and to the point, but good enough that a beginner can understand what is wrong and can figure out how to fix. It is a difficult balance to hit! Err on the side of providing extra information. * When using any low-level routines, such as `std::fs`, *always* add error context about what it is doing. For example, reading from a file should include context about which file is being read if there is an error. * Cargo's error style is usually a phrase, starting with a lowercase letter. If there is a longer error message that needs multiple sentences, go ahead and use multiple sentences. This should probably be improved sometime in the future to be more structured. ## Debug logging Cargo uses the [`env_logger`] crate to display debug log messages. The `CARGO_LOG` environment variable can be set to enable debug logging, with a value such as `trace`, `debug`, or `warn`. It also supports filtering for specific modules. Feel free to use the standard [`log`] macros to help with diagnosing problems. ```sh # Outputs all logs with levels debug and higher CARGO_LOG=debug cargo generate-lockfile # Don't forget that you can filter by module as well CARGO_LOG=cargo::core::resolver=trace cargo generate-lockfile # This will print lots of info about the download process. `trace` prints even more. CARGO_HTTP_DEBUG=true CARGO_LOG=cargo::ops::registry=debug cargo fetch # This is an important command for diagnosing fingerprint issues. CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build ``` [`env_logger`]: https://docs.rs/env_logger [`log`]: https://docs.rs/log [`anyhow`]: https://docs.rs/anyhow cargo-0.66.0/src/doc/contrib/src/architecture/files.md000066400000000000000000000100601432416201200225570ustar00rootroot00000000000000# Files This chapter gives some pointers on where to start looking at Cargo's on-disk data file structures. * [`Layout`] is the abstraction for the `target` directory. It handles locking the target directory, and providing paths to the parts inside. There is a separate `Layout` for each "target". * [`Resolve`] contains the contents of the `Cargo.lock` file. See the [`encode`] module for the different `Cargo.lock` formats. * [`TomlManifest`] contains the contents of the `Cargo.toml` file. It is translated to a [`Manifest`] object for some simplification, and the `Manifest` is stored in a [`Package`]. * The [`fingerprint`] module deals with the fingerprint information stored in `target/debug/.fingerprint`. This tracks whether or not a crate needs to be rebuilt. * `cargo install` tracks its installed files with some metadata in `$CARGO_HOME`. The metadata is managed in the [`common_for_install_and_uninstall`] module. * Git sources are cached in `$CARGO_HOME/git`. The code for this cache is in the [`git`] source module. * Registries are cached in `$CARGO_HOME/registry`. There are three parts, the index, the compressed `.crate` files, and the extracted sources of those crate files. * Management of the registry cache can be found in the [`registry`] source module. Note that this includes an on-disk cache as an optimization for accessing the git repository. * Saving of `.crate` files is handled by the [`RemoteRegistry`]. * Extraction of `.crate` files is handled by the [`RegistrySource`]. * There is a lock for the package cache. Code must be careful, because this lock must be obtained manually. See [`Config::acquire_package_cache_lock`]. [`Layout`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/layout.rs [`Resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/resolve.rs [`encode`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/encode.rs [`TomlManifest`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/toml/mod.rs [`Manifest`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/manifest.rs [`Package`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package.rs [`common_for_install_and_uninstall`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/common_for_install_and_uninstall.rs [`git`]: https://github.com/rust-lang/cargo/tree/master/src/cargo/sources/git [`registry`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/mod.rs [`RemoteRegistry`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/remote.rs [`RegistrySource`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/mod.rs [`Config::acquire_package_cache_lock`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/config/mod.rs#L1261-L1266 ## Filesystems Cargo tends to get run on a very wide array of file systems. Different file systems can have a wide range of capabilities, and Cargo should strive to do its best to handle them. Some examples of issues to deal with: * Not all file systems support locking. Cargo tries to detect if locking is supported, and if not, will ignore lock errors. This isn't ideal, but it is difficult to deal with. * The [`fs::canonicalize`] function doesn't work on all file systems (particularly some Windows file systems). If that function is used, there should be a fallback if it fails. This function will also return `\\?\` style paths on Windows, which can have some issues (such as some tools not supporting them, or having issues with relative paths). * Timestamps can be unreliable. The [`fingerprint`] module has a deeper discussion of this. One example is that Docker cache layers will erase the fractional part of the time stamp. * Symlinks are not always supported, particularly on Windows. [`fingerprint`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/fingerprint.rs [`fs::canonicalize`]: https://doc.rust-lang.org/std/fs/fn.canonicalize.html cargo-0.66.0/src/doc/contrib/src/architecture/index.md000066400000000000000000000004401432416201200225650ustar00rootroot00000000000000# Architecture Overview This chapter gives a very high-level overview of Cargo's architecture. This is intended to give you links into the code which is hopefully commented with more in-depth information. If you feel something is missing that would help you, feel free to ask on Zulip. cargo-0.66.0/src/doc/contrib/src/architecture/packages.md000066400000000000000000000130511432416201200232360ustar00rootroot00000000000000# Packages and Resolution ## Workspaces The [`Workspace`] object is usually created very early by calling the [`workspace`][ws-method] helper method. This discovers the root of the workspace, and loads all the workspace members as a [`Package`] object. Each package corresponds to a single `Cargo.toml` (which is deserialized into a [`Manifest`]), and may define several [`Target`]s, such as the library, binaries, integration test or examples. Targets are crates (each target defines a crate root, like `src/lib.rs` or `examples/foo.rs`) and are what is actually compiled by `rustc`. ## Packages and Sources There are several data structures that are important to understand how packages are found and loaded: * [`Package`] β€” A package, which is a `Cargo.toml` manifest and its associated source files. * [`PackageId`] β€” A unique identifier for a package. * [`Source`] β€” An abstraction for something that can fetch packages (a remote registry, a git repo, the local filesystem, etc.). Check out the [source implementations] for all the details about registries, indexes, git dependencies, etc. * [`SourceId`] β€” A unique identifier for a source. * [`SourceMap`] β€” Map of all available sources. * [`PackageRegistry`] β€” This is the main interface for how the dependency resolver finds packages. It contains the `SourceMap`, and handles things like the `[patch]` table. The `Registry` trait provides a generic interface to the `PackageRegistry`, but this is only used for providing an alternate implementation of the `PackageRegistry` for testing. The dependency resolver sends a query to the `PackageRegistry` to "get me all packages that match this dependency declaration". * [`Summary`] β€” A summary is a subset of a [`Manifest`], and is essentially the information that can be found in a registry index. Queries against the `PackageRegistry` yields a `Summary`. The resolver uses the summary information to build the dependency graph. * [`PackageSet`] β€” Contains all of the `Package` objects. This works with the [`Downloads`] struct to coordinate downloading packages. It has a reference to the `SourceMap` to get the `Source` objects which tell the `Downloads` struct which URLs to fetch. All of these come together in the [`ops::resolve`] module. This module contains the primary functions for performing resolution (described below). It also handles downloading of packages. It is essentially where all of the data structures above come together. ## Resolver [`Resolve`] is the representation of a directed graph of package dependencies, which uses [`PackageId`]s for nodes. This is the data structure that is saved to the `Cargo.lock` file. If there is no lock file, Cargo constructs a resolve by finding a graph of packages which matches declared dependency specification according to SemVer. [`ops::resolve`] is the front-end for creating a `Resolve`. It handles loading the `Cargo.lock` file, checking if it needs updating, etc. Resolution is currently performed twice. It is performed once with all features enabled. This is the resolve that gets saved to `Cargo.lock`. It then runs again with only the specific features the user selected on the command-line. Ideally this second run will get removed in the future when transitioning to the new feature resolver. ### Feature resolver A new feature-specific resolver was added in 2020 which adds more sophisticated feature resolution. It is located in the [`resolver::features`] module. The original dependency resolver still performs feature unification, as it can help reduce the dependencies it has to consider during resolution (rather than assuming every optional dependency of every package is enabled). Checking if a feature is enabled must go through the new feature resolver. [`Workspace`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/workspace.rs [ws-method]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/command_prelude.rs#L298-L318 [`Package`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package.rs [`Target`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/manifest.rs#L181-L206 [`Manifest`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/manifest.rs#L27-L51 [`Source`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/source/mod.rs [`SourceId`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/source/source_id.rs [`SourceMap`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/source/mod.rs#L245-L249 [`PackageRegistry`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/registry.rs#L36-L81 [`ops::resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/resolve.rs [`resolver::features`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/features.rs#L259 [source implementations]: https://github.com/rust-lang/cargo/tree/master/src/cargo/sources [`PackageId`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package_id.rs [`Summary`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/summary.rs [`PackageSet`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/package.rs#L283-L296 [`Downloads`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/package.rs#L298-L352 [`Resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/resolve.rs cargo-0.66.0/src/doc/contrib/src/architecture/subcommands.md000066400000000000000000000023201432416201200237700ustar00rootroot00000000000000# SubCommands Cargo is a single binary composed of a set of [`clap`] subcommands. All subcommands live in [`src/bin/cargo/commands`] directory. [`src/bin/cargo/main.rs`] is the entry point. Each subcommand, such as [`src/bin/cargo/commands/build.rs`], usually performs the following: 1. Parse the CLI flags. See the [`command_prelude`] module for some helpers to make this easier. 2. Load the config files. 3. Discover and load the workspace. 4. Calls the actual implementation of the subcommand which resides in [`src/cargo/ops`]. If the subcommand is not found in the built-in list, then Cargo will automatically search for a subcommand named `cargo-{NAME}` in the users `PATH` to execute the subcommand. [`clap`]: https://clap.rs/ [`src/bin/cargo/commands/build.rs`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands/build.rs [`src/cargo/ops`]: https://github.com/rust-lang/cargo/tree/master/src/cargo/ops [`src/bin/cargo/commands`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands [`src/bin/cargo/main.rs`]: https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/main.rs [`command_prelude`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/command_prelude.rs cargo-0.66.0/src/doc/contrib/src/design.md000066400000000000000000000117211432416201200202510ustar00rootroot00000000000000# Design Principles The purpose of Cargo is to formalize a canonical Rust workflow, by automating the standard tasks associated with distributing software. Cargo simplifies structuring a new project, adding dependencies, writing and running unit tests, and more. Cargo is not intended to be a general-purpose build tool. Ideally, it should be easy to integrate it within another build tool, though admittedly that is not as seamless as desired. ## Stability and compatibility ### Backwards compatibility Cargo strives to remain backwards compatible with projects created in previous versions. The CLI interface also strives to remain backwards compatible, such that the commands and options behave the same. That being said, changes in behavior, and even outright breakage are sometimes done in limited situations. The following outlines some situations where backwards-incompatible changes are made: * Anything that addresses a security concern. * Dropping support for older platforms and tooling. Cargo follows the Rust [tiered platform support]. * Changes to resolve possibly unsafe or unreliable behavior. None of these changes should be taken lightly, and should be avoided if possible, or possibly with some transition period to alert the user of the potential change. Behavior is sometimes changed in ways that have a high confidence that it won't break existing workflows. Almost every change carries this risk, so it is often a judgment call balancing the benefit of the change with the perceived possibility of its negative consequences. At times, some changes fall in the gray area, where the current behavior is undocumented, or not working as intended. These are more difficult judgment calls. The general preference is to balance towards avoiding breaking existing workflows. Support for older registry APIs and index formats may be dropped, if there is high confidence that there aren't any active registries that may be affected. This has never (to my knowledge) happened so far, and is unlikely to happen in the future, but remains a possibility. In all of the above, a transition period may be employed if a change is known to cause breakage. A warning can be issued to alert the user that something will change, and provide them with an alternative to resolve the issue (preferably in a way that is compatible across versions if possible). Cargo is only expected to work with the version of the related Rust tools (`rustc`, `rustdoc`, etc.) that it is released with. As a matter of choice, the latest nightly works with the most recent stable release, but that is mostly to accommodate development of Cargo itself, and should not be expected by users. ### Forwards compatibility Additionally, Cargo strives a limited degree of *forwards compatibility*. Changes should not egregiously prevent older versions from working. This is mostly relevant for persistent data, such as on-disk files and the registry interface and index. It also applies to a lesser degree to the registry API. Changes to `Cargo.lock` require a transition time, where the new format is not automatically written when the lock file is updated. The transition time should not be less than 6 months, though preferably longer. New projects may use the new format in a shorter time frame. Changes to `Cargo.toml` can be made in any release. This is because the user must manually modify the file, and opt-in to any new changes. Additionally, Cargo will usually only issue a warning about new fields it doesn't understand, but otherwise continue to function. Changes to cache files (such as artifacts in the `target` directory, or cached data in Cargo's home directory) should not *prevent* older versions from running, but they may cause older versions to recreate the cache, which may result in a performance impact. Changes to the registry index should not prevent older versions from working. Generally, older versions ignore new fields, so the format should be easily extensible. Changes to the format or interpretation of existing fields should be done very carefully to avoid preventing older versions of Cargo from working. In some cases, this may mean that older versions of Cargo will not be able to *select* a newly published crate, but it shouldn't prevent them from working at all. This level of compatibility may not last forever, but the exact time frame for such a change has not yet been decided. The registry API may be changed in such a way to prevent older versions of Cargo from working. Generally, compatibility should be retained for as long as possible, but the exact length of time is not specified. ## Simplicity and layers Standard workflows should be easy and consistent. Each knob that is added has a high cost, regardless if it is intended for a small audience. Layering and defaults can help avoid the surface area that the user needs to be concerned with. Try to avoid small functionalities that may have complex interactions with one another. [tiered platform support]: https://doc.rust-lang.org/nightly/rustc/platform-support.html cargo-0.66.0/src/doc/contrib/src/index.md000066400000000000000000000023321432416201200201050ustar00rootroot00000000000000# Introduction Thank you for your interest in contributing to [Cargo]! This guide provides an overview of how to contribute to Cargo, how to dive into the code, and how the testing infrastructure works. There are many ways to contribute, such as [helping other users], [filing issues], [improving the documentation], [fixing bugs], and working on [small] and [large features]. If you have a general question about Cargo or its internals, feel free to ask on [Zulip]. This guide assumes you have some familiarity with Rust, and how to use Cargo, [rustup], and general development tools like [git]. Please also read the [Rust Code of Conduct]. [Cargo]: https://doc.rust-lang.org/cargo/ [Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo [Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct [helping other users]: https://users.rust-lang.org/ [filing issues]: issues.md [rustup]: https://rust-lang.github.io/rustup/ [git]: https://git-scm.com/ [improving the documentation]: https://github.com/rust-lang/cargo/tree/master/src/doc [fixing bugs]: process/index.md#working-on-small-bugs [small]: process/index.md#working-on-small-features [large features]: process/index.md#working-on-large-features cargo-0.66.0/src/doc/contrib/src/issues.md000066400000000000000000000123551432416201200203170ustar00rootroot00000000000000# Issue Tracker Cargo's issue tracker is located at . This is the primary spot where we track bugs and small feature requests. See [Process] for more about our process for proposing changes. ## Filing issues We can't fix what we don't know about, so please report problems liberally. This includes problems with understanding the documentation, unhelpful error messages, and unexpected behavior. **If you think that you have identified an issue with Cargo that might compromise its users' security, please do not open a public issue on GitHub. Instead, we ask you to refer to Rust's [security policy].** Opening an issue is as easy as following [this link][new-issues]. There are several templates for different issue kinds, but if none of them fit your issue, don't hesitate to modify one of the templates, or click the [Open a blank issue] link. The Rust tools are spread across multiple repositories in the Rust organization. It may not always be clear where to file an issue. No worries! If you file in the wrong tracker, someone will either transfer it to the correct one or ask you to move it. Some other repositories that may be relevant are: * [`rust-lang/rust`] β€” Home for the [`rustc`] compiler and [`rustdoc`]. * [`rust-lang/rustup`] β€” Home for the [`rustup`] toolchain installer. * [`rust-lang/rustfmt`] β€” Home for the `rustfmt` tool, which also includes `cargo fmt`. * [`rust-lang/rust-clippy`] β€” Home for the `clippy` tool, which also includes `cargo clippy`. * [`rust-lang/crates.io`] β€” Home for the [crates.io] website. Issues with [`cargo fix`] can be tricky to know where they should be filed, since the fixes are driven by `rustc`, processed by [`rustfix`], and the front-interface is implemented in Cargo. Feel free to file in the Cargo issue tracker, and it will get moved to one of the other issue trackers if necessary. [Process]: process/index.md [security policy]: https://www.rust-lang.org/security.html [new-issues]: https://github.com/rust-lang/cargo/issues/new/choose [Open a blank issue]: https://github.com/rust-lang/cargo/issues/new [`rust-lang/rust`]: https://github.com/rust-lang/rust [`rust-lang/rustup`]: https://github.com/rust-lang/rustup [`rust-lang/rustfmt`]: https://github.com/rust-lang/rustfmt [`rust-lang/rust-clippy`]: https://github.com/rust-lang/rust-clippy [`rustc`]: https://doc.rust-lang.org/rustc/ [`rustdoc`]: https://doc.rust-lang.org/rustdoc/ [`rustup`]: https://rust-lang.github.io/rustup/ [`rust-lang/crates.io`]: https://github.com/rust-lang/crates.io [crates.io]: https://crates.io/ [`rustfix`]: https://github.com/rust-lang/rustfix/ [`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html ## Issue labels [Issue labels] are very helpful to identify the types of issues and which category they are related to. The Cargo team typically manages assigning labels. The labels use a naming convention with short prefixes and colors to indicate the kind of label: * Yellow, **A**-prefixed labels state which **area** of the project an issue relates to. * Light purple, **C**-prefixed labels represent the **category** of an issue. In particular, **[C-feature-request]** marks *proposals* for new features. If an issue is **C-feature-request**, but is not **[Feature accepted]** or **[I-nominated]**, then it was not thoroughly discussed, and might need some additional design or perhaps should be implemented as an external subcommand first. Ping @rust-lang/cargo if you want to send a PR for such issue. * Dark purple, **Command**-prefixed labels mean the issue has to do with a specific cargo command. * Green, **E**-prefixed labels indicate the level of **experience** or **effort** necessary to fix the issue. **[E-mentor]** issues also have some instructions on how to get started. Generally, all of the **E**-prefixed labels are issues that are ready for someone to contribute to! * Red, **I**-prefixed labels indicate the **importance** of the issue. The **[I-nominated]** label indicates that an issue has been nominated for prioritizing at the next triage meeting. * Purple gray, **O**-prefixed labels are the **operating system** or platform that this issue is specific to. * Orange, **P**-prefixed labels indicate a bug's **priority**. * **S**-prefixed labels are "status" labels, typically used for PRs, but can also indicate an issue is **[S-blocked]**. * The light orange **[relnotes]** label marks issues that should be highlighted in the [Rust release notes] of the next release. * Dark blue, **Z**-prefixed labels are for unstable, [nightly features]. [Issue labels]: https://github.com/rust-lang/cargo/labels [E-easy]: https://github.com/rust-lang/cargo/labels/E-easy [E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor [I-nominated]: https://github.com/rust-lang/cargo/labels/I-nominated [C-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request [Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted [S-blocked]: https://github.com/rust-lang/cargo/labels/S-blocked [Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md [nightly features]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html [relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes cargo-0.66.0/src/doc/contrib/src/process/000077500000000000000000000000001432416201200201325ustar00rootroot00000000000000cargo-0.66.0/src/doc/contrib/src/process/index.md000066400000000000000000000127051432416201200215700ustar00rootroot00000000000000# Process This chapter gives an overview of how Cargo comes together, and how you can be a part of that process. See the [Working on Cargo] chapter for an overview of the contribution process. Please read the guidelines below before working on an issue or new feature. **Due to limited review capacity, the Cargo team is not accepting new features or major changes at this time. Please consult with the team before opening a new PR. Only issues that have been explicitly marked as accepted will be reviewed.** [Working on Cargo]: working-on-cargo.md ## Cargo team Cargo is managed by a [team] of volunteers. The Cargo Team reviews all changes, and sets the direction for the project. The team meets on a weekly basis on a video chat. If you are interested in participating, feel free to contact us on [Zulip]. ## Roadmap The [Roadmap Project Board] is used for tracking major initiatives. This gives an overview of the things the team is interested in and thinking about. The [RFC Project Board] is used for tracking [RFCs]. [the 2020 roadmap]: https://blog.rust-lang.org/inside-rust/2020/01/10/cargo-in-2020.html [Roadmap Project Board]: https://github.com/rust-lang/cargo/projects/1 [RFC Project Board]: https://github.com/rust-lang/cargo/projects/2 [RFCs]: https://github.com/rust-lang/rfcs/ ## Working on small bugs Issues labeled with the [E-help-wanted], [E-easy], or [E-mentor] [labels] are typically issues that the Cargo team wants to see addressed, and are relatively easy to get started with. If you are interested in one of those, and it has not already been assigned to someone, leave a comment. See [Issue assignment](#issue-assignment) below for assigning yourself. If there is a specific issue that you are interested in, but it doesn't have one of the `E-` labels, leave a comment on the issue. If a Cargo team member has the time to help out, they will respond to help with the next steps. [E-help-wanted]: https://github.com/rust-lang/cargo/labels/E-help-wanted [E-easy]: https://github.com/rust-lang/cargo/labels/E-easy [E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor [labels]: ../issues.md#issue-labels ## Working on large bugs Some issues may be difficult to fix. They may require significant code changes, or major design decisions. The [E-medium] and [E-hard] [labels] can be used to tag such issues. These will typically involve some discussion with the Cargo team on how to tackle it. [E-medium]: https://github.com/rust-lang/cargo/labels/E-medium [E-hard]: https://github.com/rust-lang/cargo/labels/E-hard ## Working on small features Small feature requests are typically managed on the [issue tracker][issue-feature-request]. Features that the Cargo team have approved will have the [Feature accepted] label or the [E-mentor] label. If there is a feature request that you are interested in, feel free to leave a comment expressing your interest. If a Cargo team member has the time to help out, they will respond to help with the next steps. Keep in mind that the Cargo team has limited time, and may not be able to help with every feature request. Most of them require some design work, which can be difficult. Check out the [design principles chapter] for some guidance. ## Working on large features Cargo follows the Rust model of evolution. Major features usually go through an [RFC process]. Therefore, before opening a feature request issue create a Pre-RFC thread on the [internals][irlo] forum to get preliminary feedback. Implementing a feature as a [custom subcommand][subcommands] is encouraged as it helps demonstrate the demand for the functionality and is a great way to deliver a working solution faster as it can iterate outside of Cargo's release cadence. See the [unstable chapter] for how new major features are typically implemented. [unstable chapter]: unstable.md ## Bots and infrastructure The Cargo project uses several bots: * [GitHub Actions] are used to automatically run all tests for each PR. * [rust-highfive] automatically assigns reviewers for PRs. * [bors] is used to merge PRs. See [The merging process]. * [triagebot] is used for assigning issues to non-members, see [Issue assignment](#issue-assignment). * [rfcbot] is used for making asynchronous decisions by team members. [rust-highfive]: https://github.com/rust-highfive [bors]: https://buildbot2.rust-lang.org/homu/ [The merging process]: working-on-cargo.md#the-merging-process [GitHub Actions]: https://github.com/features/actions [triagebot]: https://github.com/rust-lang/triagebot/wiki [rfcbot]: https://github.com/rust-lang/rfcbot-rs ## Issue assignment Normally, if you plan to work on an issue that has been marked with one of the `E-` tags or [Feature accepted], it is sufficient just to leave a comment that you are working on it. We also have a bot that allows you to formally "claim" an issue by entering the text `@rustbot claim` in a comment. See the [Assignment] docs on how this works. [Assignment]: https://github.com/rust-lang/triagebot/wiki/Assignment [team]: https://www.rust-lang.org/governance/teams/dev-tools#cargo [Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo [issue-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request [Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted [design principles chapter]: ../design.md [RFC process]: https://github.com/rust-lang/rfcs/ [irlo]: https://internals.rust-lang.org/ [subcommands]: https://doc.rust-lang.org/cargo/reference/external-tools.html#custom-subcommands cargo-0.66.0/src/doc/contrib/src/process/release.md000066400000000000000000000172401432416201200221000ustar00rootroot00000000000000# Release process Cargo is released with `rustc` using a ["train model"][choochoo]. After a change lands in Cargo's master branch, it will be synced with the [rust-lang/rust] repository by a Cargo team member, which happens about once a week. If there are complications, it can take longer. After it is synced and merged, the changes will appear in the next nightly release, which is usually published around 00:30 UTC. After changes are in the nightly release, they will make their way to the stable release anywhere from 6 to 12 weeks later, depending on when during the cycle it landed. The current release schedule is posted on the [Rust Forge]. See the [release process] for more details on how Rust's releases are created. Rust releases are managed by the [Release team]. [Rust Forge]: https://forge.rust-lang.org/ ## Build process The build process for Cargo is handled as part of building Rust. Every PR on the [rust-lang/rust] repository creates a full collection of release artifacts for every platform. The code for this is in the [`dist` bootstrap module]. Every night at 00:00 UTC, the artifacts from the most recently merged PR are promoted to the nightly release channel. A similar process happens for beta and stable releases. [`dist` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/dist.rs ## Submodule updates Cargo is tracked in the [rust-lang/rust] repository using a [git submodule]. It is updated manually about once a week by a Cargo team member. However, anyone is welcome to update it as needed. [@ehuss] has a tool called [subup](https://github.com/ehuss/subup) to automate the process of updating the submodule, updating the lockfile, running tests, and creating a PR. Running the tests ahead-of-time helps avoid long cycle times waiting for bors if there are any errors. Subup will also provide a message to include in the PR with a list of all PRs it covers. Posting this in the PR message also helps create reference links on each Cargo PR to the submodule update PR to help track when it gets merged. The following is an example of the command to run in a local clone of rust-lang/rust to run a certain set of tests of things that are likely to get broken by a Cargo update: ```bash subup --up-branch update-cargo \ --commit-message "Update cargo" \ --test="src/tools/linkchecker tidy \ src/tools/cargo \ src/tools/rustfmt \ src/tools/rls" \ src/tools/cargo ``` If doing a [beta backport](#beta-backports), the command is similar, but needs to point to the correct branches: ```bash subup --up-branch update-beta-cargo \ --rust-branch beta \ --set-config rust.channel=beta \ --commit-message "[beta] Update cargo" \ --test="src/tools/linkchecker tidy \ src/tools/cargo \ src/tools/rustfmt \ src/tools/rls" \ rust-1.63.0:src/tools/cargo ``` [@ehuss]: https://github.com/ehuss/ [git submodule]: https://git-scm.com/book/en/v2/Git-Tools-Submodules ## Version updates Shortly after each major release, a Cargo team member will post a PR to update Cargo's version in `Cargo.toml`. Cargo's library is permanently unstable, so its version number starts with a `0`. The minor version is always 1 greater than the Rust release it is a part of, so cargo 0.49.0 is part of the 1.48 Rust release. The [CHANGELOG] is also usually updated at this time. Also, any version-specific checks that are no longer needed can be removed. For example, some tests are disabled on stable if they require some nightly behavior. Once that behavior is available on the new stable release, the checks are no longer necessary. (I usually search for the word "nightly" in the testsuite directory, and read the comments to see if any of those nightly checks can be removed.) Sometimes Cargo will have a runtime check to probe `rustc` if it supports a specific feature. This is usually stored in the [`TargetInfo`] struct. If this behavior is now stable, those checks should be removed. Cargo has several other packages in the [`crates/` directory]. If any of these packages have changed, the version should be bumped **before the beta release**. It is rare that these get updated. Bumping these as-needed helps avoid churning incompatible version numbers. This process should be improved in the future! [@ehuss] has a tool called [cargo-new-release] to automate the process of doing a version bump. It runs through several steps: 1. Creates a branch 2. Updates the version numbers 3. Creates a changelog for anything on the master branch that is not part of beta 4. Creates a changelog for anything on the beta branch It opens a browser tab for every PR in order to review each change. It places each PR in the changelog with its title, but usually every PR should be rewritten to explain the change from the user's perspective. Each PR should also be categorized as an Addition, Change, Fix, or Nightly-only change. Most PRs are deleted, since they are not relevant to users of Cargo. For example, remove all PRs related to Cargo internals, infrastructure, documentation, error changes, refactorings, etc. Usually about half of the PRs get removed. This process usually takes @ehuss about an hour to finish. [@ehuss]: https://github.com/ehuss/ [cargo-new-release]: https://github.com/ehuss/cargo-new-release [`crates/` directory]: https://github.com/rust-lang/cargo/tree/master/crates ## Docs publishing Docs are automatically published during the Rust release process. The nightly channel's docs appear at . Once nightly is promoted to beta, those docs will appear at . Once the stable release is made, it will appear on (which is the "current" stable) and the release-specific URL such as . The code that builds the documentation is located in the [`doc` bootstrap module]. [`doc` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/doc.rs ## crates.io publishing Cargo's library is published to [crates.io] as part of the stable release process. This is handled by the [Release team] as part of their process. There is a [`publish.py` script] that in theory should help with this process. The test and build tool crates aren't published. [`publish.py` script]: https://github.com/rust-lang/cargo/blob/master/publish.py ## Beta backports If there is a regression or major problem detected during the beta phase, it may be necessary to backport a fix to beta. The process is documented in the [Beta Backporting] page. [Beta Backporting]: https://forge.rust-lang.org/release/beta-backporting.html ## Stable backports In (hopefully!) very rare cases, a major regression or problem may be reported after the stable release. Decisions about this are usually coordinated between the [Release team] and the Cargo team. There is usually a high bar for making a stable patch release, and the decision may be influenced by whether or not there are other changes that need a new stable release. The process here is similar to the beta-backporting process. The [rust-lang/cargo] branch is the same as beta (`rust-1.XX.0`). The [rust-lang/rust] branch is called `stable`. [choochoo]: https://doc.rust-lang.org/book/appendix-07-nightly-rust.html [rust-lang/rust]: https://github.com/rust-lang/rust/ [rust-lang/cargo]: https://github.com/rust-lang/cargo/ [CHANGELOG]: https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md [release process]: https://forge.rust-lang.org/release/process.html [`TargetInfo`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/target_info.rs [crates.io]: https://crates.io/ [release team]: https://www.rust-lang.org/governance/teams/operations#release cargo-0.66.0/src/doc/contrib/src/process/unstable.md000066400000000000000000000132211432416201200222700ustar00rootroot00000000000000# Unstable features Most new features should go through the unstable process. This means that the feature will only be usable on the nightly channel, and requires a specific opt-in by the user. Small changes can skip this process, but please consult with the Cargo team first. ## Unstable feature opt-in For features that require behavior changes or new syntax in `Cargo.toml`, then it will need a `cargo-features` value placed at the top of `Cargo.toml` to enable it. The process for doing adding a new feature is described in the [`features` module]. Code that implements the feature will need to manually check that the feature is enabled for the current manifest. For features that add new command-line flags, config options, or environment variables, then the `-Z` flags will be needed to enable them. The [`features` module] also describes how to add these. New flags should use the `fail_if_stable_opt` method to check if the `-Z unstable-options` flag has been passed. ## Unstable documentation Every unstable feature should have a section added to the [unstable chapter] describing how to use the feature. `-Z` CLI flags should be documented in the built-in help in the [`cli` module]. [unstable chapter]: https://github.com/rust-lang/cargo/blob/master/src/doc/src/reference/unstable.md [`cli` module]: https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/cli.rs ## Tracking issues Each unstable feature should get a [tracking issue]. These issues are typically created when a PR is close to being merged, or soon after it is merged. Use the [tracking issue template] when creating a tracking issue. Larger features should also get a new label in the issue tracker so that when issues are filed, they can be easily tied together. [tracking issue]: https://github.com/rust-lang/cargo/labels/C-tracking-issue [tracking issue template]: https://github.com/rust-lang/cargo/issues/new?labels=C-tracking-issue&template=tracking_issue.md ## Pre-Stabilization Once an unstable feature is "complete", the search for users to test and give feedback begins. Testing notes should be written up to give users an idea of how to test the new feature. An example being the [workspace inheritance testing notes] for workspace inheritance. Once testing notes have been written up you should make posts in various rust communities ([rust subreddit], [users], [internals], etc). Example posts made for workspace inheritance: [reddit post], [users post], [internals post]. The unstable feature should also be added to [This Week in Rust]. This should be done by adding the label `call-for-testing` to the RFC for the feature and making a comment with a link to the testing notes and the tracking issue (as needed). If there is not an RFC, a pull request should be made to the [TWiR repo] adding the feature to the `Call for Testing` section ([example]). [workspace inheritance testing notes]: https://github.com/rust-lang/cargo/blob/6d6dd9d9be9c91390da620adf43581619c2fa90e/src/doc/src/reference/unstable.md#testing-notes [rust subreddit]: https://www.reddit.com/r/rust/ [users]: https://users.rust-lang.org/ [internals]: https://internals.rust-lang.org/ [reddit post]: https://www.reddit.com/r/rust/comments/uo8zeh/help_test_workspace_inheritance_in_preparation/ [users post]: https://users.rust-lang.org/t/help-test-workspace-inheritance-in-preparation-for-stablization/75582 [internals post]: https://internals.rust-lang.org/t/help-test-workspace-inheritance-in-preparation-for-stablization/16618 [This Week in Rust]: https://this-week-in-rust.org/ [TWiR repo]: https://github.com/rust-lang/this-week-in-rust [example]: https://github.com/rust-lang/this-week-in-rust/pull/3256 ## Stabilization After some period of time, typically measured in months, the feature can be considered to be stabilized. The feature should not have any significant known bugs or issues, and any design concerns should be resolved. The stabilization process depends on the kind of feature. For smaller features, you can leave a comment on the tracking issue expressing interest in stabilizing it. It can usually help to indicate that the feature has received some real-world testing, and has exhibited some demand for broad use. For larger features that have not gone through the [RFC process], then an RFC to call for stabilization might be warranted. This gives the community a final chance to provide feedback about the proposed design. For a small feature, or one that has already gone through the RFC process, a Cargo Team member may decide to call for a "final comment period" using [rfcbot]. This is a public signal that a major change is being made, and gives the Cargo Team members an opportunity to confirm or block the change. This process can take a few days or weeks, or longer if a concern is raised. Once the stabilization has been approved, the person who called for stabilization should prepare a PR to stabilize the feature. This PR should: * Flip the feature to stable in the [`features` module]. * Remove any unstable checks that aren't automatically handled by the feature system. * Move the documentation from the [unstable chapter] into the appropriate places in the Cargo book and man pages. * Remove the `-Z` flags and help message if applicable. * Update all tests to remove nightly checks. * Tag the PR with [relnotes] label if it seems important enough to highlight in the [Rust release notes]. [`features` module]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/features.rs [RFC process]: https://github.com/rust-lang/rfcs/ [rfcbot]: https://github.com/rust-lang/rfcbot-rs [Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md [relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes cargo-0.66.0/src/doc/contrib/src/process/working-on-cargo.md000066400000000000000000000201411432416201200236350ustar00rootroot00000000000000# Working on Cargo This chapter gives an overview of how to build Cargo, make a change, and submit a Pull Request. 0. [Before hacking on Cargo.](#before-hacking-on-cargo) 1. [Check out the Cargo source.](#checkout-out-the-source) 2. [Building Cargo.](#building-cargo) 3. [Making a change.](#making-a-change) 4. [Writing and running tests.](../tests/index.md) 5. [Submitting a Pull Request.](#submitting-a-pull-request) 6. [The merging process.](#the-merging-process) ## Before hacking on Cargo We encourage people to discuss their design before hacking on code. This gives the Cargo team a chance to know your idea more. Sometimes after a discussion, we even find a way to solve the problem without coding! Typically, you [file an issue] or start a thread on the [internals forum] before submitting a pull request. Please read [the process] of how features and bugs are managed in Cargo. ## Checkout out the source We use the "fork and pull" model [described here][development-models], where contributors push changes to their personal fork and [create pull requests] to bring those changes into the source repository. Cargo uses [git] and [GitHub] for all development. 1. Fork the [`rust-lang/cargo`] repository on GitHub to your personal account (see [GitHub docs][how-to-fork]). 2. Clone your fork to your local machine using `git clone` (see [GitHub docs][how-to-clone]) 3. It is recommended to start a new branch for the change you want to make. All Pull Requests are made against the master branch. ## Building Cargo Cargo is built by...running `cargo`! There are a few prerequisites that you need to have installed: * `rustc` and `cargo` need to be installed. Cargo is expected to build and test with the current stable, beta, and nightly releases. It is your choice which to use. Nightly is recommended, since some nightly-specific tests are disabled when using the stable release. But using stable is fine if you aren't working on those. * A C compiler (typically gcc, clang, or MSVC). * [git] * Unix: * pkg-config * OpenSSL (`libssl-dev` on Ubuntu, `openssl-devel` on Fedora) * macOS: * OpenSSL ([homebrew] is recommended to install the `openssl` package) If you can successfully run `cargo build`, you should be good to go! [homebrew]: https://brew.sh/ ## Running Cargo You can use `cargo run` to run cargo itself, or you can use the path directly to the cargo binary, such as `target/debug/cargo`. If you are using [`rustup`], beware that running the binary directly can cause issues with rustup overrides. Usually, when `cargo` is executed as part of rustup, the toolchain becomes sticky (via an environment variable), and all calls to `rustc` will use the same toolchain. But when `cargo` is not run via rustup, the toolchain may change based on the directory. Since Cargo changes the directory for each compilation, this can cause different calls to `rustc` to use different versions. There are a few workarounds: * Don't use rustup overrides. * Use `rustup run target/debug/cargo` to execute `cargo`. * Set the `RUSTC` environment variable to a specific `rustc` executable (not the rustup wrapper). * Create a [custom toolchain]. This is a bit of a hack, but you can create a directory in the rustup `toolchains` directory, and create symlinks for all the files and directories in there to your toolchain of choice (such as nightly), except for the `cargo` binary, which you can symlink to your `target/debug/cargo` binary in your project directory. *Normally*, all development is done by running Cargo's test suite, so running it directly usually isn't required. But it can be useful for testing Cargo on more complex projects. [`rustup`]: https://rust-lang.github.io/rustup/ [custom toolchain]: https://rust-lang.github.io/rustup/concepts/toolchains.html#custom-toolchains ## Making a change Some guidelines on working on a change: * All code changes are expected to comply with the formatting suggested by `rustfmt`. You can use `rustup component add rustfmt` to install `rustfmt` and use `cargo fmt` to automatically format your code. * Include tests that cover all non-trivial code. See the [Testing chapter] for more about writing and running tests. * All code should be warning-free. This is checked during tests. ## Submitting a Pull Request After you have committed your work, and pushed it to GitHub, you can open a Pull Request * Push your commits to GitHub and create a pull request against Cargo's `master` branch. * Include a clear description of what the change is and why it is being made. * Use [GitHub's keywords] in the description to automatically link to an issue if the PR resolves the issue. For example `Closes #1234` will link issue #1234 to the PR. When the PR is merged, GitHub will automatically close the issue. The [rust-highfive] bot will automatically assign a reviewer for the PR. It may take at least a few days for someone to respond. If you don't get a response in over a week, feel free to ping the assigned reviewer. When your PR is submitted, GitHub automatically runs all tests. The GitHub interface will show a green checkmark if it passes, or a red X if it fails. There are links to the logs on the PR page to diagnose any issues. The tests typically finish in under 30 minutes. The reviewer might point out changes deemed necessary. Large or tricky changes may require several passes of review and changes. ### Status labeling PRs will get marked with [labels] like [`S-waiting-on-review`] or [`S-waiting-on-author`] to indicate their status. The [`@rustbot`] bot can be used by anyone to adjust the labels. If a PR gets marked as `S-waiting-on-author`, and you have pushed new changes that you would like to be reviewed, you can write a comment on the PR with the text `@rustbot ready`. The bot will switch the labels on the PR. More information about these commands can be found at the [shortcuts documentation]. [labels]: https://github.com/rust-lang/cargo/labels [`S-waiting-on-review`]: https://github.com/rust-lang/cargo/labels/S-waiting-on-review [`S-waiting-on-author`]: https://github.com/rust-lang/cargo/labels/S-waiting-on-author [`@rustbot`]: https://github.com/rustbot [shortcuts documentation]: https://github.com/rust-lang/triagebot/wiki/Shortcuts ## The merging process After a reviewer has approved your PR, they will issue a command to the [bors] bot (also known as "Homu", the software that powers [`@bors`]). Bors will create a temporary branch with your PR, and run all tests. Only if all tests pass will it merge the PR to master. If it fails, the bot will leave a comment on the PR. This system ensures that the master branch is always in a good state, and that merges are processed one at a time. The [Homu queue dashboard][homu-cargo] shows the current merge queue. Cargo's queue is rarely busy, but a busy project like the [rust repo][homu-rust] is constantly full. Assuming everything works, congratulations! It may take at least a week for the changes to arrive on the nightly channel. See the [release chapter] for more information on how Cargo releases are made. [development-models]: https://help.github.com/articles/about-collaborative-development-models/ [create pull requests]: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request [how-to-fork]: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo [`rust-lang/cargo`]: https://github.com/rust-lang/cargo/ [git]: https://git-scm.com/ [GitHub]: https://github.com/ [how-to-clone]: https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository [Testing chapter]: ../tests/index.md [GitHub's keywords]: https://docs.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue [rust-highfive]: https://github.com/rust-highfive [bors]: https://buildbot2.rust-lang.org/homu/ [`@bors`]: https://github.com/bors [homu-cargo]: https://buildbot2.rust-lang.org/homu/queue/cargo [homu-rust]: https://buildbot2.rust-lang.org/homu/queue/rust [release chapter]: release.md [internals forum]: https://internals.rust-lang.org/c/tools-and-infrastructure/cargo [file an issue]: https://github.com/rust-lang/cargo/issues [the process]: index.md cargo-0.66.0/src/doc/contrib/src/tests/000077500000000000000000000000001432416201200176165ustar00rootroot00000000000000cargo-0.66.0/src/doc/contrib/src/tests/crater.md000066400000000000000000000140541432416201200214240ustar00rootroot00000000000000# Crater [Crater](https://github.com/rust-lang/crater) is a tool for compiling and running tests for _every_ crate on [crates.io](https://crates.io) (and a few on GitHub). It is mainly used for checking the extent of breakage when implementing potentially breaking changes and ensuring lack of breakage by running beta vs stable compiler versions. Essentially it runs some `cargo` command on every crate twice; once against the "start" toolchain and again against the "end" toolchain. For example, "start" could be the stable release, and "end" could be beta. If it passes in "start" but fails with "end", then that is reported as a regression. There is a bot called [craterbot] which is used to run crater on hardware managed by the rust-lang organization. Crater is run by the release team during the beta cycle. If there are any regressions that look like they are caused by Cargo, they should contact the Cargo team to decide how to handle it. ## Running crater If you have a change that you want to test before the beta release, or you want to test behavior that is not normally exercised by crater, you can do a manual run of crater. Roughly the steps are: 1. Create a branch with your changes. In your clone of cargo, make the changes to incorporate whatever new thing you want to test and push it to a branch on your fork on GitHub. 2. Get a clone of 3. Create a branch in your rust-lang/rust clone to add your changes. 4. Change the `src/tools/cargo` submodule to point to your new branch. Modify `.gitmodules` to point to your clone and branch of cargo with the changes you want to test. For example: ```bash git submodule set-url src/tools/cargo https://github.com/ehuss/cargo.git git submodule set-branch --branch my-awesome-feature src/tools/cargo git submodule update --remote src/tools/cargo git add .gitmodules src/tools/cargo git commit ``` 5. Create an PR on rust-lang/rust. Push your submodule changes to GitHub and make a PR. Start the PR title with `[EXPERIMENT]` to make it clear what the PR is for and assign yourself or @ghost. 6. Make a "try" build. A "try" build creates a full release of x86_64-unknown-linux-gnu and stores it on rust-lang servers. This can be done with a comment `@bors try` on the PR (all Cargo team members should have permission to do this). 7. Run crater. Look at the [craterbot] docs to determine the command that you want to run. There are different modes like `check-only`, `build-and-test`, `rustdoc`, etc. You can also choose how many crates to run against. If you are uncertain if your cargo changes will work correctly, it might be a good idea to run against `top-100` first to check its behavior. This will run much faster. You can do a full run afterwards. After the try build finishes (which should take a couple hours), ask someone to make a crater run. The Cargo team does not have that permission, so just ask someone on Zulip. They will need to write a comment to `@craterbot` with the command that you have specified. 8. Wait. Crater can take anywhere from a few hours to a few weeks to run depending on how long the [craterbot queue](https://crater.rust-lang.org/) is and which mode you picked and the priority of your job. When the crater run finishes, craterbot will post a comment to the PR with a link to a report of the results. 9. Investigate the report. Look through the report which contains links to build logs for any regressions or errors. 10. Close the PR. Whenever you are done doing crater runs, close your PR. [craterbot]: https://github.com/rust-lang/crater/blob/master/docs/bot-usage.md ## Advanced crater modes Crater only has a few built-in modes, such as running `cargo check` or `cargo test`. You can pass extra flags with `+cargoflags`. More complex tests can be accomplished by customizing Cargo to perform whatever actions you want. Since crater essentially runs `cargo check`, you can modify the `check` command to perform whichever actions you want. For example, to test `cargo fix --edition`, [this commit](https://github.com/ehuss/cargo/commit/6901690a6f8d519efb4fabf48c1c2b94af0c3bd8) intercepted `cargo check` and modified it to instead: 1. Only run on crates with the 2018 edition. 2. Run `cargo fix --edition`. 3. Modify the manifest to switch to the 2021 edition. 4. Run `cargo check` to verify. If you need to compare the before and after of a command that is not part of crater's built-in modes, that can be more difficult. Two possible options: * Work with the infra team to add a new mode. * Build two custom try builds. Each one should modify the `cargo check` command as described above. The "start" build should perform whichever action you want with an otherwise unmodified cargo. The "end" build should perform whichever action you want with your modified cargo. Then, in the `@craterbot` command, specify the start and end hashes of the two try builds. ## Limitations There are some limitations of crater to consider when running Cargo: * A crater run without regressions is not a green light to move forward. * A large portion of Rust code is not tested, such as closed-source projects or things otherwise not collected by crater. * Many crates can't build in crater's environment or are otherwise broken. * Some crates have flaky tests. * Crater runs in an isolated environment. * It only runs on Linux x86-64. * It does not have network access. * The crate source is in a read-only mount. * Crater does several steps before running the test (using its own copy of the stable toolchain): * It generates a lockfile using `generate-lockfile` and includes `-Zno-index-update` to prevent index updates (which makes it run much faster). * All dependencies are downloaded ahead-of-time with `cargo fetch`. * The built-in modes pass several flags to cargo such as `--frozen` or `--message-format=json`. It will sometimes use `--all-targets` and sometimes not. Check the [crater source](https://github.com/rust-lang/crater/blob/master/src/runner/test.rs) for more details on how it works. cargo-0.66.0/src/doc/contrib/src/tests/index.md000066400000000000000000000021201432416201200212420ustar00rootroot00000000000000# Tests Cargo has an extensive test suite. Most of it is implemented as integration tests in the [`testsuite`] directory. There are several other tests: * Unit tests are scattered throughout. * The dependency resolver has its own set of tests in the [`resolver-tests`] directory. * All of the packages in the [`crates`] directory have their own set of tests. * The [`build-std`] test is for the [build-std feature]. It is separate since it has some special requirements. * Documentation has a variety of tests, such as link validation, and the [SemVer chapter validity checks]. [`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/ [`resolver-tests`]: https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests [`crates`]: https://github.com/rust-lang/cargo/tree/master/crates [`build-std`]: https://github.com/rust-lang/cargo/blob/master/tests/build-std/main.rs [build-std feature]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std [SemVer chapter validity checks]: https://github.com/rust-lang/cargo/tree/master/src/doc/semver-check cargo-0.66.0/src/doc/contrib/src/tests/profiling.md000066400000000000000000000024011432416201200221260ustar00rootroot00000000000000# Benchmarking and Profiling ## Internal profiler Cargo has a basic, hierarchical profiler built-in. The environment variable `CARGO_PROFILE` can be set to an integer which specifies how deep in the profile stack to print results for. ```sh # Output first three levels of profiling info CARGO_PROFILE=3 cargo generate-lockfile ``` ## Benchmarking ### Benchsuite Head over to the [`benches` directory](https://github.com/rust-lang/cargo/tree/master/benches) for more information about the benchmarking suite. ### Informal benchmarking The overhead for starting a build should be kept as low as possible (preferably, well under 0.5 seconds on most projects and systems). Currently, the primary parts that affect this are: * Running the resolver. * Querying the index. * Checking git dependencies. * Scanning the local project. * Building the unit dependency graph. One way to test this is to use [hyperfine]. This is a tool that can be used to measure the difference between different commands and settings. Usually this is done by measuring the time it takes for `cargo build` to finish in a large project where the build is fresh (no actual compilation is performed). Just run `cargo build` once before using hyperfine. [hyperfine]: https://github.com/sharkdp/hyperfine cargo-0.66.0/src/doc/contrib/src/tests/running.md000066400000000000000000000036231432416201200216240ustar00rootroot00000000000000# Running Tests Using `cargo test` is usually sufficient for running the full test suite. This can take a few minutes, so you may want to use more targeted flags to pick the specific test you want to run, such as `cargo test --test testsuite -- check::check_success`. ## Running nightly tests Some tests only run on the nightly toolchain, and will be ignored on other channels. It is recommended that you run tests with both nightly and stable to ensure everything is working as expected. Some of the nightly tests require the `rustc-dev` and `llvm-tools-preview` rustup components installed. These components include the compiler as a library. This may already be installed with your nightly toolchain, but if it isn't, run `rustup component add rustc-dev llvm-tools-preview --toolchain=nightly`. ## Running cross tests Some tests exercise cross compiling to a different target. This will require you to install the appropriate target. This typically is the 32-bit target of your host platform. For example, if your host is a 64-bit `x86_64-unknown-linux-gnu`, then you should install the 32-bit target with `rustup target add i686-unknown-linux-gnu`. If you don't have the alternate target installed, there should be an error message telling you what to do. You may also need to install additional tools for the target. For example, on Ubuntu you should install the `gcc-multilib` package. If you can't install an alternate target, you can set the `CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests. The Windows cross tests only support the MSVC toolchain. ## Running build-std tests The `build-std` tests are disabled by default, but you can run them by setting the `CARGO_RUN_BUILD_STD_TESTS=1` environment variable and running `cargo test --test build-std`. This requires the nightly channel, and also requires the `rust-src` component installed with `rustup component add rust-src --toolchain=nightly`. cargo-0.66.0/src/doc/contrib/src/tests/writing.md000066400000000000000000000261331432416201200216300ustar00rootroot00000000000000# Writing Tests The following focuses on writing an integration test. However, writing unit tests is also encouraged! ## Testsuite Cargo has a wide variety of integration tests that execute the `cargo` binary and verify its behavior, located in the [`testsuite`] directory. The [`support`] crate and [`snapbox`] contain many helpers to make this process easy. There are two styles of tests that can roughly be categorized as - functional tests - The fixture is programmatically defined - The assertions are regular string comparisons - Easier to share in an issue as a code block is completely self-contained - More resilient to insignificant changes though ui tests are easy to update when a change does occur - ui tests - The fixture is file-based - The assertions use file-backed snapshots that can be updated with an env variable - Easier to review the expected behavior of the command as more details are included - Easier to get up and running from an existing project - Easier to reason about as everything is just files in the repo These tests typically work by creating a temporary "project" with a `Cargo.toml` file, executing the `cargo` binary process, and checking the stdout and stderr output against the expected output. ### Functional Tests Generally, a functional test will be placed in `tests/testsuite/.rs` and will look roughly like: ```rust,ignore #[cargo_test] fn () { let p = project() .file("src/main.rs", r#"fn main() { println!("hi!"); }"#) .build(); p.cargo("run --bin foo") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] [..] [RUNNING] `target/debug/foo` ", ) .with_stdout("hi!") .run(); } } ``` The [`#[cargo_test]` attribute](#cargo_test-attribute) is used in place of `#[test]` to inject some setup code. [`ProjectBuilder`] via `project()`: - Each project is in a separate directory in the sandbox - If you do not specify a `Cargo.toml` manifest using `file()`, one is automatically created with a project name of `foo` using `basic_manifest()`. [`Execs`] via `p.cargo(...)`: - This executes the command and evaluates different assertions - See [`support::compare`] for an explanation of the string pattern matching. Patterns are used to make it easier to match against the expected output. #### `#[cargo_test]` attribute The `#[cargo_test]` attribute injects code which does some setup before starting the test. It will create a filesystem "sandbox" under the "cargo integration test" directory for each test, such as `/path/to/cargo/target/tmp/cit/t123/`. The sandbox will contain a `home` directory that will be used instead of your normal home directory. The `#[cargo_test]` attribute takes several options that will affect how the test is generated. They are listed in parentheses separated with commas, such as: ```rust,ignore #[cargo_test(nightly, reason = "-Zfoo is unstable")] ``` The options it supports are: * `nightly` β€” This will cause the test to be ignored if not running on the nightly toolchain. This is useful for tests that use unstable options in `rustc` or `rustdoc`. These tests are run in Cargo's CI, but are disabled in rust-lang/rust's CI due to the difficulty of updating both repos simultaneously. A `reason` field is required to explain why it is nightly-only. * `build_std_real` β€” This is a "real" `-Zbuild-std` test (in the `build_std` integration test). This only runs on nightly, and only if the environment variable `CARGO_RUN_BUILD_STD_TESTS` is set (these tests on run on Linux). * `build_std_mock` β€” This is a "mock" `-Zbuild-std` test (which uses a mock standard library). This only runs on nightly, and is disabled for windows-gnu. * `requires_` β€” This indicates a command that is required to be installed to be run. For example, `requires_rustfmt` means the test will only run if the executable `rustfmt` is installed. These tests are *always* run on CI. This is mainly used to avoid requiring contributors from having every dependency installed. * `>=1.64` β€” This indicates that the test will only run with the given version of `rustc` or newer. This can be used when a new `rustc` feature has been stabilized that the test depends on. If this is specified, a `reason` is required to explain why it is being checked. #### Testing Nightly Features If you are testing a Cargo feature that only works on "nightly" Cargo, then you need to call `masquerade_as_nightly_cargo` on the process builder and pass the name of the feature as the reason, like this: ```rust,ignore p.cargo("build").masquerade_as_nightly_cargo(&["print-im-a-teapot"]) ``` If you are testing a feature that only works on *nightly rustc* (such as benchmarks), then you should use the `nightly` option of the `cargo_test` attribute, like this: ```rust,ignore #[cargo_test(nightly, reason = "-Zfoo is unstable")] ``` This will cause the test to be ignored if not running on the nightly toolchain. #### Specifying Dependencies You should not write any tests that use the network such as contacting crates.io. Typically, simple path dependencies are the easiest way to add a dependency. Example: ```rust,ignore let p = project() .file("Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dependencies] bar = {path = "bar"} "#) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("bar/src/lib.rs", "") .build(); ``` If you need to test with registry dependencies, see [`support::registry::Package`] for creating packages you can depend on. If you need to test git dependencies, see [`support::git`] to create a git dependency. ### UI Tests UI Tests are a bit more spread out and generally look like: `tests/testsuite//mod.rs`: ```rust,ignore mod ; ``` `tests/testsuite///mod.rs`: ```rust,ignore use cargo_test_support::prelude::*; use cargo_test_support::compare::assert_ui; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn () { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("run") .arg_line("--bin foo") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } ``` Then populate - `tests/testsuite///in` with the project's directory structure - `tests/testsuite///out` with the files you want verified - `tests/testsuite///stdout.log` with nothing - `tests/testsuite///stderr.log` with nothing `#[cargo_test]`: - This is used in place of `#[test]` - This attribute injects code which does some setup before starting the test, creating a filesystem "sandbox" under the "cargo integration test" directory for each test such as `/path/to/cargo/target/cit/t123/` - The sandbox will contain a `home` directory that will be used instead of your normal home directory `Project`: - The project is copied from a directory in the repo - Each project is in a separate directory in the sandbox [`Command`] via `Command::cargo_ui()`: - Set up and run a command. [`OutputAssert`] via `Command::assert()`: - Perform assertions on the result of the [`Command`] [`Assert`] via `assert_ui()`: - Verify the command modified the file system as expected #### Updating Snapshots The project, stdout, and stderr snapshots can be updated by running with the `SNAPSHOTS=overwrite` environment variable, like: ```console $ SNAPSHOTS=overwrite cargo test ``` Be sure to check the snapshots to make sure they make sense. #### Testing Nightly Features If you are testing a Cargo feature that only works on "nightly" Cargo, then you need to call `masquerade_as_nightly_cargo` on the process builder and pass the name of the feature as the reason, like this: ```rust,ignore snapbox::cmd::Command::cargo() .masquerade_as_nightly_cargo(&["print-im-a-teapot"]) ``` If you are testing a feature that only works on *nightly rustc* (such as benchmarks), then you should use the `nightly` option of the `cargo_test` attribute, like this: ```rust,ignore #[cargo_test(nightly, reason = "-Zfoo is unstable")] ``` This will cause the test to be ignored if not running on the nightly toolchain. ### Platform-specific Notes When checking output, use `/` for paths even on Windows: the actual output of `\` on Windows will be replaced with `/`. Be careful when executing binaries on Windows. You should not rename, delete, or overwrite a binary immediately after running it. Under some conditions Windows will fail with errors like "directory not empty" or "failed to remove" or "access is denied". ## Debugging tests In some cases, you may need to dig into a test that is not working as you expect, or you just generally want to experiment within the sandbox environment. The general process is: 1. Build the sandbox for the test you want to investigate. For example: `cargo test --test testsuite -- features2::inactivate_targets`. 2. In another terminal, head into the sandbox directory to inspect the files and run `cargo` directly. 1. The sandbox directories start with `t0` for the first test. `cd target/tmp/cit/t0` 2. Set up the environment so that the sandbox configuration takes effect: `export CARGO_HOME=$(pwd)/home/.cargo` 3. Most tests create a `foo` project, so head into that: `cd foo` 3. Run whatever cargo command you want. See [Running Cargo] for more details on running the correct `cargo` process. Some examples: * `/path/to/my/cargo/target/debug/cargo check` * Using a debugger like `lldb` or `gdb`: 1. `lldb /path/to/my/cargo/target/debug/cargo` 2. Set a breakpoint, for example: `b generate_targets` 3. Run with arguments: `r check` [`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/ [`ProjectBuilder`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/lib.rs#L196-L202 [`Execs`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/lib.rs#L531-L550 [`support`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/lib.rs [`support::compare`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/compare.rs [`support::registry::Package`]: https://github.com/rust-lang/cargo/blob/d847468768446168b596f721844193afaaf9d3f2/crates/cargo-test-support/src/registry.rs#L311-L389 [`support::git`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/git.rs [Running Cargo]: ../process/working-on-cargo.md#running-cargo [`snapbox`]: https://docs.rs/snapbox/latest/snapbox/ [`Command`]: https://docs.rs/snapbox/latest/snapbox/cmd/struct.Command.html [`OutputAssert`]: https://docs.rs/snapbox/latest/snapbox/cmd/struct.OutputAssert.html [`Assert`]: https://docs.rs/snapbox/latest/snapbox/struct.Assert.html cargo-0.66.0/src/doc/man/000077500000000000000000000000001432416201200150005ustar00rootroot00000000000000cargo-0.66.0/src/doc/man/cargo-add.md000066400000000000000000000075161432416201200171540ustar00rootroot00000000000000# cargo-add(1) {{*set actionverb="Add"}} {{*set nouns="adds"}} ## NAME cargo-add - Add dependencies to a Cargo.toml manifest file ## SYNOPSIS `cargo add` [_options_] _crate_...\ `cargo add` [_options_] `--path` _path_\ `cargo add` [_options_] `--git` _url_ [_crate_...]\ ## DESCRIPTION This command can add or modify dependencies. The source for the dependency can be specified with: * _crate_`@`_version_: Fetch from a registry with a version constraint of "_version_" * `--path` _path_: Fetch from the specified _path_ * `--git` _url_: Pull from a git repo at _url_ If no source is specified, then a best effort will be made to select one, including: * Existing dependencies in other tables (like `dev-dependencies`) * Workspace members * Latest release in the registry When you add a package that is already present, the existing entry will be updated with the flags specified. Upon successful invocation, the enabled (`+`) and disabled (`-`) [features] of the specified dependency will be listed in the command's output. [features]: ../reference/features.md ## OPTIONS ### Source options {{#options}} {{#option "`--git` _url_" }} [Git URL to add the specified crate from](../reference/specifying-dependencies.html#specifying-dependencies-from-git-repositories). {{/option}} {{#option "`--branch` _branch_" }} Branch to use when adding from git. {{/option}} {{#option "`--tag` _tag_" }} Tag to use when adding from git. {{/option}} {{#option "`--rev` _sha_" }} Specific commit to use when adding from git. {{/option}} {{#option "`--path` _path_" }} [Filesystem path](../reference/specifying-dependencies.html#specifying-path-dependencies) to local crate to add. {{/option}} {{> options-registry }} {{/options}} ### Section options {{#options}} {{#option "`--dev`" }} Add as a [development dependency](../reference/specifying-dependencies.html#development-dependencies). {{/option}} {{#option "`--build`" }} Add as a [build dependency](../reference/specifying-dependencies.html#build-dependencies). {{/option}} {{#option "`--target` _target_" }} Add as a dependency to the [given target platform](../reference/specifying-dependencies.html#platform-specific-dependencies). {{/option}} {{/options}} ### Dependency options {{#options}} {{#option "`--rename` _name_" }} [Rename](../reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) the dependency. {{/option}} {{#option "`--optional`" }} Mark the dependency as [optional](../reference/features.html#optional-dependencies). {{/option}} {{#option "`--no-optional`" }} Mark the dependency as [required](../reference/features.html#optional-dependencies). {{/option}} {{#option "`--no-default-features`" }} Disable the [default features](../reference/features.html#dependency-features). {{/option}} {{#option "`--default-features`" }} Re-enable the [default features](../reference/features.html#dependency-features). {{/option}} {{#option "`--features` _features_" }} Space or comma separated list of [features to activate](../reference/features.html#dependency-features). When adding multiple crates, the features for a specific crate may be enabled with `package-name/feature-name` syntax. This flag may be specified multiple times, which enables all specified features. {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Add `regex` as a dependency cargo add regex 2. Add `trybuild` as a dev-dependency cargo add --dev trybuild 3. Add an older version of `nom` as a dependency cargo add nom@5 4. Add support for serializing data structures to json with `derive`s cargo add serde serde_json -F serde/derive ## SEE ALSO {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-bench.md000066400000000000000000000107411432416201200174750ustar00rootroot00000000000000# cargo-bench(1) {{*set actionverb="Benchmark"}} {{*set nouns="benchmarks"}} {{*set multitarget=true}} ## NAME cargo-bench - Execute benchmarks of a package ## SYNOPSIS `cargo bench` [_options_] [_benchname_] [`--` _bench-options_] ## DESCRIPTION Compile and execute benchmarks. The benchmark filtering argument _benchname_ and all the arguments following the two dashes (`--`) are passed to the benchmark binaries and thus to _libtest_ (rustc's built in unit-test and micro-benchmarking framework). If you are passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of `cargo bench -- --help` and check out the rustc book's chapter on how tests work at . As an example, this will run only the benchmark named `foo` (and skip other similarly named benchmarks like `foobar`): cargo bench -- foo --exact Benchmarks are built with the `--test` option to `rustc` which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the `#[bench]` attribute. Cargo passes the `--bench` flag to the test harness to tell it to run only benchmarks. The libtest harness may be disabled by setting `harness = false` in the target manifest settings, in which case your code will need to provide its own `main` function to handle running benchmarks. > **Note**: The > [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html) > is currently unstable and only available on the > [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html). > There are some packages available on > [crates.io](https://crates.io/keywords/benchmark) that may help with > running benchmarks on the stable channel, such as > [Criterion](https://crates.io/crates/criterion). By default, `cargo bench` uses the [`bench` profile], which enables optimizations and disables debugging information. If you need to debug a benchmark, you can use the `--profile=dev` command-line option to switch to the dev profile. You can then run the debug-enabled benchmark within a debugger. [`bench` profile]: ../reference/profiles.html#bench ## OPTIONS ### Benchmark Options {{> options-test }} {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo bench` will build the following targets of the selected packages: - lib β€” used to link with binaries and benchmarks - bins (only if benchmark targets are built and required features are available) - lib as a benchmark - bins as benchmarks - benchmark targets The default behavior can be changed by setting the `bench` flag for the target in the manifest settings. Setting examples to `bench = true` will build and run the example as a benchmark. Setting targets to `bench = false` will stop them from being benchmarked by default. Target selection options that take a target by name ignore the `bench` flag and will always benchmark the given target. {{> options-targets-bin-auto-built }} {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options By default the Rust test harness hides output from benchmark execution to keep results readable. Benchmark output can be recovered (e.g., for debugging) by passing `--nocapture` to the benchmark binaries: cargo bench -- --nocapture {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options The `--jobs` argument affects the building of the benchmark executable but does not affect how many threads are used when running the benchmarks. The Rust test harness runs benchmarks serially in a single thread. {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Build and execute all the benchmarks of the current package: cargo bench 2. Run only a specific benchmark within a specific benchmark target: cargo bench --bench bench_name -- modname::some_benchmark ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-test" 1}} cargo-0.66.0/src/doc/man/cargo-build.md000066400000000000000000000042631432416201200175170ustar00rootroot00000000000000# cargo-build(1) {{*set actionverb="Build"}} {{*set multitarget=true}} ## NAME cargo-build - Compile the current package ## SYNOPSIS `cargo build` [_options_] ## DESCRIPTION Compile local packages and all of their dependencies. ## OPTIONS {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo build` will build all binary and library targets of the selected packages. Binaries are skipped if they have `required-features` that are missing. {{> options-targets-bin-auto-built }} {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{#option "`--out-dir` _directory_" }} Copy final artifacts to this directory. This option is unstable and available only on the [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) and requires the `-Z unstable-options` flag to enable. See for more information. {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{#option "`--build-plan`" }} Outputs a series of JSON messages to stdout that indicate the commands to run the build. This option is unstable and available only on the [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) and requires the `-Z unstable-options` flag to enable. See for more information. {{/option}} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{> options-future-incompat }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Build the local package and all of its dependencies: cargo build 2. Build with optimizations: cargo build --release ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-rustc" 1}} cargo-0.66.0/src/doc/man/cargo-check.md000066400000000000000000000035071432416201200174750ustar00rootroot00000000000000# cargo-check(1) {{*set actionverb="Check"}} {{*set multitarget=true}} ## NAME cargo-check - Check the current package ## SYNOPSIS `cargo check` [_options_] ## DESCRIPTION Check a local package and all of its dependencies for errors. This will essentially compile the packages without performing the final step of code generation, which is faster than running `cargo build`. The compiler will save metadata files to disk so that future runs will reuse them if the source has not been modified. Some diagnostics and errors are only emitted during code generation, so they inherently won't be reported with `cargo check`. ## OPTIONS {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo check` will check all binary and library targets of the selected packages. Binaries are skipped if they have `required-features` that are missing. {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile-legacy-check }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{> options-future-incompat }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Check the local package for errors: cargo check 2. Check all targets, including unit tests: cargo check --all-targets --profile=test ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-build" 1}} cargo-0.66.0/src/doc/man/cargo-clean.md000066400000000000000000000030551432416201200175000ustar00rootroot00000000000000# cargo-clean(1) {{*set actionverb="Clean"}} {{*set multitarget=true}} ## NAME cargo-clean - Remove generated artifacts ## SYNOPSIS `cargo clean` [_options_] ## DESCRIPTION Remove artifacts from the target directory that Cargo has generated in the past. With no options, `cargo clean` will delete the entire target directory. ## OPTIONS ### Package Selection When no packages are selected, all packages and all dependencies in the workspace are cleaned. {{#options}} {{#option "`-p` _spec_..." "`--package` _spec_..." }} Clean only the specified packages. This flag may be specified multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format. {{/option}} {{/options}} ### Clean Options {{#options}} {{#option "`--doc`" }} This option will cause `cargo clean` to remove only the `doc` directory in the target directory. {{/option}} {{#option "`--release`" }} Remove all artifacts in the `release` directory. {{/option}} {{#option "`--profile` _name_" }} Remove all artifacts in the directory with the given profile name. {{/option}} {{> options-target-dir }} {{> options-target-triple }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Remove the entire target directory: cargo clean 2. Remove only the release artifacts: cargo clean --release ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-build" 1}} cargo-0.66.0/src/doc/man/cargo-doc.md000066400000000000000000000050771432416201200171710ustar00rootroot00000000000000# cargo-doc(1) {{*set actionverb="Document"}} {{*set multitarget=true}} ## NAME cargo-doc - Build a package's documentation ## SYNOPSIS `cargo doc` [_options_] ## DESCRIPTION Build the documentation for the local package and all dependencies. The output is placed in `target/doc` in rustdoc's usual format. ## OPTIONS ### Documentation Options {{#options}} {{#option "`--open`" }} Open the docs in a browser after building them. This will use your default browser unless you define another one in the `BROWSER` environment variable or use the [`doc.browser`](../reference/config.html#docbrowser) configuration option. {{/option}} {{#option "`--no-deps`" }} Do not build documentation for dependencies. {{/option}} {{#option "`--document-private-items`" }} Include non-public items in the documentation. This will be enabled by default if documenting a binary target. {{/option}} {{/options}} {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo doc` will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have `required-features` that are missing. The default behavior can be changed by setting `doc = false` for the target in the manifest settings. Using target selection options will ignore the `doc` flag and will always document the given target. {{#options}} {{> options-targets-lib-bin }} {{#option "`--example` _name_..." }} {{actionverb}} the specified example. This flag may be specified multiple times and supports common Unix glob patterns. {{/option}} {{#option "`--examples`" }} {{actionverb}} all example targets. {{/option}} {{/options}} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Build the local package documentation and its dependencies and output to `target/doc`. cargo doc ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-rustdoc" 1}}, {{man "rustdoc" 1}} cargo-0.66.0/src/doc/man/cargo-fetch.md000066400000000000000000000026131432416201200175060ustar00rootroot00000000000000# cargo-fetch(1) {{*set actionverb="Fetch"}} {{*set target-default-to-all-arch=true}} {{*set multitarget=true}} ## NAME cargo-fetch - Fetch dependencies of a package from the network ## SYNOPSIS `cargo fetch` [_options_] ## DESCRIPTION If a `Cargo.lock` file is available, this command will ensure that all of the git dependencies and/or registry dependencies are downloaded and locally available. Subsequent Cargo commands will be able to run offline after a `cargo fetch` unless the lock file changes. If the lock file is not available, then this command will generate the lock file before fetching the dependencies. If `--target` is not specified, then all target dependencies are fetched. See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch) plugin which adds a command to download popular crates. This may be useful if you plan to use Cargo without a network with the `--offline` flag. ## OPTIONS ### Fetch options {{#options}} {{> options-target-triple }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Fetch all dependencies: cargo fetch ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-update" 1}}, {{man "cargo-generate-lockfile" 1}} cargo-0.66.0/src/doc/man/cargo-fix.md000066400000000000000000000114441432416201200172050ustar00rootroot00000000000000# cargo-fix(1) {{*set actionverb="Fix"}} {{*set multitarget=true}} ## NAME cargo-fix - Automatically fix lint warnings reported by rustc ## SYNOPSIS `cargo fix` [_options_] ## DESCRIPTION This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! Executing `cargo fix` will under the hood execute {{man "cargo-check" 1}}. Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For example if you'd like to apply all fixes to the current package, you can run: cargo fix which behaves the same as `cargo check --all-targets`. `cargo fix` is only capable of fixing code that is normally compiled with `cargo check`. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: cargo fix --features foo Similarly, other `cfg` expressions like platform-specific code will need to pass `--target` to fix code for the given target. cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with `cargo fix` or otherwise have any questions or feature requests please don't hesitate to file an issue at . ### Edition migration The `cargo fix` subcommand can also be used to migrate a package from one [edition] to the next. The general procedure is: 1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if your project has multiple features. You may also want to run `cargo fix --edition` multiple times with different `--target` flags if your project has platform-specific code gated by `cfg` attributes. 2. Modify `Cargo.toml` to set the [edition field] to the new edition. 3. Run your project tests to verify that everything still works. If new warnings are issued, you may want to consider running `cargo fix` again (without the `--edition` flag) to apply any suggestions given by the compiler. And hopefully that's it! Just keep in mind of the caveats mentioned above that `cargo fix` cannot update code for inactive features or `cfg` expressions. Also, in some rare cases the compiler is unable to automatically migrate all code to the new edition, and this may require manual changes after building with the new edition. [edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html [edition field]: ../reference/manifest.html#the-edition-field ## OPTIONS ### Fix options {{#options}} {{#option "`--broken-code`" }} Fix code even if it already has compiler errors. This is useful if `cargo fix` fails to apply the changes. It will apply the changes and leave the broken code in the working directory for you to inspect and manually fix. {{/option}} {{#option "`--edition`" }} Apply changes that will update the code to the next edition. This will not update the edition in the `Cargo.toml` manifest, which must be updated manually after `cargo fix --edition` has finished. {{/option}} {{#option "`--edition-idioms`" }} Apply suggestions that will update code to the preferred style for the current edition. {{/option}} {{#option "`--allow-no-vcs`" }} Fix code even if a VCS was not detected. {{/option}} {{#option "`--allow-dirty`" }} Fix code even if the working directory has changes. {{/option}} {{#option "`--allow-staged`" }} Fix code even if the working directory has staged changes. {{/option}} {{/options}} {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo fix` will fix all targets (`--all-targets` implied). Binaries are skipped if they have `required-features` that are missing. {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile-legacy-check }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Apply compiler suggestions to the local package: cargo fix 2. Update a package to prepare it for the next edition: cargo fix --edition 3. Apply suggested idioms for the current edition: cargo fix --edition-idioms ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-check" 1}} cargo-0.66.0/src/doc/man/cargo-generate-lockfile.md000066400000000000000000000016721432416201200220010ustar00rootroot00000000000000# cargo-generate-lockfile(1) ## NAME cargo-generate-lockfile - Generate the lockfile for a package ## SYNOPSIS `cargo generate-lockfile` [_options_] ## DESCRIPTION This command will create the `Cargo.lock` lockfile for the current package or workspace. If the lockfile already exists, it will be rebuilt with the latest available version of every package. See also {{man "cargo-update" 1}} which is also capable of creating a `Cargo.lock` lockfile and has more options for controlling update behavior. ## OPTIONS ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Create or update the lockfile for the current package or workspace: cargo generate-lockfile ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-update" 1}} cargo-0.66.0/src/doc/man/cargo-help.md000066400000000000000000000005331432416201200173440ustar00rootroot00000000000000# cargo-help(1) ## NAME cargo-help - Get help for a Cargo command ## SYNOPSIS `cargo help` [_subcommand_] ## DESCRIPTION Prints a help message for the given command. ## EXAMPLES 1. Get help for a command: cargo help build 2. Help is also available with the `--help` flag: cargo build --help ## SEE ALSO {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-init.md000066400000000000000000000020221432416201200173520ustar00rootroot00000000000000# cargo-init(1) ## NAME cargo-init - Create a new Cargo package in an existing directory ## SYNOPSIS `cargo init` [_options_] [_path_] ## DESCRIPTION This command will create a new Cargo manifest in the current directory. Give a path as an argument to create in the given directory. If there are typically-named Rust source files already in the directory, those will be used. If not, then a sample `src/main.rs` file will be created, or `src/lib.rs` if `--lib` is passed. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). See {{man "cargo-new" 1}} for a similar command which will create a new package in a new directory. ## OPTIONS ### Init Options {{> options-new }} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Create a binary Cargo package in the current directory: cargo init ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-new" 1}} cargo-0.66.0/src/doc/man/cargo-install.md000066400000000000000000000145501432416201200200660ustar00rootroot00000000000000# cargo-install(1) {{*set actionverb="Install"}} {{*set temp-target-dir=true}} ## NAME cargo-install - Build and install a Rust binary ## SYNOPSIS `cargo install` [_options_] _crate_[@_version_]...\ `cargo install` [_options_] `--path` _path_\ `cargo install` [_options_] `--git` _url_ [_crate_...]\ `cargo install` [_options_] `--list` ## DESCRIPTION This command manages Cargo's local set of installed binary crates. Only packages which have executable `[[bin]]` or `[[example]]` targets can be installed, and all executables are installed into the installation root's `bin` folder. {{> description-install-root }} There are multiple sources from which a crate can be installed. The default location is crates.io but the `--git`, `--path`, and `--registry` flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the _crate_ argument is required to indicate which crate should be installed. Crates from crates.io can optionally specify the version they wish to install via the `--version` flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the `--bin` argument can selectively install only one of them, and if you'd rather install examples the `--example` argument can be used as well. If the package is already installed, Cargo will reinstall it if the installed version does not appear to be up-to-date. If any of the following values change, then Cargo will reinstall the package: - The package version and source. - The set of binary names installed. - The chosen features. - The profile (`--profile`). - The target (`--target`). Installing with `--path` will always build and install, unless there are conflicting binaries from another package. The `--force` flag may be used to force Cargo to always reinstall the package. If the source is crates.io or `--git` then by default the crate will be built in a temporary target directory. To avoid this, the target directory can be specified by setting the `CARGO_TARGET_DIR` environment variable to a relative path. In particular, this can be useful for caching build artifacts on continuous integration systems. By default, the `Cargo.lock` file that is included with the package will be ignored. This means that Cargo will recompute which versions of dependencies to use, possibly using newer versions that have been released since the package was published. The `--locked` flag can be used to force Cargo to use the packaged `Cargo.lock` file if it is available. This may be useful for ensuring reproducible builds, to use the exact same set of dependencies that were available when the package was published. It may also be useful if a newer version of a dependency is published that no longer builds on your system, or has other problems. The downside to using `--locked` is that you will not receive any fixes or updates to any dependency. Note that Cargo did not start publishing `Cargo.lock` files until version 1.37, which means packages published with prior versions will not have a `Cargo.lock` file available. ## OPTIONS ### Install Options {{#options}} {{#option "`--vers` _version_" "`--version` _version_" }} Specify a version to install. This may be a [version requirement](../reference/specifying-dependencies.md), like `~1.2`, to have Cargo select the newest version from the given requirement. If the version does not have a requirement operator (such as `^` or `~`), then it must be in the form _MAJOR.MINOR.PATCH_, and will install exactly that version; it is *not* treated as a caret requirement like Cargo dependencies are. {{/option}} {{#option "`--git` _url_" }} Git URL to install the specified crate from. {{/option}} {{#option "`--branch` _branch_" }} Branch to use when installing from git. {{/option}} {{#option "`--tag` _tag_" }} Tag to use when installing from git. {{/option}} {{#option "`--rev` _sha_" }} Specific commit to use when installing from git. {{/option}} {{#option "`--path` _path_" }} Filesystem path to local crate to install. {{/option}} {{#option "`--list`" }} List all installed packages and their versions. {{/option}} {{#option "`-f`" "`--force`" }} Force overwriting existing crates or binaries. This can be used if a package has installed a binary with the same name as another package. This is also useful if something has changed on the system that you want to rebuild with, such as a newer version of `rustc`. {{/option}} {{#option "`--no-track`" }} By default, Cargo keeps track of the installed packages with a metadata file stored in the installation root directory. This flag tells Cargo not to use or create that file. With this flag, Cargo will refuse to overwrite any existing files unless the `--force` flag is used. This also disables Cargo's ability to protect against multiple concurrent invocations of Cargo installing at the same time. {{/option}} {{#option "`--bin` _name_..." }} Install only the specified binary. {{/option}} {{#option "`--bins`" }} Install all binaries. {{/option}} {{#option "`--example` _name_..." }} Install only the specified example. {{/option}} {{#option "`--examples`" }} Install all examples. {{/option}} {{#option "`--root` _dir_" }} Directory to install packages into. {{/option}} {{> options-registry }} {{> options-index }} {{/options}} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-target-dir }} {{#option "`--debug`" }} Build with the `dev` profile instead the `release` profile. See also the `--profile` option for choosing a specific profile by name. {{/option}} {{> options-profile }} {{> options-timings }} {{/options}} ### Manifest Options {{#options}} {{> options-locked }} {{/options}} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Install or upgrade a package from crates.io: cargo install ripgrep 2. Install or reinstall the package in the current directory: cargo install --path . 3. View the list of installed packages: cargo install --list ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-uninstall" 1}}, {{man "cargo-search" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo-locate-project.md000066400000000000000000000021021432416201200213210ustar00rootroot00000000000000# cargo-locate-project(1) ## NAME cargo-locate-project - Print a JSON representation of a Cargo.toml file's location ## SYNOPSIS `cargo locate-project` [_options_] ## DESCRIPTION This command will print a JSON object to stdout with the full path to the `Cargo.toml` manifest. ## OPTIONS {{#options}} {{#option "`--workspace`" }} Locate the `Cargo.toml` at the root of the workspace, as opposed to the current workspace member. {{/option}} {{/options}} ### Display Options {{#options}} {{#option "`--message-format` _fmt_" }} The representation in which to print the project location. Valid values: - `json` (default): JSON object with the path under the key "root". - `plain`: Just the path. {{/option}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Display the path to the manifest based on the current directory: cargo locate-project ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-metadata" 1}} cargo-0.66.0/src/doc/man/cargo-login.md000066400000000000000000000017671432416201200175360ustar00rootroot00000000000000# cargo-login(1) ## NAME cargo-login - Save an API token from the registry locally ## SYNOPSIS `cargo login` [_options_] [_token_] ## DESCRIPTION This command will save the API token to disk so that commands that require authentication, such as {{man "cargo-publish" 1}}, will be automatically authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME` defaults to `.cargo` in your home directory. If the _token_ argument is not specified, it will be read from stdin. The API token for crates.io may be retrieved from . Take care to keep the token secret, it should not be shared with anyone else. ## OPTIONS ### Login Options {{#options}} {{> options-registry }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Save the API token to disk: cargo login ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo-metadata.md000066400000000000000000000325751432416201200202070ustar00rootroot00000000000000# cargo-metadata(1) ## NAME cargo-metadata - Machine-readable metadata about the current package ## SYNOPSIS `cargo metadata` [_options_] ## DESCRIPTION Output JSON to stdout containing information about the workspace members and resolved dependencies of the current package. It is recommended to include the `--format-version` flag to future-proof your code to ensure the output is in the format you are expecting. See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata) for a Rust API for reading the metadata. ## OUTPUT FORMAT The output has the following format: ```javascript { /* Array of all packages in the workspace. It also includes all feature-enabled dependencies unless --no-deps is used. */ "packages": [ { /* The name of the package. */ "name": "my-package", /* The version of the package. */ "version": "0.1.0", /* The Package ID, a unique identifier for referring to the package. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The license value from the manifest, or null. */ "license": "MIT/Apache-2.0", /* The license-file value from the manifest, or null. */ "license_file": "LICENSE", /* The description value from the manifest, or null. */ "description": "Package description.", /* The source ID of the package. This represents where a package is retrieved from. This is null for path dependencies and workspace members. For other dependencies, it is a string with the format: - "registry+URL" for registry-based dependencies. Example: "registry+https://github.com/rust-lang/crates.io-index" - "git+URL" for git-based dependencies. Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" */ "source": null, /* Array of dependencies declared in the package's manifest. */ "dependencies": [ { /* The name of the dependency. */ "name": "bitflags", /* The source ID of the dependency. May be null, see description for the package source. */ "source": "registry+https://github.com/rust-lang/crates.io-index", /* The version requirement for the dependency. Dependencies without a version requirement have a value of "*". */ "req": "^1.0", /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* If the dependency is renamed, this is the new name for the dependency as a string. null if it is not renamed. */ "rename": null, /* Boolean of whether or not this is an optional dependency. */ "optional": false, /* Boolean of whether or not default features are enabled. */ "uses_default_features": true, /* Array of features enabled. */ "features": [], /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)", /* The file system path for a local path dependency. not present if not a path dependency. */ "path": "/path/to/dep", /* A string of the URL of the registry this dependency is from. If not specified or null, the dependency is from the default registry (crates.io). */ "registry": null } ], /* Array of Cargo targets. */ "targets": [ { /* Array of target kinds. - lib targets list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - binary is ["bin"] - example is ["example"] - integration test is ["test"] - benchmark is ["bench"] - build script is ["custom-build"] */ "kind": [ "bin" ], /* Array of crate types. - lib and example libraries list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - all other target kinds are ["bin"] */ "crate_types": [ "bin" ], /* The name of the target. */ "name": "my-package", /* Absolute path to the root source file of the target. */ "src_path": "/path/to/my-package/src/main.rs", /* The Rust edition of the target. Defaults to the package edition. */ "edition": "2018", /* Array of required features. This property is not included if no required features are set. */ "required-features": ["feat1"], /* Whether the target should be documented by `cargo doc`. */ "doc": true, /* Whether or not this target has doc tests enabled, and the target is compatible with doc testing. */ "doctest": false, /* Whether or not this target should be built and run with `--test` */ "test": true } ], /* Set of features defined for the package. Each feature maps to an array of features or dependencies it enables. */ "features": { "default": [ "feat1" ], "feat1": [], "feat2": [] }, /* Absolute path to this package's manifest. */ "manifest_path": "/path/to/my-package/Cargo.toml", /* Package metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } }, /* List of registries to which this package may be published. Publishing is unrestricted if null, and forbidden if an empty array. */ "publish": [ "crates-io" ], /* Array of authors from the manifest. Empty array if no authors specified. */ "authors": [ "Jane Doe " ], /* Array of categories from the manifest. */ "categories": [ "command-line-utilities" ], /* Optional string that is the default binary picked by cargo run. */ "default_run": null, /* Optional string that is the minimum supported rust version */ "rust_version": "1.56", /* Array of keywords from the manifest. */ "keywords": [ "cli" ], /* The readme value from the manifest or null if not specified. */ "readme": "README.md", /* The repository value from the manifest or null if not specified. */ "repository": "https://github.com/rust-lang/cargo", /* The homepage value from the manifest or null if not specified. */ "homepage": "https://rust-lang.org", /* The documentation value from the manifest or null if not specified. */ "documentation": "https://doc.rust-lang.org/stable/std", /* The default edition of the package. Note that individual targets may have different editions. */ "edition": "2018", /* Optional string that is the name of a native library the package is linking to. */ "links": null, } ], /* Array of members of the workspace. Each entry is the Package ID for the package. */ "workspace_members": [ "my-package 0.1.0 (path+file:///path/to/my-package)", ], // The resolved dependency graph for the entire workspace. The enabled // features are based on the enabled features for the "current" package. // Inactivated optional dependencies are not listed. // // This is null if --no-deps is specified. // // By default, this includes all dependencies for all target platforms. // The `--filter-platform` flag may be used to narrow to a specific // target triple. "resolve": { /* Array of nodes within the dependency graph. Each node is a package. */ "nodes": [ { /* The Package ID of this node. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The dependencies of this package, an array of Package IDs. */ "dependencies": [ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" ], /* The dependencies of this package. This is an alternative to "dependencies" which contains additional information. In particular, this handles renamed dependencies. */ "deps": [ { /* The name of the dependency's library target. If this is a renamed dependency, this is the new name. */ "name": "bitflags", /* The Package ID of the dependency. */ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", /* Array of dependency kinds. Added in Cargo 1.40. */ "dep_kinds": [ { /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)" } ] } ], /* Array of features enabled on this package. */ "features": [ "default" ] } ], /* The root package of the workspace. This is null if this is a virtual workspace. Otherwise it is the Package ID of the root package. */ "root": "my-package 0.1.0 (path+file:///path/to/my-package)" }, /* The absolute path to the build directory where Cargo places its output. */ "target_directory": "/path/to/my-package/target", /* The version of the schema for this metadata structure. This will be changed if incompatible changes are ever made. */ "version": 1, /* The absolute path to the root of the workspace. */ "workspace_root": "/path/to/my-package" /* Workspace metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } } } ```` ## OPTIONS ### Output Options {{#options}} {{#option "`--no-deps`" }} Output information only about the workspace members and don't fetch dependencies. {{/option}} {{#option "`--format-version` _version_" }} Specify the version of the output format to use. Currently `1` is the only possible value. {{/option}} {{#option "`--filter-platform` _triple_" }} This filters the `resolve` output to only include dependencies for the given target triple. Without this flag, the resolve includes all targets. Note that the dependencies listed in the "packages" array still includes all dependencies. Each package definition is intended to be an unaltered reproduction of the information within `Cargo.toml`. {{/option}} {{/options}} {{> section-features }} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Output JSON about the current package: cargo metadata --format-version=1 ## SEE ALSO {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-new.md000066400000000000000000000015471432416201200172130ustar00rootroot00000000000000# cargo-new(1) ## NAME cargo-new - Create a new Cargo package ## SYNOPSIS `cargo new` [_options_] _path_ ## DESCRIPTION This command will create a new Cargo package in the given directory. This includes a simple template with a `Cargo.toml` manifest, sample source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). See {{man "cargo-init" 1}} for a similar command which will create a new manifest in an existing directory. ## OPTIONS ### New Options {{> options-new }} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Create a binary Cargo package in the given directory: cargo new foo ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-init" 1}} cargo-0.66.0/src/doc/man/cargo-owner.md000066400000000000000000000031561432416201200175520ustar00rootroot00000000000000# cargo-owner(1) ## NAME cargo-owner - Manage the owners of a crate on the registry ## SYNOPSIS `cargo owner` [_options_] `--add` _login_ [_crate_]\ `cargo owner` [_options_] `--remove` _login_ [_crate_]\ `cargo owner` [_options_] `--list` [_crate_] ## DESCRIPTION This command will modify the owners for a crate on the registry. Owners of a crate can upload new versions and yank old versions. Non-team owners can also modify the set of owners, so take care! This command requires you to be authenticated with either the `--token` option or using {{man "cargo-login" 1}}. If the crate name is not specified, it will use the package name from the current directory. See [the reference](../reference/publishing.html#cargo-owner) for more information about owners and publishing. ## OPTIONS ### Owner Options {{#options}} {{#option "`-a`" "`--add` _login_..." }} Invite the given user or team as an owner. {{/option}} {{#option "`-r`" "`--remove` _login_..." }} Remove the given user or team as an owner. {{/option}} {{#option "`-l`" "`--list`" }} List owners of a crate. {{/option}} {{> options-token }} {{> options-index }} {{> options-registry }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. List owners of a package: cargo owner --list foo 2. Invite an owner to a package: cargo owner --add username foo 3. Remove an owner from a package: cargo owner --remove username foo ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo-package.md000066400000000000000000000064111432416201200200100ustar00rootroot00000000000000# cargo-package(1) {{*set actionverb="Package"}} {{*set noall=true}} {{*set multitarget=true}} ## NAME cargo-package - Assemble the local package into a distributable tarball ## SYNOPSIS `cargo package` [_options_] ## DESCRIPTION This command will create a distributable, compressed `.crate` file with the source code of the package in the current directory. The resulting file will be stored in the `target/package` directory. This performs the following steps: 1. Load and check the current workspace, performing some basic checks. - Path dependencies are not allowed unless they have a version key. Cargo will ignore the path key for dependencies in published packages. `dev-dependencies` do not have this restriction. 2. Create the compressed `.crate` file. - The original `Cargo.toml` file is rewritten and normalized. - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the manifest. - `Cargo.lock` is automatically included if the package contains an executable binary or example target. {{man "cargo-install" 1}} will use the packaged lock file if the `--locked` flag is used. - A `.cargo_vcs_info.json` file is included that contains information about the current VCS checkout hash if available (not included with `--allow-dirty`). 3. Extract the `.crate` file and build it to verify it can build. - This will rebuild your package from scratch to ensure that it can be built from a pristine state. The `--no-verify` flag can be used to skip this step. 4. Check that build scripts did not modify any source files. The list of files included can be controlled with the `include` and `exclude` fields in the manifest. See [the reference](../reference/publishing.html) for more details about packaging and publishing. ### .cargo_vcs_info.json format Will generate a `.cargo_vcs_info.json` in the following format ```javascript { "git": { "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" }, "path_in_vcs": "" } ``` `path_in_vcs` will be set to a repo-relative path for packages in subdirectories of the version control repository. ## OPTIONS ### Package Options {{#options}} {{#option "`-l`" "`--list`" }} Print files included in a package without making one. {{/option}} {{#option "`--no-verify`" }} Don't verify the contents by building them. {{/option}} {{#option "`--no-metadata`" }} Ignore warnings about a lack of human-usable metadata (such as the description or the license). {{/option}} {{#option "`--allow-dirty`" }} Allow working directories with uncommitted VCS changes to be packaged. {{/option}} {{/options}} {{> section-package-selection }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-target-dir }} {{/options}} {{> section-features }} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Create a compressed `.crate` file of the current package: cargo package ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo-pkgid.md000066400000000000000000000043371432416201200175200ustar00rootroot00000000000000# cargo-pkgid(1) ## NAME cargo-pkgid - Print a fully qualified package specification ## SYNOPSIS `cargo pkgid` [_options_] [_spec_] ## DESCRIPTION Given a _spec_ argument, print out the fully qualified package ID specifier for a package or dependency in the current workspace. This command will generate an error if _spec_ is ambiguous as to which package it refers to in the dependency graph. If no _spec_ is given, then the specifier for the local package is printed. This command requires that a lockfile is available and dependencies have been fetched. A package specifier consists of a name, version, and source URL. You are allowed to use partial specifiers to succinctly match a specific package as long as it matches only one package. The format of a _spec_ can be one of the following: SPEC Structure | Example SPEC ---------------------------|-------------- _name_ | `bitflags` _name_`@`_version_ | `bitflags@1.0.4` _url_ | `https://github.com/rust-lang/cargo` _url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0` _url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags` _url_`#`_name_`:`_version_ | `https://github.com/rust-lang/cargo#crates-io@0.21.0` ## OPTIONS ### Package Selection {{#options}} {{#option "`-p` _spec_" "`--package` _spec_" }} Get the package ID for the given package instead of the current package. {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Retrieve package specification for `foo` package: cargo pkgid foo 2. Retrieve package specification for version 1.0.0 of `foo`: cargo pkgid foo@1.0.0 3. Retrieve package specification for `foo` from crates.io: cargo pkgid https://github.com/rust-lang/crates.io-index#foo 4. Retrieve package specification for `foo` from a local package: cargo pkgid file:///path/to/local/package#foo ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}}, {{man "cargo-metadata" 1}} cargo-0.66.0/src/doc/man/cargo-publish.md000066400000000000000000000050231432416201200200610ustar00rootroot00000000000000# cargo-publish(1) {{*set actionverb="Publish"}} {{*set multitarget=true}} ## NAME cargo-publish - Upload a package to the registry ## SYNOPSIS `cargo publish` [_options_] ## DESCRIPTION This command will create a distributable, compressed `.crate` file with the source code of the package in the current directory and upload it to a registry. The default registry is . This performs the following steps: 1. Performs a few checks, including: - Checks the `package.publish` key in the manifest for restrictions on which registries you are allowed to publish to. 2. Create a `.crate` file by following the steps in {{man "cargo-package" 1}}. 3. Upload the crate to the registry. Note that the server will perform additional checks on the crate. This command requires you to be authenticated with either the `--token` option or using {{man "cargo-login" 1}}. See [the reference](../reference/publishing.html) for more details about packaging and publishing. ## OPTIONS ### Publish Options {{#options}} {{#option "`--dry-run`" }} Perform all checks without uploading. {{/option}} {{> options-token }} {{#option "`--no-verify`" }} Don't verify the contents by building them. {{/option}} {{#option "`--allow-dirty`" }} Allow working directories with uncommitted VCS changes to be packaged. {{/option}} {{> options-index }} {{#option "`--registry` _registry_"}} Name of the registry to publish to. Registry names are defined in [Cargo config files](../reference/config.html). If not specified, and there is a [`package.publish`](../reference/manifest.html#the-publish-field) field in `Cargo.toml` with a single registry, then it will publish to that registry. Otherwise it will use the default registry, which is defined by the [`registry.default`](../reference/config.html#registrydefault) config key which defaults to `crates-io`. {{/option}} {{/options}} {{> section-options-package }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-target-dir }} {{/options}} {{> section-features }} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Publish the current package: cargo publish ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-package" 1}}, {{man "cargo-login" 1}} cargo-0.66.0/src/doc/man/cargo-report.md000066400000000000000000000014611432416201200177300ustar00rootroot00000000000000# cargo-report(1) ## NAME cargo-report - Generate and display various kinds of reports ## SYNOPSIS `cargo report` _type_ [_options_] ### DESCRIPTION Displays a report of the given _type_ - currently, only `future-incompat` is supported ## OPTIONS {{#options}} {{#option "`--id` _id_" }} Show the report with the specified Cargo-generated id {{/option}} {{#option "`-p` _spec_..." "`--package` _spec_..." }} Only display a report for the specified package {{/option}} {{/options}} ## EXAMPLES 1. Display the latest future-incompat report: cargo report future-incompat 2. Display the latest future-incompat report for a specific package: cargo report future-incompat --package my-dep:0.0.1 ## SEE ALSO [Future incompat report](../reference/future-incompat-report.html) {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-run.md000066400000000000000000000035641432416201200172270ustar00rootroot00000000000000# cargo-run(1) {{*set actionverb="Run"}} ## NAME cargo-run - Run the current package ## SYNOPSIS `cargo run` [_options_] [`--` _args_] ## DESCRIPTION Run a binary or example of the local package. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. ## OPTIONS {{> section-options-package }} ### Target Selection When no target selection options are given, `cargo run` will run the binary target. If there are multiple binary targets, you must pass a target flag to choose one. Or, the `default-run` field may be specified in the `[package]` section of `Cargo.toml` to choose the name of the binary to run by default. {{#options}} {{#option "`--bin` _name_" }} Run the specified binary. {{/option}} {{#option "`--example` _name_" }} Run the specified example. {{/option}} {{/options}} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Build the local package and run its main target (assuming only one binary): cargo run 2. Run an example with extra arguments: cargo run --example exname -- --exoption exarg1 exarg2 ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-build" 1}} cargo-0.66.0/src/doc/man/cargo-rustc.md000066400000000000000000000071771432416201200175670ustar00rootroot00000000000000# cargo-rustc(1) {{*set actionverb="Build"}} {{*set multitarget=true}} ## NAME cargo-rustc - Compile the current package, and pass extra options to the compiler ## SYNOPSIS `cargo rustc` [_options_] [`--` _args_] ## DESCRIPTION The specified target for the current package (or package specified by `-p` if provided) will be compiled along with all of its dependencies. The specified _args_ will all be passed to the final compiler invocation, not any of the dependencies. Note that the compiler will still unconditionally receive arguments such as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply be added to the compiler invocation. See for documentation on rustc flags. {{> description-one-target }} To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS` [environment variable](../reference/environment-variables.html) or the `build.rustflags` [config value](../reference/config.html). ## OPTIONS {{> section-options-package }} ### Target Selection When no target selection options are given, `cargo rustc` will build all binary and library targets of the selected package. {{> options-targets-bin-auto-built }} {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{#option "`--profile` _name_" }} Build with the given profile. The `rustc` subcommand will treat the following named profiles with special behaviors: * `check` β€” Builds in the same way as the {{man "cargo-check" 1}} command with the `dev` profile. * `test` β€” Builds in the same way as the {{man "cargo-test" 1}} command, enabling building in test mode which will enable tests and enable the `test` cfg option. See [rustc tests](https://doc.rust-lang.org/rustc/tests/index.html) for more detail. * `bench` β€” Builds in the same was as the {{man "cargo-bench" 1}} command, similar to the `test` profile. See the [the reference](../reference/profiles.html) for more details on profiles. {{/option}} {{> options-ignore-rust-version }} {{> options-timings }} {{#option "`--crate-type` _crate-type_"}} Build for the given crate type. This flag accepts a comma-separated list of 1 or more crate types, of which the allowed values are the same as `crate-type` field in the manifest for configurating a Cargo target. See [`crate-type` field](../reference/cargo-targets.html#the-crate-type-field) for possible values. If the manifest contains a list, and `--crate-type` is provided, the command-line argument value will override what is in the manifest. This flag only works when building a `lib` or `example` library target. {{/option}} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{> options-future-incompat }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Check if your package (not including dependencies) uses unsafe code: cargo rustc --lib -- -D unsafe-code 2. Try an experimental flag on the nightly compiler, such as this which prints the size of every type: cargo rustc --lib -- -Z print-type-sizes 3. Override `crate-type` field in Cargo.toml with command-line option: cargo rustc --lib --crate-type lib,cdylib ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-build" 1}}, {{man "rustc" 1}} cargo-0.66.0/src/doc/man/cargo-rustdoc.md000066400000000000000000000050151432416201200200770ustar00rootroot00000000000000# cargo-rustdoc(1) {{*set actionverb="Document"}} {{*set multitarget=true}} ## NAME cargo-rustdoc - Build a package's documentation, using specified custom flags ## SYNOPSIS `cargo rustdoc` [_options_] [`--` _args_] ## DESCRIPTION The specified target for the current package (or package specified by `-p` if provided) will be documented with the specified _args_ being passed to the final rustdoc invocation. Dependencies will not be documented as part of this command. Note that rustdoc will still unconditionally receive arguments such as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply be added to the rustdoc invocation. See for documentation on rustdoc flags. {{> description-one-target }} To pass flags to all rustdoc processes spawned by Cargo, use the `RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html) or the `build.rustdocflags` [config value](../reference/config.html). ## OPTIONS ### Documentation Options {{#options}} {{#option "`--open`" }} Open the docs in a browser after building them. This will use your default browser unless you define another one in the `BROWSER` environment variable or use the [`doc.browser`](../reference/config.html#docbrowser) configuration option. {{/option}} {{/options}} {{> section-options-package }} ### Target Selection When no target selection options are given, `cargo rustdoc` will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have `required-features` that are missing. {{> options-targets }} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options {{#options}} {{> options-jobs }} {{> options-keep-going }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Build documentation with custom CSS included from a given file: cargo rustdoc --lib -- --extend-css extra.css ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-doc" 1}}, {{man "rustdoc" 1}} cargo-0.66.0/src/doc/man/cargo-search.md000066400000000000000000000015501432416201200176610ustar00rootroot00000000000000# cargo-search(1) ## NAME cargo-search - Search packages in crates.io ## SYNOPSIS `cargo search` [_options_] [_query_...] ## DESCRIPTION This performs a textual search for crates on . The matching crates will be displayed along with their description in TOML format suitable for copying into a `Cargo.toml` manifest. ## OPTIONS ### Search Options {{#options}} {{#option "`--limit` _limit_" }} Limit the number of results (default: 10, max: 100). {{/option}} {{> options-index }} {{> options-registry }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Search for a package from crates.io: cargo search serde ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-install" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo-test.md000066400000000000000000000122771432416201200174030ustar00rootroot00000000000000# cargo-test(1) {{*set actionverb="Test"}} {{*set nouns="tests"}} {{*set multitarget=true}} ## NAME cargo-test - Execute unit and integration tests of a package ## SYNOPSIS `cargo test` [_options_] [_testname_] [`--` _test-options_] ## DESCRIPTION Compile and execute unit, integration, and documentation tests. The test filtering argument `TESTNAME` and all the arguments following the two dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's built in unit-test and micro-benchmarking framework). If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of `cargo test -- --help` and check out the rustc book's chapter on how tests work at . As an example, this will filter for tests with `foo` in their name and run them on 3 threads in parallel: cargo test foo -- --test-threads 3 Tests are built with the `--test` option to `rustc` which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the `#[test]` attribute in multiple threads. `#[bench]` annotated functions will also be run with one iteration to verify that they are functional. If the package contains multiple test targets, each target compiles to a special executable as aforementioned, and then is run serially. The libtest harness may be disabled by setting `harness = false` in the target manifest settings, in which case your code will need to provide its own `main` function to handle running tests. ### Documentation tests Documentation tests are also run by default, which is handled by `rustdoc`. It extracts code samples from documentation comments of the library target, and then executes them. Different from normal test targets, each code block compiles to a doctest executable on the fly with `rustc`. These executables run in parallel in separate processes. The compilation of a code block is in fact a part of test function controlled by libtest, so some options such as `--jobs` might not take effect. Note that this execution model of doctests is not guaranteed and may change in the future; beware of depending on it. See the [rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information on writing doc tests. ## OPTIONS ### Test Options {{> options-test }} {{> section-package-selection }} ### Target Selection When no target selection options are given, `cargo test` will build the following targets of the selected packages: - lib β€” used to link with binaries, examples, integration tests, and doc tests - bins (only if integration tests are built and required features are available) - examples β€” to ensure they compile - lib as a unit test - bins as unit tests - integration tests - doc tests for the lib target The default behavior can be changed by setting the `test` flag for the target in the manifest settings. Setting examples to `test = true` will build and run the example as a test. Setting targets to `test = false` will stop them from being tested by default. Target selection options that take a target by name ignore the `test` flag and will always test the given target. Doc tests for libraries may be disabled by setting `doctest = false` for the library in the manifest. {{> options-targets-bin-auto-built }} {{> options-targets }} {{#options}} {{#option "`--doc`" }} Test only the library's documentation. This cannot be mixed with other target options. {{/option}} {{/options}} {{> section-features }} ### Compilation Options {{#options}} {{> options-target-triple }} {{> options-release }} {{> options-profile }} {{> options-ignore-rust-version }} {{> options-timings }} {{/options}} ### Output Options {{#options}} {{> options-target-dir }} {{/options}} ### Display Options By default the Rust test harness hides output from test execution to keep results readable. Test output can be recovered (e.g., for debugging) by passing `--nocapture` to the test binaries: cargo test -- --nocapture {{#options}} {{> options-display }} {{> options-message-format }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} ### Miscellaneous Options The `--jobs` argument affects the building of the test executable but does not affect how many threads are used when running the tests. The Rust test harness includes an option to control the number of threads used: cargo test -j 2 -- --test-threads=2 {{#options}} {{> options-jobs }} {{> options-keep-going }} {{> options-future-incompat }} {{/options}} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Execute all the unit and integration tests of the current package: cargo test 2. Run only tests whose names match against a filter string: cargo test name_filter 3. Run only a specific test within a specific integration test: cargo test --test int_test_name -- modname::test_name ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-bench" 1}}, [types of tests](../reference/cargo-targets.html#tests), [how to write tests](https://doc.rust-lang.org/rustc/tests/index.html) cargo-0.66.0/src/doc/man/cargo-tree.md000066400000000000000000000177321432416201200173640ustar00rootroot00000000000000# cargo-tree(1) {{*set actionverb="Display"}} {{*set noall=true}} ## NAME cargo-tree - Display a tree visualization of a dependency graph ## SYNOPSIS `cargo tree` [_options_] ## DESCRIPTION This command will display a tree of dependencies to the terminal. An example of a simple project that depends on the "rand" package: ``` myproject v0.1.0 (/myproject) └── rand v0.7.3 β”œβ”€β”€ getrandom v0.1.14 β”‚ β”œβ”€β”€ cfg-if v0.1.10 β”‚ └── libc v0.2.68 β”œβ”€β”€ libc v0.2.68 (*) β”œβ”€β”€ rand_chacha v0.2.2 β”‚ β”œβ”€β”€ ppv-lite86 v0.2.6 β”‚ └── rand_core v0.5.1 β”‚ └── getrandom v0.1.14 (*) └── rand_core v0.5.1 (*) [build-dependencies] └── cc v1.0.50 ``` Packages marked with `(*)` have been "de-duplicated". The dependencies for the package have already been shown elsewhere in the graph, and so are not repeated. Use the `--no-dedupe` option to repeat the duplicates. The `-e` flag can be used to select the dependency kinds to display. The "features" kind changes the output to display the features enabled by each dependency. For example, `cargo tree -e features`: ``` myproject v0.1.0 (/myproject) └── log feature "serde" └── log v0.4.8 β”œβ”€β”€ serde v1.0.106 └── cfg-if feature "default" └── cfg-if v0.1.10 ``` In this tree, `myproject` depends on `log` with the `serde` feature. `log` in turn depends on `cfg-if` with "default" features. When using `-e features` it can be helpful to use `-i` flag to show how the features flow into a package. See the examples below for more detail. ## OPTIONS ### Tree Options {{#options}} {{#option "`-i` _spec_" "`--invert` _spec_" }} Show the reverse dependencies for the given package. This flag will invert the tree and display the packages that depend on the given package. Note that in a workspace, by default it will only display the package's reverse dependencies inside the tree of the workspace member in the current directory. The `--workspace` flag can be used to extend it so that it will show the package's reverse dependencies across the entire workspace. The `-p` flag can be used to display the package's reverse dependencies only with the subtree of the package given to `-p`. {{/option}} {{#option "`--prune` _spec_" }} Prune the given package from the display of the dependency tree. {{/option}} {{#option "`--depth` _depth_" }} Maximum display depth of the dependency tree. A depth of 1 displays the direct dependencies, for example. {{/option}} {{#option "`--no-dedupe`" }} Do not de-duplicate repeated dependencies. Usually, when a package has already displayed its dependencies, further occurrences will not re-display its dependencies, and will include a `(*)` to indicate it has already been shown. This flag will cause those duplicates to be repeated. {{/option}} {{#option "`-d`" "`--duplicates`" }} Show only dependencies which come in multiple versions (implies `--invert`). When used with the `-p` flag, only shows duplicates within the subtree of the given package. It can be beneficial for build times and executable sizes to avoid building that same package multiple times. This flag can help identify the offending packages. You can then investigate if the package that depends on the duplicate with the older version can be updated to the newer version so that only one instance is built. {{/option}} {{#option "`-e` _kinds_" "`--edges` _kinds_" }} The dependency kinds to display. Takes a comma separated list of values: - `all` β€” Show all edge kinds. - `normal` β€” Show normal dependencies. - `build` β€” Show build dependencies. - `dev` β€” Show development dependencies. - `features` β€” Show features enabled by each dependency. If this is the only kind given, then it will automatically include the other dependency kinds. - `no-normal` β€” Do not include normal dependencies. - `no-build` β€” Do not include build dependencies. - `no-dev` β€” Do not include development dependencies. - `no-proc-macro` β€” Do not include procedural macro dependencies. The `normal`, `build`, `dev`, and `all` dependency kinds cannot be mixed with `no-normal`, `no-build`, or `no-dev` dependency kinds. The default is `normal,build,dev`. {{/option}} {{#option "`--target` _triple_" }} Filter dependencies matching the given target-triple. The default is the host platform. Use the value `all` to include *all* targets. {{/option}} {{/options}} ### Tree Formatting Options {{#options}} {{#option "`--charset` _charset_" }} Chooses the character set to use for the tree. Valid values are "utf8" or "ascii". Default is "utf8". {{/option}} {{#option "`-f` _format_" "`--format` _format_" }} Set the format string for each package. The default is "{p}". This is an arbitrary string which will be used to display each package. The following strings will be replaced with the corresponding value: - `{p}` β€” The package name. - `{l}` β€” The package license. - `{r}` β€” The package repository URL. - `{f}` β€” Comma-separated list of package features that are enabled. - `{lib}` β€” The name, as used in a `use` statement, of the package's library. {{/option}} {{#option "`--prefix` _prefix_" }} Sets how each line is displayed. The _prefix_ value can be one of: - `indent` (default) β€” Shows each line indented as a tree. - `depth` β€” Show as a list, with the numeric depth printed before each entry. - `none` β€” Show as a flat list. {{/option}} {{/options}} {{> section-package-selection }} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-features }} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Display the tree for the package in the current directory: cargo tree 2. Display all the packages that depend on the `syn` package: cargo tree -i syn 3. Show the features enabled on each package: cargo tree --format "{p} {f}" 4. Show all packages that are built multiple times. This can happen if multiple semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). cargo tree -d 5. Explain why features are enabled for the `syn` package: cargo tree -e features -i syn The `-e features` flag is used to show features. The `-i` flag is used to invert the graph so that it displays the packages that depend on `syn`. An example of what this would display: ``` syn v1.0.17 β”œβ”€β”€ syn feature "clone-impls" β”‚ └── syn feature "default" β”‚ └── rustversion v1.0.2 β”‚ └── rustversion feature "default" β”‚ └── myproject v0.1.0 (/myproject) β”‚ └── myproject feature "default" (command-line) β”œβ”€β”€ syn feature "default" (*) β”œβ”€β”€ syn feature "derive" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "full" β”‚ └── rustversion v1.0.2 (*) β”œβ”€β”€ syn feature "parsing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "printing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "proc-macro" β”‚ └── syn feature "default" (*) └── syn feature "quote" β”œβ”€β”€ syn feature "printing" (*) └── syn feature "proc-macro" (*) ``` To read this graph, you can follow the chain for each feature from the root to see why it is included. For example, the "full" feature is added by the `rustversion` crate which is included from `myproject` (with the default features), and `myproject` is the package selected on the command-line. All of the other `syn` features are added by the "default" feature ("quote" is added by "printing" and "proc-macro", both of which are default features). If you're having difficulty cross-referencing the de-duplicated `(*)` entries, try with the `--no-dedupe` flag to get the full output. ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-metadata" 1}} cargo-0.66.0/src/doc/man/cargo-uninstall.md000066400000000000000000000021141432416201200204220ustar00rootroot00000000000000# cargo-uninstall(1) ## NAME cargo-uninstall - Remove a Rust binary ## SYNOPSIS `cargo uninstall` [_options_] [_spec_...] ## DESCRIPTION This command removes a package installed with {{man "cargo-install" 1}}. The _spec_ argument is a package ID specification of the package to remove (see {{man "cargo-pkgid" 1}}). By default all binaries are removed for a crate but the `--bin` and `--example` flags can be used to only remove particular binaries. {{> description-install-root }} ## OPTIONS ### Install Options {{#options}} {{#option "`-p`" "`--package` _spec_..." }} Package to uninstall. {{/option}} {{#option "`--bin` _name_..." }} Only uninstall the binary _name_. {{/option}} {{#option "`--root` _dir_" }} Directory to uninstall packages from. {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Uninstall a previously installed package. cargo uninstall ripgrep ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-install" 1}} cargo-0.66.0/src/doc/man/cargo-update.md000066400000000000000000000045231432416201200177010ustar00rootroot00000000000000# cargo-update(1) ## NAME cargo-update - Update dependencies as recorded in the local lock file ## SYNOPSIS `cargo update` [_options_] ## DESCRIPTION This command will update dependencies in the `Cargo.lock` file to the latest version. If the `Cargo.lock` file does not exist, it will be created with the latest available versions. ## OPTIONS ### Update Options {{#options}} {{#option "`-p` _spec_..." "`--package` _spec_..." }} Update only the specified packages. This flag may be specified multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format. If packages are specified with the `-p` flag, then a conservative update of the lockfile will be performed. This means that only the dependency specified by SPEC will be updated. Its transitive dependencies will be updated only if SPEC cannot be updated without updating dependencies. All other dependencies will remain locked at their currently recorded versions. If `-p` is not specified, all dependencies are updated. {{/option}} {{#option "`--aggressive`" }} When used with `-p`, dependencies of _spec_ are forced to update as well. Cannot be used with `--precise`. {{/option}} {{#option "`--precise` _precise_" }} When used with `-p`, allows you to specify a specific version number to set the package to. If the package comes from a git repository, this can be a git revision (such as a SHA hash or tag). {{/option}} {{#option "`-w`" "`--workspace`" }} Attempt to update only packages defined in the workspace. Other packages are updated only if they don't already exist in the lockfile. This option is useful for updating `Cargo.lock` after you've changed version numbers in `Cargo.toml`. {{/option}} {{#option "`--dry-run`" }} Displays what would be updated, but doesn't actually write the lockfile. {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Update all dependencies in the lockfile: cargo update 2. Update only specific dependencies: cargo update -p foo -p bar 3. Set a specific dependency to a specific version: cargo update -p foo --precise 1.2.3 ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}} cargo-0.66.0/src/doc/man/cargo-vendor.md000066400000000000000000000043251432416201200177140ustar00rootroot00000000000000# cargo-vendor(1) ## NAME cargo-vendor - Vendor all dependencies locally ## SYNOPSIS `cargo vendor` [_options_] [_path_] ## DESCRIPTION This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at ``. After this command completes the vendor directory specified by `` will contain all remote sources from dependencies specified. Additional manifests beyond the default one can be specified with the `-s` option. The `cargo vendor` command will also print out the configuration necessary to use the vendored sources, which you will need to add to `.cargo/config.toml`. ## OPTIONS ### Vendor Options {{#options}} {{#option "`-s` _manifest_" "`--sync` _manifest_" }} Specify an extra `Cargo.toml` manifest to workspaces which should also be vendored and synced to the output. May be specified multiple times. {{/option}} {{#option "`--no-delete`" }} Don't delete the "vendor" directory when vendoring, but rather keep all existing contents of the vendor directory {{/option}} {{#option "`--respect-source-config`" }} Instead of ignoring `[source]` configuration by default in `.cargo/config.toml` read it and use it when downloading crates from crates.io, for example {{/option}} {{#option "`--versioned-dirs`" }} Normally versions are only added to disambiguate multiple versions of the same package. This option causes all directories in the "vendor" directory to be versioned, which makes it easier to track the history of vendored packages over time, and can help with the performance of re-vendoring when only a subset of the packages have changed. {{/option}} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Vendor all dependencies into a local "vendor" folder cargo vendor 2. Vendor all dependencies into a local "third-party/vendor" folder cargo vendor third-party/vendor 3. Vendor the current workspace as well as another to "vendor" cargo vendor -s ../path/to/Cargo.toml ## SEE ALSO {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-verify-project.md000066400000000000000000000015631432416201200213700ustar00rootroot00000000000000# cargo-verify-project(1) ## NAME cargo-verify-project - Check correctness of crate manifest ## SYNOPSIS `cargo verify-project` [_options_] ## DESCRIPTION This command will parse the local manifest and check its validity. It emits a JSON object with the result. A successful validation will display: {"success":"true"} An invalid workspace will display: {"invalid":"human-readable error message"} ## OPTIONS ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-manifest-path }} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} ## EXIT STATUS * `0`: The workspace is OK. * `1`: The workspace is invalid. ## EXAMPLES 1. Check the current workspace for errors: cargo verify-project ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-package" 1}} cargo-0.66.0/src/doc/man/cargo-version.md000066400000000000000000000010101432416201200200700ustar00rootroot00000000000000# cargo-version(1) ## NAME cargo-version - Show version information ## SYNOPSIS `cargo version` [_options_] ## DESCRIPTION Displays the version of Cargo. ## OPTIONS {{#options}} {{#option "`-v`" "`--verbose`" }} Display additional version information. {{/option}} {{/options}} ## EXAMPLES 1. Display the version: cargo version 2. The version is also available via flags: cargo --version cargo -V 3. Display extra version information: cargo -Vv ## SEE ALSO {{man "cargo" 1}} cargo-0.66.0/src/doc/man/cargo-yank.md000066400000000000000000000026671432416201200173700ustar00rootroot00000000000000# cargo-yank(1) ## NAME cargo-yank - Remove a pushed crate from the index ## SYNOPSIS `cargo yank` [_options_] _crate_@_version_\ `cargo yank` [_options_] `--version` _version_ [_crate_] ## DESCRIPTION The yank command removes a previously published crate's version from the server's index. This command does not delete any data, and the crate will still be available for download via the registry's download link. Note that existing crates locked to a yanked version will still be able to download the yanked version to use it. Cargo will, however, not allow any new crates to be locked to any yanked version. This command requires you to be authenticated with either the `--token` option or using {{man "cargo-login" 1}}. If the crate name is not specified, it will use the package name from the current directory. ## OPTIONS ### Yank Options {{#options}} {{#option "`--vers` _version_" "`--version` _version_" }} The version to yank or un-yank. {{/option}} {{#option "`--undo`" }} Undo a yank, putting a version back into the index. {{/option}} {{> options-token }} {{> options-index }} {{> options-registry }} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## EXAMPLES 1. Yank a crate from the index: cargo yank foo@1.0.7 ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}} cargo-0.66.0/src/doc/man/cargo.md000066400000000000000000000137571432416201200164320ustar00rootroot00000000000000# cargo(1) ## NAME cargo - The Rust package manager ## SYNOPSIS `cargo` [_options_] _command_ [_args_]\ `cargo` [_options_] `--version`\ `cargo` [_options_] `--list`\ `cargo` [_options_] `--help`\ `cargo` [_options_] `--explain` _code_ ## DESCRIPTION This program is a package manager and build tool for the Rust language, available at . ## COMMANDS ### Build Commands {{man "cargo-bench" 1}}\     Execute benchmarks of a package. {{man "cargo-build" 1}}\     Compile a package. {{man "cargo-check" 1}}\     Check a local package and all of its dependencies for errors. {{man "cargo-clean" 1}}\     Remove artifacts that Cargo has generated in the past. {{man "cargo-doc" 1}}\     Build a package's documentation. {{man "cargo-fetch" 1}}\     Fetch dependencies of a package from the network. {{man "cargo-fix" 1}}\     Automatically fix lint warnings reported by rustc. {{man "cargo-run" 1}}\     Run a binary or example of the local package. {{man "cargo-rustc" 1}}\     Compile a package, and pass extra options to the compiler. {{man "cargo-rustdoc" 1}}\     Build a package's documentation, using specified custom flags. {{man "cargo-test" 1}}\     Execute unit and integration tests of a package. ### Manifest Commands {{man "cargo-generate-lockfile" 1}}\     Generate `Cargo.lock` for a project. {{man "cargo-locate-project" 1}}\     Print a JSON representation of a `Cargo.toml` file's location. {{man "cargo-metadata" 1}}\     Output the resolved dependencies of a package in machine-readable format. {{man "cargo-pkgid" 1}}\     Print a fully qualified package specification. {{man "cargo-tree" 1}}\     Display a tree visualization of a dependency graph. {{man "cargo-update" 1}}\     Update dependencies as recorded in the local lock file. {{man "cargo-vendor" 1}}\     Vendor all dependencies locally. {{man "cargo-verify-project" 1}}\     Check correctness of crate manifest. ### Package Commands {{man "cargo-init" 1}}\     Create a new Cargo package in an existing directory. {{man "cargo-install" 1}}\     Build and install a Rust binary. {{man "cargo-new" 1}}\     Create a new Cargo package. {{man "cargo-search" 1}}\     Search packages in crates.io. {{man "cargo-uninstall" 1}}\     Remove a Rust binary. ### Publishing Commands {{man "cargo-login" 1}}\     Save an API token from the registry locally. {{man "cargo-owner" 1}}\     Manage the owners of a crate on the registry. {{man "cargo-package" 1}}\     Assemble the local package into a distributable tarball. {{man "cargo-publish" 1}}\     Upload a package to the registry. {{man "cargo-yank" 1}}\     Remove a pushed crate from the index. ### General Commands {{man "cargo-help" 1}}\     Display help information about Cargo. {{man "cargo-version" 1}}\     Show version information. ## OPTIONS ### Special Options {{#options}} {{#option "`-V`" "`--version`" }} Print version info and exit. If used with `--verbose`, prints extra information. {{/option}} {{#option "`--list`" }} List all installed Cargo subcommands. If used with `--verbose`, prints extra information. {{/option}} {{#option "`--explain` _code_" }} Run `rustc --explain CODE` which will print out a detailed explanation of an error message (for example, `E0004`). {{/option}} {{/options}} ### Display Options {{#options}} {{> options-display }} {{/options}} ### Manifest Options {{#options}} {{> options-locked }} {{/options}} {{> section-options-common }} {{> section-environment }} {{> section-exit-status }} ## FILES `~/.cargo/`\     Default location for Cargo's "home" directory where it stores various files. The location can be changed with the `CARGO_HOME` environment variable. `$CARGO_HOME/bin/`\     Binaries installed by {{man "cargo-install" 1}} will be located here. If using [rustup], executables distributed with Rust are also located here. `$CARGO_HOME/config.toml`\     The global configuration file. See [the reference](../reference/config.html) for more information about configuration files. `.cargo/config.toml`\     Cargo automatically searches for a file named `.cargo/config.toml` in the current directory, and all parent directories. These configuration files will be merged with the global configuration file. `$CARGO_HOME/credentials.toml`\     Private authentication information for logging in to a registry. `$CARGO_HOME/registry/`\     This directory contains cached downloads of the registry index and any downloaded dependencies. `$CARGO_HOME/git/`\     This directory contains cached downloads of git dependencies. Please note that the internal structure of the `$CARGO_HOME` directory is not stable yet and may be subject to change. [rustup]: https://rust-lang.github.io/rustup/ ## EXAMPLES 1. Build a local package and all of its dependencies: cargo build 2. Build a package with optimizations: cargo build --release 3. Run tests for a cross-compiled target: cargo test --target i686-unknown-linux-gnu 4. Create a new package that builds an executable: cargo new foobar 5. Create a package in the current directory: mkdir foo && cd foo cargo init . 6. Learn about a command's options and usage: cargo help clean ## BUGS See for issues. ## SEE ALSO {{man "rustc" 1}}, {{man "rustdoc" 1}} cargo-0.66.0/src/doc/man/generated_txt/000077500000000000000000000000001432416201200176355ustar00rootroot00000000000000cargo-0.66.0/src/doc/man/generated_txt/cargo-add.txt000066400000000000000000000151211432416201200222170ustar00rootroot00000000000000CARGO-ADD(1) NAME cargo-add - Add dependencies to a Cargo.toml manifest file SYNOPSIS cargo add [options] crate... cargo add [options] --path path cargo add [options] --git url [crate...] DESCRIPTION This command can add or modify dependencies. The source for the dependency can be specified with: o crate@version: Fetch from a registry with a version constraint of "version" o --path path: Fetch from the specified path o --git url: Pull from a git repo at url If no source is specified, then a best effort will be made to select one, including: o Existing dependencies in other tables (like dev-dependencies) o Workspace members o Latest release in the registry When you add a package that is already present, the existing entry will be updated with the flags specified. Upon successful invocation, the enabled (+) and disabled (-) features of the specified dependency will be listed in the command's output. OPTIONS Source options --git url Git URL to add the specified crate from . --branch branch Branch to use when adding from git. --tag tag Tag to use when adding from git. --rev sha Specific commit to use when adding from git. --path path Filesystem path to local crate to add. --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. Section options --dev Add as a development dependency . --build Add as a build dependency . --target target Add as a dependency to the given target platform . Dependency options --rename name Rename the dependency. --optional Mark the dependency as optional . --no-optional Mark the dependency as required . --no-default-features Disable the default features . --default-features Re-enable the default features . --features features Space or comma separated list of features to activate . When adding multiple crates, the features for a specific crate may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Add regex as a dependency cargo add regex 2. Add trybuild as a dev-dependency cargo add --dev trybuild 3. Add an older version of nom as a dependency cargo add nom@5 4. Add support for serializing data structures to json with derives cargo add serde serde_json -F serde/derive SEE ALSO cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-bench.txt000066400000000000000000000442001432416201200225460ustar00rootroot00000000000000CARGO-BENCH(1) NAME cargo-bench - Execute benchmarks of a package SYNOPSIS cargo bench [options] [benchname] [-- bench-options] DESCRIPTION Compile and execute benchmarks. The benchmark filtering argument benchname and all the arguments following the two dashes (--) are passed to the benchmark binaries and thus to libtest (rustc's built in unit-test and micro-benchmarking framework). If you are passing arguments to both Cargo and the binary, the ones after -- go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of cargo bench -- --help and check out the rustc book's chapter on how tests work at . As an example, this will run only the benchmark named foo (and skip other similarly named benchmarks like foobar): cargo bench -- foo --exact Benchmarks are built with the --test option to rustc which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the #[bench] attribute. Cargo passes the --bench flag to the test harness to tell it to run only benchmarks. The libtest harness may be disabled by setting harness = false in the target manifest settings, in which case your code will need to provide its own main function to handle running benchmarks. Note: The #[bench] attribute is currently unstable and only available on the nightly channel . There are some packages available on crates.io that may help with running benchmarks on the stable channel, such as Criterion . By default, cargo bench uses the bench profile , which enables optimizations and disables debugging information. If you need to debug a benchmark, you can use the --profile=dev command-line option to switch to the dev profile. You can then run the debug-enabled benchmark within a debugger. OPTIONS Benchmark Options --no-run Compile, but don't run benchmarks. --no-fail-fast Run all benchmarks regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all benchmarks within the executable to completion, this flag only applies to the executable as a whole. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Benchmark only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Benchmark all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo bench will build the following targets of the selected packages: o lib β€” used to link with binaries and benchmarks o bins (only if benchmark targets are built and required features are available) o lib as a benchmark o bins as benchmarks o benchmark targets The default behavior can be changed by setting the bench flag for the target in the manifest settings. Setting examples to bench = true will build and run the example as a benchmark. Setting targets to bench = false will stop them from being benchmarked by default. Target selection options that take a target by name ignore the bench flag and will always benchmark the given target. Binary targets are automatically built if there is an integration test or benchmark being selected to benchmark. This allows an integration test to execute the binary to exercise and test its behavior. The CARGO_BIN_EXE_ environment variable is set when the integration test is built so that it can use the env macro to locate the executable. Passing target selection flags will benchmark only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Benchmark the package's library. --bin name... Benchmark the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Benchmark all binary targets. --example name... Benchmark the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Benchmark all example targets. --test name... Benchmark the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Benchmark all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Benchmark the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Benchmark all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Benchmark all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Benchmark for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. --profile name Benchmark with the given profile. See the the reference for more details on profiles. --ignore-rust-version Benchmark the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options By default the Rust test harness hides output from benchmark execution to keep results readable. Benchmark output can be recovered (e.g., for debugging) by passing --nocapture to the benchmark binaries: cargo bench -- --nocapture -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options The --jobs argument affects the building of the benchmark executable but does not affect how many threads are used when running the benchmarks. The Rust test harness runs benchmarks serially in a single thread. -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Build and execute all the benchmarks of the current package: cargo bench 2. Run only a specific benchmark within a specific benchmark target: cargo bench --bench bench_name -- modname::some_benchmark SEE ALSO cargo(1), cargo-test(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-build.txt000066400000000000000000000370231432416201200225730ustar00rootroot00000000000000CARGO-BUILD(1) NAME cargo-build - Compile the current package SYNOPSIS cargo build [options] DESCRIPTION Compile local packages and all of their dependencies. OPTIONS Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Build only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Build all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo build will build all binary and library targets of the selected packages. Binaries are skipped if they have required-features that are missing. Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The CARGO_BIN_EXE_ environment variable is set when the integration test is built so that it can use the env macro to locate the executable. Passing target selection flags will build only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Build the package's library. --bin name... Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Build all binary targets. --example name... Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Build all example targets. --test name... Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Build all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Build all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Build for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Build optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Build with the given profile. See the the reference for more details on profiles. --ignore-rust-version Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. --out-dir directory Copy final artifacts to this directory. This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See for more information. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. --build-plan Outputs a series of JSON messages to stdout that indicate the commands to run the build. This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See for more information. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. --future-incompat-report Displays a future-incompat report for any future-incompatible warnings produced during execution of this command See cargo-report(1) ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Build the local package and all of its dependencies: cargo build 2. Build with optimizations: cargo build --release SEE ALSO cargo(1), cargo-rustc(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-check.txt000066400000000000000000000355751432416201200225630ustar00rootroot00000000000000CARGO-CHECK(1) NAME cargo-check - Check the current package SYNOPSIS cargo check [options] DESCRIPTION Check a local package and all of its dependencies for errors. This will essentially compile the packages without performing the final step of code generation, which is faster than running cargo build. The compiler will save metadata files to disk so that future runs will reuse them if the source has not been modified. Some diagnostics and errors are only emitted during code generation, so they inherently won't be reported with cargo check. OPTIONS Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Check only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Check all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo check will check all binary and library targets of the selected packages. Binaries are skipped if they have required-features that are missing. Passing target selection flags will check only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Check the package's library. --bin name... Check the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Check all binary targets. --example name... Check the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Check all example targets. --test name... Check the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Check all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Check the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Check all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Check all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Check for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Check optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Check with the given profile. As a special case, specifying the test profile will also enable checking in test mode which will enable checking tests and enable the test cfg option. See rustc tests for more detail. See the the reference for more details on profiles. --ignore-rust-version Check the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. --future-incompat-report Displays a future-incompat report for any future-incompatible warnings produced during execution of this command See cargo-report(1) ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Check the local package for errors: cargo check 2. Check all targets, including unit tests: cargo check --all-targets --profile=test SEE ALSO cargo(1), cargo-build(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-clean.txt000066400000000000000000000136361432416201200225620ustar00rootroot00000000000000CARGO-CLEAN(1) NAME cargo-clean - Remove generated artifacts SYNOPSIS cargo clean [options] DESCRIPTION Remove artifacts from the target directory that Cargo has generated in the past. With no options, cargo clean will delete the entire target directory. OPTIONS Package Selection When no packages are selected, all packages and all dependencies in the workspace are cleaned. -p spec..., --package spec... Clean only the specified packages. This flag may be specified multiple times. See cargo-pkgid(1) for the SPEC format. Clean Options --doc This option will cause cargo clean to remove only the doc directory in the target directory. --release Remove all artifacts in the release directory. --profile name Remove all artifacts in the directory with the given profile name. --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. --target triple Clean for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Remove the entire target directory: cargo clean 2. Remove only the release artifacts: cargo clean --release SEE ALSO cargo(1), cargo-build(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-doc.txt000066400000000000000000000320331432416201200222350ustar00rootroot00000000000000CARGO-DOC(1) NAME cargo-doc - Build a package's documentation SYNOPSIS cargo doc [options] DESCRIPTION Build the documentation for the local package and all dependencies. The output is placed in target/doc in rustdoc's usual format. OPTIONS Documentation Options --open Open the docs in a browser after building them. This will use your default browser unless you define another one in the BROWSER environment variable or use the doc.browser configuration option. --no-deps Do not build documentation for dependencies. --document-private-items Include non-public items in the documentation. This will be enabled by default if documenting a binary target. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Document only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Document all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo doc will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have required-features that are missing. The default behavior can be changed by setting doc = false for the target in the manifest settings. Using target selection options will ignore the doc flag and will always document the given target. --lib Document the package's library. --bin name... Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Document all binary targets. --example name... Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Document all example targets. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Document for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Document optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Document with the given profile. See the the reference for more details on profiles. --ignore-rust-version Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Build the local package documentation and its dependencies and output to target/doc. cargo doc SEE ALSO cargo(1), cargo-rustdoc(1), rustdoc(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-fetch.txt000066400000000000000000000127311432416201200225640ustar00rootroot00000000000000CARGO-FETCH(1) NAME cargo-fetch - Fetch dependencies of a package from the network SYNOPSIS cargo fetch [options] DESCRIPTION If a Cargo.lock file is available, this command will ensure that all of the git dependencies and/or registry dependencies are downloaded and locally available. Subsequent Cargo commands will be able to run offline after a cargo fetch unless the lock file changes. If the lock file is not available, then this command will generate the lock file before fetching the dependencies. If --target is not specified, then all target dependencies are fetched. See also the cargo-prefetch plugin which adds a command to download popular crates. This may be useful if you plan to use Cargo without a network with the --offline flag. OPTIONS Fetch options --target triple Fetch for the given architecture. The default is all architectures. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Fetch all dependencies: cargo fetch SEE ALSO cargo(1), cargo-update(1), cargo-generate-lockfile(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-fix.txt000066400000000000000000000435041432416201200222630ustar00rootroot00000000000000CARGO-FIX(1) NAME cargo-fix - Automatically fix lint warnings reported by rustc SYNOPSIS cargo fix [options] DESCRIPTION This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! Executing cargo fix will under the hood execute cargo-check(1). Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For example if you'd like to apply all fixes to the current package, you can run: cargo fix which behaves the same as cargo check --all-targets. cargo fix is only capable of fixing code that is normally compiled with cargo check. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: cargo fix --features foo Similarly, other cfg expressions like platform-specific code will need to pass --target to fix code for the given target. cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with cargo fix or otherwise have any questions or feature requests please don't hesitate to file an issue at . Edition migration The cargo fix subcommand can also be used to migrate a package from one edition to the next. The general procedure is: 1. Run cargo fix --edition. Consider also using the --all-features flag if your project has multiple features. You may also want to run cargo fix --edition multiple times with different --target flags if your project has platform-specific code gated by cfg attributes. 2. Modify Cargo.toml to set the edition field to the new edition. 3. Run your project tests to verify that everything still works. If new warnings are issued, you may want to consider running cargo fix again (without the --edition flag) to apply any suggestions given by the compiler. And hopefully that's it! Just keep in mind of the caveats mentioned above that cargo fix cannot update code for inactive features or cfg expressions. Also, in some rare cases the compiler is unable to automatically migrate all code to the new edition, and this may require manual changes after building with the new edition. OPTIONS Fix options --broken-code Fix code even if it already has compiler errors. This is useful if cargo fix fails to apply the changes. It will apply the changes and leave the broken code in the working directory for you to inspect and manually fix. --edition Apply changes that will update the code to the next edition. This will not update the edition in the Cargo.toml manifest, which must be updated manually after cargo fix --edition has finished. --edition-idioms Apply suggestions that will update code to the preferred style for the current edition. --allow-no-vcs Fix code even if a VCS was not detected. --allow-dirty Fix code even if the working directory has changes. --allow-staged Fix code even if the working directory has staged changes. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Fix only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Fix all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo fix will fix all targets (--all-targets implied). Binaries are skipped if they have required-features that are missing. Passing target selection flags will fix only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Fix the package's library. --bin name... Fix the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Fix all binary targets. --example name... Fix the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Fix all example targets. --test name... Fix the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Fix all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Fix the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Fix all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Fix all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Fix for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Fix optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Fix with the given profile. As a special case, specifying the test profile will also enable checking in test mode which will enable checking tests and enable the test cfg option. See rustc tests for more detail. See the the reference for more details on profiles. --ignore-rust-version Fix the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Apply compiler suggestions to the local package: cargo fix 2. Update a package to prepare it for the next edition: cargo fix --edition 3. Apply suggested idioms for the current edition: cargo fix --edition-idioms SEE ALSO cargo(1), cargo-check(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-generate-lockfile.txt000066400000000000000000000106041432416201200250500ustar00rootroot00000000000000CARGO-GENERATE-LOCKFILE(1) NAME cargo-generate-lockfile - Generate the lockfile for a package SYNOPSIS cargo generate-lockfile [options] DESCRIPTION This command will create the Cargo.lock lockfile for the current package or workspace. If the lockfile already exists, it will be rebuilt with the latest available version of every package. See also cargo-update(1) which is also capable of creating a Cargo.lock lockfile and has more options for controlling update behavior. OPTIONS Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Create or update the lockfile for the current package or workspace: cargo generate-lockfile SEE ALSO cargo(1), cargo-update(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-help.txt000066400000000000000000000005601432416201200224200ustar00rootroot00000000000000CARGO-HELP(1) NAME cargo-help - Get help for a Cargo command SYNOPSIS cargo help [subcommand] DESCRIPTION Prints a help message for the given command. EXAMPLES 1. Get help for a command: cargo help build 2. Help is also available with the --help flag: cargo build --help SEE ALSO cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-init.txt000066400000000000000000000105601432416201200224340ustar00rootroot00000000000000CARGO-INIT(1) NAME cargo-init - Create a new Cargo package in an existing directory SYNOPSIS cargo init [options] [path] DESCRIPTION This command will create a new Cargo manifest in the current directory. Give a path as an argument to create in the given directory. If there are typically-named Rust source files already in the directory, those will be used. If not, then a sample src/main.rs file will be created, or src/lib.rs if --lib is passed. If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). See cargo-new(1) for a similar command which will create a new package in a new directory. OPTIONS Init Options --bin Create a package with a binary target (src/main.rs). This is the default behavior. --lib Create a package with a library target (src/lib.rs). --edition edition Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021 --name name Set the package name. Defaults to the directory name. --vcs vcs Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to git or the configuration value cargo-new.vcs, or none if already inside a VCS repository. --registry registry This sets the publish field in Cargo.toml to the given registry name which will restrict publishing only to that registry. Registry names are defined in Cargo config files . If not specified, the default registry defined by the registry.default config key is used. If the default registry is not set and --registry is not used, the publish field will not be set which means that publishing will not be restricted. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Create a binary Cargo package in the current directory: cargo init SEE ALSO cargo(1), cargo-new(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-install.txt000066400000000000000000000367451432416201200231540ustar00rootroot00000000000000CARGO-INSTALL(1) NAME cargo-install - Build and install a Rust binary SYNOPSIS cargo install [options] crate[@version]... cargo install [options] --path path cargo install [options] --git url [crate...] cargo install [options] --list DESCRIPTION This command manages Cargo's local set of installed binary crates. Only packages which have executable [[bin]] or [[example]] targets can be installed, and all executables are installed into the installation root's bin folder. The installation root is determined, in order of precedence: o --root option o CARGO_INSTALL_ROOT environment variable o install.root Cargo config value o CARGO_HOME environment variable o $HOME/.cargo There are multiple sources from which a crate can be installed. The default location is crates.io but the --git, --path, and --registry flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the crate argument is required to indicate which crate should be installed. Crates from crates.io can optionally specify the version they wish to install via the --version flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the --bin argument can selectively install only one of them, and if you'd rather install examples the --example argument can be used as well. If the package is already installed, Cargo will reinstall it if the installed version does not appear to be up-to-date. If any of the following values change, then Cargo will reinstall the package: o The package version and source. o The set of binary names installed. o The chosen features. o The profile (--profile). o The target (--target). Installing with --path will always build and install, unless there are conflicting binaries from another package. The --force flag may be used to force Cargo to always reinstall the package. If the source is crates.io or --git then by default the crate will be built in a temporary target directory. To avoid this, the target directory can be specified by setting the CARGO_TARGET_DIR environment variable to a relative path. In particular, this can be useful for caching build artifacts on continuous integration systems. By default, the Cargo.lock file that is included with the package will be ignored. This means that Cargo will recompute which versions of dependencies to use, possibly using newer versions that have been released since the package was published. The --locked flag can be used to force Cargo to use the packaged Cargo.lock file if it is available. This may be useful for ensuring reproducible builds, to use the exact same set of dependencies that were available when the package was published. It may also be useful if a newer version of a dependency is published that no longer builds on your system, or has other problems. The downside to using --locked is that you will not receive any fixes or updates to any dependency. Note that Cargo did not start publishing Cargo.lock files until version 1.37, which means packages published with prior versions will not have a Cargo.lock file available. OPTIONS Install Options --vers version, --version version Specify a version to install. This may be a version requirement , like ~1.2, to have Cargo select the newest version from the given requirement. If the version does not have a requirement operator (such as ^ or ~), then it must be in the form MAJOR.MINOR.PATCH, and will install exactly that version; it is not treated as a caret requirement like Cargo dependencies are. --git url Git URL to install the specified crate from. --branch branch Branch to use when installing from git. --tag tag Tag to use when installing from git. --rev sha Specific commit to use when installing from git. --path path Filesystem path to local crate to install. --list List all installed packages and their versions. -f, --force Force overwriting existing crates or binaries. This can be used if a package has installed a binary with the same name as another package. This is also useful if something has changed on the system that you want to rebuild with, such as a newer version of rustc. --no-track By default, Cargo keeps track of the installed packages with a metadata file stored in the installation root directory. This flag tells Cargo not to use or create that file. With this flag, Cargo will refuse to overwrite any existing files unless the --force flag is used. This also disables Cargo's ability to protect against multiple concurrent invocations of Cargo installing at the same time. --bin name... Install only the specified binary. --bins Install all binaries. --example name... Install only the specified example. --examples Install all examples. --root dir Directory to install packages into. --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. --index index The URL of the registry index to use. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Install for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to a new temporary folder located in the temporary directory of the platform. When using --path, by default it will use target directory in the workspace of the local crate unless --target-dir is specified. --debug Build with the dev profile instead the release profile. See also the --profile option for choosing a specific profile by name. --profile name Install with the given profile. See the the reference for more details on profiles. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Manifest Options --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Install or upgrade a package from crates.io: cargo install ripgrep 2. Install or reinstall the package in the current directory: cargo install --path . 3. View the list of installed packages: cargo install --list SEE ALSO cargo(1), cargo-uninstall(1), cargo-search(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-locate-project.txt000066400000000000000000000063121432416201200244040ustar00rootroot00000000000000CARGO-LOCATE-PROJECT(1) NAME cargo-locate-project - Print a JSON representation of a Cargo.toml file's location SYNOPSIS cargo locate-project [options] DESCRIPTION This command will print a JSON object to stdout with the full path to the Cargo.toml manifest. OPTIONS --workspace Locate the Cargo.toml at the root of the workspace, as opposed to the current workspace member. Display Options --message-format fmt The representation in which to print the project location. Valid values: o json (default): JSON object with the path under the key "root". o plain: Just the path. -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Display the path to the manifest based on the current directory: cargo locate-project SEE ALSO cargo(1), cargo-metadata(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-login.txt000066400000000000000000000065171432416201200226100ustar00rootroot00000000000000CARGO-LOGIN(1) NAME cargo-login - Save an API token from the registry locally SYNOPSIS cargo login [options] [token] DESCRIPTION This command will save the API token to disk so that commands that require authentication, such as cargo-publish(1), will be automatically authenticated. The token is saved in $CARGO_HOME/credentials.toml. CARGO_HOME defaults to .cargo in your home directory. If the token argument is not specified, it will be read from stdin. The API token for crates.io may be retrieved from . Take care to keep the token secret, it should not be shared with anyone else. OPTIONS Login Options --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Save the API token to disk: cargo login SEE ALSO cargo(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-metadata.txt000066400000000000000000000510251432416201200232520ustar00rootroot00000000000000CARGO-METADATA(1) NAME cargo-metadata - Machine-readable metadata about the current package SYNOPSIS cargo metadata [options] DESCRIPTION Output JSON to stdout containing information about the workspace members and resolved dependencies of the current package. It is recommended to include the --format-version flag to future-proof your code to ensure the output is in the format you are expecting. See the cargo_metadata crate for a Rust API for reading the metadata. OUTPUT FORMAT The output has the following format: { /* Array of all packages in the workspace. It also includes all feature-enabled dependencies unless --no-deps is used. */ "packages": [ { /* The name of the package. */ "name": "my-package", /* The version of the package. */ "version": "0.1.0", /* The Package ID, a unique identifier for referring to the package. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The license value from the manifest, or null. */ "license": "MIT/Apache-2.0", /* The license-file value from the manifest, or null. */ "license_file": "LICENSE", /* The description value from the manifest, or null. */ "description": "Package description.", /* The source ID of the package. This represents where a package is retrieved from. This is null for path dependencies and workspace members. For other dependencies, it is a string with the format: - "registry+URL" for registry-based dependencies. Example: "registry+https://github.com/rust-lang/crates.io-index" - "git+URL" for git-based dependencies. Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" */ "source": null, /* Array of dependencies declared in the package's manifest. */ "dependencies": [ { /* The name of the dependency. */ "name": "bitflags", /* The source ID of the dependency. May be null, see description for the package source. */ "source": "registry+https://github.com/rust-lang/crates.io-index", /* The version requirement for the dependency. Dependencies without a version requirement have a value of "*". */ "req": "^1.0", /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* If the dependency is renamed, this is the new name for the dependency as a string. null if it is not renamed. */ "rename": null, /* Boolean of whether or not this is an optional dependency. */ "optional": false, /* Boolean of whether or not default features are enabled. */ "uses_default_features": true, /* Array of features enabled. */ "features": [], /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)", /* The file system path for a local path dependency. not present if not a path dependency. */ "path": "/path/to/dep", /* A string of the URL of the registry this dependency is from. If not specified or null, the dependency is from the default registry (crates.io). */ "registry": null } ], /* Array of Cargo targets. */ "targets": [ { /* Array of target kinds. - lib targets list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - binary is ["bin"] - example is ["example"] - integration test is ["test"] - benchmark is ["bench"] - build script is ["custom-build"] */ "kind": [ "bin" ], /* Array of crate types. - lib and example libraries list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - all other target kinds are ["bin"] */ "crate_types": [ "bin" ], /* The name of the target. */ "name": "my-package", /* Absolute path to the root source file of the target. */ "src_path": "/path/to/my-package/src/main.rs", /* The Rust edition of the target. Defaults to the package edition. */ "edition": "2018", /* Array of required features. This property is not included if no required features are set. */ "required-features": ["feat1"], /* Whether the target should be documented by `cargo doc`. */ "doc": true, /* Whether or not this target has doc tests enabled, and the target is compatible with doc testing. */ "doctest": false, /* Whether or not this target should be built and run with `--test` */ "test": true } ], /* Set of features defined for the package. Each feature maps to an array of features or dependencies it enables. */ "features": { "default": [ "feat1" ], "feat1": [], "feat2": [] }, /* Absolute path to this package's manifest. */ "manifest_path": "/path/to/my-package/Cargo.toml", /* Package metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } }, /* List of registries to which this package may be published. Publishing is unrestricted if null, and forbidden if an empty array. */ "publish": [ "crates-io" ], /* Array of authors from the manifest. Empty array if no authors specified. */ "authors": [ "Jane Doe " ], /* Array of categories from the manifest. */ "categories": [ "command-line-utilities" ], /* Optional string that is the default binary picked by cargo run. */ "default_run": null, /* Optional string that is the minimum supported rust version */ "rust_version": "1.56", /* Array of keywords from the manifest. */ "keywords": [ "cli" ], /* The readme value from the manifest or null if not specified. */ "readme": "README.md", /* The repository value from the manifest or null if not specified. */ "repository": "https://github.com/rust-lang/cargo", /* The homepage value from the manifest or null if not specified. */ "homepage": "https://rust-lang.org", /* The documentation value from the manifest or null if not specified. */ "documentation": "https://doc.rust-lang.org/stable/std", /* The default edition of the package. Note that individual targets may have different editions. */ "edition": "2018", /* Optional string that is the name of a native library the package is linking to. */ "links": null, } ], /* Array of members of the workspace. Each entry is the Package ID for the package. */ "workspace_members": [ "my-package 0.1.0 (path+file:///path/to/my-package)", ], // The resolved dependency graph for the entire workspace. The enabled // features are based on the enabled features for the "current" package. // Inactivated optional dependencies are not listed. // // This is null if --no-deps is specified. // // By default, this includes all dependencies for all target platforms. // The `--filter-platform` flag may be used to narrow to a specific // target triple. "resolve": { /* Array of nodes within the dependency graph. Each node is a package. */ "nodes": [ { /* The Package ID of this node. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The dependencies of this package, an array of Package IDs. */ "dependencies": [ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" ], /* The dependencies of this package. This is an alternative to "dependencies" which contains additional information. In particular, this handles renamed dependencies. */ "deps": [ { /* The name of the dependency's library target. If this is a renamed dependency, this is the new name. */ "name": "bitflags", /* The Package ID of the dependency. */ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", /* Array of dependency kinds. Added in Cargo 1.40. */ "dep_kinds": [ { /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)" } ] } ], /* Array of features enabled on this package. */ "features": [ "default" ] } ], /* The root package of the workspace. This is null if this is a virtual workspace. Otherwise it is the Package ID of the root package. */ "root": "my-package 0.1.0 (path+file:///path/to/my-package)" }, /* The absolute path to the build directory where Cargo places its output. */ "target_directory": "/path/to/my-package/target", /* The version of the schema for this metadata structure. This will be changed if incompatible changes are ever made. */ "version": 1, /* The absolute path to the root of the workspace. */ "workspace_root": "/path/to/my-package" /* Workspace metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } } } OPTIONS Output Options --no-deps Output information only about the workspace members and don't fetch dependencies. --format-version version Specify the version of the output format to use. Currently 1 is the only possible value. --filter-platform triple This filters the resolve output to only include dependencies for the given target triple. Without this flag, the resolve includes all targets. Note that the dependencies listed in the "packages" array still includes all dependencies. Each package definition is intended to be an unaltered reproduction of the information within Cargo.toml. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Output JSON about the current package: cargo metadata --format-version=1 SEE ALSO cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-new.txt000066400000000000000000000102641432416201200222630ustar00rootroot00000000000000CARGO-NEW(1) NAME cargo-new - Create a new Cargo package SYNOPSIS cargo new [options] path DESCRIPTION This command will create a new Cargo package in the given directory. This includes a simple template with a Cargo.toml manifest, sample source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). See cargo-init(1) for a similar command which will create a new manifest in an existing directory. OPTIONS New Options --bin Create a package with a binary target (src/main.rs). This is the default behavior. --lib Create a package with a library target (src/lib.rs). --edition edition Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021 --name name Set the package name. Defaults to the directory name. --vcs vcs Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to git or the configuration value cargo-new.vcs, or none if already inside a VCS repository. --registry registry This sets the publish field in Cargo.toml to the given registry name which will restrict publishing only to that registry. Registry names are defined in Cargo config files . If not specified, the default registry defined by the registry.default config key is used. If the default registry is not set and --registry is not used, the publish field will not be set which means that publishing will not be restricted. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Create a binary Cargo package in the given directory: cargo new foo SEE ALSO cargo(1), cargo-init(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-owner.txt000066400000000000000000000112131432416201200226170ustar00rootroot00000000000000CARGO-OWNER(1) NAME cargo-owner - Manage the owners of a crate on the registry SYNOPSIS cargo owner [options] --add login [crate] cargo owner [options] --remove login [crate] cargo owner [options] --list [crate] DESCRIPTION This command will modify the owners for a crate on the registry. Owners of a crate can upload new versions and yank old versions. Non-team owners can also modify the set of owners, so take care! This command requires you to be authenticated with either the --token option or using cargo-login(1). If the crate name is not specified, it will use the package name from the current directory. See the reference for more information about owners and publishing. OPTIONS Owner Options -a, --add login... Invite the given user or team as an owner. -r, --remove login... Remove the given user or team as an owner. -l, --list List owners of a crate. --token token API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)). Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters. --index index The URL of the registry index to use. --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. List owners of a package: cargo owner --list foo 2. Invite an owner to a package: cargo owner --add username foo 3. Remove an owner from a package: cargo owner --remove username foo SEE ALSO cargo(1), cargo-login(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-package.txt000066400000000000000000000254441432416201200230730ustar00rootroot00000000000000CARGO-PACKAGE(1) NAME cargo-package - Assemble the local package into a distributable tarball SYNOPSIS cargo package [options] DESCRIPTION This command will create a distributable, compressed .crate file with the source code of the package in the current directory. The resulting file will be stored in the target/package directory. This performs the following steps: 1. Load and check the current workspace, performing some basic checks. o Path dependencies are not allowed unless they have a version key. Cargo will ignore the path key for dependencies in published packages. dev-dependencies do not have this restriction. 2. Create the compressed .crate file. o The original Cargo.toml file is rewritten and normalized. o [patch], [replace], and [workspace] sections are removed from the manifest. o Cargo.lock is automatically included if the package contains an executable binary or example target. cargo-install(1) will use the packaged lock file if the --locked flag is used. o A .cargo_vcs_info.json file is included that contains information about the current VCS checkout hash if available (not included with --allow-dirty). 3. Extract the .crate file and build it to verify it can build. o This will rebuild your package from scratch to ensure that it can be built from a pristine state. The --no-verify flag can be used to skip this step. 4. Check that build scripts did not modify any source files. The list of files included can be controlled with the include and exclude fields in the manifest. See the reference for more details about packaging and publishing. .cargo_vcs_info.json format Will generate a .cargo_vcs_info.json in the following format { "git": { "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" }, "path_in_vcs": "" } path_in_vcs will be set to a repo-relative path for packages in subdirectories of the version control repository. OPTIONS Package Options -l, --list Print files included in a package without making one. --no-verify Don't verify the contents by building them. --no-metadata Ignore warnings about a lack of human-usable metadata (such as the description or the license). --allow-dirty Allow working directories with uncommitted VCS changes to be packaged. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Package only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Package all members in the workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Compilation Options --target triple Package for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Create a compressed .crate file of the current package: cargo package SEE ALSO cargo(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-pkgid.txt000066400000000000000000000150411432416201200225660ustar00rootroot00000000000000CARGO-PKGID(1) NAME cargo-pkgid - Print a fully qualified package specification SYNOPSIS cargo pkgid [options] [spec] DESCRIPTION Given a spec argument, print out the fully qualified package ID specifier for a package or dependency in the current workspace. This command will generate an error if spec is ambiguous as to which package it refers to in the dependency graph. If no spec is given, then the specifier for the local package is printed. This command requires that a lockfile is available and dependencies have been fetched. A package specifier consists of a name, version, and source URL. You are allowed to use partial specifiers to succinctly match a specific package as long as it matches only one package. The format of a spec can be one of the following: +-----------------+--------------------------------------------------+ | SPEC Structure | Example SPEC | +-----------------+--------------------------------------------------+ | name | bitflags | +-----------------+--------------------------------------------------+ | name@version | bitflags@1.0.4 | +-----------------+--------------------------------------------------+ | url | https://github.com/rust-lang/cargo | +-----------------+--------------------------------------------------+ | url#version | https://github.com/rust-lang/cargo#0.33.0 | +-----------------+--------------------------------------------------+ | url#name | | | | https://github.com/rust-lang/crates.io-index#bitflags | +-----------------+--------------------------------------------------+ | | | | url#name:version | https://github.com/rust-lang/cargo#crates-io@0.21.0 | +-----------------+--------------------------------------------------+ OPTIONS Package Selection -p spec, --package spec Get the package ID for the given package instead of the current package. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Retrieve package specification for foo package: cargo pkgid foo 2. Retrieve package specification for version 1.0.0 of foo: cargo pkgid foo@1.0.0 3. Retrieve package specification for foo from crates.io: cargo pkgid https://github.com/rust-lang/crates.io-index#foo 4. Retrieve package specification for foo from a local package: cargo pkgid file:///path/to/local/package#foo SEE ALSO cargo(1), cargo-generate-lockfile(1), cargo-metadata(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-publish.txt000066400000000000000000000226261432416201200231450ustar00rootroot00000000000000CARGO-PUBLISH(1) NAME cargo-publish - Upload a package to the registry SYNOPSIS cargo publish [options] DESCRIPTION This command will create a distributable, compressed .crate file with the source code of the package in the current directory and upload it to a registry. The default registry is . This performs the following steps: 1. Performs a few checks, including: o Checks the package.publish key in the manifest for restrictions on which registries you are allowed to publish to. 2. Create a .crate file by following the steps in cargo-package(1). 3. Upload the crate to the registry. Note that the server will perform additional checks on the crate. This command requires you to be authenticated with either the --token option or using cargo-login(1). See the reference for more details about packaging and publishing. OPTIONS Publish Options --dry-run Perform all checks without uploading. --token token API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)). Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters. --no-verify Don't verify the contents by building them. --allow-dirty Allow working directories with uncommitted VCS changes to be packaged. --index index The URL of the registry index to use. --registry registry Name of the registry to publish to. Registry names are defined in Cargo config files . If not specified, and there is a package.publish field in Cargo.toml with a single registry, then it will publish to that registry. Otherwise it will use the default registry, which is defined by the registry.default config key which defaults to crates-io. Package Selection By default, the package in the current working directory is selected. The -p flag can be used to choose a different package in a workspace. -p spec, --package spec The package to publish. See cargo-pkgid(1) for the SPEC format. Compilation Options --target triple Publish for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Publish the current package: cargo publish SEE ALSO cargo(1), cargo-package(1), cargo-login(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-report.txt000066400000000000000000000014661432416201200230110ustar00rootroot00000000000000CARGO-REPORT(1) NAME cargo-report - Generate and display various kinds of reports SYNOPSIS cargo report type [options] DESCRIPTION Displays a report of the given type - currently, only future-incompat is supported OPTIONS --id id Show the report with the specified Cargo-generated id -p spec..., --package spec... Only display a report for the specified package EXAMPLES 1. Display the latest future-incompat report: cargo report future-incompat 2. Display the latest future-incompat report for a specific package: cargo report future-incompat --package my-dep:0.0.1 SEE ALSO Future incompat report cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-run.txt000066400000000000000000000251661432416201200223050ustar00rootroot00000000000000CARGO-RUN(1) NAME cargo-run - Run the current package SYNOPSIS cargo run [options] [-- args] DESCRIPTION Run a binary or example of the local package. All the arguments following the two dashes (--) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after -- go to the binary, the ones before go to Cargo. OPTIONS Package Selection By default, the package in the current working directory is selected. The -p flag can be used to choose a different package in a workspace. -p spec, --package spec The package to run. See cargo-pkgid(1) for the SPEC format. Target Selection When no target selection options are given, cargo run will run the binary target. If there are multiple binary targets, you must pass a target flag to choose one. Or, the default-run field may be specified in the [package] section of Cargo.toml to choose the name of the binary to run by default. --bin name Run the specified binary. --example name Run the specified example. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Run for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Run optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Run with the given profile. See the the reference for more details on profiles. --ignore-rust-version Run the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Build the local package and run its main target (assuming only one binary): cargo run 2. Run an example with extra arguments: cargo run --example exname -- --exoption exarg1 exarg2 SEE ALSO cargo(1), cargo-build(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-rustc.txt000066400000000000000000000376041432416201200226410ustar00rootroot00000000000000CARGO-RUSTC(1) NAME cargo-rustc - Compile the current package, and pass extra options to the compiler SYNOPSIS cargo rustc [options] [-- args] DESCRIPTION The specified target for the current package (or package specified by -p if provided) will be compiled along with all of its dependencies. The specified args will all be passed to the final compiler invocation, not any of the dependencies. Note that the compiler will still unconditionally receive arguments such as -L, --extern, and --crate-type, and the specified args will simply be added to the compiler invocation. See for documentation on rustc flags. This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of --lib, --bin, etc, must be used to select which target is compiled. To pass flags to all compiler processes spawned by Cargo, use the RUSTFLAGS environment variable or the build.rustflags config value . OPTIONS Package Selection By default, the package in the current working directory is selected. The -p flag can be used to choose a different package in a workspace. -p spec, --package spec The package to build. See cargo-pkgid(1) for the SPEC format. Target Selection When no target selection options are given, cargo rustc will build all binary and library targets of the selected package. Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The CARGO_BIN_EXE_ environment variable is set when the integration test is built so that it can use the env macro to locate the executable. Passing target selection flags will build only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Build the package's library. --bin name... Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Build all binary targets. --example name... Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Build all example targets. --test name... Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Build all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Build all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Build for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Build optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Build with the given profile. The rustc subcommand will treat the following named profiles with special behaviors: o check β€” Builds in the same way as the cargo-check(1) command with the dev profile. o test β€” Builds in the same way as the cargo-test(1) command, enabling building in test mode which will enable tests and enable the test cfg option. See rustc tests for more detail. o bench β€” Builds in the same was as the cargo-bench(1) command, similar to the test profile. See the the reference for more details on profiles. --ignore-rust-version Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. --crate-type crate-type Build for the given crate type. This flag accepts a comma-separated list of 1 or more crate types, of which the allowed values are the same as crate-type field in the manifest for configurating a Cargo target. See crate-type field for possible values. If the manifest contains a list, and --crate-type is provided, the command-line argument value will override what is in the manifest. This flag only works when building a lib or example library target. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. --future-incompat-report Displays a future-incompat report for any future-incompatible warnings produced during execution of this command See cargo-report(1) ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Check if your package (not including dependencies) uses unsafe code: cargo rustc --lib -- -D unsafe-code 2. Try an experimental flag on the nightly compiler, such as this which prints the size of every type: cargo rustc --lib -- -Z print-type-sizes 3. Override crate-type field in Cargo.toml with command-line option: cargo rustc --lib --crate-type lib,cdylib SEE ALSO cargo(1), cargo-build(1), rustc(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-rustdoc.txt000066400000000000000000000341161432416201200231570ustar00rootroot00000000000000CARGO-RUSTDOC(1) NAME cargo-rustdoc - Build a package's documentation, using specified custom flags SYNOPSIS cargo rustdoc [options] [-- args] DESCRIPTION The specified target for the current package (or package specified by -p if provided) will be documented with the specified args being passed to the final rustdoc invocation. Dependencies will not be documented as part of this command. Note that rustdoc will still unconditionally receive arguments such as -L, --extern, and --crate-type, and the specified args will simply be added to the rustdoc invocation. See for documentation on rustdoc flags. This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of --lib, --bin, etc, must be used to select which target is compiled. To pass flags to all rustdoc processes spawned by Cargo, use the RUSTDOCFLAGS environment variable or the build.rustdocflags config value . OPTIONS Documentation Options --open Open the docs in a browser after building them. This will use your default browser unless you define another one in the BROWSER environment variable or use the doc.browser configuration option. Package Selection By default, the package in the current working directory is selected. The -p flag can be used to choose a different package in a workspace. -p spec, --package spec The package to document. See cargo-pkgid(1) for the SPEC format. Target Selection When no target selection options are given, cargo rustdoc will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have required-features that are missing. Passing target selection flags will document only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Document the package's library. --bin name... Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Document all binary targets. --example name... Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Document all example targets. --test name... Document the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Document all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Document the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Document all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Document all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Document for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Document optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Document with the given profile. See the the reference for more details on profiles. --ignore-rust-version Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Build documentation with custom CSS included from a given file: cargo rustdoc --lib -- --extend-css extra.css SEE ALSO cargo(1), cargo-doc(1), rustdoc(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-search.txt000066400000000000000000000062761432416201200227470ustar00rootroot00000000000000CARGO-SEARCH(1) NAME cargo-search - Search packages in crates.io SYNOPSIS cargo search [options] [query...] DESCRIPTION This performs a textual search for crates on . The matching crates will be displayed along with their description in TOML format suitable for copying into a Cargo.toml manifest. OPTIONS Search Options --limit limit Limit the number of results (default: 10, max: 100). --index index The URL of the registry index to use. --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Search for a package from crates.io: cargo search serde SEE ALSO cargo(1), cargo-install(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-test.txt000066400000000000000000000461601432416201200224550ustar00rootroot00000000000000CARGO-TEST(1) NAME cargo-test - Execute unit and integration tests of a package SYNOPSIS cargo test [options] [testname] [-- test-options] DESCRIPTION Compile and execute unit, integration, and documentation tests. The test filtering argument TESTNAME and all the arguments following the two dashes (--) are passed to the test binaries and thus to libtest (rustc's built in unit-test and micro-benchmarking framework). If you're passing arguments to both Cargo and the binary, the ones after -- go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of cargo test -- --help and check out the rustc book's chapter on how tests work at . As an example, this will filter for tests with foo in their name and run them on 3 threads in parallel: cargo test foo -- --test-threads 3 Tests are built with the --test option to rustc which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the #[test] attribute in multiple threads. #[bench] annotated functions will also be run with one iteration to verify that they are functional. If the package contains multiple test targets, each target compiles to a special executable as aforementioned, and then is run serially. The libtest harness may be disabled by setting harness = false in the target manifest settings, in which case your code will need to provide its own main function to handle running tests. Documentation tests Documentation tests are also run by default, which is handled by rustdoc. It extracts code samples from documentation comments of the library target, and then executes them. Different from normal test targets, each code block compiles to a doctest executable on the fly with rustc. These executables run in parallel in separate processes. The compilation of a code block is in fact a part of test function controlled by libtest, so some options such as --jobs might not take effect. Note that this execution model of doctests is not guaranteed and may change in the future; beware of depending on it. See the rustdoc book for more information on writing doc tests. OPTIONS Test Options --no-run Compile, but don't run tests. --no-fail-fast Run all tests regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all tests within the executable to completion, this flag only applies to the executable as a whole. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Test only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Test all members in the workspace. --all Deprecated alias for --workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Target Selection When no target selection options are given, cargo test will build the following targets of the selected packages: o lib β€” used to link with binaries, examples, integration tests, and doc tests o bins (only if integration tests are built and required features are available) o examples β€” to ensure they compile o lib as a unit test o bins as unit tests o integration tests o doc tests for the lib target The default behavior can be changed by setting the test flag for the target in the manifest settings. Setting examples to test = true will build and run the example as a test. Setting targets to test = false will stop them from being tested by default. Target selection options that take a target by name ignore the test flag and will always test the given target. Doc tests for libraries may be disabled by setting doctest = false for the library in the manifest. Binary targets are automatically built if there is an integration test or benchmark being selected to test. This allows an integration test to execute the binary to exercise and test its behavior. The CARGO_BIN_EXE_ environment variable is set when the integration test is built so that it can use the env macro to locate the executable. Passing target selection flags will test only the specified targets. Note that --bin, --example, --test and --bench flags also support common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. --lib Test the package's library. --bin name... Test the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. --bins Test all binary targets. --example name... Test the specified example. This flag may be specified multiple times and supports common Unix glob patterns. --examples Test all example targets. --test name... Test the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. --tests Test all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target. --bench name... Test the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. --benches Test all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target. --all-targets Test all targets. This is equivalent to specifying --lib --bins --tests --benches --examples. --doc Test only the library's documentation. This cannot be mixed with other target options. Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Compilation Options --target triple Test for the given architecture. The default is the host architecture. The general format of the triple is ---. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times. This may also be specified with the build.target config value . Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details. -r, --release Test optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name. --profile name Test with the given profile. See the the reference for more details on profiles. --ignore-rust-version Test the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field. --timings=fmts Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats: o html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. o json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information. Output Options --target-dir directory Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to target in the root of the workspace. Display Options By default the Rust test harness hides output from test execution to keep results readable. Test output can be recovered (e.g., for debugging) by passing --nocapture to the test binaries: cargo test -- --nocapture -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . --message-format fmt The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. Conflicts with short and json. o short: Emit shorter, human-readable text messages. Conflicts with human and json. o json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. Miscellaneous Options The --jobs argument affects the building of the test executable but does not affect how many threads are used when running the tests. The Rust test harness includes an option to control the number of threads used: cargo test -j 2 -- --test-threads=2 -j N, --jobs N Number of parallel jobs to run. May also be specified with the build.jobs config value . Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. --keep-going Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options. --future-incompat-report Displays a future-incompat report for any future-incompatible warnings produced during execution of this command See cargo-report(1) ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Execute all the unit and integration tests of the current package: cargo test 2. Run only tests whose names match against a filter string: cargo test name_filter 3. Run only a specific test within a specific integration test: cargo test --test int_test_name -- modname::test_name SEE ALSO cargo(1), cargo-bench(1), types of tests , how to write tests cargo-0.66.0/src/doc/man/generated_txt/cargo-tree.txt000066400000000000000000000353231432416201200224340ustar00rootroot00000000000000CARGO-TREE(1) NAME cargo-tree - Display a tree visualization of a dependency graph SYNOPSIS cargo tree [options] DESCRIPTION This command will display a tree of dependencies to the terminal. An example of a simple project that depends on the "rand" package: myproject v0.1.0 (/myproject) └── rand v0.7.3 β”œβ”€β”€ getrandom v0.1.14 β”‚ β”œβ”€β”€ cfg-if v0.1.10 β”‚ └── libc v0.2.68 β”œβ”€β”€ libc v0.2.68 (*) β”œβ”€β”€ rand_chacha v0.2.2 β”‚ β”œβ”€β”€ ppv-lite86 v0.2.6 β”‚ └── rand_core v0.5.1 β”‚ └── getrandom v0.1.14 (*) └── rand_core v0.5.1 (*) [build-dependencies] └── cc v1.0.50 Packages marked with (*) have been "de-duplicated". The dependencies for the package have already been shown elsewhere in the graph, and so are not repeated. Use the --no-dedupe option to repeat the duplicates. The -e flag can be used to select the dependency kinds to display. The "features" kind changes the output to display the features enabled by each dependency. For example, cargo tree -e features: myproject v0.1.0 (/myproject) └── log feature "serde" └── log v0.4.8 β”œβ”€β”€ serde v1.0.106 └── cfg-if feature "default" └── cfg-if v0.1.10 In this tree, myproject depends on log with the serde feature. log in turn depends on cfg-if with "default" features. When using -e features it can be helpful to use -i flag to show how the features flow into a package. See the examples below for more detail. OPTIONS Tree Options -i spec, --invert spec Show the reverse dependencies for the given package. This flag will invert the tree and display the packages that depend on the given package. Note that in a workspace, by default it will only display the package's reverse dependencies inside the tree of the workspace member in the current directory. The --workspace flag can be used to extend it so that it will show the package's reverse dependencies across the entire workspace. The -p flag can be used to display the package's reverse dependencies only with the subtree of the package given to -p. --prune spec Prune the given package from the display of the dependency tree. --depth depth Maximum display depth of the dependency tree. A depth of 1 displays the direct dependencies, for example. --no-dedupe Do not de-duplicate repeated dependencies. Usually, when a package has already displayed its dependencies, further occurrences will not re-display its dependencies, and will include a (*) to indicate it has already been shown. This flag will cause those duplicates to be repeated. -d, --duplicates Show only dependencies which come in multiple versions (implies --invert). When used with the -p flag, only shows duplicates within the subtree of the given package. It can be beneficial for build times and executable sizes to avoid building that same package multiple times. This flag can help identify the offending packages. You can then investigate if the package that depends on the duplicate with the older version can be updated to the newer version so that only one instance is built. -e kinds, --edges kinds The dependency kinds to display. Takes a comma separated list of values: o all β€” Show all edge kinds. o normal β€” Show normal dependencies. o build β€” Show build dependencies. o dev β€” Show development dependencies. o features β€” Show features enabled by each dependency. If this is the only kind given, then it will automatically include the other dependency kinds. o no-normal β€” Do not include normal dependencies. o no-build β€” Do not include build dependencies. o no-dev β€” Do not include development dependencies. o no-proc-macro β€” Do not include procedural macro dependencies. The normal, build, dev, and all dependency kinds cannot be mixed with no-normal, no-build, or no-dev dependency kinds. The default is normal,build,dev. --target triple Filter dependencies matching the given target-triple. The default is the host platform. Use the value all to include all targets. Tree Formatting Options --charset charset Chooses the character set to use for the tree. Valid values are "utf8" or "ascii". Default is "utf8". -f format, --format format Set the format string for each package. The default is "{p}". This is an arbitrary string which will be used to display each package. The following strings will be replaced with the corresponding value: o {p} β€” The package name. o {l} β€” The package license. o {r} β€” The package repository URL. o {f} β€” Comma-separated list of package features that are enabled. o {lib} β€” The name, as used in a use statement, of the package's library. --prefix prefix Sets how each line is displayed. The prefix value can be one of: o indent (default) β€” Shows each line indented as a tree. o depth β€” Show as a list, with the numeric depth printed before each entry. o none β€” Show as a flat list. Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if --manifest-path is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the workspace.default-members key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing --workspace), and a non-virtual workspace will include only the root crate itself. -p spec..., --package spec... Display only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. --workspace Display all members in the workspace. --exclude SPEC... Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the default feature is activated for every selected package. See the features documentation for more details. -F features, --features features Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features. --all-features Activate all available features of all selected packages. --no-default-features Do not activate the default feature of the selected packages. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Display the tree for the package in the current directory: cargo tree 2. Display all the packages that depend on the syn package: cargo tree -i syn 3. Show the features enabled on each package: cargo tree --format "{p} {f}" 4. Show all packages that are built multiple times. This can happen if multiple semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). cargo tree -d 5. Explain why features are enabled for the syn package: cargo tree -e features -i syn The -e features flag is used to show features. The -i flag is used to invert the graph so that it displays the packages that depend on syn. An example of what this would display: syn v1.0.17 β”œβ”€β”€ syn feature "clone-impls" β”‚ └── syn feature "default" β”‚ └── rustversion v1.0.2 β”‚ └── rustversion feature "default" β”‚ └── myproject v0.1.0 (/myproject) β”‚ └── myproject feature "default" (command-line) β”œβ”€β”€ syn feature "default" (*) β”œβ”€β”€ syn feature "derive" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "full" β”‚ └── rustversion v1.0.2 (*) β”œβ”€β”€ syn feature "parsing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "printing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "proc-macro" β”‚ └── syn feature "default" (*) └── syn feature "quote" β”œβ”€β”€ syn feature "printing" (*) └── syn feature "proc-macro" (*) To read this graph, you can follow the chain for each feature from the root to see why it is included. For example, the "full" feature is added by the rustversion crate which is included from myproject (with the default features), and myproject is the package selected on the command-line. All of the other syn features are added by the "default" feature ("quote" is added by "printing" and "proc-macro", both of which are default features). If you're having difficulty cross-referencing the de-duplicated (*) entries, try with the --no-dedupe flag to get the full output. SEE ALSO cargo(1), cargo-metadata(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-uninstall.txt000066400000000000000000000064571432416201200235140ustar00rootroot00000000000000CARGO-UNINSTALL(1) NAME cargo-uninstall - Remove a Rust binary SYNOPSIS cargo uninstall [options] [spec...] DESCRIPTION This command removes a package installed with cargo-install(1). The spec argument is a package ID specification of the package to remove (see cargo-pkgid(1)). By default all binaries are removed for a crate but the --bin and --example flags can be used to only remove particular binaries. The installation root is determined, in order of precedence: o --root option o CARGO_INSTALL_ROOT environment variable o install.root Cargo config value o CARGO_HOME environment variable o $HOME/.cargo OPTIONS Install Options -p, --package spec... Package to uninstall. --bin name... Only uninstall the binary name. --root dir Directory to uninstall packages from. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Uninstall a previously installed package. cargo uninstall ripgrep SEE ALSO cargo(1), cargo-install(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-update.txt000066400000000000000000000135341432416201200227570ustar00rootroot00000000000000CARGO-UPDATE(1) NAME cargo-update - Update dependencies as recorded in the local lock file SYNOPSIS cargo update [options] DESCRIPTION This command will update dependencies in the Cargo.lock file to the latest version. If the Cargo.lock file does not exist, it will be created with the latest available versions. OPTIONS Update Options -p spec..., --package spec... Update only the specified packages. This flag may be specified multiple times. See cargo-pkgid(1) for the SPEC format. If packages are specified with the -p flag, then a conservative update of the lockfile will be performed. This means that only the dependency specified by SPEC will be updated. Its transitive dependencies will be updated only if SPEC cannot be updated without updating dependencies. All other dependencies will remain locked at their currently recorded versions. If -p is not specified, all dependencies are updated. --aggressive When used with -p, dependencies of spec are forced to update as well. Cannot be used with --precise. --precise precise When used with -p, allows you to specify a specific version number to set the package to. If the package comes from a git repository, this can be a git revision (such as a SHA hash or tag). -w, --workspace Attempt to update only packages defined in the workspace. Other packages are updated only if they don't already exist in the lockfile. This option is useful for updating Cargo.lock after you've changed version numbers in Cargo.toml. --dry-run Displays what would be updated, but doesn't actually write the lockfile. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Update all dependencies in the lockfile: cargo update 2. Update only specific dependencies: cargo update -p foo -p bar 3. Set a specific dependency to a specific version: cargo update -p foo --precise 1.2.3 SEE ALSO cargo(1), cargo-generate-lockfile(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-vendor.txt000066400000000000000000000133541432416201200227720ustar00rootroot00000000000000CARGO-VENDOR(1) NAME cargo-vendor - Vendor all dependencies locally SYNOPSIS cargo vendor [options] [path] DESCRIPTION This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at . After this command completes the vendor directory specified by will contain all remote sources from dependencies specified. Additional manifests beyond the default one can be specified with the -s option. The cargo vendor command will also print out the configuration necessary to use the vendored sources, which you will need to add to .cargo/config.toml. OPTIONS Vendor Options -s manifest, --sync manifest Specify an extra Cargo.toml manifest to workspaces which should also be vendored and synced to the output. May be specified multiple times. --no-delete Don't delete the "vendor" directory when vendoring, but rather keep all existing contents of the vendor directory --respect-source-config Instead of ignoring [source] configuration by default in .cargo/config.toml read it and use it when downloading crates from crates.io, for example --versioned-dirs Normally versions are only added to disambiguate multiple versions of the same package. This option causes all directories in the "vendor" directory to be versioned, which makes it easier to track the history of vendored packages over time, and can help with the performance of re-vendoring when only a subset of the packages have changed. Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Vendor all dependencies into a local "vendor" folder cargo vendor 2. Vendor all dependencies into a local "third-party/vendor" folder cargo vendor third-party/vendor 3. Vendor the current workspace as well as another to "vendor" cargo vendor -s ../path/to/Cargo.toml SEE ALSO cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-verify-project.txt000066400000000000000000000104351432416201200244420ustar00rootroot00000000000000CARGO-VERIFY-PROJECT(1) NAME cargo-verify-project - Check correctness of crate manifest SYNOPSIS cargo verify-project [options] DESCRIPTION This command will parse the local manifest and check its validity. It emits a JSON object with the result. A successful validation will display: {"success":"true"} An invalid workspace will display: {"invalid":"human-readable error message"} OPTIONS Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --manifest-path path Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory. --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: The workspace is OK. o 1: The workspace is invalid. EXAMPLES 1. Check the current workspace for errors: cargo verify-project SEE ALSO cargo(1), cargo-package(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-version.txt000066400000000000000000000010061432416201200231510ustar00rootroot00000000000000CARGO-VERSION(1) NAME cargo-version - Show version information SYNOPSIS cargo version [options] DESCRIPTION Displays the version of Cargo. OPTIONS -v, --verbose Display additional version information. EXAMPLES 1. Display the version: cargo version 2. The version is also available via flags: cargo --version cargo -V 3. Display extra version information: cargo -Vv SEE ALSO cargo(1) cargo-0.66.0/src/doc/man/generated_txt/cargo-yank.txt000066400000000000000000000106571432416201200224420ustar00rootroot00000000000000CARGO-YANK(1) NAME cargo-yank - Remove a pushed crate from the index SYNOPSIS cargo yank [options] crate@version cargo yank [options] --version version [crate] DESCRIPTION The yank command removes a previously published crate's version from the server's index. This command does not delete any data, and the crate will still be available for download via the registry's download link. Note that existing crates locked to a yanked version will still be able to download the yanked version to use it. Cargo will, however, not allow any new crates to be locked to any yanked version. This command requires you to be authenticated with either the --token option or using cargo-login(1). If the crate name is not specified, it will use the package name from the current directory. OPTIONS Yank Options --vers version, --version version The version to yank or un-yank. --undo Undo a yank, putting a version back into the index. --token token API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)). Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters. --index index The URL of the registry index to use. --registry registry Name of the registry to use. Registry names are defined in Cargo config files . If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io. Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. EXAMPLES 1. Yank a crate from the index: cargo yank foo@1.0.7 SEE ALSO cargo(1), cargo-login(1), cargo-publish(1) cargo-0.66.0/src/doc/man/generated_txt/cargo.txt000066400000000000000000000217401432416201200214750ustar00rootroot00000000000000CARGO(1) NAME cargo - The Rust package manager SYNOPSIS cargo [options] command [args] cargo [options] --version cargo [options] --list cargo [options] --help cargo [options] --explain code DESCRIPTION This program is a package manager and build tool for the Rust language, available at . COMMANDS Build Commands cargo-bench(1) Β Β Β Β Execute benchmarks of a package. cargo-build(1) Β Β Β Β Compile a package. cargo-check(1) Β Β Β Β Check a local package and all of its dependencies for errors. cargo-clean(1) Β Β Β Β Remove artifacts that Cargo has generated in the past. cargo-doc(1) Β Β Β Β Build a package's documentation. cargo-fetch(1) Β Β Β Β Fetch dependencies of a package from the network. cargo-fix(1) Β Β Β Β Automatically fix lint warnings reported by rustc. cargo-run(1) Β Β Β Β Run a binary or example of the local package. cargo-rustc(1) Β Β Β Β Compile a package, and pass extra options to the compiler. cargo-rustdoc(1) Β Β Β Β Build a package's documentation, using specified custom flags. cargo-test(1) Β Β Β Β Execute unit and integration tests of a package. Manifest Commands cargo-generate-lockfile(1) Β Β Β Β Generate Cargo.lock for a project. cargo-locate-project(1) Β Β Β Β Print a JSON representation of a Cargo.toml file's location. cargo-metadata(1) Β Β Β Β Output the resolved dependencies of a package in machine-readable format. cargo-pkgid(1) Β Β Β Β Print a fully qualified package specification. cargo-tree(1) Β Β Β Β Display a tree visualization of a dependency graph. cargo-update(1) Β Β Β Β Update dependencies as recorded in the local lock file. cargo-vendor(1) Β Β Β Β Vendor all dependencies locally. cargo-verify-project(1) Β Β Β Β Check correctness of crate manifest. Package Commands cargo-init(1) Β Β Β Β Create a new Cargo package in an existing directory. cargo-install(1) Β Β Β Β Build and install a Rust binary. cargo-new(1) Β Β Β Β Create a new Cargo package. cargo-search(1) Β Β Β Β Search packages in crates.io. cargo-uninstall(1) Β Β Β Β Remove a Rust binary. Publishing Commands cargo-login(1) Β Β Β Β Save an API token from the registry locally. cargo-owner(1) Β Β Β Β Manage the owners of a crate on the registry. cargo-package(1) Β Β Β Β Assemble the local package into a distributable tarball. cargo-publish(1) Β Β Β Β Upload a package to the registry. cargo-yank(1) Β Β Β Β Remove a pushed crate from the index. General Commands cargo-help(1) Β Β Β Β Display help information about Cargo. cargo-version(1) Β Β Β Β Show version information. OPTIONS Special Options -V, --version Print version info and exit. If used with --verbose, prints extra information. --list List all installed Cargo subcommands. If used with --verbose, prints extra information. --explain code Run rustc --explain CODE which will print out a detailed explanation of an error message (for example, E0004). Display Options -v, --verbose Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value . -q, --quiet Do not print cargo log messages. May also be specified with the term.quiet config value . --color when Control when colored output is used. Valid values: o auto (default): Automatically detect if color support is available on the terminal. o always: Always display colors. o never: Never display colors. May also be specified with the term.color config value . Manifest Options --frozen, --locked Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access. --offline Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline. May also be specified with the net.offline config value . Common Options +toolchain If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work. --config KEY=VALUE or PATH Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information. -h, --help Prints help information. -Z flag Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details. ENVIRONMENT See the reference for details on environment variables that Cargo reads. EXIT STATUS o 0: Cargo succeeded. o 101: Cargo failed to complete. FILES ~/.cargo/ Β Β Β Β Default location for Cargo's "home" directory where it stores various files. The location can be changed with the CARGO_HOME environment variable. $CARGO_HOME/bin/ Β Β Β Β Binaries installed by cargo-install(1) will be located here. If using rustup , executables distributed with Rust are also located here. $CARGO_HOME/config.toml Β Β Β Β The global configuration file. See the reference for more information about configuration files. .cargo/config.toml Β Β Β Β Cargo automatically searches for a file named .cargo/config.toml in the current directory, and all parent directories. These configuration files will be merged with the global configuration file. $CARGO_HOME/credentials.toml Β Β Β Β Private authentication information for logging in to a registry. $CARGO_HOME/registry/ Β Β Β Β This directory contains cached downloads of the registry index and any downloaded dependencies. $CARGO_HOME/git/ Β Β Β Β This directory contains cached downloads of git dependencies. Please note that the internal structure of the $CARGO_HOME directory is not stable yet and may be subject to change. EXAMPLES 1. Build a local package and all of its dependencies: cargo build 2. Build a package with optimizations: cargo build --release 3. Run tests for a cross-compiled target: cargo test --target i686-unknown-linux-gnu 4. Create a new package that builds an executable: cargo new foobar 5. Create a package in the current directory: mkdir foo && cd foo cargo init . 6. Learn about a command's options and usage: cargo help clean BUGS See for issues. SEE ALSO rustc(1), rustdoc(1) cargo-0.66.0/src/doc/man/includes/000077500000000000000000000000001432416201200166065ustar00rootroot00000000000000cargo-0.66.0/src/doc/man/includes/description-install-root.md000066400000000000000000000003611432416201200241000ustar00rootroot00000000000000The installation root is determined, in order of precedence: - `--root` option - `CARGO_INSTALL_ROOT` environment variable - `install.root` Cargo [config value](../reference/config.html) - `CARGO_HOME` environment variable - `$HOME/.cargo` cargo-0.66.0/src/doc/man/includes/description-one-target.md000066400000000000000000000003711432416201200235170ustar00rootroot00000000000000This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of `--lib`, `--bin`, etc, must be used to select which target is compiled. cargo-0.66.0/src/doc/man/includes/options-display.md000066400000000000000000000014231432416201200222660ustar00rootroot00000000000000{{#option "`-v`" "`--verbose`"}} Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the `term.verbose` [config value](../reference/config.html). {{/option}} {{#option "`-q`" "`--quiet`"}} Do not print cargo log messages. May also be specified with the `term.quiet` [config value](../reference/config.html). {{/option}} {{#option "`--color` _when_"}} Control when colored output is used. Valid values: - `auto` (default): Automatically detect if color support is available on the terminal. - `always`: Always display colors. - `never`: Never display colors. May also be specified with the `term.color` [config value](../reference/config.html). {{/option}} cargo-0.66.0/src/doc/man/includes/options-future-incompat.md000066400000000000000000000003041432416201200237400ustar00rootroot00000000000000{{#option "`--future-incompat-report`"}} Displays a future-incompat report for any future-incompatible warnings produced during execution of this command See {{man "cargo-report" 1}} {{/option}} cargo-0.66.0/src/doc/man/includes/options-ignore-rust-version.md000066400000000000000000000003141432416201200245600ustar00rootroot00000000000000{{#option "`--ignore-rust-version`"}} {{actionverb}} the target even if the selected Rust compiler is older than the required Rust version as configured in the project's `rust-version` field. {{/option}} cargo-0.66.0/src/doc/man/includes/options-index.md000066400000000000000000000001221432416201200217230ustar00rootroot00000000000000{{#option "`--index` _index_"}} The URL of the registry index to use. {{/option}} cargo-0.66.0/src/doc/man/includes/options-jobs.md000066400000000000000000000005151432416201200215570ustar00rootroot00000000000000{{#option "`-j` _N_" "`--jobs` _N_"}} Number of parallel jobs to run. May also be specified with the `build.jobs` [config value](../reference/config.html). Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. {{/option}} cargo-0.66.0/src/doc/man/includes/options-keep-going.md000066400000000000000000000003211432416201200226420ustar00rootroot00000000000000{{#option "`--keep-going`"}} Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires `-Zunstable-options`. {{/option}} cargo-0.66.0/src/doc/man/includes/options-locked.md000066400000000000000000000022321432416201200220610ustar00rootroot00000000000000{{#option "`--frozen`" "`--locked`"}} Either of these flags requires that the `Cargo.lock` file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The `--frozen` flag also prevents Cargo from attempting to access the network to determine if it is out-of-date. These may be used in environments where you want to assert that the `Cargo.lock` file is up-to-date (such as a CI build) or want to avoid network access. {{/option}} {{#option "`--offline`"}} Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the {{man "cargo-fetch" 1}} command to download dependencies before going offline. May also be specified with the `net.offline` [config value](../reference/config.html). {{/option}} cargo-0.66.0/src/doc/man/includes/options-manifest-path.md000066400000000000000000000002721432416201200233620ustar00rootroot00000000000000{{#option "`--manifest-path` _path_" }} Path to the `Cargo.toml` file. By default, Cargo searches for the `Cargo.toml` file in the current directory or any parent directory. {{/option}} cargo-0.66.0/src/doc/man/includes/options-message-format.md000066400000000000000000000022651432416201200235400ustar00rootroot00000000000000{{#option "`--message-format` _fmt_" }} The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: - `human` (default): Display in a human-readable text format. Conflicts with `short` and `json`. - `short`: Emit shorter, human-readable text messages. Conflicts with `human` and `json`. - `json`: Emit JSON messages to stdout. See [the reference](../reference/external-tools.html#json-messages) for more details. Conflicts with `human` and `short`. - `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains the "short" rendering from rustc. Cannot be used with `human` or `short`. - `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with `human` or `short`. - `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with `human` or `short`. {{/option}} cargo-0.66.0/src/doc/man/includes/options-new.md000066400000000000000000000024201432416201200214100ustar00rootroot00000000000000{{#options}} {{#option "`--bin`" }} Create a package with a binary target (`src/main.rs`). This is the default behavior. {{/option}} {{#option "`--lib`" }} Create a package with a library target (`src/lib.rs`). {{/option}} {{#option "`--edition` _edition_" }} Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021 {{/option}} {{#option "`--name` _name_" }} Set the package name. Defaults to the directory name. {{/option}} {{#option "`--vcs` _vcs_" }} Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to `git` or the configuration value `cargo-new.vcs`, or `none` if already inside a VCS repository. {{/option}} {{#option "`--registry` _registry_" }} This sets the `publish` field in `Cargo.toml` to the given registry name which will restrict publishing only to that registry. Registry names are defined in [Cargo config files](../reference/config.html). If not specified, the default registry defined by the `registry.default` config key is used. If the default registry is not set and `--registry` is not used, the `publish` field will not be set which means that publishing will not be restricted. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/options-profile-legacy-check.md000066400000000000000000000006311432416201200245760ustar00rootroot00000000000000{{#option "`--profile` _name_" }} {{actionverb}} with the given profile. As a special case, specifying the `test` profile will also enable checking in test mode which will enable checking tests and enable the `test` cfg option. See [rustc tests](https://doc.rust-lang.org/rustc/tests/index.html) for more detail. See the [the reference](../reference/profiles.html) for more details on profiles. {{/option}} cargo-0.66.0/src/doc/man/includes/options-profile.md000066400000000000000000000002471432416201200222640ustar00rootroot00000000000000{{#option "`--profile` _name_" }} {{actionverb}} with the given profile. See the [the reference](../reference/profiles.html) for more details on profiles. {{/option}} cargo-0.66.0/src/doc/man/includes/options-registry.md000066400000000000000000000004421432416201200224710ustar00rootroot00000000000000{{#option "`--registry` _registry_"}} Name of the registry to use. Registry names are defined in [Cargo config files](../reference/config.html). If not specified, the default registry is used, which is defined by the `registry.default` config key which defaults to `crates-io`. {{/option}} cargo-0.66.0/src/doc/man/includes/options-release.md000066400000000000000000000002651432416201200222440ustar00rootroot00000000000000{{#option "`-r`" "`--release`"}} {{actionverb}} optimized artifacts with the `release` profile. See also the `--profile` option for choosing a specific profile by name. {{/option}} cargo-0.66.0/src/doc/man/includes/options-target-dir.md000066400000000000000000000010731432416201200226640ustar00rootroot00000000000000{{#option "`--target-dir` _directory_"}} Directory for all generated artifacts and intermediate files. May also be specified with the `CARGO_TARGET_DIR` environment variable, or the `build.target-dir` [config value](../reference/config.html). {{#if temp-target-dir}} Defaults to a new temporary folder located in the temporary directory of the platform. When using `--path`, by default it will use `target` directory in the workspace of the local crate unless `--target-dir` is specified. {{else}} Defaults to `target` in the root of the workspace. {{/if}} {{/option}} cargo-0.66.0/src/doc/man/includes/options-target-triple.md000066400000000000000000000013351432416201200234060ustar00rootroot00000000000000{{#option "`--target` _triple_"}} {{actionverb}} for the given architecture. {{~#if target-default-to-all-arch}} The default is all architectures. {{~else}} The default is the host architecture. {{~/if}} The general format of the triple is `---`. Run `rustc --print target-list` for a list of supported targets. {{~#if multitarget }} This flag may be specified multiple times. {{~/if}} This may also be specified with the `build.target` [config value](../reference/config.html). Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the [build cache](../guide/build-cache.html) documentation for more details. {{/option}} cargo-0.66.0/src/doc/man/includes/options-targets-bin-auto-built.md000066400000000000000000000007641432416201200251320ustar00rootroot00000000000000Binary targets are automatically built if there is an integration test or benchmark being selected to {{lower actionverb}}. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) is set when the integration test is built so that it can use the [`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the executable. cargo-0.66.0/src/doc/man/includes/options-targets-lib-bin.md000066400000000000000000000004661432416201200236120ustar00rootroot00000000000000{{#option "`--lib`" }} {{actionverb}} the package's library. {{/option}} {{#option "`--bin` _name_..." }} {{actionverb}} the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. {{/option}} {{#option "`--bins`" }} {{actionverb}} all binary targets. {{/option}} cargo-0.66.0/src/doc/man/includes/options-targets.md000066400000000000000000000042201432416201200222700ustar00rootroot00000000000000Passing target selection flags will {{lower actionverb}} only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. {{#options}} {{> options-targets-lib-bin }} {{#option "`--example` _name_..." }} {{actionverb}} the specified example. This flag may be specified multiple times and supports common Unix glob patterns. {{/option}} {{#option "`--examples`" }} {{actionverb}} all example targets. {{/option}} {{#option "`--test` _name_..." }} {{actionverb}} the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. {{/option}} {{#option "`--tests`" }} {{actionverb}} all targets in test mode that have the `test = true` manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the `test` flag in the manifest settings for the target. {{/option}} {{#option "`--bench` _name_..." }} {{actionverb}} the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. {{/option}} {{#option "`--benches`" }} {{actionverb}} all targets in benchmark mode that have the `bench = true` manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the `bench` flag in the manifest settings for the target. {{/option}} {{#option "`--all-targets`" }} {{actionverb}} all targets. This is equivalent to specifying `--lib --bins --tests --benches --examples`. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/options-test.md000066400000000000000000000006061432416201200216020ustar00rootroot00000000000000{{#options}} {{#option "`--no-run`" }} Compile, but don't run {{nouns}}. {{/option}} {{#option "`--no-fail-fast`" }} Run all {{nouns}} regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all {{nouns}} within the executable to completion, this flag only applies to the executable as a whole. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/options-timings.md000066400000000000000000000016131432416201200222740ustar00rootroot00000000000000{{#option "`--timings=`_fmts_"}} Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; `--timings` without an argument will default to `--timings=html`. Specifying an output format (rather than the default) is unstable and requires `-Zunstable-options`. Valid output formats: - `html` (unstable, requires `-Zunstable-options`): Write a human-readable file `cargo-timing.html` to the `target/cargo-timings` directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data. - `json` (unstable, requires `-Zunstable-options`): Emit machine-readable JSON information about timing information. {{/option}} cargo-0.66.0/src/doc/man/includes/options-token.md000066400000000000000000000011171432416201200217410ustar00rootroot00000000000000{{#option "`--token` _token_" }} API token to use when authenticating. This overrides the token stored in the credentials file (which is created by {{man "cargo-login" 1}}). [Cargo config](../reference/config.html) environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the `CARGO_REGISTRY_TOKEN` environment variable. Tokens for other registries may be specified with environment variables of the form `CARGO_REGISTRIES_NAME_TOKEN` where `NAME` is the name of the registry in all capital letters. {{/option}} cargo-0.66.0/src/doc/man/includes/section-environment.md000066400000000000000000000002041432416201200231320ustar00rootroot00000000000000## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. cargo-0.66.0/src/doc/man/includes/section-exit-status.md000066400000000000000000000001131432416201200230570ustar00rootroot00000000000000## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. cargo-0.66.0/src/doc/man/includes/section-features.md000066400000000000000000000015041432416201200224100ustar00rootroot00000000000000### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details. {{#options}} {{#option "`-F` _features_" "`--features` _features_" }} Space or comma separated list of features to activate. Features of workspace members may be enabled with `package-name/feature-name` syntax. This flag may be specified multiple times, which enables all specified features. {{/option}} {{#option "`--all-features`" }} Activate all available features of all selected packages. {{/option}} {{#option "`--no-default-features`" }} Do not activate the `default` feature of the selected packages. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/section-options-common.md000066400000000000000000000016631432416201200235610ustar00rootroot00000000000000### Common Options {{#options}} {{#option "`+`_toolchain_"}} If Cargo has been installed with rustup, and the first argument to `cargo` begins with `+`, it will be interpreted as a rustup toolchain name (such as `+stable` or `+nightly`). See the [rustup documentation](https://rust-lang.github.io/rustup/overrides.html) for more information about how toolchain overrides work. {{/option}} {{#option "`--config` _KEY=VALUE_ or _PATH_"}} Overrides a Cargo configuration value. The argument should be in TOML syntax of `KEY=VALUE`, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the [command-line overrides section](../reference/config.html#command-line-overrides) for more information. {{/option}} {{#option "`-h`" "`--help`"}} Prints help information. {{/option}} {{#option "`-Z` _flag_"}} Unstable (nightly-only) flags to Cargo. Run `cargo -Z help` for details. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/section-options-package.md000066400000000000000000000005241432416201200236570ustar00rootroot00000000000000### Package Selection By default, the package in the current working directory is selected. The `-p` flag can be used to choose a different package in a workspace. {{#options}} {{#option "`-p` _spec_" "`--package` _spec_" }} The package to {{lower actionverb}}. See {{man "cargo-pkgid" 1}} for the SPEC format. {{/option}} {{/options}} cargo-0.66.0/src/doc/man/includes/section-package-selection.md000066400000000000000000000032731432416201200241550ustar00rootroot00000000000000### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself. {{#options}} {{#option "`-p` _spec_..." "`--package` _spec_..."}} {{actionverb}} only the specified packages. See {{man "cargo-pkgid" 1}} for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. {{/option}} {{#option "`--workspace`" }} {{actionverb}} all members in the workspace. {{/option}} {{#unless noall}} {{#option "`--all`" }} Deprecated alias for `--workspace`. {{/option}} {{/unless}} {{#option "`--exclude` _SPEC_..." }} Exclude the specified packages. Must be used in conjunction with the `--workspace` flag. This flag may be specified multiple times and supports common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. {{/option}} {{/options}} cargo-0.66.0/src/doc/semver-check/000077500000000000000000000000001432416201200166015ustar00rootroot00000000000000cargo-0.66.0/src/doc/semver-check/Cargo.toml000066400000000000000000000003401432416201200205260ustar00rootroot00000000000000[package] name = "semver-check" version = "0.1.0" authors = ["Eric Huss"] edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] tempfile = "3.1.0" cargo-0.66.0/src/doc/semver-check/src/000077500000000000000000000000001432416201200173705ustar00rootroot00000000000000cargo-0.66.0/src/doc/semver-check/src/main.rs000066400000000000000000000207111432416201200206630ustar00rootroot00000000000000//! Test runner for the semver compatibility doc chapter. //! //! This extracts all the "rust" annotated code blocks and tests that they //! either fail or succeed as expected. This also checks that the examples are //! formatted correctly. //! //! An example with the word "MINOR" at the top is expected to successfully //! build against the before and after. Otherwise it should fail. A comment of //! "// Error:" will check that the given message appears in the error output. use std::error::Error; use std::fs; use std::path::Path; use std::process::{Command, Output}; fn main() { if let Err(e) = doit() { println!("error: {}", e); std::process::exit(1); } } const SEPARATOR: &str = "///////////////////////////////////////////////////////////"; fn doit() -> Result<(), Box> { let filename = std::env::args() .nth(1) .unwrap_or_else(|| "../src/reference/semver.md".to_string()); let contents = fs::read_to_string(filename)?; let mut lines = contents.lines().enumerate(); loop { // Find a rust block. let (block_start, run_program) = loop { match lines.next() { Some((lineno, line)) => { if line.trim().starts_with("```rust") && !line.contains("skip") { break (lineno + 1, line.contains("run-fail")); } } None => return Ok(()), } }; // Read in the code block. let mut block = Vec::new(); loop { match lines.next() { Some((_, line)) => { if line.trim() == "```" { break; } block.push(line); } None => { return Err(format!( "rust block did not end for example starting on line {}", block_start ) .into()); } } } // Split it into the separate source files. let parts: Vec<_> = block.split(|line| line.trim() == SEPARATOR).collect(); if parts.len() != 4 { return Err(format!( "expected 4 sections in example starting on line {}, got {}:\n{:?}", block_start, parts.len(), parts ) .into()); } let join = |part: &[&str]| { let mut result = String::new(); result.push_str("#![allow(unused)]\n#![deny(warnings)]\n"); result.push_str(&part.join("\n")); if !result.ends_with('\n') { result.push('\n'); } result }; let expect_success = parts[0][0].contains("MINOR"); println!("Running test from line {}", block_start); let result = run_test( join(parts[1]), join(parts[2]), join(parts[3]), expect_success, run_program, ); if let Err(e) = result { return Err(format!( "test failed for example starting on line {}: {}", block_start, e ) .into()); } } } const CRATE_NAME: &str = "updated_crate"; fn run_test( before: String, after: String, example: String, expect_success: bool, run_program: bool, ) -> Result<(), Box> { let tempdir = tempfile::TempDir::new()?; let before_p = tempdir.path().join("before.rs"); let after_p = tempdir.path().join("after.rs"); let example_p = tempdir.path().join("example.rs"); let check_fn = if run_program { run_check } else { compile_check }; compile_check(before, &before_p, CRATE_NAME, false, true)?; check_fn(example.clone(), &example_p, "example", true, true)?; compile_check(after, &after_p, CRATE_NAME, false, true)?; check_fn(example, &example_p, "example", true, expect_success)?; Ok(()) } fn check_formatting(path: &Path) -> Result<(), Box> { match Command::new("rustfmt") .args(&["--edition=2018", "--check"]) .arg(path) .status() { Ok(status) => { if !status.success() { return Err(format!("failed to run rustfmt: {}", status).into()); } Ok(()) } Err(e) => Err(format!("failed to run rustfmt: {}", e).into()), } } fn compile( contents: &str, path: &Path, crate_name: &str, extern_path: bool, ) -> Result> { let crate_type = if contents.contains("fn main()") { "bin" } else { "rlib" }; fs::write(path, &contents)?; check_formatting(path)?; let out_dir = path.parent().unwrap(); let mut cmd = Command::new("rustc"); cmd.args(&[ "--edition=2021", "--crate-type", crate_type, "--crate-name", crate_name, "--out-dir", ]); cmd.arg(&out_dir); if extern_path { let epath = out_dir.join(format!("lib{}.rlib", CRATE_NAME)); cmd.arg("--extern") .arg(format!("{}={}", CRATE_NAME, epath.display())); } cmd.arg(path); cmd.output().map_err(Into::into) } fn compile_check( mut contents: String, path: &Path, crate_name: &str, extern_path: bool, expect_success: bool, ) -> Result<(), Box> { // If the example has an error message, remove it so that it can be // compared with the actual output, and also to avoid issues with rustfmt // moving it around. let expected_error = match contents.find("// Error:") { Some(index) => { let start = contents[..index].rfind(|ch| ch != ' ').unwrap(); let end = contents[index..].find('\n').unwrap(); let error = contents[index + 9..index + end].trim().to_string(); contents.replace_range(start + 1..index + end, ""); Some(error) } None => None, }; let output = compile(&contents, path, crate_name, extern_path)?; let stderr = std::str::from_utf8(&output.stderr).unwrap(); match (output.status.success(), expect_success) { (true, true) => Ok(()), (true, false) => Err(format!( "expected failure, got success {}\n===== Contents:\n{}\n===== Output:\n{}\n", path.display(), contents, stderr ) .into()), (false, true) => Err(format!( "expected success, got error {}\n===== Contents:\n{}\n===== Output:\n{}\n", path.display(), contents, stderr ) .into()), (false, false) => { if expected_error.is_none() { return Err("failing test should have an \"// Error:\" annotation ".into()); } let expected_error = expected_error.unwrap(); if !stderr.contains(&expected_error) { Err(format!( "expected error message not found in compiler output\nExpected: {}\nGot:\n{}\n", expected_error, stderr ) .into()) } else { Ok(()) } } } } fn run_check( contents: String, path: &Path, crate_name: &str, extern_path: bool, expect_success: bool, ) -> Result<(), Box> { let compile_output = compile(&contents, path, crate_name, extern_path)?; if !compile_output.status.success() { let stderr = std::str::from_utf8(&compile_output.stderr).unwrap(); return Err(format!( "expected success, got error {}\n===== Contents:\n{}\n===== Output:\n{}\n", path.display(), contents, stderr ) .into()); } let binary_path = path.parent().unwrap().join(crate_name); let output = Command::new(binary_path).output()?; let stderr = std::str::from_utf8(&output.stderr).unwrap(); match (output.status.success(), expect_success) { (true, false) => Err(format!( "expected panic, got success {}\n===== Contents:\n{}\n===== Output:\n{}\n", path.display(), contents, stderr ) .into()), (false, true) => Err(format!( "expected success, got panic {}\n===== Contents:\n{}\n===== Output:\n{}\n", path.display(), contents, stderr, ) .into()), (_, _) => Ok(()), } } cargo-0.66.0/src/doc/src/000077500000000000000000000000001432416201200150145ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/SUMMARY.md000066400000000000000000000101331432416201200164710ustar00rootroot00000000000000# Summary [Introduction](index.md) * [Getting Started](getting-started/index.md) * [Installation](getting-started/installation.md) * [First Steps with Cargo](getting-started/first-steps.md) * [Cargo Guide](guide/index.md) * [Why Cargo Exists](guide/why-cargo-exists.md) * [Creating a New Package](guide/creating-a-new-project.md) * [Working on an Existing Package](guide/working-on-an-existing-project.md) * [Dependencies](guide/dependencies.md) * [Package Layout](guide/project-layout.md) * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md) * [Tests](guide/tests.md) * [Continuous Integration](guide/continuous-integration.md) * [Cargo Home](guide/cargo-home.md) * [Build Cache](guide/build-cache.md) * [Cargo Reference](reference/index.md) * [Specifying Dependencies](reference/specifying-dependencies.md) * [Overriding Dependencies](reference/overriding-dependencies.md) * [The Manifest Format](reference/manifest.md) * [Cargo Targets](reference/cargo-targets.md) * [Workspaces](reference/workspaces.md) * [Features](reference/features.md) * [Features Examples](reference/features-examples.md) * [Profiles](reference/profiles.md) * [Configuration](reference/config.md) * [Environment Variables](reference/environment-variables.md) * [Build Scripts](reference/build-scripts.md) * [Build Script Examples](reference/build-script-examples.md) * [Publishing on crates.io](reference/publishing.md) * [Package ID Specifications](reference/pkgid-spec.md) * [Source Replacement](reference/source-replacement.md) * [External Tools](reference/external-tools.md) * [Registries](reference/registries.md) * [Dependency Resolution](reference/resolver.md) * [SemVer Compatibility](reference/semver.md) * [Future incompat report](reference/future-incompat-report.md) * [Reporting build timings](reference/timings.md) * [Unstable Features](reference/unstable.md) * [Cargo Commands](commands/index.md) * [General Commands](commands/general-commands.md) * [cargo](commands/cargo.md) * [cargo help](commands/cargo-help.md) * [cargo version](commands/cargo-version.md) * [Build Commands](commands/build-commands.md) * [cargo bench](commands/cargo-bench.md) * [cargo build](commands/cargo-build.md) * [cargo check](commands/cargo-check.md) * [cargo clean](commands/cargo-clean.md) * [cargo doc](commands/cargo-doc.md) * [cargo fetch](commands/cargo-fetch.md) * [cargo fix](commands/cargo-fix.md) * [cargo run](commands/cargo-run.md) * [cargo rustc](commands/cargo-rustc.md) * [cargo rustdoc](commands/cargo-rustdoc.md) * [cargo test](commands/cargo-test.md) * [cargo report](commands/cargo-report.md) * [Manifest Commands](commands/manifest-commands.md) * [cargo add](commands/cargo-add.md) * [cargo generate-lockfile](commands/cargo-generate-lockfile.md) * [cargo locate-project](commands/cargo-locate-project.md) * [cargo metadata](commands/cargo-metadata.md) * [cargo pkgid](commands/cargo-pkgid.md) * [cargo tree](commands/cargo-tree.md) * [cargo update](commands/cargo-update.md) * [cargo vendor](commands/cargo-vendor.md) * [cargo verify-project](commands/cargo-verify-project.md) * [Package Commands](commands/package-commands.md) * [cargo init](commands/cargo-init.md) * [cargo install](commands/cargo-install.md) * [cargo new](commands/cargo-new.md) * [cargo search](commands/cargo-search.md) * [cargo uninstall](commands/cargo-uninstall.md) * [Publishing Commands](commands/publishing-commands.md) * [cargo login](commands/cargo-login.md) * [cargo owner](commands/cargo-owner.md) * [cargo package](commands/cargo-package.md) * [cargo publish](commands/cargo-publish.md) * [cargo yank](commands/cargo-yank.md) * [FAQ](faq.md) * [Appendix: Glossary](appendix/glossary.md) * [Appendix: Git Authentication](appendix/git-authentication.md) cargo-0.66.0/src/doc/src/appendix/000077500000000000000000000000001432416201200166245ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/appendix/git-authentication.md000066400000000000000000000055551432416201200227600ustar00rootroot00000000000000# Git Authentication Cargo supports some forms of authentication when using git dependencies and registries. This appendix contains some information for setting up git authentication in a way that works with Cargo. If you need other authentication methods, the [`net.git-fetch-with-cli`] config value can be set to cause Cargo to execute the `git` executable to handle fetching remote repositories instead of using the built-in support. This can be enabled with the `CARGO_NET_GIT_FETCH_WITH_CLI=true` environment variable. ## HTTPS authentication HTTPS authentication requires the [`credential.helper`] mechanism. There are multiple credential helpers, and you specify the one you want to use in your global git configuration file. ```ini # ~/.gitconfig [credential] helper = store ``` Cargo does not ask for passwords, so for most helpers you will need to give the helper the initial username/password before running Cargo. One way to do this is to run `git clone` of the private git repo and enter the username/password. > **Tip:**
> macOS users may want to consider using the osxkeychain helper.
> Windows users may want to consider using the [GCM] helper. > **Note:** Windows users will need to make sure that the `sh` shell is > available in your `PATH`. This typically is available with the Git for > Windows installation. ## SSH authentication SSH authentication requires `ssh-agent` to be running to acquire the SSH key. Make sure the appropriate environment variables are set up (`SSH_AUTH_SOCK` on most Unix-like systems), and that the correct keys are added (with `ssh-add`). Windows can use Pageant (part of [PuTTY]) or `ssh-agent`. To use `ssh-agent`, Cargo needs to use the OpenSSH that is distributed as part of Windows, as Cargo does not support the simulated Unix-domain sockets used by MinGW or Cygwin. More information about installing with Windows can be found at the [Microsoft installation documentation] and the page on [key management] has instructions on how to start `ssh-agent` and to add keys. > **Note:** Cargo does not support git's shorthand SSH URLs like > `git@example.com:user/repo.git`. Use a full SSH URL like > `ssh://git@example.com/user/repo.git`. > **Note:** SSH configuration files (like OpenSSH's `~/.ssh/config`) are not > used by Cargo's built-in SSH library. More advanced requirements should use > [`net.git-fetch-with-cli`]. [`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage [`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli [GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ [PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/ [Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse [key management]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_keymanagement cargo-0.66.0/src/doc/src/appendix/glossary.md000066400000000000000000000266321432416201200210220ustar00rootroot00000000000000# Glossary ### Artifact An *artifact* is the file or set of files created as a result of the compilation process. This includes linkable libraries, executable binaries, and generated documentation. ### Cargo *Cargo* is the Rust [*package manager*](#package-manager), and the primary topic of this book. ### Cargo.lock See [*lock file*](#lock-file). ### Cargo.toml See [*manifest*](#manifest). ### Crate A Rust *crate* is either a library or an executable program, referred to as either a *library crate* or a *binary crate*, respectively. Every [target](#target) defined for a Cargo [package](#package) is a *crate*. Loosely, the term *crate* may refer to either the source code of the target or to the compiled artifact that the target produces. It may also refer to a compressed package fetched from a [registry](#registry). The source code for a given crate may be subdivided into [*modules*](#module). ### Edition A *Rust edition* is a developmental landmark of the Rust language. The [edition of a package][edition-field] is specified in the `Cargo.toml` [manifest](#manifest), and individual targets can specify which edition they use. See the [Edition Guide] for more information. ### Feature The meaning of *feature* depends on the context: - A [*feature*][feature] is a named flag which allows for conditional compilation. A feature can refer to an optional dependency, or an arbitrary name defined in a `Cargo.toml` [manifest](#manifest) that can be checked within source code. - Cargo has [*unstable feature flags*][cargo-unstable] which can be used to enable experimental behavior of Cargo itself. - The Rust compiler and Rustdoc have their own unstable feature flags (see [The Unstable Book][unstable-book] and [The Rustdoc Book][rustdoc-unstable]). - CPU targets have [*target features*][target-feature] which specify capabilities of a CPU. ### Index The *index* is the searchable list of [*crates*](#crate) in a [*registry*](#registry). ### Lock file The `Cargo.lock` *lock file* is a file that captures the exact version of every dependency used in a [*workspace*](#workspace) or [*package*](#package). It is automatically generated by Cargo. See [Cargo.toml vs Cargo.lock]. ### Manifest A [*manifest*][manifest] is a description of a [package](#package) or a [workspace](#workspace) in a file named `Cargo.toml`. A [*virtual manifest*][virtual] is a `Cargo.toml` file that only describes a workspace, and does not include a package. ### Member A *member* is a [*package*](#package) that belongs to a [*workspace*](#workspace). ### Module Rust's module system is used to organize code into logical units called *modules*, which provide isolated namespaces within the code. The source code for a given [crate](#crate) may be subdivided into one or more separate modules. This is usually done to organize the code into areas of related functionality or to control the visible scope (public/private) of symbols within the source (structs, functions, and so on). A [`Cargo.toml`](#manifest) file is primarily concerned with the [package](#package) it defines, its crates, and the packages of the crates on which they depend. Nevertheless, you will see the term "module" often when working with Rust, so you should understand its relationship to a given crate. ### Package A *package* is a collection of source files and a `Cargo.toml` [*manifest*](#manifest) file which describes the package. A package has a name and version which is used for specifying dependencies between packages. A package contains multiple [*targets*](#target), each of which is a [*crate*](#crate). The `Cargo.toml` file describes the type of the crates (binary or library) within the package, along with some metadata about each one -- how each is to be built, what their direct dependencies are, etc., as described throughout this book. The *package root* is the directory where the package's `Cargo.toml` manifest is located. (Compare with [*workspace root*](#workspace).) The [*package ID specification*][pkgid-spec], or *SPEC*, is a string used to uniquely reference a specific version of a package from a specific source. Small to medium sized Rust projects will only need a single package, though it is common for them to have multiple crates. Larger projects may involve multiple packages, in which case Cargo [*workspaces*](#workspace) can be used to manage common dependencies and other related metadata between the packages. ### Package manager Broadly speaking, a *package manager* is a program (or collection of related programs) in a software ecosystem that automates the process of obtaining, installing, and upgrading artifacts. Within a programming language ecosystem, a package manager is a developer-focused tool whose primary functionality is to download library artifacts and their dependencies from some central repository; this capability is often combined with the ability to perform software builds (by invoking the language-specific compiler). [*Cargo*](#cargo) is the package manager within the Rust ecosystem. Cargo downloads your Rust [package](#package)’s dependencies ([*artifacts*](#artifact) known as [*crates*](#crate)), compiles your packages, makes distributable packages, and (optionally) uploads them to [crates.io][], the Rust community’s [*package registry*](#registry). ### Package registry See [*registry*](#registry). ### Project Another name for a [package](#package). ### Registry A *registry* is a service that contains a collection of downloadable [*crates*](#crate) that can be installed or used as dependencies for a [*package*](#package). The default registry in the Rust ecosystem is [crates.io](https://crates.io). The registry has an [*index*](#index) which contains a list of all crates, and tells Cargo how to download the crates that are needed. ### Source A *source* is a provider that contains [*crates*](#crate) that may be included as dependencies for a [*package*](#package). There are several kinds of sources: - **Registry source** β€” See [registry](#registry). - **Local registry source** β€” A set of crates stored as compressed files on the filesystem. See [Local Registry Sources]. - **Directory source** β€” A set of crates stored as uncompressed files on the filesystem. See [Directory Sources]. - **Path source** β€” An individual package located on the filesystem (such as a [path dependency]) or a set of multiple packages (such as [path overrides]). - **Git source** β€” Packages located in a git repository (such as a [git dependency] or [git source]). See [Source Replacement] for more information. ### Spec See [package ID specification](#package). ### Target The meaning of the term *target* depends on the context: - **Cargo Target** β€” Cargo [*packages*](#package) consist of *targets* which correspond to [*artifacts*](#artifact) that will be produced. Packages can have library, binary, example, test, and benchmark targets. The [list of targets][targets] are configured in the `Cargo.toml` [*manifest*](#manifest), often inferred automatically by the [directory layout] of the source files. - **Target Directory** β€” Cargo places all built artifacts and intermediate files in the *target* directory. By default this is a directory named `target` at the [*workspace*](#workspace) root, or the package root if not using a workspace. The directory may be changed with the `--target-dir` command-line option, the `CARGO_TARGET_DIR` [environment variable], or the `build.target-dir` [config option]. - **Target Architecture** β€” The OS and machine architecture for the built artifacts are typically referred to as a *target*. - **Target Triple** β€” A triple is a specific format for specifying a target architecture. Triples may be referred to as a *target triple* which is the architecture for the artifact produced, and the *host triple* which is the architecture that the compiler is running on. The target triple can be specified with the `--target` command-line option or the `build.target` [config option]. The general format of the triple is `---` where: - `arch` = The base CPU architecture, for example `x86_64`, `i686`, `arm`, `thumb`, `mips`, etc. - `sub` = The CPU sub-architecture, for example `arm` has `v7`, `v7s`, `v5te`, etc. - `vendor` = The vendor, for example `unknown`, `apple`, `pc`, `nvidia`, etc. - `sys` = The system name, for example `linux`, `windows`, `darwin`, etc. `none` is typically used for bare-metal without an OS. - `abi` = The ABI, for example `gnu`, `android`, `eabi`, etc. Some parameters may be omitted. Run `rustc --print target-list` for a list of supported targets. ### Test Targets Cargo *test targets* generate binaries which help verify proper operation and correctness of code. There are two types of test artifacts: * **Unit test** β€” A *unit test* is an executable binary compiled directly from a library or a binary target. It contains the entire contents of the library or binary code, and runs `#[test]` annotated functions, intended to verify individual units of code. * **Integration test target** β€” An [*integration test target*][integration-tests] is an executable binary compiled from a *test target* which is a distinct [*crate*](#crate) whose source is located in the `tests` directory or specified by the [`[[test]]` table][targets] in the `Cargo.toml` [*manifest*](#manifest). It is intended to only test the public API of a library, or execute a binary to verify its operation. ### Workspace A [*workspace*][workspace] is a collection of one or more [*packages*](#package) that share common dependency resolution (with a shared `Cargo.lock` [*lock file*](#lock-file)), output directory, and various settings such as profiles. A [*virtual workspace*][virtual] is a workspace where the root `Cargo.toml` [*manifest*](#manifest) does not define a package, and only lists the workspace [*members*](#member). The *workspace root* is the directory where the workspace's `Cargo.toml` manifest is located. (Compare with [*package root*](#package).) [Cargo.toml vs Cargo.lock]: ../guide/cargo-toml-vs-cargo-lock.md [Directory Sources]: ../reference/source-replacement.md#directory-sources [Local Registry Sources]: ../reference/source-replacement.md#local-registry-sources [Source Replacement]: ../reference/source-replacement.md [cargo-unstable]: ../reference/unstable.md [config option]: ../reference/config.md [crates.io]: https://crates.io/ [directory layout]: ../guide/project-layout.md [edition guide]: ../../edition-guide/index.html [edition-field]: ../reference/manifest.md#the-edition-field [environment variable]: ../reference/environment-variables.md [feature]: ../reference/features.md [git dependency]: ../reference/specifying-dependencies.md#specifying-dependencies-from-git-repositories [git source]: ../reference/source-replacement.md [integration-tests]: ../reference/cargo-targets.md#integration-tests [manifest]: ../reference/manifest.md [path dependency]: ../reference/specifying-dependencies.md#specifying-path-dependencies [path overrides]: ../reference/overriding-dependencies.md#paths-overrides [pkgid-spec]: ../reference/pkgid-spec.md [rustdoc-unstable]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html [target-feature]: ../../reference/attributes/codegen.html#the-target_feature-attribute [targets]: ../reference/cargo-targets.md#configuring-a-target [unstable-book]: https://doc.rust-lang.org/nightly/unstable-book/index.html [virtual]: ../reference/workspaces.md [workspace]: ../reference/workspaces.md cargo-0.66.0/src/doc/src/commands/000077500000000000000000000000001432416201200166155ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/commands/build-commands.md000066400000000000000000000006111432416201200220330ustar00rootroot00000000000000# Build Commands * [cargo bench](cargo-bench.md) * [cargo build](cargo-build.md) * [cargo check](cargo-check.md) * [cargo clean](cargo-clean.md) * [cargo doc](cargo-doc.md) * [cargo fetch](cargo-fetch.md) * [cargo fix](cargo-fix.md) * [cargo run](cargo-run.md) * [cargo rustc](cargo-rustc.md) * [cargo rustdoc](cargo-rustdoc.md) * [cargo test](cargo-test.md) * [cargo report](cargo-report.md) cargo-0.66.0/src/doc/src/commands/cargo-add.md000066400000000000000000000236231432416201200207660ustar00rootroot00000000000000# cargo-add(1) ## NAME cargo-add - Add dependencies to a Cargo.toml manifest file ## SYNOPSIS `cargo add` [_options_] _crate_...\ `cargo add` [_options_] `--path` _path_\ `cargo add` [_options_] `--git` _url_ [_crate_...]\ ## DESCRIPTION This command can add or modify dependencies. The source for the dependency can be specified with: * _crate_`@`_version_: Fetch from a registry with a version constraint of "_version_" * `--path` _path_: Fetch from the specified _path_ * `--git` _url_: Pull from a git repo at _url_ If no source is specified, then a best effort will be made to select one, including: * Existing dependencies in other tables (like `dev-dependencies`) * Workspace members * Latest release in the registry When you add a package that is already present, the existing entry will be updated with the flags specified. Upon successful invocation, the enabled (`+`) and disabled (`-`) [features] of the specified dependency will be listed in the command's output. [features]: ../reference/features.md ## OPTIONS ### Source options

--git url
Git URL to add the specified crate from.
--branch branch
Branch to use when adding from git.
--tag tag
Tag to use when adding from git.
--rev sha
Specific commit to use when adding from git.
--path path
Filesystem path to local crate to add.
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
### Section options
--dev
Add as a development dependency.
--build
Add as a build dependency.
--target target
Add as a dependency to the given target platform.
### Dependency options
--rename name
Rename the dependency.
--optional
Mark the dependency as optional.
--no-optional
Mark the dependency as required.
--no-default-features
Disable the default features.
--default-features
Re-enable the default features.
--features features
Space or comma separated list of features to activate. When adding multiple crates, the features for a specific crate may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Add `regex` as a dependency cargo add regex 2. Add `trybuild` as a dev-dependency cargo add --dev trybuild 3. Add an older version of `nom` as a dependency cargo add nom@5 4. Add support for serializing data structures to json with `derive`s cargo add serde serde_json -F serde/derive ## SEE ALSO [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-bench.md000066400000000000000000000575611432416201200213250ustar00rootroot00000000000000# cargo-bench(1) ## NAME cargo-bench - Execute benchmarks of a package ## SYNOPSIS `cargo bench` [_options_] [_benchname_] [`--` _bench-options_] ## DESCRIPTION Compile and execute benchmarks. The benchmark filtering argument _benchname_ and all the arguments following the two dashes (`--`) are passed to the benchmark binaries and thus to _libtest_ (rustc's built in unit-test and micro-benchmarking framework). If you are passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of `cargo bench -- --help` and check out the rustc book's chapter on how tests work at . As an example, this will run only the benchmark named `foo` (and skip other similarly named benchmarks like `foobar`): cargo bench -- foo --exact Benchmarks are built with the `--test` option to `rustc` which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the `#[bench]` attribute. Cargo passes the `--bench` flag to the test harness to tell it to run only benchmarks. The libtest harness may be disabled by setting `harness = false` in the target manifest settings, in which case your code will need to provide its own `main` function to handle running benchmarks. > **Note**: The > [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html) > is currently unstable and only available on the > [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html). > There are some packages available on > [crates.io](https://crates.io/keywords/benchmark) that may help with > running benchmarks on the stable channel, such as > [Criterion](https://crates.io/crates/criterion). By default, `cargo bench` uses the [`bench` profile], which enables optimizations and disables debugging information. If you need to debug a benchmark, you can use the `--profile=dev` command-line option to switch to the dev profile. You can then run the debug-enabled benchmark within a debugger. [`bench` profile]: ../reference/profiles.html#bench ## OPTIONS ### Benchmark Options
--no-run
Compile, but don't run benchmarks.
--no-fail-fast
Run all benchmarks regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all benchmarks within the executable to completion, this flag only applies to the executable as a whole.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Benchmark only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Benchmark all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo bench` will build the following targets of the selected packages: - lib β€” used to link with binaries and benchmarks - bins (only if benchmark targets are built and required features are available) - lib as a benchmark - bins as benchmarks - benchmark targets The default behavior can be changed by setting the `bench` flag for the target in the manifest settings. Setting examples to `bench = true` will build and run the example as a benchmark. Setting targets to `bench = false` will stop them from being benchmarked by default. Target selection options that take a target by name ignore the `bench` flag and will always benchmark the given target. Binary targets are automatically built if there is an integration test or benchmark being selected to benchmark. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) is set when the integration test is built so that it can use the [`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the executable. Passing target selection flags will benchmark only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Benchmark the package's library.
--bin name...
Benchmark the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Benchmark all binary targets.
--example name...
Benchmark the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Benchmark all example targets.
--test name...
Benchmark the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Benchmark all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Benchmark the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Benchmark all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Benchmark all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Benchmark for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

--profile name
Benchmark with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Benchmark the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options By default the Rust test harness hides output from benchmark execution to keep results readable. Benchmark output can be recovered (e.g., for debugging) by passing `--nocapture` to the benchmark binaries: cargo bench -- --nocapture
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options The `--jobs` argument affects the building of the benchmark executable but does not affect how many threads are used when running the benchmarks. The Rust test harness runs benchmarks serially in a single thread.
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Build and execute all the benchmarks of the current package: cargo bench 2. Run only a specific benchmark within a specific benchmark target: cargo bench --bench bench_name -- modname::some_benchmark ## SEE ALSO [cargo(1)](cargo.html), [cargo-test(1)](cargo-test.html) cargo-0.66.0/src/doc/src/commands/cargo-build.md000066400000000000000000000542241432416201200213360ustar00rootroot00000000000000# cargo-build(1) ## NAME cargo-build - Compile the current package ## SYNOPSIS `cargo build` [_options_] ## DESCRIPTION Compile local packages and all of their dependencies. ## OPTIONS ### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Build only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Build all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo build` will build all binary and library targets of the selected packages. Binaries are skipped if they have `required-features` that are missing. Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) is set when the integration test is built so that it can use the [`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the executable. Passing target selection flags will build only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Build the package's library.
--bin name...
Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Build all binary targets.
--example name...
Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Build all example targets.
--test name...
Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Build all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Build all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Build for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Build optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Build with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
--out-dir directory
Copy final artifacts to this directory.

This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See https://github.com/rust-lang/cargo/issues/6790 for more information.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
--build-plan
Outputs a series of JSON messages to stdout that indicate the commands to run the build.

This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See https://github.com/rust-lang/cargo/issues/5579 for more information.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
--future-incompat-report
Displays a future-incompat report for any future-incompatible warnings produced during execution of this command

See cargo-report(1)

## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Build the local package and all of its dependencies: cargo build 2. Build with optimizations: cargo build --release ## SEE ALSO [cargo(1)](cargo.html), [cargo-rustc(1)](cargo-rustc.html) cargo-0.66.0/src/doc/src/commands/cargo-check.md000066400000000000000000000522411432416201200213110ustar00rootroot00000000000000# cargo-check(1) ## NAME cargo-check - Check the current package ## SYNOPSIS `cargo check` [_options_] ## DESCRIPTION Check a local package and all of its dependencies for errors. This will essentially compile the packages without performing the final step of code generation, which is faster than running `cargo build`. The compiler will save metadata files to disk so that future runs will reuse them if the source has not been modified. Some diagnostics and errors are only emitted during code generation, so they inherently won't be reported with `cargo check`. ## OPTIONS ### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Check only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Check all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo check` will check all binary and library targets of the selected packages. Binaries are skipped if they have `required-features` that are missing. Passing target selection flags will check only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Check the package's library.
--bin name...
Check the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Check all binary targets.
--example name...
Check the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Check all example targets.
--test name...
Check the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Check all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Check the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Check all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Check all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Check for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Check optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Check with the given profile.

As a special case, specifying the test profile will also enable checking in test mode which will enable checking tests and enable the test cfg option. See rustc tests for more detail.

See the the reference for more details on profiles.

--ignore-rust-version
Check the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
--future-incompat-report
Displays a future-incompat report for any future-incompatible warnings produced during execution of this command

See cargo-report(1)

## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Check the local package for errors: cargo check 2. Check all targets, including unit tests: cargo check --all-targets --profile=test ## SEE ALSO [cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) cargo-0.66.0/src/doc/src/commands/cargo-clean.md000066400000000000000000000214631432416201200213200ustar00rootroot00000000000000# cargo-clean(1) ## NAME cargo-clean - Remove generated artifacts ## SYNOPSIS `cargo clean` [_options_] ## DESCRIPTION Remove artifacts from the target directory that Cargo has generated in the past. With no options, `cargo clean` will delete the entire target directory. ## OPTIONS ### Package Selection When no packages are selected, all packages and all dependencies in the workspace are cleaned.
-p spec...
--package spec...
Clean only the specified packages. This flag may be specified multiple times. See cargo-pkgid(1) for the SPEC format.
### Clean Options
--doc
This option will cause cargo clean to remove only the doc directory in the target directory.
--release
Remove all artifacts in the release directory.
--profile name
Remove all artifacts in the directory with the given profile name.
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
--target triple
Clean for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Remove the entire target directory: cargo clean 2. Remove only the release artifacts: cargo clean --release ## SEE ALSO [cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) cargo-0.66.0/src/doc/src/commands/cargo-doc.md000066400000000000000000000455041432416201200210050ustar00rootroot00000000000000# cargo-doc(1) ## NAME cargo-doc - Build a package's documentation ## SYNOPSIS `cargo doc` [_options_] ## DESCRIPTION Build the documentation for the local package and all dependencies. The output is placed in `target/doc` in rustdoc's usual format. ## OPTIONS ### Documentation Options
--open
Open the docs in a browser after building them. This will use your default browser unless you define another one in the BROWSER environment variable or use the doc.browser configuration option.
--no-deps
Do not build documentation for dependencies.
--document-private-items
Include non-public items in the documentation. This will be enabled by default if documenting a binary target.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Document only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Document all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo doc` will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have `required-features` that are missing. The default behavior can be changed by setting `doc = false` for the target in the manifest settings. Using target selection options will ignore the `doc` flag and will always document the given target.
--lib
Document the package's library.
--bin name...
Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Document all binary targets.
--example name...
Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Document all example targets.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Document for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Document optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Document with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Build the local package documentation and its dependencies and output to `target/doc`. cargo doc ## SEE ALSO [cargo(1)](cargo.html), [cargo-rustdoc(1)](cargo-rustdoc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) cargo-0.66.0/src/doc/src/commands/cargo-fetch.md000066400000000000000000000166561432416201200213370ustar00rootroot00000000000000# cargo-fetch(1) ## NAME cargo-fetch - Fetch dependencies of a package from the network ## SYNOPSIS `cargo fetch` [_options_] ## DESCRIPTION If a `Cargo.lock` file is available, this command will ensure that all of the git dependencies and/or registry dependencies are downloaded and locally available. Subsequent Cargo commands will be able to run offline after a `cargo fetch` unless the lock file changes. If the lock file is not available, then this command will generate the lock file before fetching the dependencies. If `--target` is not specified, then all target dependencies are fetched. See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch) plugin which adds a command to download popular crates. This may be useful if you plan to use Cargo without a network with the `--offline` flag. ## OPTIONS ### Fetch options
--target triple
Fetch for the given architecture. The default is all architectures. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Fetch all dependencies: cargo fetch ## SEE ALSO [cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html) cargo-0.66.0/src/doc/src/commands/cargo-fix.md000066400000000000000000000607161432416201200210300ustar00rootroot00000000000000# cargo-fix(1) ## NAME cargo-fix - Automatically fix lint warnings reported by rustc ## SYNOPSIS `cargo fix` [_options_] ## DESCRIPTION This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! Executing `cargo fix` will under the hood execute [cargo-check(1)](cargo-check.html). Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For example if you'd like to apply all fixes to the current package, you can run: cargo fix which behaves the same as `cargo check --all-targets`. `cargo fix` is only capable of fixing code that is normally compiled with `cargo check`. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: cargo fix --features foo Similarly, other `cfg` expressions like platform-specific code will need to pass `--target` to fix code for the given target. cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with `cargo fix` or otherwise have any questions or feature requests please don't hesitate to file an issue at . ### Edition migration The `cargo fix` subcommand can also be used to migrate a package from one [edition] to the next. The general procedure is: 1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if your project has multiple features. You may also want to run `cargo fix --edition` multiple times with different `--target` flags if your project has platform-specific code gated by `cfg` attributes. 2. Modify `Cargo.toml` to set the [edition field] to the new edition. 3. Run your project tests to verify that everything still works. If new warnings are issued, you may want to consider running `cargo fix` again (without the `--edition` flag) to apply any suggestions given by the compiler. And hopefully that's it! Just keep in mind of the caveats mentioned above that `cargo fix` cannot update code for inactive features or `cfg` expressions. Also, in some rare cases the compiler is unable to automatically migrate all code to the new edition, and this may require manual changes after building with the new edition. [edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html [edition field]: ../reference/manifest.html#the-edition-field ## OPTIONS ### Fix options
--broken-code
Fix code even if it already has compiler errors. This is useful if cargo fix fails to apply the changes. It will apply the changes and leave the broken code in the working directory for you to inspect and manually fix.
--edition
Apply changes that will update the code to the next edition. This will not update the edition in the Cargo.toml manifest, which must be updated manually after cargo fix --edition has finished.
--edition-idioms
Apply suggestions that will update code to the preferred style for the current edition.
--allow-no-vcs
Fix code even if a VCS was not detected.
--allow-dirty
Fix code even if the working directory has changes.
--allow-staged
Fix code even if the working directory has staged changes.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Fix only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Fix all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo fix` will fix all targets (`--all-targets` implied). Binaries are skipped if they have `required-features` that are missing. Passing target selection flags will fix only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Fix the package's library.
--bin name...
Fix the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Fix all binary targets.
--example name...
Fix the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Fix all example targets.
--test name...
Fix the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Fix all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Fix the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Fix all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Fix all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Fix for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Fix optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Fix with the given profile.

As a special case, specifying the test profile will also enable checking in test mode which will enable checking tests and enable the test cfg option. See rustc tests for more detail.

See the the reference for more details on profiles.

--ignore-rust-version
Fix the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Apply compiler suggestions to the local package: cargo fix 2. Update a package to prepare it for the next edition: cargo fix --edition 3. Apply suggested idioms for the current edition: cargo fix --edition-idioms ## SEE ALSO [cargo(1)](cargo.html), [cargo-check(1)](cargo-check.html) cargo-0.66.0/src/doc/src/commands/cargo-generate-lockfile.md000066400000000000000000000151331432416201200236130ustar00rootroot00000000000000# cargo-generate-lockfile(1) ## NAME cargo-generate-lockfile - Generate the lockfile for a package ## SYNOPSIS `cargo generate-lockfile` [_options_] ## DESCRIPTION This command will create the `Cargo.lock` lockfile for the current package or workspace. If the lockfile already exists, it will be rebuilt with the latest available version of every package. See also [cargo-update(1)](cargo-update.html) which is also capable of creating a `Cargo.lock` lockfile and has more options for controlling update behavior. ## OPTIONS ### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Create or update the lockfile for the current package or workspace: cargo generate-lockfile ## SEE ALSO [cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html) cargo-0.66.0/src/doc/src/commands/cargo-help.md000066400000000000000000000005401432416201200211570ustar00rootroot00000000000000# cargo-help(1) ## NAME cargo-help - Get help for a Cargo command ## SYNOPSIS `cargo help` [_subcommand_] ## DESCRIPTION Prints a help message for the given command. ## EXAMPLES 1. Get help for a command: cargo help build 2. Help is also available with the `--help` flag: cargo build --help ## SEE ALSO [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-init.md000066400000000000000000000150641432416201200212010ustar00rootroot00000000000000# cargo-init(1) ## NAME cargo-init - Create a new Cargo package in an existing directory ## SYNOPSIS `cargo init` [_options_] [_path_] ## DESCRIPTION This command will create a new Cargo manifest in the current directory. Give a path as an argument to create in the given directory. If there are typically-named Rust source files already in the directory, those will be used. If not, then a sample `src/main.rs` file will be created, or `src/lib.rs` if `--lib` is passed. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). See [cargo-new(1)](cargo-new.html) for a similar command which will create a new package in a new directory. ## OPTIONS ### Init Options
--bin
Create a package with a binary target (src/main.rs). This is the default behavior.
--lib
Create a package with a library target (src/lib.rs).
--edition edition
Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021
--name name
Set the package name. Defaults to the directory name.
--vcs vcs
Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to git or the configuration value cargo-new.vcs, or none if already inside a VCS repository.
--registry registry
This sets the publish field in Cargo.toml to the given registry name which will restrict publishing only to that registry.

Registry names are defined in Cargo config files. If not specified, the default registry defined by the registry.default config key is used. If the default registry is not set and --registry is not used, the publish field will not be set which means that publishing will not be restricted.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Create a binary Cargo package in the current directory: cargo init ## SEE ALSO [cargo(1)](cargo.html), [cargo-new(1)](cargo-new.html) cargo-0.66.0/src/doc/src/commands/cargo-install.md000066400000000000000000000530111432416201200216760ustar00rootroot00000000000000# cargo-install(1) ## NAME cargo-install - Build and install a Rust binary ## SYNOPSIS `cargo install` [_options_] _crate_[@_version_]...\ `cargo install` [_options_] `--path` _path_\ `cargo install` [_options_] `--git` _url_ [_crate_...]\ `cargo install` [_options_] `--list` ## DESCRIPTION This command manages Cargo's local set of installed binary crates. Only packages which have executable `[[bin]]` or `[[example]]` targets can be installed, and all executables are installed into the installation root's `bin` folder. The installation root is determined, in order of precedence: - `--root` option - `CARGO_INSTALL_ROOT` environment variable - `install.root` Cargo [config value](../reference/config.html) - `CARGO_HOME` environment variable - `$HOME/.cargo` There are multiple sources from which a crate can be installed. The default location is crates.io but the `--git`, `--path`, and `--registry` flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the _crate_ argument is required to indicate which crate should be installed. Crates from crates.io can optionally specify the version they wish to install via the `--version` flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the `--bin` argument can selectively install only one of them, and if you'd rather install examples the `--example` argument can be used as well. If the package is already installed, Cargo will reinstall it if the installed version does not appear to be up-to-date. If any of the following values change, then Cargo will reinstall the package: - The package version and source. - The set of binary names installed. - The chosen features. - The profile (`--profile`). - The target (`--target`). Installing with `--path` will always build and install, unless there are conflicting binaries from another package. The `--force` flag may be used to force Cargo to always reinstall the package. If the source is crates.io or `--git` then by default the crate will be built in a temporary target directory. To avoid this, the target directory can be specified by setting the `CARGO_TARGET_DIR` environment variable to a relative path. In particular, this can be useful for caching build artifacts on continuous integration systems. By default, the `Cargo.lock` file that is included with the package will be ignored. This means that Cargo will recompute which versions of dependencies to use, possibly using newer versions that have been released since the package was published. The `--locked` flag can be used to force Cargo to use the packaged `Cargo.lock` file if it is available. This may be useful for ensuring reproducible builds, to use the exact same set of dependencies that were available when the package was published. It may also be useful if a newer version of a dependency is published that no longer builds on your system, or has other problems. The downside to using `--locked` is that you will not receive any fixes or updates to any dependency. Note that Cargo did not start publishing `Cargo.lock` files until version 1.37, which means packages published with prior versions will not have a `Cargo.lock` file available. ## OPTIONS ### Install Options
--vers version
--version version
Specify a version to install. This may be a version requirement, like ~1.2, to have Cargo select the newest version from the given requirement. If the version does not have a requirement operator (such as ^ or ~), then it must be in the form MAJOR.MINOR.PATCH, and will install exactly that version; it is not treated as a caret requirement like Cargo dependencies are.
--git url
Git URL to install the specified crate from.
--branch branch
Branch to use when installing from git.
--tag tag
Tag to use when installing from git.
--rev sha
Specific commit to use when installing from git.
--path path
Filesystem path to local crate to install.
--list
List all installed packages and their versions.
-f
--force
Force overwriting existing crates or binaries. This can be used if a package has installed a binary with the same name as another package. This is also useful if something has changed on the system that you want to rebuild with, such as a newer version of rustc.
--no-track
By default, Cargo keeps track of the installed packages with a metadata file stored in the installation root directory. This flag tells Cargo not to use or create that file. With this flag, Cargo will refuse to overwrite any existing files unless the --force flag is used. This also disables Cargo's ability to protect against multiple concurrent invocations of Cargo installing at the same time.
--bin name...
Install only the specified binary.
--bins
Install all binaries.
--example name...
Install only the specified example.
--examples
Install all examples.
--root dir
Directory to install packages into.
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
--index index
The URL of the registry index to use.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Install for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to a new temporary folder located in the temporary directory of the platform.

When using --path, by default it will use target directory in the workspace of the local crate unless --target-dir is specified.

--debug
Build with the dev profile instead the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Install with the given profile. See the the reference for more details on profiles.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Manifest Options
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Install or upgrade a package from crates.io: cargo install ripgrep 2. Install or reinstall the package in the current directory: cargo install --path . 3. View the list of installed packages: cargo install --list ## SEE ALSO [cargo(1)](cargo.html), [cargo-uninstall(1)](cargo-uninstall.html), [cargo-search(1)](cargo-search.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo-locate-project.md000066400000000000000000000125301432416201200231440ustar00rootroot00000000000000# cargo-locate-project(1) ## NAME cargo-locate-project - Print a JSON representation of a Cargo.toml file's location ## SYNOPSIS `cargo locate-project` [_options_] ## DESCRIPTION This command will print a JSON object to stdout with the full path to the `Cargo.toml` manifest. ## OPTIONS
--workspace
Locate the Cargo.toml at the root of the workspace, as opposed to the current workspace member.
### Display Options
--message-format fmt
The representation in which to print the project location. Valid values:

  • json (default): JSON object with the path under the key "root".
  • plain: Just the path.
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Display the path to the manifest based on the current directory: cargo locate-project ## SEE ALSO [cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html) cargo-0.66.0/src/doc/src/commands/cargo-login.md000066400000000000000000000114571432416201200213500ustar00rootroot00000000000000# cargo-login(1) ## NAME cargo-login - Save an API token from the registry locally ## SYNOPSIS `cargo login` [_options_] [_token_] ## DESCRIPTION This command will save the API token to disk so that commands that require authentication, such as [cargo-publish(1)](cargo-publish.html), will be automatically authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME` defaults to `.cargo` in your home directory. If the _token_ argument is not specified, it will be read from stdin. The API token for crates.io may be retrieved from . Take care to keep the token secret, it should not be shared with anyone else. ## OPTIONS ### Login Options
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Save the API token to disk: cargo login ## SEE ALSO [cargo(1)](cargo.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo-metadata.md000066400000000000000000000512761432416201200220230ustar00rootroot00000000000000# cargo-metadata(1) ## NAME cargo-metadata - Machine-readable metadata about the current package ## SYNOPSIS `cargo metadata` [_options_] ## DESCRIPTION Output JSON to stdout containing information about the workspace members and resolved dependencies of the current package. It is recommended to include the `--format-version` flag to future-proof your code to ensure the output is in the format you are expecting. See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata) for a Rust API for reading the metadata. ## OUTPUT FORMAT The output has the following format: ```javascript { /* Array of all packages in the workspace. It also includes all feature-enabled dependencies unless --no-deps is used. */ "packages": [ { /* The name of the package. */ "name": "my-package", /* The version of the package. */ "version": "0.1.0", /* The Package ID, a unique identifier for referring to the package. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The license value from the manifest, or null. */ "license": "MIT/Apache-2.0", /* The license-file value from the manifest, or null. */ "license_file": "LICENSE", /* The description value from the manifest, or null. */ "description": "Package description.", /* The source ID of the package. This represents where a package is retrieved from. This is null for path dependencies and workspace members. For other dependencies, it is a string with the format: - "registry+URL" for registry-based dependencies. Example: "registry+https://github.com/rust-lang/crates.io-index" - "git+URL" for git-based dependencies. Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" */ "source": null, /* Array of dependencies declared in the package's manifest. */ "dependencies": [ { /* The name of the dependency. */ "name": "bitflags", /* The source ID of the dependency. May be null, see description for the package source. */ "source": "registry+https://github.com/rust-lang/crates.io-index", /* The version requirement for the dependency. Dependencies without a version requirement have a value of "*". */ "req": "^1.0", /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* If the dependency is renamed, this is the new name for the dependency as a string. null if it is not renamed. */ "rename": null, /* Boolean of whether or not this is an optional dependency. */ "optional": false, /* Boolean of whether or not default features are enabled. */ "uses_default_features": true, /* Array of features enabled. */ "features": [], /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)", /* The file system path for a local path dependency. not present if not a path dependency. */ "path": "/path/to/dep", /* A string of the URL of the registry this dependency is from. If not specified or null, the dependency is from the default registry (crates.io). */ "registry": null } ], /* Array of Cargo targets. */ "targets": [ { /* Array of target kinds. - lib targets list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - binary is ["bin"] - example is ["example"] - integration test is ["test"] - benchmark is ["bench"] - build script is ["custom-build"] */ "kind": [ "bin" ], /* Array of crate types. - lib and example libraries list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - all other target kinds are ["bin"] */ "crate_types": [ "bin" ], /* The name of the target. */ "name": "my-package", /* Absolute path to the root source file of the target. */ "src_path": "/path/to/my-package/src/main.rs", /* The Rust edition of the target. Defaults to the package edition. */ "edition": "2018", /* Array of required features. This property is not included if no required features are set. */ "required-features": ["feat1"], /* Whether the target should be documented by `cargo doc`. */ "doc": true, /* Whether or not this target has doc tests enabled, and the target is compatible with doc testing. */ "doctest": false, /* Whether or not this target should be built and run with `--test` */ "test": true } ], /* Set of features defined for the package. Each feature maps to an array of features or dependencies it enables. */ "features": { "default": [ "feat1" ], "feat1": [], "feat2": [] }, /* Absolute path to this package's manifest. */ "manifest_path": "/path/to/my-package/Cargo.toml", /* Package metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } }, /* List of registries to which this package may be published. Publishing is unrestricted if null, and forbidden if an empty array. */ "publish": [ "crates-io" ], /* Array of authors from the manifest. Empty array if no authors specified. */ "authors": [ "Jane Doe " ], /* Array of categories from the manifest. */ "categories": [ "command-line-utilities" ], /* Optional string that is the default binary picked by cargo run. */ "default_run": null, /* Optional string that is the minimum supported rust version */ "rust_version": "1.56", /* Array of keywords from the manifest. */ "keywords": [ "cli" ], /* The readme value from the manifest or null if not specified. */ "readme": "README.md", /* The repository value from the manifest or null if not specified. */ "repository": "https://github.com/rust-lang/cargo", /* The homepage value from the manifest or null if not specified. */ "homepage": "https://rust-lang.org", /* The documentation value from the manifest or null if not specified. */ "documentation": "https://doc.rust-lang.org/stable/std", /* The default edition of the package. Note that individual targets may have different editions. */ "edition": "2018", /* Optional string that is the name of a native library the package is linking to. */ "links": null, } ], /* Array of members of the workspace. Each entry is the Package ID for the package. */ "workspace_members": [ "my-package 0.1.0 (path+file:///path/to/my-package)", ], // The resolved dependency graph for the entire workspace. The enabled // features are based on the enabled features for the "current" package. // Inactivated optional dependencies are not listed. // // This is null if --no-deps is specified. // // By default, this includes all dependencies for all target platforms. // The `--filter-platform` flag may be used to narrow to a specific // target triple. "resolve": { /* Array of nodes within the dependency graph. Each node is a package. */ "nodes": [ { /* The Package ID of this node. */ "id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The dependencies of this package, an array of Package IDs. */ "dependencies": [ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" ], /* The dependencies of this package. This is an alternative to "dependencies" which contains additional information. In particular, this handles renamed dependencies. */ "deps": [ { /* The name of the dependency's library target. If this is a renamed dependency, this is the new name. */ "name": "bitflags", /* The Package ID of the dependency. */ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", /* Array of dependency kinds. Added in Cargo 1.40. */ "dep_kinds": [ { /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)" } ] } ], /* Array of features enabled on this package. */ "features": [ "default" ] } ], /* The root package of the workspace. This is null if this is a virtual workspace. Otherwise it is the Package ID of the root package. */ "root": "my-package 0.1.0 (path+file:///path/to/my-package)" }, /* The absolute path to the build directory where Cargo places its output. */ "target_directory": "/path/to/my-package/target", /* The version of the schema for this metadata structure. This will be changed if incompatible changes are ever made. */ "version": 1, /* The absolute path to the root of the workspace. */ "workspace_root": "/path/to/my-package" /* Workspace metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all-features": true } } } } ```` ## OPTIONS ### Output Options
--no-deps
Output information only about the workspace members and don't fetch dependencies.
--format-version version
Specify the version of the output format to use. Currently 1 is the only possible value.
--filter-platform triple
This filters the resolve output to only include dependencies for the given target triple. Without this flag, the resolve includes all targets.

Note that the dependencies listed in the "packages" array still includes all dependencies. Each package definition is intended to be an unaltered reproduction of the information within Cargo.toml.

### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Output JSON about the current package: cargo metadata --format-version=1 ## SEE ALSO [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-new.md000066400000000000000000000145531432416201200210310ustar00rootroot00000000000000# cargo-new(1) ## NAME cargo-new - Create a new Cargo package ## SYNOPSIS `cargo new` [_options_] _path_ ## DESCRIPTION This command will create a new Cargo package in the given directory. This includes a simple template with a `Cargo.toml` manifest, sample source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). See [cargo-init(1)](cargo-init.html) for a similar command which will create a new manifest in an existing directory. ## OPTIONS ### New Options
--bin
Create a package with a binary target (src/main.rs). This is the default behavior.
--lib
Create a package with a library target (src/lib.rs).
--edition edition
Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021
--name name
Set the package name. Defaults to the directory name.
--vcs vcs
Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to git or the configuration value cargo-new.vcs, or none if already inside a VCS repository.
--registry registry
This sets the publish field in Cargo.toml to the given registry name which will restrict publishing only to that registry.

Registry names are defined in Cargo config files. If not specified, the default registry defined by the registry.default config key is used. If the default registry is not set and --registry is not used, the publish field will not be set which means that publishing will not be restricted.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Create a binary Cargo package in the given directory: cargo new foo ## SEE ALSO [cargo(1)](cargo.html), [cargo-init(1)](cargo-init.html) cargo-0.66.0/src/doc/src/commands/cargo-owner.md000066400000000000000000000162641432416201200213730ustar00rootroot00000000000000# cargo-owner(1) ## NAME cargo-owner - Manage the owners of a crate on the registry ## SYNOPSIS `cargo owner` [_options_] `--add` _login_ [_crate_]\ `cargo owner` [_options_] `--remove` _login_ [_crate_]\ `cargo owner` [_options_] `--list` [_crate_] ## DESCRIPTION This command will modify the owners for a crate on the registry. Owners of a crate can upload new versions and yank old versions. Non-team owners can also modify the set of owners, so take care! This command requires you to be authenticated with either the `--token` option or using [cargo-login(1)](cargo-login.html). If the crate name is not specified, it will use the package name from the current directory. See [the reference](../reference/publishing.html#cargo-owner) for more information about owners and publishing. ## OPTIONS ### Owner Options
-a
--add login...
Invite the given user or team as an owner.
-r
--remove login...
Remove the given user or team as an owner.
-l
--list
List owners of a crate.
--token token
API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)).

Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters.

--index index
The URL of the registry index to use.
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. List owners of a package: cargo owner --list foo 2. Invite an owner to a package: cargo owner --add username foo 3. Remove an owner from a package: cargo owner --remove username foo ## SEE ALSO [cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo-package.md000066400000000000000000000361221432416201200216270ustar00rootroot00000000000000# cargo-package(1) ## NAME cargo-package - Assemble the local package into a distributable tarball ## SYNOPSIS `cargo package` [_options_] ## DESCRIPTION This command will create a distributable, compressed `.crate` file with the source code of the package in the current directory. The resulting file will be stored in the `target/package` directory. This performs the following steps: 1. Load and check the current workspace, performing some basic checks. - Path dependencies are not allowed unless they have a version key. Cargo will ignore the path key for dependencies in published packages. `dev-dependencies` do not have this restriction. 2. Create the compressed `.crate` file. - The original `Cargo.toml` file is rewritten and normalized. - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the manifest. - `Cargo.lock` is automatically included if the package contains an executable binary or example target. [cargo-install(1)](cargo-install.html) will use the packaged lock file if the `--locked` flag is used. - A `.cargo_vcs_info.json` file is included that contains information about the current VCS checkout hash if available (not included with `--allow-dirty`). 3. Extract the `.crate` file and build it to verify it can build. - This will rebuild your package from scratch to ensure that it can be built from a pristine state. The `--no-verify` flag can be used to skip this step. 4. Check that build scripts did not modify any source files. The list of files included can be controlled with the `include` and `exclude` fields in the manifest. See [the reference](../reference/publishing.html) for more details about packaging and publishing. ### .cargo_vcs_info.json format Will generate a `.cargo_vcs_info.json` in the following format ```javascript { "git": { "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" }, "path_in_vcs": "" } ``` `path_in_vcs` will be set to a repo-relative path for packages in subdirectories of the version control repository. ## OPTIONS ### Package Options
-l
--list
Print files included in a package without making one.
--no-verify
Don't verify the contents by building them.
--no-metadata
Ignore warnings about a lack of human-usable metadata (such as the description or the license).
--allow-dirty
Allow working directories with uncommitted VCS changes to be packaged.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Package only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Package all members in the workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Compilation Options
--target triple
Package for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Create a compressed `.crate` file of the current package: cargo package ## SEE ALSO [cargo(1)](cargo.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo-pkgid.md000066400000000000000000000175071432416201200213400ustar00rootroot00000000000000# cargo-pkgid(1) ## NAME cargo-pkgid - Print a fully qualified package specification ## SYNOPSIS `cargo pkgid` [_options_] [_spec_] ## DESCRIPTION Given a _spec_ argument, print out the fully qualified package ID specifier for a package or dependency in the current workspace. This command will generate an error if _spec_ is ambiguous as to which package it refers to in the dependency graph. If no _spec_ is given, then the specifier for the local package is printed. This command requires that a lockfile is available and dependencies have been fetched. A package specifier consists of a name, version, and source URL. You are allowed to use partial specifiers to succinctly match a specific package as long as it matches only one package. The format of a _spec_ can be one of the following: SPEC Structure | Example SPEC ---------------------------|-------------- _name_ | `bitflags` _name_`@`_version_ | `bitflags@1.0.4` _url_ | `https://github.com/rust-lang/cargo` _url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0` _url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags` _url_`#`_name_`:`_version_ | `https://github.com/rust-lang/cargo#crates-io@0.21.0` ## OPTIONS ### Package Selection
-p spec
--package spec
Get the package ID for the given package instead of the current package.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Retrieve package specification for `foo` package: cargo pkgid foo 2. Retrieve package specification for version 1.0.0 of `foo`: cargo pkgid foo@1.0.0 3. Retrieve package specification for `foo` from crates.io: cargo pkgid https://github.com/rust-lang/crates.io-index#foo 4. Retrieve package specification for `foo` from a local package: cargo pkgid file:///path/to/local/package#foo ## SEE ALSO [cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html), [cargo-metadata(1)](cargo-metadata.html) cargo-0.66.0/src/doc/src/commands/cargo-publish.md000066400000000000000000000333111432416201200216770ustar00rootroot00000000000000# cargo-publish(1) ## NAME cargo-publish - Upload a package to the registry ## SYNOPSIS `cargo publish` [_options_] ## DESCRIPTION This command will create a distributable, compressed `.crate` file with the source code of the package in the current directory and upload it to a registry. The default registry is . This performs the following steps: 1. Performs a few checks, including: - Checks the `package.publish` key in the manifest for restrictions on which registries you are allowed to publish to. 2. Create a `.crate` file by following the steps in [cargo-package(1)](cargo-package.html). 3. Upload the crate to the registry. Note that the server will perform additional checks on the crate. This command requires you to be authenticated with either the `--token` option or using [cargo-login(1)](cargo-login.html). See [the reference](../reference/publishing.html) for more details about packaging and publishing. ## OPTIONS ### Publish Options
--dry-run
Perform all checks without uploading.
--token token
API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)).

Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters.

--no-verify
Don't verify the contents by building them.
--allow-dirty
Allow working directories with uncommitted VCS changes to be packaged.
--index index
The URL of the registry index to use.
--registry registry
Name of the registry to publish to. Registry names are defined in Cargo config files. If not specified, and there is a package.publish field in Cargo.toml with a single registry, then it will publish to that registry. Otherwise it will use the default registry, which is defined by the registry.default config key which defaults to crates-io.
### Package Selection By default, the package in the current working directory is selected. The `-p` flag can be used to choose a different package in a workspace.
-p spec
--package spec
The package to publish. See cargo-pkgid(1) for the SPEC format.
### Compilation Options
--target triple
Publish for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Publish the current package: cargo publish ## SEE ALSO [cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html), [cargo-login(1)](cargo-login.html) cargo-0.66.0/src/doc/src/commands/cargo-report.md000066400000000000000000000023221432416201200215420ustar00rootroot00000000000000# cargo-report(1) ## NAME cargo-report - Generate and display various kinds of reports ## SYNOPSIS `cargo report` _type_ [_options_] ### DESCRIPTION Displays a report of the given _type_ - currently, only `future-incompat` is supported ## OPTIONS
--id id
Show the report with the specified Cargo-generated id
-p spec...
--package spec...
Only display a report for the specified package
## EXAMPLES 1. Display the latest future-incompat report: cargo report future-incompat 2. Display the latest future-incompat report for a specific package: cargo report future-incompat --package my-dep:0.0.1 ## SEE ALSO [Future incompat report](../reference/future-incompat-report.html) [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-run.md000066400000000000000000000362121432416201200210400ustar00rootroot00000000000000# cargo-run(1) ## NAME cargo-run - Run the current package ## SYNOPSIS `cargo run` [_options_] [`--` _args_] ## DESCRIPTION Run a binary or example of the local package. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. ## OPTIONS ### Package Selection By default, the package in the current working directory is selected. The `-p` flag can be used to choose a different package in a workspace.
-p spec
--package spec
The package to run. See cargo-pkgid(1) for the SPEC format.
### Target Selection When no target selection options are given, `cargo run` will run the binary target. If there are multiple binary targets, you must pass a target flag to choose one. Or, the `default-run` field may be specified in the `[package]` section of `Cargo.toml` to choose the name of the binary to run by default.
--bin name
Run the specified binary.
--example name
Run the specified example.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Run for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Run optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Run with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Run the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Build the local package and run its main target (assuming only one binary): cargo run 2. Run an example with extra arguments: cargo run --example exname -- --exoption exarg1 exarg2 ## SEE ALSO [cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) cargo-0.66.0/src/doc/src/commands/cargo-rustc.md000066400000000000000000000535261432416201200214030ustar00rootroot00000000000000# cargo-rustc(1) ## NAME cargo-rustc - Compile the current package, and pass extra options to the compiler ## SYNOPSIS `cargo rustc` [_options_] [`--` _args_] ## DESCRIPTION The specified target for the current package (or package specified by `-p` if provided) will be compiled along with all of its dependencies. The specified _args_ will all be passed to the final compiler invocation, not any of the dependencies. Note that the compiler will still unconditionally receive arguments such as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply be added to the compiler invocation. See for documentation on rustc flags. This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of `--lib`, `--bin`, etc, must be used to select which target is compiled. To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS` [environment variable](../reference/environment-variables.html) or the `build.rustflags` [config value](../reference/config.html). ## OPTIONS ### Package Selection By default, the package in the current working directory is selected. The `-p` flag can be used to choose a different package in a workspace.
-p spec
--package spec
The package to build. See cargo-pkgid(1) for the SPEC format.
### Target Selection When no target selection options are given, `cargo rustc` will build all binary and library targets of the selected package. Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) is set when the integration test is built so that it can use the [`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the executable. Passing target selection flags will build only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Build the package's library.
--bin name...
Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Build all binary targets.
--example name...
Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Build all example targets.
--test name...
Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Build all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Build all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Build for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Build optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Build with the given profile.

The rustc subcommand will treat the following named profiles with special behaviors:

  • check β€” Builds in the same way as the cargo-check(1) command with the dev profile.
  • test β€” Builds in the same way as the cargo-test(1) command, enabling building in test mode which will enable tests and enable the test cfg option. See rustc tests for more detail.
  • bench β€” Builds in the same was as the cargo-bench(1) command, similar to the test profile.

See the the reference for more details on profiles.

--ignore-rust-version
Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
--crate-type crate-type
Build for the given crate type. This flag accepts a comma-separated list of 1 or more crate types, of which the allowed values are the same as crate-type field in the manifest for configurating a Cargo target. See crate-type field for possible values.

If the manifest contains a list, and --crate-type is provided, the command-line argument value will override what is in the manifest.

This flag only works when building a lib or example library target.

### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
--future-incompat-report
Displays a future-incompat report for any future-incompatible warnings produced during execution of this command

See cargo-report(1)

## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Check if your package (not including dependencies) uses unsafe code: cargo rustc --lib -- -D unsafe-code 2. Try an experimental flag on the nightly compiler, such as this which prints the size of every type: cargo rustc --lib -- -Z print-type-sizes 3. Override `crate-type` field in Cargo.toml with command-line option: cargo rustc --lib --crate-type lib,cdylib ## SEE ALSO [cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html), [rustc(1)](https://doc.rust-lang.org/rustc/index.html) cargo-0.66.0/src/doc/src/commands/cargo-rustdoc.md000066400000000000000000000500061432416201200217140ustar00rootroot00000000000000# cargo-rustdoc(1) ## NAME cargo-rustdoc - Build a package's documentation, using specified custom flags ## SYNOPSIS `cargo rustdoc` [_options_] [`--` _args_] ## DESCRIPTION The specified target for the current package (or package specified by `-p` if provided) will be documented with the specified _args_ being passed to the final rustdoc invocation. Dependencies will not be documented as part of this command. Note that rustdoc will still unconditionally receive arguments such as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply be added to the rustdoc invocation. See for documentation on rustdoc flags. This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of `--lib`, `--bin`, etc, must be used to select which target is compiled. To pass flags to all rustdoc processes spawned by Cargo, use the `RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html) or the `build.rustdocflags` [config value](../reference/config.html). ## OPTIONS ### Documentation Options
--open
Open the docs in a browser after building them. This will use your default browser unless you define another one in the BROWSER environment variable or use the doc.browser configuration option.
### Package Selection By default, the package in the current working directory is selected. The `-p` flag can be used to choose a different package in a workspace.
-p spec
--package spec
The package to document. See cargo-pkgid(1) for the SPEC format.
### Target Selection When no target selection options are given, `cargo rustdoc` will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have `required-features` that are missing. Passing target selection flags will document only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Document the package's library.
--bin name...
Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Document all binary targets.
--example name...
Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Document all example targets.
--test name...
Document the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Document all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Document the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Document all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Document all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Document for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Document optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Document with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Build documentation with custom CSS included from a given file: cargo rustdoc --lib -- --extend-css extra.css ## SEE ALSO [cargo(1)](cargo.html), [cargo-doc(1)](cargo-doc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) cargo-0.66.0/src/doc/src/commands/cargo-search.md000066400000000000000000000120351432416201200214760ustar00rootroot00000000000000# cargo-search(1) ## NAME cargo-search - Search packages in crates.io ## SYNOPSIS `cargo search` [_options_] [_query_...] ## DESCRIPTION This performs a textual search for crates on . The matching crates will be displayed along with their description in TOML format suitable for copying into a `Cargo.toml` manifest. ## OPTIONS ### Search Options
--limit limit
Limit the number of results (default: 10, max: 100).
--index index
The URL of the registry index to use.
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Search for a package from crates.io: cargo search serde ## SEE ALSO [cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo-test.md000066400000000000000000000624661432416201200212250ustar00rootroot00000000000000# cargo-test(1) ## NAME cargo-test - Execute unit and integration tests of a package ## SYNOPSIS `cargo test` [_options_] [_testname_] [`--` _test-options_] ## DESCRIPTION Compile and execute unit, integration, and documentation tests. The test filtering argument `TESTNAME` and all the arguments following the two dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's built in unit-test and micro-benchmarking framework). If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of `cargo test -- --help` and check out the rustc book's chapter on how tests work at . As an example, this will filter for tests with `foo` in their name and run them on 3 threads in parallel: cargo test foo -- --test-threads 3 Tests are built with the `--test` option to `rustc` which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the `#[test]` attribute in multiple threads. `#[bench]` annotated functions will also be run with one iteration to verify that they are functional. If the package contains multiple test targets, each target compiles to a special executable as aforementioned, and then is run serially. The libtest harness may be disabled by setting `harness = false` in the target manifest settings, in which case your code will need to provide its own `main` function to handle running tests. ### Documentation tests Documentation tests are also run by default, which is handled by `rustdoc`. It extracts code samples from documentation comments of the library target, and then executes them. Different from normal test targets, each code block compiles to a doctest executable on the fly with `rustc`. These executables run in parallel in separate processes. The compilation of a code block is in fact a part of test function controlled by libtest, so some options such as `--jobs` might not take effect. Note that this execution model of doctests is not guaranteed and may change in the future; beware of depending on it. See the [rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information on writing doc tests. ## OPTIONS ### Test Options
--no-run
Compile, but don't run tests.
--no-fail-fast
Run all tests regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all tests within the executable to completion, this flag only applies to the executable as a whole.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Test only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Test all members in the workspace.
--all
Deprecated alias for --workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Target Selection When no target selection options are given, `cargo test` will build the following targets of the selected packages: - lib β€” used to link with binaries, examples, integration tests, and doc tests - bins (only if integration tests are built and required features are available) - examples β€” to ensure they compile - lib as a unit test - bins as unit tests - integration tests - doc tests for the lib target The default behavior can be changed by setting the `test` flag for the target in the manifest settings. Setting examples to `test = true` will build and run the example as a test. Setting targets to `test = false` will stop them from being tested by default. Target selection options that take a target by name ignore the `test` flag and will always test the given target. Doc tests for libraries may be disabled by setting `doctest = false` for the library in the manifest. Binary targets are automatically built if there is an integration test or benchmark being selected to test. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) is set when the integration test is built so that it can use the [`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the executable. Passing target selection flags will test only the specified targets. Note that `--bin`, `--example`, `--test` and `--bench` flags also support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern.
--lib
Test the package's library.
--bin name...
Test the specified binary. This flag may be specified multiple times and supports common Unix glob patterns.
--bins
Test all binary targets.
--example name...
Test the specified example. This flag may be specified multiple times and supports common Unix glob patterns.
--examples
Test all example targets.
--test name...
Test the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns.
--tests
Test all targets in test mode that have the test = true manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the test flag in the manifest settings for the target.
--bench name...
Test the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns.
--benches
Test all targets in benchmark mode that have the bench = true manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the bench flag in the manifest settings for the target.
--all-targets
Test all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
--doc
Test only the library's documentation. This cannot be mixed with other target options.
### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Compilation Options
--target triple
Test for the given architecture. The default is the host architecture. The general format of the triple is <arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a list of supported targets. This flag may be specified multiple times.

This may also be specified with the build.target config value.

Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the build cache documentation for more details.

-r
--release
Test optimized artifacts with the release profile. See also the --profile option for choosing a specific profile by name.
--profile name
Test with the given profile. See the the reference for more details on profiles.
--ignore-rust-version
Test the target even if the selected Rust compiler is older than the required Rust version as configured in the project's rust-version field.
--timings=fmts
Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma-separated list of output formats; --timings without an argument will default to --timings=html. Specifying an output format (rather than the default) is unstable and requires -Zunstable-options. Valid output formats:

  • html (unstable, requires -Zunstable-options): Write a human-readable file cargo-timing.html to the target/cargo-timings directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine-readable timing data.
  • json (unstable, requires -Zunstable-options): Emit machine-readable JSON information about timing information.
### Output Options
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value. Defaults to target in the root of the workspace.
### Display Options By default the Rust test harness hides output from test execution to keep results readable. Test output can be recovered (e.g., for debugging) by passing `--nocapture` to the test binaries: cargo test -- --nocapture
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

--message-format fmt
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format. Conflicts with short and json.
  • short: Emit shorter, human-readable text messages. Conflicts with human and json.
  • json: Emit JSON messages to stdout. See the reference for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with human or short.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
### Miscellaneous Options The `--jobs` argument affects the building of the test executable but does not affect how many threads are used when running the tests. The Rust test harness includes an option to control the number of threads used: cargo test -j 2 -- --test-threads=2
-j N
--jobs N
Number of parallel jobs to run. May also be specified with the build.jobs config value. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0.
--keep-going
Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires -Zunstable-options.
--future-incompat-report
Displays a future-incompat report for any future-incompatible warnings produced during execution of this command

See cargo-report(1)

## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Execute all the unit and integration tests of the current package: cargo test 2. Run only tests whose names match against a filter string: cargo test name_filter 3. Run only a specific test within a specific integration test: cargo test --test int_test_name -- modname::test_name ## SEE ALSO [cargo(1)](cargo.html), [cargo-bench(1)](cargo-bench.html), [types of tests](../reference/cargo-targets.html#tests), [how to write tests](https://doc.rust-lang.org/rustc/tests/index.html) cargo-0.66.0/src/doc/src/commands/cargo-tree.md000066400000000000000000000463751432416201200212060ustar00rootroot00000000000000# cargo-tree(1) ## NAME cargo-tree - Display a tree visualization of a dependency graph ## SYNOPSIS `cargo tree` [_options_] ## DESCRIPTION This command will display a tree of dependencies to the terminal. An example of a simple project that depends on the "rand" package: ``` myproject v0.1.0 (/myproject) └── rand v0.7.3 β”œβ”€β”€ getrandom v0.1.14 β”‚ β”œβ”€β”€ cfg-if v0.1.10 β”‚ └── libc v0.2.68 β”œβ”€β”€ libc v0.2.68 (*) β”œβ”€β”€ rand_chacha v0.2.2 β”‚ β”œβ”€β”€ ppv-lite86 v0.2.6 β”‚ └── rand_core v0.5.1 β”‚ └── getrandom v0.1.14 (*) └── rand_core v0.5.1 (*) [build-dependencies] └── cc v1.0.50 ``` Packages marked with `(*)` have been "de-duplicated". The dependencies for the package have already been shown elsewhere in the graph, and so are not repeated. Use the `--no-dedupe` option to repeat the duplicates. The `-e` flag can be used to select the dependency kinds to display. The "features" kind changes the output to display the features enabled by each dependency. For example, `cargo tree -e features`: ``` myproject v0.1.0 (/myproject) └── log feature "serde" └── log v0.4.8 β”œβ”€β”€ serde v1.0.106 └── cfg-if feature "default" └── cfg-if v0.1.10 ``` In this tree, `myproject` depends on `log` with the `serde` feature. `log` in turn depends on `cfg-if` with "default" features. When using `-e features` it can be helpful to use `-i` flag to show how the features flow into a package. See the examples below for more detail. ## OPTIONS ### Tree Options
-i spec
--invert spec
Show the reverse dependencies for the given package. This flag will invert the tree and display the packages that depend on the given package.

Note that in a workspace, by default it will only display the package's reverse dependencies inside the tree of the workspace member in the current directory. The --workspace flag can be used to extend it so that it will show the package's reverse dependencies across the entire workspace. The -p flag can be used to display the package's reverse dependencies only with the subtree of the package given to -p.

--prune spec
Prune the given package from the display of the dependency tree.
--depth depth
Maximum display depth of the dependency tree. A depth of 1 displays the direct dependencies, for example.
--no-dedupe
Do not de-duplicate repeated dependencies. Usually, when a package has already displayed its dependencies, further occurrences will not re-display its dependencies, and will include a (*) to indicate it has already been shown. This flag will cause those duplicates to be repeated.
-d
--duplicates
Show only dependencies which come in multiple versions (implies --invert). When used with the -p flag, only shows duplicates within the subtree of the given package.

It can be beneficial for build times and executable sizes to avoid building that same package multiple times. This flag can help identify the offending packages. You can then investigate if the package that depends on the duplicate with the older version can be updated to the newer version so that only one instance is built.

-e kinds
--edges kinds
The dependency kinds to display. Takes a comma separated list of values:

  • all β€” Show all edge kinds.
  • normal β€” Show normal dependencies.
  • build β€” Show build dependencies.
  • dev β€” Show development dependencies.
  • features β€” Show features enabled by each dependency. If this is the only kind given, then it will automatically include the other dependency kinds.
  • no-normal β€” Do not include normal dependencies.
  • no-build β€” Do not include build dependencies.
  • no-dev β€” Do not include development dependencies.
  • no-proc-macro β€” Do not include procedural macro dependencies.

The normal, build, dev, and all dependency kinds cannot be mixed with no-normal, no-build, or no-dev dependency kinds.

The default is normal,build,dev.

--target triple
Filter dependencies matching the given target-triple. The default is the host platform. Use the value all to include all targets.
### Tree Formatting Options
--charset charset
Chooses the character set to use for the tree. Valid values are "utf8" or "ascii". Default is "utf8".
-f format
--format format
Set the format string for each package. The default is "{p}".

This is an arbitrary string which will be used to display each package. The following strings will be replaced with the corresponding value:

  • {p} β€” The package name.
  • {l} β€” The package license.
  • {r} β€” The package repository URL.
  • {f} β€” Comma-separated list of package features that are enabled.
  • {lib} β€” The name, as used in a use statement, of the package's library.
--prefix prefix
Sets how each line is displayed. The prefix value can be one of:

  • indent (default) β€” Shows each line indented as a tree.
  • depth β€” Show as a list, with the numeric depth printed before each entry.
  • none β€” Show as a flat list.
### Package Selection By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if `--manifest-path` is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. The default members of a workspace can be set explicitly with the `workspace.default-members` key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing `--workspace`), and a non-virtual workspace will include only the root crate itself.
-p spec...
--package spec...
Display only the specified packages. See cargo-pkgid(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
--workspace
Display all members in the workspace.
--exclude SPEC...
Exclude the specified packages. Must be used in conjunction with the --workspace flag. This flag may be specified multiple times and supports common Unix glob patterns like *, ? and []. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Feature Selection The feature flags allow you to control which features are enabled. When no feature options are given, the `default` feature is activated for every selected package. See [the features documentation](../reference/features.html#command-line-feature-options) for more details.
-F features
--features features
Space or comma separated list of features to activate. Features of workspace members may be enabled with package-name/feature-name syntax. This flag may be specified multiple times, which enables all specified features.
--all-features
Activate all available features of all selected packages.
--no-default-features
Do not activate the default feature of the selected packages.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Display the tree for the package in the current directory: cargo tree 2. Display all the packages that depend on the `syn` package: cargo tree -i syn 3. Show the features enabled on each package: cargo tree --format "{p} {f}" 4. Show all packages that are built multiple times. This can happen if multiple semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). cargo tree -d 5. Explain why features are enabled for the `syn` package: cargo tree -e features -i syn The `-e features` flag is used to show features. The `-i` flag is used to invert the graph so that it displays the packages that depend on `syn`. An example of what this would display: ``` syn v1.0.17 β”œβ”€β”€ syn feature "clone-impls" β”‚ └── syn feature "default" β”‚ └── rustversion v1.0.2 β”‚ └── rustversion feature "default" β”‚ └── myproject v0.1.0 (/myproject) β”‚ └── myproject feature "default" (command-line) β”œβ”€β”€ syn feature "default" (*) β”œβ”€β”€ syn feature "derive" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "full" β”‚ └── rustversion v1.0.2 (*) β”œβ”€β”€ syn feature "parsing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "printing" β”‚ └── syn feature "default" (*) β”œβ”€β”€ syn feature "proc-macro" β”‚ └── syn feature "default" (*) └── syn feature "quote" β”œβ”€β”€ syn feature "printing" (*) └── syn feature "proc-macro" (*) ``` To read this graph, you can follow the chain for each feature from the root to see why it is included. For example, the "full" feature is added by the `rustversion` crate which is included from `myproject` (with the default features), and `myproject` is the package selected on the command-line. All of the other `syn` features are added by the "default" feature ("quote" is added by "printing" and "proc-macro", both of which are default features). If you're having difficulty cross-referencing the de-duplicated `(*)` entries, try with the `--no-dedupe` flag to get the full output. ## SEE ALSO [cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html) cargo-0.66.0/src/doc/src/commands/cargo-uninstall.md000066400000000000000000000125031432416201200222420ustar00rootroot00000000000000# cargo-uninstall(1) ## NAME cargo-uninstall - Remove a Rust binary ## SYNOPSIS `cargo uninstall` [_options_] [_spec_...] ## DESCRIPTION This command removes a package installed with [cargo-install(1)](cargo-install.html). The _spec_ argument is a package ID specification of the package to remove (see [cargo-pkgid(1)](cargo-pkgid.html)). By default all binaries are removed for a crate but the `--bin` and `--example` flags can be used to only remove particular binaries. The installation root is determined, in order of precedence: - `--root` option - `CARGO_INSTALL_ROOT` environment variable - `install.root` Cargo [config value](../reference/config.html) - `CARGO_HOME` environment variable - `$HOME/.cargo` ## OPTIONS ### Install Options
-p
--package spec...
Package to uninstall.
--bin name...
Only uninstall the binary name.
--root dir
Directory to uninstall packages from.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Uninstall a previously installed package. cargo uninstall ripgrep ## SEE ALSO [cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html) cargo-0.66.0/src/doc/src/commands/cargo-update.md000066400000000000000000000214261432416201200215170ustar00rootroot00000000000000# cargo-update(1) ## NAME cargo-update - Update dependencies as recorded in the local lock file ## SYNOPSIS `cargo update` [_options_] ## DESCRIPTION This command will update dependencies in the `Cargo.lock` file to the latest version. If the `Cargo.lock` file does not exist, it will be created with the latest available versions. ## OPTIONS ### Update Options
-p spec...
--package spec...
Update only the specified packages. This flag may be specified multiple times. See cargo-pkgid(1) for the SPEC format.

If packages are specified with the -p flag, then a conservative update of the lockfile will be performed. This means that only the dependency specified by SPEC will be updated. Its transitive dependencies will be updated only if SPEC cannot be updated without updating dependencies. All other dependencies will remain locked at their currently recorded versions.

If -p is not specified, all dependencies are updated.

--aggressive
When used with -p, dependencies of spec are forced to update as well. Cannot be used with --precise.
--precise precise
When used with -p, allows you to specify a specific version number to set the package to. If the package comes from a git repository, this can be a git revision (such as a SHA hash or tag).
-w
--workspace
Attempt to update only packages defined in the workspace. Other packages are updated only if they don't already exist in the lockfile. This option is useful for updating Cargo.lock after you've changed version numbers in Cargo.toml.
--dry-run
Displays what would be updated, but doesn't actually write the lockfile.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Update all dependencies in the lockfile: cargo update 2. Update only specific dependencies: cargo update -p foo -p bar 3. Set a specific dependency to a specific version: cargo update -p foo --precise 1.2.3 ## SEE ALSO [cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html) cargo-0.66.0/src/doc/src/commands/cargo-vendor.md000066400000000000000000000205101432416201200215230ustar00rootroot00000000000000# cargo-vendor(1) ## NAME cargo-vendor - Vendor all dependencies locally ## SYNOPSIS `cargo vendor` [_options_] [_path_] ## DESCRIPTION This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at ``. After this command completes the vendor directory specified by `` will contain all remote sources from dependencies specified. Additional manifests beyond the default one can be specified with the `-s` option. The `cargo vendor` command will also print out the configuration necessary to use the vendored sources, which you will need to add to `.cargo/config.toml`. ## OPTIONS ### Vendor Options
-s manifest
--sync manifest
Specify an extra Cargo.toml manifest to workspaces which should also be vendored and synced to the output. May be specified multiple times.
--no-delete
Don't delete the "vendor" directory when vendoring, but rather keep all existing contents of the vendor directory
--respect-source-config
Instead of ignoring [source] configuration by default in .cargo/config.toml read it and use it when downloading crates from crates.io, for example
--versioned-dirs
Normally versions are only added to disambiguate multiple versions of the same package. This option causes all directories in the "vendor" directory to be versioned, which makes it easier to track the history of vendored packages over time, and can help with the performance of re-vendoring when only a subset of the packages have changed.
### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Vendor all dependencies into a local "vendor" folder cargo vendor 2. Vendor all dependencies into a local "third-party/vendor" folder cargo vendor third-party/vendor 3. Vendor the current workspace as well as another to "vendor" cargo vendor -s ../path/to/Cargo.toml ## SEE ALSO [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-verify-project.md000066400000000000000000000146041432416201200232050ustar00rootroot00000000000000# cargo-verify-project(1) ## NAME cargo-verify-project - Check correctness of crate manifest ## SYNOPSIS `cargo verify-project` [_options_] ## DESCRIPTION This command will parse the local manifest and check its validity. It emits a JSON object with the result. A successful validation will display: {"success":"true"} An invalid workspace will display: {"invalid":"human-readable error message"} ## OPTIONS ### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--manifest-path path
Path to the Cargo.toml file. By default, Cargo searches for the Cargo.toml file in the current directory or any parent directory.
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: The workspace is OK. * `1`: The workspace is invalid. ## EXAMPLES 1. Check the current workspace for errors: cargo verify-project ## SEE ALSO [cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html) cargo-0.66.0/src/doc/src/commands/cargo-version.md000066400000000000000000000014211432416201200217130ustar00rootroot00000000000000# cargo-version(1) ## NAME cargo-version - Show version information ## SYNOPSIS `cargo version` [_options_] ## DESCRIPTION Displays the version of Cargo. ## OPTIONS
-v
--verbose
Display additional version information.
## EXAMPLES 1. Display the version: cargo version 2. The version is also available via flags: cargo --version cargo -V 3. Display extra version information: cargo -Vv ## SEE ALSO [cargo(1)](cargo.html) cargo-0.66.0/src/doc/src/commands/cargo-yank.md000066400000000000000000000151501432416201200211740ustar00rootroot00000000000000# cargo-yank(1) ## NAME cargo-yank - Remove a pushed crate from the index ## SYNOPSIS `cargo yank` [_options_] _crate_@_version_\ `cargo yank` [_options_] `--version` _version_ [_crate_] ## DESCRIPTION The yank command removes a previously published crate's version from the server's index. This command does not delete any data, and the crate will still be available for download via the registry's download link. Note that existing crates locked to a yanked version will still be able to download the yanked version to use it. Cargo will, however, not allow any new crates to be locked to any yanked version. This command requires you to be authenticated with either the `--token` option or using [cargo-login(1)](cargo-login.html). If the crate name is not specified, it will use the package name from the current directory. ## OPTIONS ### Yank Options
--vers version
--version version
The version to yank or un-yank.
--undo
Undo a yank, putting a version back into the index.
--token token
API token to use when authenticating. This overrides the token stored in the credentials file (which is created by cargo-login(1)).

Cargo config environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the CARGO_REGISTRY_TOKEN environment variable. Tokens for other registries may be specified with environment variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry in all capital letters.

--index index
The URL of the registry index to use.
--registry registry
Name of the registry to use. Registry names are defined in Cargo config files. If not specified, the default registry is used, which is defined by the registry.default config key which defaults to crates-io.
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## EXAMPLES 1. Yank a crate from the index: cargo yank foo@1.0.7 ## SEE ALSO [cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html) cargo-0.66.0/src/doc/src/commands/cargo.md000066400000000000000000000274331432416201200202430ustar00rootroot00000000000000# cargo(1) ## NAME cargo - The Rust package manager ## SYNOPSIS `cargo` [_options_] _command_ [_args_]\ `cargo` [_options_] `--version`\ `cargo` [_options_] `--list`\ `cargo` [_options_] `--help`\ `cargo` [_options_] `--explain` _code_ ## DESCRIPTION This program is a package manager and build tool for the Rust language, available at . ## COMMANDS ### Build Commands [cargo-bench(1)](cargo-bench.html)\     Execute benchmarks of a package. [cargo-build(1)](cargo-build.html)\     Compile a package. [cargo-check(1)](cargo-check.html)\     Check a local package and all of its dependencies for errors. [cargo-clean(1)](cargo-clean.html)\     Remove artifacts that Cargo has generated in the past. [cargo-doc(1)](cargo-doc.html)\     Build a package's documentation. [cargo-fetch(1)](cargo-fetch.html)\     Fetch dependencies of a package from the network. [cargo-fix(1)](cargo-fix.html)\     Automatically fix lint warnings reported by rustc. [cargo-run(1)](cargo-run.html)\     Run a binary or example of the local package. [cargo-rustc(1)](cargo-rustc.html)\     Compile a package, and pass extra options to the compiler. [cargo-rustdoc(1)](cargo-rustdoc.html)\     Build a package's documentation, using specified custom flags. [cargo-test(1)](cargo-test.html)\     Execute unit and integration tests of a package. ### Manifest Commands [cargo-generate-lockfile(1)](cargo-generate-lockfile.html)\     Generate `Cargo.lock` for a project. [cargo-locate-project(1)](cargo-locate-project.html)\     Print a JSON representation of a `Cargo.toml` file's location. [cargo-metadata(1)](cargo-metadata.html)\     Output the resolved dependencies of a package in machine-readable format. [cargo-pkgid(1)](cargo-pkgid.html)\     Print a fully qualified package specification. [cargo-tree(1)](cargo-tree.html)\     Display a tree visualization of a dependency graph. [cargo-update(1)](cargo-update.html)\     Update dependencies as recorded in the local lock file. [cargo-vendor(1)](cargo-vendor.html)\     Vendor all dependencies locally. [cargo-verify-project(1)](cargo-verify-project.html)\     Check correctness of crate manifest. ### Package Commands [cargo-init(1)](cargo-init.html)\     Create a new Cargo package in an existing directory. [cargo-install(1)](cargo-install.html)\     Build and install a Rust binary. [cargo-new(1)](cargo-new.html)\     Create a new Cargo package. [cargo-search(1)](cargo-search.html)\     Search packages in crates.io. [cargo-uninstall(1)](cargo-uninstall.html)\     Remove a Rust binary. ### Publishing Commands [cargo-login(1)](cargo-login.html)\     Save an API token from the registry locally. [cargo-owner(1)](cargo-owner.html)\     Manage the owners of a crate on the registry. [cargo-package(1)](cargo-package.html)\     Assemble the local package into a distributable tarball. [cargo-publish(1)](cargo-publish.html)\     Upload a package to the registry. [cargo-yank(1)](cargo-yank.html)\     Remove a pushed crate from the index. ### General Commands [cargo-help(1)](cargo-help.html)\     Display help information about Cargo. [cargo-version(1)](cargo-version.html)\     Show version information. ## OPTIONS ### Special Options
-V
--version
Print version info and exit. If used with --verbose, prints extra information.
--list
List all installed Cargo subcommands. If used with --verbose, prints extra information.
--explain code
Run rustc --explain CODE which will print out a detailed explanation of an error message (for example, E0004).
### Display Options
-v
--verbose
Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the term.verbose config value.
-q
--quiet
Do not print cargo log messages. May also be specified with the term.quiet config value.
--color when
Control when colored output is used. Valid values:

  • auto (default): Automatically detect if color support is available on the terminal.
  • always: Always display colors.
  • never: Never display colors.

May also be specified with the term.color config value.

### Manifest Options
--frozen
--locked
Either of these flags requires that the Cargo.lock file is up-to-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The --frozen flag also prevents Cargo from attempting to access the network to determine if it is out-of-date.

These may be used in environments where you want to assert that the Cargo.lock file is up-to-date (such as a CI build) or want to avoid network access.

--offline
Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible.

Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the cargo-fetch(1) command to download dependencies before going offline.

May also be specified with the net.offline config value.

### Common Options
+toolchain
If Cargo has been installed with rustup, and the first argument to cargo begins with +, it will be interpreted as a rustup toolchain name (such as +stable or +nightly). See the rustup documentation for more information about how toolchain overrides work.
--config KEY=VALUE or PATH
Overrides a Cargo configuration value. The argument should be in TOML syntax of KEY=VALUE, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the command-line overrides section for more information.
-h
--help
Prints help information.
-Z flag
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
## ENVIRONMENT See [the reference](../reference/environment-variables.html) for details on environment variables that Cargo reads. ## EXIT STATUS * `0`: Cargo succeeded. * `101`: Cargo failed to complete. ## FILES `~/.cargo/`\     Default location for Cargo's "home" directory where it stores various files. The location can be changed with the `CARGO_HOME` environment variable. `$CARGO_HOME/bin/`\     Binaries installed by [cargo-install(1)](cargo-install.html) will be located here. If using [rustup], executables distributed with Rust are also located here. `$CARGO_HOME/config.toml`\     The global configuration file. See [the reference](../reference/config.html) for more information about configuration files. `.cargo/config.toml`\     Cargo automatically searches for a file named `.cargo/config.toml` in the current directory, and all parent directories. These configuration files will be merged with the global configuration file. `$CARGO_HOME/credentials.toml`\     Private authentication information for logging in to a registry. `$CARGO_HOME/registry/`\     This directory contains cached downloads of the registry index and any downloaded dependencies. `$CARGO_HOME/git/`\     This directory contains cached downloads of git dependencies. Please note that the internal structure of the `$CARGO_HOME` directory is not stable yet and may be subject to change. [rustup]: https://rust-lang.github.io/rustup/ ## EXAMPLES 1. Build a local package and all of its dependencies: cargo build 2. Build a package with optimizations: cargo build --release 3. Run tests for a cross-compiled target: cargo test --target i686-unknown-linux-gnu 4. Create a new package that builds an executable: cargo new foobar 5. Create a package in the current directory: mkdir foo && cd foo cargo init . 6. Learn about a command's options and usage: cargo help clean ## BUGS See for issues. ## SEE ALSO [rustc(1)](https://doc.rust-lang.org/rustc/index.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) cargo-0.66.0/src/doc/src/commands/general-commands.md000066400000000000000000000001511432416201200223500ustar00rootroot00000000000000# General Commands * [cargo](cargo.md) * [cargo help](cargo-help.md) * [cargo version](cargo-version.md) cargo-0.66.0/src/doc/src/commands/index.md000066400000000000000000000003471432416201200202520ustar00rootroot00000000000000# Cargo Commands * [General Commands](general-commands.md) * [Build Commands](build-commands.md) * [Manifest Commands](manifest-commands.md) * [Package Commands](package-commands.md) * [Publishing Commands](publishing-commands.md) cargo-0.66.0/src/doc/src/commands/manifest-commands.md000066400000000000000000000005641432416201200225510ustar00rootroot00000000000000# Manifest Commands * [cargo add](cargo-add.md) * [cargo generate-lockfile](cargo-generate-lockfile.md) * [cargo locate-project](cargo-locate-project.md) * [cargo metadata](cargo-metadata.md) * [cargo pkgid](cargo-pkgid.md) * [cargo tree](cargo-tree.md) * [cargo update](cargo-update.md) * [cargo vendor](cargo-vendor.md) * [cargo verify-project](cargo-verify-project.md) cargo-0.66.0/src/doc/src/commands/package-commands.md000066400000000000000000000002731432416201200223330ustar00rootroot00000000000000# Package Commands * [cargo init](cargo-init.md) * [cargo install](cargo-install.md) * [cargo new](cargo-new.md) * [cargo search](cargo-search.md) * [cargo uninstall](cargo-uninstall.md) cargo-0.66.0/src/doc/src/commands/publishing-commands.md000066400000000000000000000002741432416201200231050ustar00rootroot00000000000000# Publishing Commands * [cargo login](cargo-login.md) * [cargo owner](cargo-owner.md) * [cargo package](cargo-package.md) * [cargo publish](cargo-publish.md) * [cargo yank](cargo-yank.md) cargo-0.66.0/src/doc/src/faq.md000066400000000000000000000306511432416201200161120ustar00rootroot00000000000000## Frequently Asked Questions ### Is the plan to use GitHub as a package repository? No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with [npmjs.com][1] and [rubygems.org][3]. We plan to support git repositories as a source of packages forever, because they can be used for early development and temporary patches, even when people use the registry as the primary source of packages. ### Why build crates.io rather than use GitHub as a registry? We think that it’s very important to support multiple ways to download packages, including downloading from GitHub and copying packages into your package itself. That said, we think that [crates.io] offers a number of important benefits, and will likely become the primary way that people download packages in Cargo. For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a central registry model as well as a Git-based model, and most packages are downloaded through the registry in those ecosystems, with an important minority of packages making use of git-based packages. [1]: https://www.npmjs.com [2]: https://bundler.io [3]: https://rubygems.org Some of the advantages that make a central registry popular in other languages include: * **Discoverability**. A central registry provides an easy place to look for existing packages. Combined with tagging, this also makes it possible for a registry to provide ecosystem-wide information, such as a list of the most popular or most-depended-on packages. * **Speed**. A central registry makes it possible to easily fetch just the metadata for packages quickly and efficiently, and then to efficiently download just the published package, and not other bloat that happens to exist in the repository. This adds up to a significant improvement in the speed of dependency resolution and fetching. As dependency graphs scale up, downloading all of the git repositories bogs down fast. Also remember that not everybody has a high-speed, low-latency Internet connection. ### Will Cargo work with C code (or other languages)? Yes! Cargo handles compiling Rust code, but we know that many Rust packages link against C code. We also know that there are decades of tooling built up around compiling languages other than Rust. Our solution: Cargo allows a package to [specify a script](reference/build-scripts.md) (written in Rust) to run before invoking `rustc`. Rust is leveraged to implement platform-specific configuration and refactor out common build functionality among packages. ### Can Cargo be used inside of `make` (or `ninja`, or ...) Indeed. While we intend Cargo to be useful as a standalone way to compile Rust packages at the top-level, we know that some people will want to invoke Cargo from other build tools. We have designed Cargo to work well in those contexts, paying attention to things like error codes and machine-readable output modes. We still have some work to do on those fronts, but using Cargo in the context of conventional scripts is something we designed for from the beginning and will continue to prioritize. ### Does Cargo handle multi-platform packages or cross-compilation? Rust itself provides facilities for configuring sections of code based on the platform. Cargo also supports [platform-specific dependencies][target-deps], and we plan to support more per-platform configuration in `Cargo.toml` in the future. [target-deps]: reference/specifying-dependencies.md#platform-specific-dependencies In the longer-term, we’re looking at ways to conveniently cross-compile packages using Cargo. ### Does Cargo support environments, like `production` or `test`? We support environments through the use of [profiles] to support: [profiles]: reference/profiles.md * environment-specific flags (like `-g --opt-level=0` for development and `--opt-level=3` for production). * environment-specific dependencies (like `hamcrest` for test assertions). * environment-specific `#[cfg]` * a `cargo test` command ### Does Cargo work on Windows? Yes! All commits to Cargo are required to pass the local test suite on Windows. If, however, you find a Windows issue, we consider it a bug, so [please file an issue][3]. [3]: https://github.com/rust-lang/cargo/issues ### Why do binaries have `Cargo.lock` in version control, but not libraries? The purpose of a `Cargo.lock` lockfile is to describe the state of the world at the time of a successful build. Cargo uses the lockfile to provide deterministic builds on different times and different systems, by ensuring that the exact same dependencies and versions are used as when the `Cargo.lock` file was originally generated. This property is most desirable from applications and packages which are at the very end of the dependency chain (binaries). As a result, it is recommended that all binaries check in their `Cargo.lock`. For libraries the situation is somewhat different. A library is not only used by the library developers, but also any downstream consumers of the library. Users dependent on the library will not inspect the library’s `Cargo.lock` (even if it exists). This is precisely because a library should **not** be deterministically recompiled for all users of the library. If a library ends up being used transitively by several dependencies, it’s likely that just a single copy of the library is desired (based on semver compatibility). If Cargo used all of the dependencies' `Cargo.lock` files, then multiple copies of the library could be used, and perhaps even a version conflict. In other words, libraries specify SemVer requirements for their dependencies but cannot see the full picture. Only end products like binaries have a full picture to decide what versions of dependencies should be used. ### Can libraries use `*` as a version for their dependencies? **As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) with wildcard dependency constraints.** While libraries _can_, strictly speaking, they should not. A version requirement of `*` says β€œThis will work with every version ever”, which is never going to be true. Libraries should always specify the range that they do work with, even if it’s something as general as β€œevery 1.x.y version”. ### Why `Cargo.toml`? As one of the most frequent interactions with Cargo, the question of why the configuration file is named `Cargo.toml` arises from time to time. The leading capital-`C` was chosen to ensure that the manifest was grouped with other similar configuration files in directory listings. Sorting files often puts capital letters before lowercase letters, ensuring files like `Makefile` and `Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize the fact that the file is in the [TOML configuration format](https://toml.io/). Cargo does not allow other names such as `cargo.toml` or `Cargofile` to emphasize the ease of how a Cargo repository can be identified. An option of many possible names has historically led to confusion where one case was handled but others were accidentally forgotten. [crates.io]: https://crates.io/ ### How can Cargo work offline? Cargo is often used in situations with limited or no network access such as airplanes, CI environments, or embedded in large production deployments. Users are often surprised when Cargo attempts to fetch resources from the network, and hence the request for Cargo to work offline comes up frequently. Cargo, at its heart, will not attempt to access the network unless told to do so. That is, if no crates come from crates.io, a git repository, or some other network location, Cargo will never attempt to make a network connection. As a result, if Cargo attempts to touch the network, then it's because it needs to fetch a required resource. Cargo is also quite aggressive about caching information to minimize the amount of network activity. It will guarantee, for example, that if `cargo build` (or an equivalent) is run to completion then the next `cargo build` is guaranteed to not touch the network so long as `Cargo.toml` has not been modified in the meantime. This avoidance of the network boils down to a `Cargo.lock` existing and a populated cache of the crates reflected in the lock file. If either of these components are missing, then they're required for the build to succeed and must be fetched remotely. As of Rust 1.11.0, Cargo understands a new flag, `--frozen`, which is an assertion that it shouldn't touch the network. When passed, Cargo will immediately return an error if it would otherwise attempt a network request. The error should include contextual information about why the network request is being made in the first place to help debug as well. Note that this flag *does not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch the network as a previous command has been run to ensure that network activity shouldn't be necessary. The `--offline` flag was added in Rust 1.36.0. This flag tells Cargo to not access the network, and try to proceed with available cached data if possible. You can use [`cargo fetch`] in one project to download dependencies before going offline, and then use those same dependencies in another project with the `--offline` flag (or [configuration value][offline config]). For more information about vendoring, see documentation on [source replacement][replace]. [replace]: reference/source-replacement.md [`cargo fetch`]: commands/cargo-fetch.md [offline config]: reference/config.md#netoffline ### Why is Cargo rebuilding my code? Cargo is responsible for incrementally compiling crates in your project. This means that if you type `cargo build` twice the second one shouldn't rebuild your crates.io dependencies, for example. Nevertheless bugs arise and Cargo can sometimes rebuild code when you're not expecting it! We've long [wanted to provide better diagnostics about this](https://github.com/rust-lang/cargo/issues/2904) but unfortunately haven't been able to make progress on that issue in quite some time. In the meantime, however, you can debug a rebuild at least a little by setting the `CARGO_LOG` environment variable: ```sh $ CARGO_LOG=cargo::core::compiler::fingerprint=info cargo build ``` This will cause Cargo to print out a lot of information about diagnostics and rebuilding. This can often contain clues as to why your project is getting rebuilt, although you'll often need to connect some dots yourself since this output isn't super easy to read just yet. Note that the `CARGO_LOG` needs to be set for the command that rebuilds when you think it should not. Unfortunately Cargo has no way right now of after-the-fact debugging "why was that rebuilt?" Some issues we've seen historically which can cause crates to get rebuilt are: * A build script prints `cargo:rerun-if-changed=foo` where `foo` is a file that doesn't exist and nothing generates it. In this case Cargo will keep running the build script thinking it will generate the file but nothing ever does. The fix is to avoid printing `rerun-if-changed` in this scenario. * Two successive Cargo builds may differ in the set of features enabled for some dependencies. For example if the first build command builds the whole workspace and the second command builds only one crate, this may cause a dependency on crates.io to have a different set of features enabled, causing it and everything that depends on it to get rebuilt. There's unfortunately not really a great fix for this, although if possible it's best to have the set of features enabled on a crate constant regardless of what you're building in your workspace. * Some filesystems exhibit unusual behavior around timestamps. Cargo primarily uses timestamps on files to govern whether rebuilding needs to happen, but if you're using a nonstandard filesystem it may be affecting the timestamps somehow (e.g. truncating them, causing them to drift, etc). In this scenario, feel free to open an issue and we can see if we can accommodate the filesystem somehow. * A concurrent build process is either deleting artifacts or modifying files. Sometimes you might have a background process that either tries to build or check your project. These background processes might surprisingly delete some build artifacts or touch files (or maybe just by accident), which can cause rebuilds to look spurious! The best fix here would be to wrangle the background process to avoid clashing with your work. If after trying to debug your issue, however, you're still running into problems then feel free to [open an issue](https://github.com/rust-lang/cargo/issues/new)! cargo-0.66.0/src/doc/src/getting-started/000077500000000000000000000000001432416201200201215ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/getting-started/first-steps.md000066400000000000000000000036041432416201200227310ustar00rootroot00000000000000## First Steps with Cargo This section provides a quick sense for the `cargo` command line tool. We demonstrate its ability to generate a new [***package***][def-package] for us, its ability to compile the [***crate***][def-crate] within the package, and its ability to run the resulting program. To start a new package with Cargo, use `cargo new`: ```console $ cargo new hello_world ``` Cargo defaults to `--bin` to make a binary program. To make a library, we would pass `--lib`, instead. Let’s check out what Cargo has generated for us: ```console $ cd hello_world $ tree . . β”œβ”€β”€ Cargo.toml └── src └── main.rs 1 directory, 2 files ``` This is all we need to get started. First, let’s check out `Cargo.toml`: ```toml [package] name = "hello_world" version = "0.1.0" edition = "2021" [dependencies] ``` This is called a [***manifest***][def-manifest], and it contains all of the metadata that Cargo needs to compile your package. Here’s what’s in `src/main.rs`: ```rust fn main() { println!("Hello, world!"); } ``` Cargo generated a β€œhello world” program for us, otherwise known as a [***binary crate***][def-crate]. Let’s compile it: ```console $ cargo build Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) ``` And then run it: ```console $ ./target/debug/hello_world Hello, world! ``` We can also use `cargo run` to compile and then run it, all in one step: ```console $ cargo run Fresh hello_world v0.1.0 (file:///path/to/package/hello_world) Running `target/hello_world` Hello, world! ``` ### Going further For more details on using Cargo, check out the [Cargo Guide](../guide/index.md) [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' [def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' cargo-0.66.0/src/doc/src/getting-started/index.md000066400000000000000000000004131432416201200215500ustar00rootroot00000000000000## Getting Started To get started with Cargo, install Cargo (and Rust) and set up your first [*crate*][def-crate]. * [Installation](installation.md) * [First steps with Cargo](first-steps.md) [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' cargo-0.66.0/src/doc/src/getting-started/installation.md000066400000000000000000000022301432416201200231410ustar00rootroot00000000000000## Installation ### Install Rust and Cargo The easiest way to get Cargo is to install the current stable release of [Rust] by using [rustup]. Installing Rust using `rustup` will also install `cargo`. On Linux and macOS systems, this is done as follows: ```console curl https://sh.rustup.rs -sSf | sh ``` It will download a script, and start the installation. If everything goes well, you’ll see this appear: ```console Rust is installed now. Great! ``` On Windows, download and run [rustup-init.exe]. It will start the installation in a console and present the above message on success. After this, you can use the `rustup` command to also install `beta` or `nightly` channels for Rust and Cargo. For other installation options and information, visit the [install][install-rust] page of the Rust website. ### Build and Install Cargo from Source Alternatively, you can [build Cargo from source][compiling-from-source]. [rust]: https://www.rust-lang.org/ [rustup]: https://rustup.rs/ [rustup-init.exe]: https://win.rustup.rs/ [install-rust]: https://www.rust-lang.org/tools/install [compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source cargo-0.66.0/src/doc/src/guide/000077500000000000000000000000001432416201200161115ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/guide/build-cache.md000066400000000000000000000126371432416201200206040ustar00rootroot00000000000000## Build cache Cargo stores the output of a build into the "target" directory. By default, this is the directory named `target` in the root of your [*workspace*][def-workspace]. To change the location, you can set the `CARGO_TARGET_DIR` [environment variable], the [`build.target-dir`] config value, or the `--target-dir` command-line flag. The directory layout depends on whether or not you are using the `--target` flag to build for a specific platform. If `--target` is not specified, Cargo runs in a mode where it builds for the host architecture. The output goes into the root of the target directory, with each [profile] stored in a separate subdirectory: Directory | Description ----------|------------ target/debug/ | Contains output for the `dev` profile. target/release/ | Contains output for the `release` profile (with the `--release` option). target/foo/ | Contains build output for the `foo` profile (with the `--profile=foo` option). For historical reasons, the `dev` and `test` profiles are stored in the `debug` directory, and the `release` and `bench` profiles are stored in the `release` directory. User-defined profiles are stored in a directory with the same name as the profile. When building for another target with `--target`, the output is placed in a directory with the name of the target: Directory | Example ----------|-------- target/<triple>/debug/ | target/thumbv7em-none-eabihf/debug/ target/<triple>/release/ | target/thumbv7em-none-eabihf/release/ > **Note**: When not using `--target`, this has a consequence that Cargo will > share your dependencies with build scripts and proc macros. [`RUSTFLAGS`] > will be shared with every `rustc` invocation. With the `--target` flag, > build scripts and proc macros are built separately (for the host > architecture), and do not share `RUSTFLAGS`. Within the profile directory (such as `debug` or `release`), artifacts are placed into the following directories: Directory | Description ----------|------------ target/debug/ | Contains the output of the package being built (the [binary executables] and [library targets]). target/debug/examples/ | Contains [example targets]. Some commands place their output in dedicated directories in the top level of the `target` directory: Directory | Description ----------|------------ target/doc/ | Contains rustdoc documentation ([`cargo doc`]). target/package/ | Contains the output of the [`cargo package`] and [`cargo publish`] commands. Cargo also creates several other directories and files needed for the build process. Their layout is considered internal to Cargo, and is subject to change. Some of these directories are: Directory | Description ----------|------------ target/debug/deps/ | Dependencies and other artifacts. target/debug/incremental/ | `rustc` [incremental output], a cache used to speed up subsequent builds. target/debug/build/ | Output from [build scripts]. ### Dep-info files Next to each compiled artifact is a file called a "dep info" file with a `.d` suffix. This file is a Makefile-like syntax that indicates all of the file dependencies required to rebuild the artifact. These are intended to be used with external build systems so that they can detect if Cargo needs to be re-executed. The paths in the file are absolute by default. See the [`build.dep-info-basedir`] config option to use relative paths. ```Makefile # Example dep-info file found in target/debug/foo.d /path/to/myproj/target/debug/foo: /path/to/myproj/src/lib.rs /path/to/myproj/src/main.rs ``` ### Shared cache A third party tool, [sccache], can be used to share built dependencies across different workspaces. To setup `sccache`, install it with `cargo install sccache` and set `RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. If you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to `.bashrc`. Alternatively, you can set [`build.rustc-wrapper`] in the [Cargo configuration][config]. Refer to sccache documentation for more details. [`RUSTFLAGS`]: ../reference/config.md#buildrustflags [`build.dep-info-basedir`]: ../reference/config.md#builddep-info-basedir [`build.rustc-wrapper`]: ../reference/config.md#buildrustc-wrapper [`build.target-dir`]: ../reference/config.md#buildtarget-dir [`cargo doc`]: ../commands/cargo-doc.md [`cargo package`]: ../commands/cargo-package.md [`cargo publish`]: ../commands/cargo-publish.md [build scripts]: ../reference/build-scripts.md [config]: ../reference/config.md [def-workspace]: ../appendix/glossary.md#workspace '"workspace" (glossary entry)' [environment variable]: ../reference/environment-variables.md [incremental output]: ../reference/profiles.md#incremental [sccache]: https://github.com/mozilla/sccache [profile]: ../reference/profiles.md [binary executables]: ../reference/cargo-targets.md#binaries [library targets]: ../reference/cargo-targets.md#library [example targets]: ../reference/cargo-targets.md#examples cargo-0.66.0/src/doc/src/guide/cargo-home.md000066400000000000000000000102041432416201200204510ustar00rootroot00000000000000## Cargo Home The "Cargo home" functions as a download and source cache. When building a [crate][def-crate], Cargo stores downloaded build dependencies in the Cargo home. You can alter the location of the Cargo home by setting the `CARGO_HOME` [environmental variable][env]. The [home](https://crates.io/crates/home) crate provides an API for getting this location if you need this information inside your Rust crate. By default, the Cargo home is located in `$HOME/.cargo/`. Please note that the internal structure of the Cargo home is not stabilized and may be subject to change at any time. The Cargo home consists of following components: ## Files: * `config.toml` Cargo's global configuration file, see the [config entry in the reference][config]. * `credentials.toml` Private login credentials from [`cargo login`] in order to log in to a [registry][def-registry]. * `.crates.toml`, `.crates2.json` These hidden files contain [package][def-package] information of crates installed via [`cargo install`]. Do NOT edit by hand! ## Directories: * `bin` The bin directory contains executables of crates that were installed via [`cargo install`] or [`rustup`](https://rust-lang.github.io/rustup/). To be able to make these binaries accessible, add the path of the directory to your `$PATH` environment variable. * `git` Git sources are stored here: * `git/db` When a crate depends on a git repository, Cargo clones the repo as a bare repo into this directory and updates it if necessary. * `git/checkouts` If a git source is used, the required commit of the repo is checked out from the bare repo inside `git/db` into this directory. This provides the compiler with the actual files contained in the repo of the commit specified for that dependency. Multiple checkouts of different commits of the same repo are possible. * `registry` Packages and metadata of crate registries (such as [crates.io](https://crates.io/)) are located here. * `registry/index` The index is a bare git repository which contains the metadata (versions, dependencies etc) of all available crates of a registry. * `registry/cache` Downloaded dependencies are stored in the cache. The crates are compressed gzip archives named with a `.crate` extension. * `registry/src` If a downloaded `.crate` archive is required by a package, it is unpacked into `registry/src` folder where rustc will find the `.rs` files. ## Caching the Cargo home in CI To avoid redownloading all crate dependencies during continuous integration, you can cache the `$CARGO_HOME` directory. However, caching the entire directory is often inefficient as it will contain downloaded sources twice. If we depend on a crate such as `serde 1.0.92` and cache the entire `$CARGO_HOME` we would actually cache the sources twice, the `serde-1.0.92.crate` inside `registry/cache` and the extracted `.rs` files of serde inside `registry/src`. That can unnecessarily slow down the build as downloading, extracting, recompressing and reuploading the cache to the CI servers can take some time. It should be sufficient to only cache the following directories across builds: * `bin/` * `registry/index/` * `registry/cache/` * `git/db/` ## Vendoring all dependencies of a project See the [`cargo vendor`] subcommand. ## Clearing the cache In theory, you can always remove any part of the cache and Cargo will do its best to restore sources if a crate needs them either by reextracting an archive or checking out a bare repo or by simply redownloading the sources from the web. Alternatively, the [cargo-cache](https://crates.io/crates/cargo-cache) crate provides a simple CLI tool to only clear selected parts of the cache or show sizes of its components in your command-line. [`cargo install`]: ../commands/cargo-install.md [`cargo login`]: ../commands/cargo-login.md [`cargo vendor`]: ../commands/cargo-vendor.md [config]: ../reference/config.md [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' [def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)' [env]: ../reference/environment-variables.md cargo-0.66.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md000066400000000000000000000100131432416201200231370ustar00rootroot00000000000000## Cargo.toml vs Cargo.lock `Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk about them, here’s a summary: * `Cargo.toml` is about describing your dependencies in a broad sense, and is written by you. * `Cargo.lock` contains exact information about your dependencies. It is maintained by Cargo and should not be manually edited. If you’re building a non-end product, such as a rust library that other rust [packages][def-package] will depend on, put `Cargo.lock` in your `.gitignore`. If you’re building an end product, which are executable like command-line tool or an application, or a system library with crate-type of `staticlib` or `cdylib`, check `Cargo.lock` into `git`. If you're curious about why that is, see ["Why do binaries have `Cargo.lock` in version control, but not libraries?" in the FAQ](../faq.md#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). Let’s dig in a little bit more. `Cargo.toml` is a [**manifest**][def-manifest] file in which we can specify a bunch of different metadata about our package. For example, we can say that we depend on another package: ```toml [package] name = "hello_world" version = "0.1.0" [dependencies] regex = { git = "https://github.com/rust-lang/regex.git" } ``` This package has a single dependency, on the `regex` library. We’ve stated in this case that we’re relying on a particular Git repository that lives on GitHub. Since we haven’t specified any other information, Cargo assumes that we intend to use the latest commit on the `master` branch to build our package. Sound good? Well, there’s one problem: If you build this package today, and then you send a copy to me, and I build this package tomorrow, something bad could happen. There could be more commits to `regex` in the meantime, and my build would include new commits while yours would not. Therefore, we would get different builds. This would be bad because we want reproducible builds. We could fix this problem by defining a specific `rev` value in our `Cargo.toml`, so Cargo could know exactly which revision to use when building the package: ```toml [dependencies] regex = { git = "https://github.com/rust-lang/regex.git", rev = "9f9f693" } ``` Now our builds will be the same. But there’s a big drawback: now we have to manually think about SHA-1s every time we want to update our library. This is both tedious and error prone. Enter the `Cargo.lock`. Because of its existence, we don’t need to manually keep track of the exact revisions: Cargo will do it for us. When we have a manifest like this: ```toml [package] name = "hello_world" version = "0.1.0" [dependencies] regex = { git = "https://github.com/rust-lang/regex.git" } ``` Cargo will take the latest commit and write that information out into our `Cargo.lock` when we build for the first time. That file will look like this: ```toml [[package]] name = "hello_world" version = "0.1.0" dependencies = [ "regex 1.5.0 (git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831)", ] [[package]] name = "regex" version = "1.5.0" source = "git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831" ``` You can see that there’s a lot more information here, including the exact revision we used to build. Now when you give your package to someone else, they’ll use the exact same SHA, even though we didn’t specify it in our `Cargo.toml`. When we’re ready to opt in to a new version of the library, Cargo can re-calculate the dependencies and update things for us: ```console $ cargo update # updates all dependencies $ cargo update -p regex # updates just β€œregex” ``` This will write out a new `Cargo.lock` with the new version information. Note that the argument to `cargo update` is actually a [Package ID Specification](../reference/pkgid-spec.md) and `regex` is just a short specification. [def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' cargo-0.66.0/src/doc/src/guide/continuous-integration.md000066400000000000000000000062011432416201200231610ustar00rootroot00000000000000## Continuous Integration ### Travis CI To test your [package][def-package] on Travis CI, here is a sample `.travis.yml` file: ```yaml language: rust rust: - stable - beta - nightly matrix: allow_failures: - rust: nightly ``` This will test all three release channels, but any breakage in nightly will not fail your overall build. Please see the [Travis CI Rust documentation](https://docs.travis-ci.com/user/languages/rust/) for more information. ### GitHub Actions To test your package on GitHub Actions, here is a sample `.github/workflows/ci.yml` file: ```yaml name: Cargo Build & Test on: push: pull_request: env: CARGO_TERM_COLOR: always jobs: build_and_test: name: Rust project - latest runs-on: ubuntu-latest strategy: matrix: toolchain: - stable - beta - nightly steps: - uses: actions/checkout@v3 - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} - run: cargo build --verbose - run: cargo test --verbose ``` This will test all three release channels (note a failure in any toolchain version will fail the entire job). You can also click `"Actions" > "new workflow"` in the GitHub UI and select Rust to add the [default configuration](https://github.com/actions/starter-workflows/blob/main/ci/rust.yml) to your repo. See [GitHub Actions documentation](https://docs.github.com/en/actions) for more information. ### GitLab CI To test your package on GitLab CI, here is a sample `.gitlab-ci.yml` file: ```yaml stages: - build rust-latest: stage: build image: rust:latest script: - cargo build --verbose - cargo test --verbose rust-nightly: stage: build image: rustlang/rust:nightly script: - cargo build --verbose - cargo test --verbose allow_failure: true ``` This will test on the stable channel and nightly channel, but any breakage in nightly will not fail your overall build. Please see the [GitLab CI documentation](https://docs.gitlab.com/ce/ci/yaml/index.html) for more information. ### builds.sr.ht To test your package on sr.ht, here is a sample `.build.yml` file. Be sure to change `` and `` to the repo to clone and the directory where it was cloned. ```yaml image: archlinux packages: - rustup sources: - tasks: - setup: | rustup toolchain install nightly stable cd / rustup run stable cargo fetch - stable: | rustup default stable cd / cargo build --verbose cargo test --verbose - nightly: | rustup default nightly cd / cargo build --verbose ||: cargo test --verbose ||: - docs: | cd / rustup run stable cargo doc --no-deps rustup run nightly cargo doc --no-deps ||: ``` This will test and build documentation on the stable channel and nightly channel, but any breakage in nightly will not fail your overall build. Please see the [builds.sr.ht documentation](https://man.sr.ht/builds.sr.ht/) for more information. [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' cargo-0.66.0/src/doc/src/guide/creating-a-new-project.md000066400000000000000000000050101432416201200226740ustar00rootroot00000000000000## Creating a New Package To start a new [package][def-package] with Cargo, use `cargo new`: ```console $ cargo new hello_world --bin ``` We’re passing `--bin` because we’re making a binary program: if we were making a library, we’d pass `--lib`. This also initializes a new `git` repository by default. If you don't want it to do that, pass `--vcs none`. Let’s check out what Cargo has generated for us: ```console $ cd hello_world $ tree . . β”œβ”€β”€ Cargo.toml └── src └── main.rs 1 directory, 2 files ``` Let’s take a closer look at `Cargo.toml`: ```toml [package] name = "hello_world" version = "0.1.0" edition = "2021" [dependencies] ``` This is called a [***manifest***][def-manifest], and it contains all of the metadata that Cargo needs to compile your package. This file is written in the [TOML] format (pronounced /tΙ‘mΙ™l/). Here’s what’s in `src/main.rs`: ```rust fn main() { println!("Hello, world!"); } ``` Cargo generated a β€œhello world” program for us, otherwise known as a [*binary crate*][def-crate]. Let’s compile it: ```console $ cargo build Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) ``` And then run it: ```console $ ./target/debug/hello_world Hello, world! ``` We can also use `cargo run` to compile and then run it, all in one step (You won't see the `Compiling` line if you have not made any changes since you last compiled): ```console $ cargo run Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) Running `target/debug/hello_world` Hello, world! ``` You’ll now notice a new file, `Cargo.lock`. It contains information about our dependencies. Since we don’t have any yet, it’s not very interesting. Once you’re ready for release, you can use `cargo build --release` to compile your files with optimizations turned on: ```console $ cargo build --release Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) ``` `cargo build --release` puts the resulting binary in `target/release` instead of `target/debug`. Compiling in debug mode is the default for development. Compilation time is shorter since the compiler doesn't do optimizations, but the code will run slower. Release mode takes longer to compile, but the code will run faster. [TOML]: https://toml.io/ [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' [def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' cargo-0.66.0/src/doc/src/guide/dependencies.md000066400000000000000000000052331432416201200210640ustar00rootroot00000000000000## Dependencies [crates.io] is the Rust community's central [*package registry*][def-package-registry] that serves as a location to discover and download [packages][def-package]. `cargo` is configured to use it by default to find requested packages. To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. [crates.io]: https://crates.io/ ### Adding a dependency If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that, then list the [crate][def-crate] name and version that you would like to use. This example adds a dependency of the `time` crate: ```toml [dependencies] time = "0.1.12" ``` The version string is a [SemVer] version requirement. The [specifying dependencies](../reference/specifying-dependencies.md) docs have more information about the options you have here. [SemVer]: https://semver.org If we also wanted to add a dependency on the `regex` crate, we would not need to add `[dependencies]` for each crate listed. Here's what your whole `Cargo.toml` file would look like with dependencies on the `time` and `regex` crates: ```toml [package] name = "hello_world" version = "0.1.0" edition = "2021" [dependencies] time = "0.1.12" regex = "0.1.41" ``` Re-run `cargo build`, and Cargo will fetch the new dependencies and all of their dependencies, compile them all, and update the `Cargo.lock`: ```console $ cargo build Updating crates.io index Downloading memchr v0.1.5 Downloading libc v0.1.10 Downloading regex-syntax v0.2.1 Downloading memchr v0.1.5 Downloading aho-corasick v0.3.0 Downloading regex v0.1.41 Compiling memchr v0.1.5 Compiling libc v0.1.10 Compiling regex-syntax v0.2.1 Compiling memchr v0.1.5 Compiling aho-corasick v0.3.0 Compiling regex v0.1.41 Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) ``` Our `Cargo.lock` contains the exact information about which revision of all of these dependencies we used. Now, if `regex` gets updated, we will still build with the same revision until we choose to `cargo update`. You can now use the `regex` library in `main.rs`. ```rust,ignore use regex::Regex; fn main() { let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); println!("Did our date match? {}", re.is_match("2014-01-01")); } ``` Running it will show: ```console $ cargo run Running `target/hello_world` Did our date match? true ``` [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' [def-package-registry]: ../appendix/glossary.md#package-registry '"package-registry" (glossary entry)' cargo-0.66.0/src/doc/src/guide/index.md000066400000000000000000000010531432416201200175410ustar00rootroot00000000000000## Cargo Guide This guide will give you all that you need to know about how to use Cargo to develop Rust packages. * [Why Cargo Exists](why-cargo-exists.md) * [Creating a New Package](creating-a-new-project.md) * [Working on an Existing Cargo Package](working-on-an-existing-project.md) * [Dependencies](dependencies.md) * [Package Layout](project-layout.md) * [Cargo.toml vs Cargo.lock](cargo-toml-vs-cargo-lock.md) * [Tests](tests.md) * [Continuous Integration](continuous-integration.md) * [Cargo Home](cargo-home.md) * [Build Cache](build-cache.md) cargo-0.66.0/src/doc/src/guide/project-layout.md000066400000000000000000000044571432416201200214260ustar00rootroot00000000000000## Package Layout Cargo uses conventions for file placement to make it easy to dive into a new Cargo [package][def-package]: ```text . β”œβ”€β”€ Cargo.lock β”œβ”€β”€ Cargo.toml β”œβ”€β”€ src/ β”‚Β Β  β”œβ”€β”€ lib.rs β”‚Β Β  β”œβ”€β”€ main.rs β”‚Β Β  └── bin/ β”‚ β”œβ”€β”€ named-executable.rs β”‚Β Β  Β Β  β”œβ”€β”€ another-executable.rs β”‚Β Β  Β Β  └── multi-file-executable/ β”‚Β Β  Β Β  β”œβ”€β”€ main.rs β”‚Β Β  Β Β  └── some_module.rs β”œβ”€β”€ benches/ β”‚Β Β  β”œβ”€β”€ large-input.rs β”‚Β Β  └── multi-file-bench/ β”‚Β Β  β”œβ”€β”€ main.rs β”‚Β Β  └── bench_module.rs β”œβ”€β”€ examples/ β”‚Β Β  β”œβ”€β”€ simple.rs β”‚Β Β  └── multi-file-example/ β”‚Β Β  β”œβ”€β”€ main.rs β”‚Β Β  └── ex_module.rs └── tests/ β”œβ”€β”€ some-integration-tests.rs └── multi-file-test/ β”œβ”€β”€ main.rs └── test_module.rs ``` * `Cargo.toml` and `Cargo.lock` are stored in the root of your package (*package root*). * Source code goes in the `src` directory. * The default library file is `src/lib.rs`. * The default executable file is `src/main.rs`. * Other executables can be placed in `src/bin/`. * Benchmarks go in the `benches` directory. * Examples go in the `examples` directory. * Integration tests go in the `tests` directory. If a binary, example, bench, or integration test consists of multiple source files, place a `main.rs` file along with the extra [*modules*][def-module] within a subdirectory of the `src/bin`, `examples`, `benches`, or `tests` directory. The name of the executable will be the directory name. You can learn more about Rust's module system in [the book][book-modules]. See [Configuring a target] for more details on manually configuring targets. See [Target auto-discovery] for more information on controlling how Cargo automatically infers target names. [book-modules]: ../../book/ch07-00-managing-growing-projects-with-packages-crates-and-modules.html [Configuring a target]: ../reference/cargo-targets.md#configuring-a-target [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' [def-module]: ../appendix/glossary.md#module '"module" (glossary entry)' [Target auto-discovery]: ../reference/cargo-targets.md#target-auto-discovery cargo-0.66.0/src/doc/src/guide/tests.md000066400000000000000000000032501432416201200175750ustar00rootroot00000000000000## Tests Cargo can run your tests with the `cargo test` command. Cargo looks for tests to run in two places: in each of your `src` files and any tests in `tests/`. Tests in your `src` files should be unit tests and [documentation tests]. Tests in `tests/` should be integration-style tests. As such, you’ll need to import your crates into the files in `tests`. Here's an example of running `cargo test` in our [package][def-package], which currently has no tests: ```console $ cargo test Compiling regex v1.5.0 (https://github.com/rust-lang/regex.git#9f9f693) Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) Running target/test/hello_world-9c2b65bbb79eabce running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out ``` If our package had tests, we would see more output with the correct number of tests. You can also run a specific test by passing a filter: ```console $ cargo test foo ``` This will run any test with `foo` in its name. `cargo test` runs additional checks as well. It will compile any examples you’ve included to ensure they are still compiles. It also run documentation tests to ensure your code samples from documentation comments compiles. Please see the [testing guide][testing] in the Rust documentation for a general view of writing and organizing tests. See [Cargo Targets: Tests] to learn more about different styles of tests in Cargo. [documentation tests]: ../../rustdoc/write-documentation/documentation-tests.html [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' [testing]: ../../book/ch11-00-testing.html [Cargo Targets: Tests]: ../reference/cargo-targets.html#tests cargo-0.66.0/src/doc/src/guide/why-cargo-exists.md000066400000000000000000000060551432416201200216560ustar00rootroot00000000000000## Why Cargo Exists ### Preliminaries In Rust, as you may know, a library or executable program is called a [*crate*][def-crate]. Crates are compiled using the Rust compiler, `rustc`. When starting with Rust, the first source code most people encounter is that of the venerable β€œhello world” program, which they compile by invoking `rustc` directly: ```console $ rustc hello.rs $ ./hello Hello, world! ``` Note that the above command required that we specify the file name explicitly. If we were to directly use `rustc` to compile a different program, a different command line invocation would be required. If we needed to specify any specific compiler flags or include external dependencies, then the needed command would be even more specific (and elaborate). Furthermore, most non-trivial programs will likely have dependencies on external libraries, and will therefore also depend transitively on *their* dependencies. Obtaining the correct versions of all the necessary dependencies and keeping them up to date would be laborious and error-prone if done by hand. Rather than work only with crates and `rustc`, we can avoid the manual tedium involved with performing the above tasks by introducing a higher-level ["*package*"][def-package] abstraction and by using a [*package manager*][def-package-manager]. ### Enter: Cargo *Cargo* is the Rust package manager. It is a tool that allows Rust [*packages*][def-package] to declare their various dependencies and ensure that you’ll always get a repeatable build. To accomplish this goal, Cargo does four things: * Introduces two metadata files with various bits of package information. * Fetches and builds your package’s dependencies. * Invokes `rustc` or another build tool with the correct parameters to build your package. * Introduces conventions to make working with Rust packages easier. To a large extent, Cargo normalizes the commands needed to build a given program or library; this is one aspect to the above mentioned conventions. As we show later, the same command can be used to build different [*artifacts*][def-artifact], regardless of their names. Rather than invoke `rustc` directly, we can instead invoke something generic such as `cargo build` and let cargo worry about constructing the correct `rustc` invocation. Furthermore, Cargo will automatically fetch from a [*registry*][def-registry] any dependencies we have defined for our artifact, and arrange for them to be incorporated into our build as needed. It is only a slight exaggeration to say that once you know how to build one Cargo-based project, you know how to build *all* of them. [def-artifact]: ../appendix/glossary.md#artifact '"artifact" (glossary entry)' [def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' [def-package-manager]: ../appendix/glossary.md#package-manager '"package manager" (glossary entry)' [def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)' cargo-0.66.0/src/doc/src/guide/working-on-an-existing-project.md000066400000000000000000000011521432416201200244140ustar00rootroot00000000000000## Working on an Existing Cargo Package If you download an existing [package][def-package] that uses Cargo, it’s really easy to get going. First, get the package from somewhere. In this example, we’ll use `regex` cloned from its repository on GitHub: ```console $ git clone https://github.com/rust-lang/regex.git $ cd regex ``` To build, use `cargo build`: ```console $ cargo build Compiling regex v1.5.0 (file:///path/to/package/regex) ``` This will fetch all of the dependencies and then build them, along with the package. [def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' cargo-0.66.0/src/doc/src/images/000077500000000000000000000000001432416201200162615ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/images/Cargo-Logo-Small.png000066400000000000000000001614701432416201200217770ustar00rootroot00000000000000‰PNG  IHDR2΅6ŠVβIDATxμΑ Β ϋ§ΆΗ €š³c°$AΖΏϬ΢mΫΆmΫFlŸβœmΫΆmϋωΣU·:αλ_ς-f;WjfψΓgΌη€ΓξΪϋ‘Οξάχ¬{YiδΑ£³vό_₯RŸ¦RœXzΰ¨υξ>π;χΈ}Ό}'Έ}gόŽέγΐ]ϋ=:pοaŽ<|ό νψ―₯RκΌό’ΔΜζ-Zίξ(kμ"k.Š΅ΐ ’E¬AyžƒΚ˜ΚΓ*UYυό¬T{ˆ$ΙΏ ΡGO&Iώ­‹Ι‰ =Φχ°’±Ηζ$x ‹νΜΘƒ‘ΦoBΐ!'d%\%WVyJyυ’W―>'IώΎ΄‘%œDl.ι±±«¬Ύ½[sN+šE a9Μ{οξΰτ„’0s’υ>ηyŽB§¦‘zC^ž―¬ςπίΩ’DνηόMIΪΎ¦“5Ά”5χπΨX9’Εx7HΰξνΐqB™S΄“ “PΗpp „ ΅C9!/»«όœ‡κEΚ«WŸ’$@ƒG’όI9Φ\…ΨΨέccK+κΣ0Ό‚ αΈ;Et$A χώΰ’C€vΰ€;ΰ B˜;?pDς¦ςΈ²ΚM!―^@(=΄H’Ÿ ‘‡~;Θ’d°΄ςœS4_ήΪό`ΧV«±΄M2E„Νˆ.w ΜΫ'd?mdΡAΦv掷‚~8‹9Δώσ΄, ½m-›„|²Yž{ύ‹y^|ιύζ½_Χγΐ8Id§žvI²ήΚ ΥjρΓυ&©Ώ²Ghlθ±5Ήx›AϋBχ‡Ž»Σ‚ΊαЊό4Έ¬FnŽy`ζtτߐ·ˆžOFiςΩΪΝNV›–R^⛆σώ@ύ“ί½ϊ‚ΛnΎxš +ΡZk­ΝΔ–l»Ξά‹μ±ρά;;ϊΞNήόj> ΐ‰±@8ξΦ²Ld Έ φΧ+s'BќH€t;iξύa%Jyΐέ1ι‚,€›aQV%Τfθ°¬6M°BVκžiy•ΊOZΌςξΨc7ίυΔ…=ϊτ ΐJ’e–]މ'©ΕΟ§Ϊzy7Ϋiƒωχ(λ»5-6ΚΒ)гξΦ»­“(ΜΊοί³sVQr$ɚώΜά2³J*5J₯ζKCΛΜΜΜΜΜΜΜπΆ/Λ8<—™™™‡©™„U‰α°uνψρSZζ­ξI›φ‡τΜ E ΎωσίB2 ’B SXΕjzgο“’ΑΛήs=2‚½(TDΐΐE"‘Έξ ―ΕUπs$RΨbΛΙ:IqmΗlvΐlqH;ΏέΌ|;ήόΖoώφwό‡·½λ―§μλ’ό΅ϋZφυ‰Q?αΣzύΑ=γΧ/Σoύi?ςΗό*βpΌέ.™Ζ#B―Rr_¦²0•}ΔφΘrΜ%δZφcf {YφU±ͺ–΄\[ ”Α₯~Nsx πΪ]±½v°½ƒJ"Ηΐn ŒSDΤΡw σYƒkZpž˜›ΥšΝΛΟ2ή~ϊŸrνΟψ ςΣ~α·½χΖ۟xδ‘we_―λ’oηοζυ]ϋzβϊΡq˜ΆΏ:‡α·>pΙύψaΨΚz½1υ%d γ*pΞχ©°bJH LRm¦Αμ\³Ύ@°ΪΖ3*˜ͺ“(3λ˜“YC?ΠΤ—τ"Ϊ‘βˆΔδ‰œ#»!0Œ ΄ήΡu ή7ˆλίƒ΄μvkΦ·Ÿguλip‹Y+ΜfύٚΡχ3¦μ7Sτ_†λώ½Ίώ+ ―ΗΪb_9²}M!Ά)ξ~¦cϊνΔέ/ŽΣpΔΙ,£–ΎU)SA"R@„U}Ÿ €s!@ ΅yΩl€«{h)%ΘU₯™αאω1ΪBJfHq lw1Ω΅θΫ†φl‰zΠρx}ς*Λ3xλWhedή7:§JΖ Š ¨+a[Χ’ύpvύ»Πξΐ‡xέΤΎδζ7ό^W΅Ÿwό”vΏ!…έoJqx“δ Rι{‘q%ή€@=,!fκ©dJP„ΛΎˆ’ 5:‘²ύ\N`”ΣI[1Dœd\Ϋγfγ~Θ:φ ΞA H¦‘XG‘ρ@ΙƒkQ?#‹²[έεδΦ³lξ<aΙΑΜq0οLρεΧHΩŠ-6XHH™ hpΎΫfνΎνϋR`ΝkΌφ Ϋ+²ΧΌβ₯vΏ@ΣπΫcΨύœ†Y JΔ+@K ­Š­”ξQdΆηJ˜5Ζ; vΆξύ*θ (―œ \B"‹ΓuWLy5Χ?7hεΈCςD )Όˆ)›ul[s΄Α73SQΣ°cyηNo>CΨά’k"—gφω’5«6ψ|Ε’™Ζlp=„0ͺkPί’₯ϋ˜ψώέhχvΰύΌFk²} φ5[9…Γξ7Ηp6ο‡ΗrΙι|d"Χ—SΘ;5j€Βμ€νΧΎTUƒA0e|Q>ή )—†)ΊlφΡ>ίΞ‘ΩUάⳑ"bπJa°Aς#Σ 1™ΥlgJΙySJˆŸΩu7§7Ή{γi†Σ—hdΗΑΌ±&Fͺ’)―ƒΤ=Pƒr ξΪ~ΞeEΤ~mD»]φkAώέμ±_ψΕΐŠ}½fJVύ<φυΪ©ζςSžώΰΏό•aXΆt6ο˜γθ…€’Θ) rQH“ςJFΥJEͺ £ξΨ2pbSΨ>"%ΈJ-•„Sθξ§ύ‘ΠκΑΓ֌7p…’¬qoΦq ‰”M}Ρ4ή–ϊηf¦ΰΆΫ§7Ÿcsη9˜ξ2ο„Ω¬)π*ι‘{& p©ί5%/’pTΔφ2ΤλΤΓ b*Νω–Œ¦½οMοiϊ1o~€}]ψ’νσ_ΓΎ.~e=±}φΛς΄|ξWε°y€lαP ­ζ”«Q©½’˜lΟ †Pν%ŒκέωQ!*τκP€©-Θ”Οζb3Π\BΗ΄‡Χqν!`{rLΖ” ΐΤΤ—7ϋθMyu ašXή}…εΝ§Χ―2oσΎ1Θ•ͺ*J”Ϊ S1%YTV½G hάϋP@TΘΙφΛΟI΅ΪΨ) TνϊβtjŸψΪζροΫΜξsi·dΊσAφuΑK\ϋ·“4n—Ώ£qιJγAs †ΓIr΅€Ήόƒτ^kΏ¨hCP„˜ͺΝR)JM(Ά3Χp1(D£HŽvρ0ν₯G‘ξ~W¬γλeƒΓν~oΉ/Όokl"ea³ΌΓςφsŒ§Ο£iΝ¬uΉΖξYΚ­ €Ζ`κHTNάϋ°ύͺʜ³ϋ/½±²_@ν½όR2(—ί“bŸBRSgιν°Ašƒμύԟάd_dσρ/`_»άΑ#ΧWzΟ{·Γxy·ΎΛ΄=ΑibΦwxηΘi$—'R!b. ͺš’œ(B ’ͺzlG+Ψ„”’ΩBŸέg³ŽΊΈŠΊžœ&βd‰{T ^μΖH΄ή—0ο[‹M 6JD’†qΨ±Ύϋ"λΫΟΆ·˜΅ΙΤ—χΎͺ«”j―ξή‘τ ¬€ Ÿ³¬Tp‰P`–Κw¬ϋιT!-υ}{‡oZ»gmη$i²h#)³ΜΓΙΧ¦°;vΎωά¬έ>h{J^ύš?ΒΎ.v]zΣο<^ώΰΏxo=ςν‚q‚έvΝvuΛ¬\ίυΜη3„Df9 ”€šϋͺMςb₯jΓΏτ’5$kΘH³ ;<.‰ϋ#Μ–Ε-’F$Ϋ‰#»q"†"τ§kΔ5¨λΐu¦ϊΦ'7YΑkXΎ„gΛΌσΉΆqH}2•\ͺυ<Τl©H΅’υπ‘φΚ΄°XO["΅x~³‚Π{-ͺ΄δγΤƒΆh3Λβz2N„Δ4nmΖS™μ;fQ"ΝνΎ²νζoΥ¦r`ΙΟΪ?κzΏΈθ5ςWίωόή0펼oΑΟiηχ‘€³ΓκδΣξ―0›Νqή#9@!GΜf)5ί ’eΟΰEN Hvrθg ZζK΄5Ε'i@δ°Δ}‰Mτ­7λθΌiμΤq »§·žg}λYΒp‡y§,f­Α«ΨΓj‘ͺ«šOK€+[œk֐Sy<ΦώXι R•VLΩVQbl‘XLη<ͺž,ΧΜΘΪ“΅Ν9'‘4‘Β–§rX ]ΑŸ”,Ξa‡Ϊ=ΆŸφοΎ—}ύPd_ϋ§ΈΨ΅―ΓOύ΅Η·ΏγοΌ7 §G dœ;Q45Y@§u =ΖΪΧ*'™ηN*ΟYK©}ΆςΑβΑ5]Ž£ΎQ‰a"ΗΑ–d˜Yλδ„+1©@4HjQsbŠ.¨οΎ νή&ϋόύS8ώί•άΨ?!φΒΧΑ'ͺγΫίώwή;mοy‰„”hšŽ˜ί-H΄΄³ΛΠ"Ν!DΦΛی›”Θ|>§λZƒVš›ΧˆkζΨ¨ΠβQ\ŸA€΄ƒΈ35’bΐNK^¬mΌ-Q:³aδ¬XγώΦ3lN^@βŠEη `%qO(ΦW@Υ*J @ θ u?ηͺΘjO«(ΈΪΨ/Ν|ΫΣ:λy^βPuϋ€}V?£iMΝJŽ;0€μs°ί§l`LuX^Tˆͺ"υκ}G«C7kΆ7ΔυŸ+gPΓυίώoΦ>ΩΉΨ΅―ωSΏμψφ·ύν3έ=ς Tή{λeyί΄5¦Ρf6‡Έώ2hΗv³f·ΎΝ΄[Ρw-σŁA«9| ·ΈfŸΙacJΣ†G¦ib'R„¦Q,2Q¬£ΑΟ5LΓ–ΣΫ/pχΥ§IΓm]Άq‘¦$ξ«…­ƒζœόu΅€€Y‘ΊW§ js_ΔbŠ»7/eοό©d6€:u`£Wt–}Ώ0›Œ¨„iGwHqS`9EϋύL)AΉΏΒWΚΦ½λίQΚ瑞¨"dΞΩτ@Χ}_’ξβϊϞe_ŸˆŠl_‹Oϊ•Ƿ΁,Ζd`™¦`§hΉ«υ ¦–όmpέ%\{Ι>³Yή’»ϊ“hψαtΓη5qΈK—ΔqΓ°Ϋ0MB€υb0pΎΑ·fΑ¬ρΎ>ΉaπΪ-_’Ӂƒycχ ’5*Ρ8½η{Κ”άΔL…š(Rσ_˜²Ι5­οœVkx^‰―œ)Uβ&(%‘ψώ(KsΙ’!$Lƒυϊˆ΅οEJ―bΑŸ›Ϋj ŝ³ΑηM!V«υ8Έδω1ϋ‰ϊ~©ΎϋRάμ­Λζ _lΩΧ‘’§Ώυ_p±k_=ϊfΩ΄;™˜"3 3 DiOZΏˆN§Hιk₯\η5€60Χš‚fl/$κΘT«ΞŒ¨ύ:‹Ή€2«šPMvς›βξSϊιζ_λόΉΤ}έΊ}Γ[Χξ±/ξ°―ι’χ~ληr±k_O<ςΐρΝoύ›gŠμδΘΛT‚žŽχ~|IJπ†Ηζ,f cˆˆ(KξŠ’θΧϊhžξ±_Lwι1ΒΈΒ‚ŸΪ΅g½vA†υ ί† 7iudΡCηA$A±ƒ)f2η+γTAJ,T;VΗ€ΞΫ@©ŸεZ"KZO οι±ΩKͺU#kKΧ/²[gwπΈθόͺdaX3lξ§Ž/ ( ςYδœ#as“ατ9ςφU4δ,E1I͝΅^λˆV•iPOJΛˆ NU*؁š}«=5ͺΥΖ{‡¨#£φΧΏ4ΆO|φΆyό­ωʏύ. ²―‘’|ΰϋΉΨ΅―k§Η7ΎεoΎ7w”` »uyϊε-dxβZΗ΅ϋgΦSuφΎ«}ͺŒΑ-'ƒQ{ύηΣ^#loγ4“|wνξ£Y<ΔΛ7ξςΚΎ˜Λ݈ζ‰Yίυ=j‰azΞ.¦³΄ϊŸ5Γλσύ…ϊΜ°ϊZ@κX”+*€a³)9λΧΡ΄³L3Ο178ί¨ΎwιIΛ§₯ρ„4œ§γ°a·έΒ„H¦oΥ†ΥΪ ΝUoy±Έ~™iυΣζ6BΒ9o*­ΪαήΛ½Γη@J|ϋ η\­kˆ%Σ¦ΰŠE>?:₯fU-ΰ’̏ΏγΫήwσνμίψΩΐKμλΏYξγ?πε|ΩΎgΏ.πϊuΏφWnžϋš?ΒWIΔh½- 3L.4(@ ‹B¦FlΓ]zΒz_iΌc§•γn…¦‘wˆ@œϊ|ƒΊf'Ÿγ”X­NΗΡ†¦›α΄1He2Τ§OhΘ.U§€:X¬$”Ϋ-ύzYTœ€x»Χ¦[dν.eΧεδζ’šΓFΞ`ΜζΖpςΒw³Ίσ<)&\33 8Ι΄ΊFH91ΨU°ζΎJDHNˆ³ΰo{ωq{nšsΑ-L[(κ…:γY υ„΅fΥπε{œ?uU5ΰ•οE-U±epK©δΦ"9gΝΣι#ŸrΥβίϊKήό»ίψΤoΉ³Χ_ρ-O?Κ­M<[ά»φΛ cΌΰ7Ή_τχ–3}ν‰aΧ ‘œ3]λΨn‹™η‘#o ©³’φΏ:²c{[γŸD›9qX"y N»ϊ8UΗf³fχκχΡ°2³ƒΛΜ°ŸYo6lΦ+ƒUΣφ–Ε-ͺ%ƒ€« ψ@©φ*\mim¨ηϊΰCΔYZή7=$=ƒ—Ά—ΙxIqΠ°=‘4ž i€'‰FΨΎΜζφGYήz–qΐΝpέ!β[Ό:f3«hΟ:Ϋ†q‚œP ΈωΚ_C?σ₯?τ~μν³Ε~ΩΒgφu‘«eυπcζg,Kρ@LΣdΔc¦hY}μΩChΐάm{A·ן‘»/‘Έ΅{ˆ*8qˆΤμJOe[e(ͺj…rUGJήžΙ)‚ύο ‡’'²Ύ{šώ―‹ο’ϊξ_ίπΞξγλ yn..€β$‘K’βκZ˜υ•ϊ$εoŠg8ξMΆ€F?h@Δ#ΎjO-Ž|) A 0uΪ“&e#Sr€ξυ½ΉhœŸ?@/žpΨoΨoo8 ΛεΩϊή‰o%©wSE‡Σl7dε/ͺe ’AIΙMT”BΆΝ€AQLΙΞkη€τ±ΐ³˜ζφϊ3ΆŸ}ʍ[Ρ­>`•KOίFΒ΄‡80εMOϋaFΪ04ήοΑ‚τ~€ŸNάΏd\Šξ_(;cτ;b:°BΦ€μN/…:TPˆ) ™βBHΝŠw DŒΗΗΓΈϋ]β›ίB_]Σ$ό0πŒoh‰ώˆϋHšpf' @έG©εw%g, Ορά}ςOx`ωώ― »όi γKϊΩ§˜Œ w΄€T‡M*h“#ι8“CΖ;’΄„ξŒΩμ‚ωβΛΞ$›4&t:JŠf̈CˆΞcŸΣ€σRέ]•’ueΊC~N,ͺΩ"ΔκVagΑ€Ό!w’BΖγ₯­Pέ3l7Χ7?Jκ1»ψˆεΕSϊΩ ŸBξ§Ω•qb˜Žx7o †x\hmΈΠ,ή72ξ΄}n*ΖΌ(ˆ/φβŠΰOξ= \εάuβ xWn§ϊ<Υι6ƒ*¨Fœ›π2‰Nγ/ˆiψΖu—³{οΧόYΰηoο̟ω3„σŸό5Ύξ/€Ymžύ£ίΗC/$’5υ™—υΖk$—(U§Xί{\c½«Ν–=μ_‹Ωß νγ` Sδπβ?J*O*w’ήK&άϊςΥ`mg%ͺ}BΥΝC§-ΔƒHΪ‹θ4ƒ‘Yθ [Χγ\C*†ˆ H0ˆ ;jYV³drvN9/ͺƒoB…”θkO}+„΄f\ΖζκSΆΫ κzBNθΜ°Δφmπ–υσiCS,F–iίΨ@₯9ϋΨ~&uL†G\½Ο“{―’¦ZŠ’°C Υώ?fšωqΤ’[\ζαu(žΓqj7›ΝΗ»/^ώμϊ;aωaό±‹oΜυέΎήqϋ¨ΫΑ‘83δŸuQκΞΗBίπ€ψ€β¬4τϋ/)²ϋδr˜f«ΘΓ_TU@υ S’@Ž’‘3F ΠΜ.…Ÿ£Xωiΐ&jΰKΚ€(n@Δ{ζxogx…›¨hšVΣΚFρ"Γ:‘%ΥΒωΚX¨+ΥA¦ «SAΑ²4ζ!«ŽίcχΩX‡ [Ά2?—Ωό’† ί²adYoŽ8?Ώ―Ӂw$‚Y*υέΓ#i‚ρξS7β|qP§šΕH¦œejͺ *”6CT2α6@ތTΜ…ννš):šΩ‹ΥεE;[ύͺξ›VfTωϊΗ}(ΐ)p‘(”Υfωχμ–Z#χΞ Θ%ΐˆ$<ˆ2nΏΒ₯Θ4>gΪ­iŸc‘φΫμ‚Βc‹@BΒ‚fφמ‘•4BΪΫηI‘˜οΟ9Ε!6Z9—@ΖήG|‹¦9£KxΪΩ §š&’]©kά‰˜œ‚-DMhƒ7Ξ•~’J*Οδ½P=Η*h€O Π6‰o_ssυΏYϟذ€_=’iVZ6νΑΐψ0dnΪa"4BΧ4˜‚ kCέό=fΛθ† i9Σζ ¦Γ].©}1’΄ϋΔ~ΕƒOdΛ’Lν0"­•·1*›νŽύαˆψž~ρˆΛ³sˆG$­‰ϋΓ€šί°pͺ‰―χuUπ’0S\—χ@5€NΎjδΧN4_`ΒeΙτ…υ·i}ΜΒηΣšHq\Iwρ“iΞ~2έόŽH<ή17Vb’".χͺ!εμB€Λ"4k~If$(εu*† œX飜dυ3PGύβ`/ͺ\#‚ρΎσGψφ+_β`ύ―1΅'ŠΛ`Q-yκΞLΥ“ HeZ'(Ζ53<ΰ3UœΗeεΈœB’οgˆχ―8k|ΪΫNO$ŽΈ’ΝιVΊ²ϋx›a•N&†œ”–εχ"vωL€Ύ_ΰ΄΄ΜεΎ‘Y<¦?Β5KΝBς=’ ‡fΒiYΒQ@ΜAuTuFΪ,<ܐΗΫ2wJaŒΌΌ;Ω„>εs/‘Ζ ψ₯5Κύκ#“ αœs4σGΖΩ:ή=Ηο>3{šŽ$Mε%{η+`@2Ψ‰ "εόMNV’š;z7τ’τ€Π4 :!₯ ϋΫΞ‹—ί2`Z>ψ˜Ωω;6ΐΠx Δ#]Ζh`;ξ'Bp΄}"ΈHpνbϋ>Γόηqϋω₯iώ=?ΒI‚iΓ΄~ΖιF5‰$˜FΤΉ,U;]Αg!χ@vo5WMΌP” F”ή€³2¨’¦<―Oδ/##&”ξWΆπ·›?0Π4˜άF4’)1f’₯wR}Γκ~I!Ι₯€FμžF;°χ“*Σ&Ϊΰ Lr[†ށGI40{‚m{š?4X³Z5!Ξ΅!ω9ξμ§2τ3I‡+†ΫϐΈ3ŽiΚC†ω·αέωǜ=ϊ˜~y‰G‰Cb³ΩlΧ―f_~v1£m…»W7 wί#8EΗ=IΣ‰λTŸ΄Ο_ξΝ|σ£§sT3Iyk)Ω=έΗ©όΘ²)€ ”DΚ`£•₯ΥΒΐΛB·€_=%τψΠdFϊΞώ>i‚Β4w(T’¦*XΜ)(΅„uR’πŸ4S@ ŸMIsΖ€•QŒ“Mη$τtΛιVβgHκHρ`tDcΔ€Ac^p–³ŽΠt6=•¦7€Ήώβ;ά½ό>γέg,ϋ‰Ε²¦Ψ’јχx‰HJΤΉά²g ΄xν+ TώκΫO]W@]S–y"”²Z Ng] mΪήƒΩ―ΏΝσWίfπθήωeFΛθB❠ϬθΈΖ wLcΗ΄M<ά ΑƒFpAOL,Γβ0&ξvwΫ‰Gη­-KN“ίRάΩ}μχϋ *dpͺ“ΘBT=탑2Σ?»ΉΧkΆzBΫ/³ώΑ–μ¦ρŒΔ ΅ίV3²Άq₯„-μσ:•D3H½QJζή“e]^hδl2’Βμ‚~ωΉ²ζ€8Ϊ~MtΔι„9i'xο˜χυ $o9ίρσϊ‹o±½ϊ>nxΕ²‹œχ!΄θ”pΎΙdΣ†”Ϊl)4 n$ˆΕ"ηΤU£P8—9iδ¬gŒŠP]`5©](ΥΕΒUo°υzψ`΄ ‡½-AξBGš^ΈΌ˜α§k+ŸΣfΓ4νύ„Δš¦<°ΰΘYΉμυνΐqHˆΐυzd΅θ²διΎGvo1~ίοϋ}όωίσ“y±½ΒB,ΠζJΙD~~Ζ|ρ˜Ωκ±e_€‘iΨj…ΌΜ"ΗΠ ΰ€—€JΜΰ}Ι6†˜2‰”²φ,₯βH‘Ήc2ΈΖ8&ΫTή.žΰ"ύ@ T9ή L$3C9ŽTΜ¦h±θŠ!³μ-ͺγφζš›WΓ<Α:Ωσpζi H—hD5§#ˆΡ8pπm‡Ξ\"0Z‚DœS„"η*₯¦s5γI(B₯d ZˆΉI+‰ eM§™Rv¦w4ΰΪ­o³jα‚‹'F9lοhΒ?¦²HΗkD€¨ˆ.ς3E€ °” {]ΟAΉ]΄AxϊhΖΣ‡©΄ξ3²ϋx‹q{{ R8!²’Š`688?cvφσ»ΪΩNΤl{tΪ!$ΔJ£LΝ@‚Q$ΚDΞ²+jΟHiΚΣDW¨΅^½ΐπΞvžmy’ΉD΄³ΈΕΈω»–MY©7ν+«υtLb9οq^ΑΐO|Λaΰϊ«°»~†―XΝ„εE‹s ’j%Ζύ$ε¦a ©§iLΚƒ:OπΛ¬V0±7) €}€’j£V‘ΆH‘¬Ψ•3S»¦Ξ›†e`SL&: Ρ$Ο.>`ΎXΨ$XΗ׌Ϋ;ΖΓY~D€1›ρ4npDP ωYDNJHDͺg"vΏΏ» k…&ψb€€ _—Έ²{­eeς[φΓHJjžϊ«³'6}tΎ±2ΝXέ’’³†Dυ³½Š‚WΤΜ «’κv.Yοgζ†Ή„Rΰt‘-8Ζ¨8ށkzlςxφ!‘€ F=`Έ…42L#ΓKΧ4eΫδ ͺΓ΅sΠ››Χ\}υ}¦έs–Ν‘'Λοϋβκ1M±Lζ/%ϋ° dΙΑΏjg™˜H ¨uˆ›Y¦§q(j%(ˆšœ]Ž1‚σ‚Οœ!O_νΌ΅Ύˆγp8²»½A₯aΆ|ΔƒKΪFχ·Œw―leι€ΡEΖΙ~4F(ƒμ"—ήSΚ]*οΉ°½·}™(%³¬’{ {Ϋqb)χ›œX㞨 0ΏxΚlυŽ5ρˆΣxάγB‘Y‰άh‡’EΥD€R ςϋ“TͺhΙΖ² CΥt&Mh‚ΰΑa{5ΝNΪYολ©Ig„Ι §haΐ3Œ‘˜Τ²†Ω¬+βρΠΜPΧΪΖ¦λ/ΎΝφκήr1ƒώA‹σ‹ Ύΰδ„³–Ÿ·Ί\ Y‘²Ex‘O$R:2Gt8Π +9SκΑH2PcšF’FΘ§σuΓΊŠlλή5φ\›Ν# wKΞ?΅Α„KG¦γW ›iQ- MBBS„’ P°κvoW™<`’XcM€ώž IΖ;‘γS½²ϋx;!(‚Ξ'Ϊω%ΛΛχ ύ%mΣ‚NΖ›R#)•ΝΧEK˜Œ6 ΰΐIfαK±Ύ)N ]SMs)›}φ₯”€1h~' bέΏxjv7ΎΏDΑhρp•{_Ρϊ^γ˜ge‹Ά±¦Όδ ΰØxρκ·/ΏξΏbΡyﬡ쫬SΛ¨όΕΔμ[όΙRΙF C₯ˆΑU;Eu"ŽΡˆΏΞ\θ,TZλ5*GœΪ΄“8EQuί4ί“°Ϋ8·Vw‹wXΝWˆγν5’– :&Lzε Έͺ'YαPp!.ΨίPΞt§(žՎ‹:ͺπW 0ύυΗ¨΄M•­Ι=ύβ-Η}]iŽΛ‹§fBhΰ•Ž0­mΊ'N'ΰ³ΕMως{κΞDr_«YΤ/@DAΚτ.3ά•”¨Ž¬š'κ‘ώ1νκΣŠλ¬Ήž†;H#)m‚q²μΛJžΕ’£ 6y4#ΗD`½ήpσκ[ο>₯ηŽ‡½Π\6(3Tλf!ρW*ΪD½G ›$’οΌœ‚1ΕΌeiΣq@άήΔωŽ_ Ί΄LWυ@h>؞vΫ-1‰ρπΞΏoΐ‘†5zxn%€c`:ls=€ΙX)χ”ρΰβZT&$8D.Μ-»Ώ›–R.ͺKv}6U[Ψμ½7mgRϋ[ω;ϊM2ε>Ύζ‘$3-lB‘G‰"# '«Lά ΕT<ΐΌΖ¨4ž“ύŠcLP—ΖR ¨_ώ)—΄0§½xί²/Ϊ3 θ¦=78‰r‡a²ζ½β˜uΛΎšρβ{ΛΞ^~υ‚νλg€ΓW,Ϋ‘‹Eΐϋ.8A€„ž#ζaBρζ§4❝e)¨2°ƒΖΜσ T–ΏRΆœOΑΪ7ΙΦί΅ΛwwλλύέένθWοΟζxF¦γ ΣέΖΆL9Fβp: ΰ=š8_ΌΕ€S2« –™JœαTπm‹Π°`»Ή%&pumϋ›Zά’φ—eWHu©…«πM Η}|ν#₯ΔκαO`υθc†IΉ½]³ΩPίαΫψ“zΐα#x‡BeΫ‹TYP>I«Ώ}p(Φ/Μπ't)™g(Άuhφή/eωρo’}ψsPΧ“ŽwθpΔ­MGonwάnŽ€«ΕŒΛσ…Il|6ηFόδ{›OΧ?δψό_rξΎΰΙΉΨ{ChJΖ8ΕT”΅_‡WΦt–g©f„3΅Q‹<―c+«δς€• vBR!Ώ1χΪvΆ2“0#'9ߟdzGρπρ»τ!ύcΌϋ>qσ:Ό²lΤ&’iΝ,ΰ Ϋ‘_‹έ‡`³ ΅]ϊ3λοGτ+6―Ÿ±ώφΏϊˆx Œ‘"‘*l}€žηΜԝ3:Ύ‘W@ΰ>ΎώΗ5.\\œ1­ΞΩoΧ¬ΧWΖΑšΟzΪn‰#a"oqβ‡E*ŽŠΛε')δloΰ•9eΩΪ¦]–οΫεΪ3Τ†D7ˆNŒΓΔώ8)|Φv B@3οkœ"·―^°~υ}³{^΄O–-ήΟ(N(UTsY€pmβ‹Pšπ1Υιlu°ΧΥp2Ώί{HJφ s8η­οe₯›‘\‡ ΑδQrxEJ‚sŠK/i`Ϊ½&7€ι€“h|4΅L(β]ΐ•ςœΪ«+Λz±©η™“€':α°yΑα³L€‘s{ϊ xΉ+CT(!XhΎPς‘Τ²S³@δΎGvo3„4ξw/’Ή‚—,V?‘γρΐξξ5›«ΪΖGΙ·=iIiΔKΚΉΔXΔΉdԜ%Pόχ½SΪΠ ύSdώΝβΔ„δGΜέ4LΖϋŠXι¨ΠwΑV¬‰dβi3#i`»ΎγζΥwΩί|ΒΜmΉœyϊ‡-"-)Uz(>—*Ζδ—ο³”»«/Y牝e· )`'dŽ\"gnBΫdάbΧγ°=•β3―¬?G}ƒΑίqm۟†ύ^&ϋΎ1T°MQ‡«ΊMέξΫ²δα °*XsΚu}]N:†I8|χο·6θέΖ@1Έ–) ˆ¦J·¨l΄€Ά@E2H€+’5{f}ΌΥHiΌCβŏΧΰη΄ύ9ύ£§Lι)ΗύšυνkHGf³9m·@$™…²Σ ηΑZ@q"‰ ³3άό=ΛΎ$,„€= 4MŒΣdMξ1©Ι¬oiŒ6Ρ ‘·k&Ώ|Ξν‹οΓρ%ησΔγ-"sSHYD΅’.Yυγ‡*sŠ#Ep’ωjT·‡wš₯! Ώ~{Vq/Ξ”*βgΈvžeYqχ‰{¦γΡΑθjΰθΑ9°>αΑ€]ˆ™kζαtΏθi;«όώfˆ?ϋϋ΄γΜΌ XΠBJ·ΰΝ¨Š1fJ ›Τ]χΜώ·χ.±#X3?·ao-C“fΙrώ€Ωβ'1Ϋ»Wμnnhš`|M73Βg2.WΔ‘h$‚ο0«εΥGfc-βν‹š¦»L LFHF›XυΑϊY.t–}ΕδΈ»»ζκ«o1m>gξwΌ3σ4«6χδ²*ΐKζ‘ea΅Λ₯‘«ΫŽT¨ηS*Ό±fβͺΟV-Ρ¨” žœν9Δ@Χ.Α5DΫΰ΄aάέ@ά“Ζ56Α™1 ( TD²ΏsΊ%όΛ«ƒΉV<8 XΤΜ‹\βζg/δ—‹τœ˜7†GΟ›Q½€ίαœ Ι―OKYΈη‘έΗΫ‰ίJ ’ ]IqΔΔΔΓρ·θρ†δzsu½xπβίgswΝvsΕf³f1ŸΣus3iV„εG6yt흀aCšΦV’FΫ°m‚m‚ρΛ]Χβ|c΄ \g‚νΧϟ±½~F˜^ΫrŽωe πΨύfρ΄P}hΩžζ©Ίs§~_$”μΞZ=uΤΒm“Zb֞™sΰΕ‘ τ4ΝŒ“&Lρ€ · ;+ε˜†-Ž„¦δ1š΄ͺ–i§`bQ© ΉίφςζΘυzΰby†―― }jΟ¬ώ·Zο_υΝ,N‘7ο£’bOχy–Θ χ=²ϋx»QJŠ~]‡IιhΩx†qΊŽn~ΑbΆβμβ§pΨνΩ_pά{.~Κο°EΆϋxC<\cWq0ήΧα8c²ΎjΡΆ-x#­2FαϊϊŠυλaλΜ:ΩρdΡX©–’0%ΚnFΠΒοrΞeΏ|Θ_ϊμQ–Ο€˜Θ„ΥΒ’/Ω1faΆ3Ɂjφ «ΦΩ(Ω$Q<¦Θΰ₯%¦Ι¦¬L™&aΌ±…4"ή3ι,W‹huΌ™•ι!ω6ϋ‰Γ!2₯Δ«›§œFΎΏκ…– d¨9j¬ό|34Q"k>λΰ¦U±sNΐOο3²ϋx»QΜSf iφĊ)s¨ͺ£‘:}šΐœJwLΪΠΝΟaΙΓGOaώœύdn^!:1k‘usF&Žγ€€ΐ|–·‰Γ….g_nž€ύMlχJwΦ τ™ΫUJ—μγ―„œιiΜ'@ •b‘³et¨ά7κqΑyGR£šΖ”f $Il1ιψΖ΅Ω95Π'D°BU$—eW¨ϊΣπ&Π8Wγε]ZΪΖU— κ{…S"+P4°§fΌ±NΛΔ)‚χΰΌ Όͺθ€{cΕϋx»QR’€]x)%ž}yΰΓ'3{-EΞLjυz’2±GyMjf8 œ=έΣ­Η#ΆNμκ8†–σΥc:?‘QΔχ·7/Ή{υ?˜Ά_0σ‡l—ΣfΦ?e_%Τr± XD-<ΆΜΜ—7ίο°σ²ω;8©ύ±šK̜uΕBL βπ>Π†&χ η¨D’M™6ΖΆOΣ’νryξβ*V•)£P”œ‚–]U©š?g6H,fwυœ-:œ·Δ T0ΚΜϊSφJΝ`SͺbqxΔ’Ύ ŠJŽ2‰ΟPIΈχSΛ·χ‘T‘vœ(ΧΫΘWΧ—«†Λ³pςeσ./IΡXχ)©‘ViΘ6ΟIӎΦ{.;¦EΗ~€λm"„>νΨέ|Βξκψρ‹.2»π8™‘δŒiͺξ°P—\™ N π Τx2ηΩsΣΊ’bΎw/Τr1;‰ΈΓήU7²kΛ֌)΄˜Νς^ώΜΜΜΜΜΜΜΜΜΜL33ΓεΗo²w“lb;†™‘τνιͺS*† Οͺ:5FΡf£vŸ£Φ‘V‘β*wƒΟRέ™blyΞ:/Θ~m`ŽχbΏ&Πχ˜+ Μνn,še4Ίo—7F¬ΪκΌΖYξ:Ύ‡ΰ™*fb¦WQv/`Ό}qp{Ϊc¬\ΐΚήxλƒ―ΒζΫ_€₯ψΠ€n/φξ[EΦXBςšePP MΔqQΛ*Xo; ŽK:1D°-…¨pšΨΚϋOΛΣκ&γc@ζ§ΩκPŸ΅φ5δ ‡€ΌWυPυ.NμΥφΥ`ΥxƒΨ‰vˆ%% †υ*φπc+45ΫΉ ]Ӂaš0ωςC`dzΖ«κjpSŸuΟ«y>λ՟ΩwjR‘(ψ1gΩ£`sγfitΟ–:9Ση\οOτ€j‡i’ Πj6ƒ§ΌΒ}f¦  υ/`΄}~ΌGǏμΗmοϊ>hΕVw„ΝŒΛ8M Θ Ρ΅P…L™`™TZ%LΎ³<γ5€ά vrAqοΥZ…ΠqR7Sπjρ”qŸ­ ΆW)Ι;{Ή8ᇓροœCΥ»€j°ΞμͺΎθ3ζκ,ZΝΣUΪ*©ή!Π€―[ R Ε{γͺιŒPB\_΄™Νή+WΈ ”ήI¨Ο6VΦ!z£§KΨH²™<]n‡‡}Ώψά΅œ—`’Ώ>0ν4ŽlaT,/δTΙ/΄3)f0ε󷕁.˜g2Ώ© QΟΌ[.oπ„kΗXμμΗΚή[01μ_GϊΪΝiώ°΄[ ”*TŘ.έH4#MΊ&Χ½χΑδΛΰjPφΚi6­gπ+LaγšKh6Ρσ™ΓU»“k W"Cΐcɚ’όŠ‘’=*+]FΠ’β]GΩ™pΈ‰…ρΉΑ’˜ά;½9νοp»Sε½XkΒ#)Άeάƌ5ͺW!PZΧS€d‹3«²(@4dΑ²΅Ή…ΗZΙ¦¨GΆΝ HώπkυΈΔ•ΗV[Xl;Tή 5ΕΘ’όώ¦ž πΖ@Ι£φ EοŠΣπ½3ΘCVΧpπθ­h,ΒN„­ ¨ Ǎφh2œ*fΚ²Q•Ά1‚ŠΤόdŽΎh9™skSΐTh»Ν©Pwbh,4Γπ*BΚή21?ΎΞρ’}ΑLΞ ‚²‚X}•™δ|nbU°4n¦>}0n¦ΐRνδrή«•ή \h,ΚξaλX ‹ ?M%ιΡΰ΄°{˜rΞkω‘ρ˜³μ?ΘΉι8% 4n €Γ¨Ηk—-ψ4ΏΏ–β₯2¬ƒŸBΰ±iˆeŸϊ2?ΈŠ’{γν·ΓP΅Ύzδ&μ]» UlΠzύuZ­Ξ ςΦ"5h1*>C`γxθώ(sEΖπ?‚Λ'ΦD6e^K«h­…λd‚Aκάv/ΐOΖΰ—Ζ›π£.έγθ+MV―Uμο)i) ?ΨŠMm›ΰ΅Ÿ’f^²ν΅·RcPΙ,TτάΟφϋΫ늁Y7Y₯f}<­Y5˜fߍΓcΙ²Gώ η ›³§¦Χ`&’]e3Jt—ΛΨ:›)]\%=ψα&ͺή₯‰‘5cϋβΰ±[ >έ~­ν.³`δΝe4Ϊ+Τ …¨IWΗΐrϊjŒ˜Χΰv‘fg™WFφ΅ιpeο,ͺώfΜΕφΔ¦μ« ϋ‚ΩΟ837W΄R ©ebbS˜YΜΔ—λ§Μ ΨΈ”N{WLQ§ΞTŸΔΫl1ŸYˆP]2.Υ­Y  \Υ_DfCͺŸη#›—‡£8H[dΩA ΗW›¬ΐiΕ0>™Λ³‰Qδ —1”Ι _ƒ@D£Q6ZΏ ΟΟ‡PvΟ£˜°΄8ZΗβBLσ₯݈qhN­‡ώξΑ΅Π^Ψƒf{‘μͺ L'ΝEƒΖΒA‚Wsq•―Qξ κGΩ=Λ•G?Ϊΰ!΅πΓϊLΗh΄g³uQi½bϊjηhqvŠ 7ά¨ dPQ•f%Ρ.. z€V• €Š=9p‰ iΡsk°ΜΟΌλ¬Nνα,s ›—H«™˜β1u@½IΉCL'°©ž@ΐδ‹ν½Θκ„ŒŽϋ&κΜ­-ΐ5Œ^MO3₯ TΚ]ψαͺήeΫΣXΪY4cϋΒ‘γ·19`wwŒΝλ]ŒJǐ­ΞΚ&\g6‰―1›GΓΧμkβΚ’}UΕπ½a€FŒϊ±¨΄>¦@'χ1κϋ;K6zΫg <˜‘€ΤΨ―VmŒS%KΐH‡λYl3¨³δ0‰ˆmΪ#»xρpcΩΘζΕlM b\:PDΐ&“›i&Ÿak‘Bwύ~T»Χ¦{²’| ( ξIœX ,5λ¦oήSh[PκFŠ₯]ΰA`,­΅c7cωΐ)”±ή(ƒ[<‚|ρςFŽjΈ‰²?e_η&ΧΛπ£MΔ’ΗD‘•2]ΠΔn,c› ψφYL@`6πYIƒ4\f΅/ƒφŽJ« eaε•"σY& ?NYxmφV±A[GΣ΄5‚ΖzΗ:φ―Œ³ώPί[›§Ί~˜Λœ“™KD@Lΐ€U¨ΑΞΩw‚q-]eο"F瞊έK/F9ΌŽώΞ5ξ2n,¬ςT¦¬ΉXΟls~f0`YΧZ½@ΐΓ9¦,mp Υξ%Œ'€VtΟ A–v‡¦D_Βοƒ’ΥώE„α5„b‡I#J‚΄tZšΈ²ŒyΕjI&±,OξΣΤΟzO}ΫkD:αλδ“Ί―?KτjRωΗτΩ°`Υρ=Ύo‚ρ¦­Šκ ƒ€CEπg¦O3.WoΝcdw™ο£ Ž NFτ*Ε€‘VθhꇧόΈ‡ώι§’Έτ< Χ_kΦΫΰΚ!Χ‡Ζ:@„dς PIΗΰ}Κ$JΖ¦,BΫρT°:ΪDmLκΦΉpΰΛΣηΔX©1‹Ωp!ΉΥxE'ΥγδE€Eο²D#΅;iΟμ4aF=ŸΥ·9³‹› Ώυ0½·ΰg$Q&›Ζ³ϋ£4Γ° –φπ–9ΝKΰΉͺAΚωΤΝΣ7w;‘™o{¦Ία3ΉH€fo ΆsΫoϊgtίrFέΟΣ|l<Ɯp.3qMrQ$°F_Q¨κΗ]ƒ-”ƒZ21$ϋΚR<ƒpΞ&“m‚γˆ¬HŠΨŒn`ϊΉ—Œ‘Υ aVΖΛ~vͺΨΨ`„KY–Ω+VgΗ€ί)Ϋ©Fρ3ν–H\€O7·[ε‘Ϋ8²‡³ΜΛΚςBUz’“• £ ?λΠτŒFG&W(J49΅²‡fΡ?ϋ ,”η™?˜υ½˜žέƒ8ϋι˜ ΐ¦΄)Ζ%|9†gΰ>e:6 mΨƒ”ξH7L»4Q‘²W¨>§IY (IιOλ4³1ی˾/ζ+ f•Αΰ’–/pά6]·ΪςP]’ΝΥΐk\W=£!J[&SΏΖέ}7‹Οl^xΒ·έςν_ώ‘ŸRψζUfMγe‘F±1+»ΎΜdMζ_…¨x PηξΚό.{ƒΔ€υ5-LλΉ·gΦ‡—‘ΏPχy­ͺΘ—# q |㲋­Zΐ„)ŠKi ΠΌΐC ΪDΜͺ§ωš‘?Y›έΟY§ωVλ”™1ΎgYΊΖšwde^ξΎλΫ–ξΎύλ>ϋžωŠΗ•ƒkχ~Τ{όλ₯ΕΞM!F³₯F DCF’‘v‰xVͺz~Nδͺiζ²ohabnΦ΄g«WβϊNΙ3+Αύ”S t1ε ΗΔι;΄nœ]₯ΤΈΕ4fΛ,’mDfσyo°έ―ΨΟΥ­‚ύ„ϊgš~R†—‚¦QΠGMμΚΖΞΤBE©Ό“ΕBΉψfυRηRژ›q5 8Ϋ]J³Ύt―±9ιHuWο?ΦlžΖηd―»ύίγžΫΏϊ—βhγ>?Ϊ|b9ΨϊΌj΄³4μoΎhsRTܚπb— 8ιY =#Υ€\Φ³198›©΅;£‹odLΙƒ+›cN¦νSd“‘ €`bu)“±:*˜xœ%ͺŽev’AUiΓιυϊσpαΪ―Ά8FoRχΆ‹SΦΘU?F3t^ŽmŒžΞΛη=bΠπλV Σ6μέ h μTΜΚ&ί•;,$ΣΈa·OY½_ Zΐ•λ;Oγ3·»χρίzΰή;Ώξ«_χO_φ\?Ϊxm1ΨόΡr΄ss¬v™’ΩŊ›ΊCπbTζΠ‹D'fξ} ‰‘ 4TψͺMmY =œ°ύ³?λΪ*μΑ•dAƒq0¬M(Š©ΈΩΰ€ύІX“L­bdΓ"`«W‘^ήγΝΐڞάχv=Α,„ˆ7_b£[b88}eˆ’ ΌΫ~8*νFm=7ΐ)ϋ͜»©383ιΚ΄kfι‡r“%Ϊ/™V+λ6PQ»D'–©ί©-³}ƒιΠ•JhξZΞΛΌLΑλ;ς{ξόΖ»ϋ_ΎςͺαΖƒγݍΏ)‡[οͺ~;#ž§θ«Š‰ u²”ωAίΠ° f‘ΐ$Ό—©ŽύΑ™ >μι@biKπ<ΦΧ ΰΰž–;Žμo‘ͺ,±«D7 Β¬P Έ2«°9Δθ%CcΉΆ]β-€±n Vt# JΟgo»4ΔΖvΙώJŸ¬Œ*hgN1‡bWV»₯|eΦ₯ΣΨΈ5€€+ fΕQiŠΝe°%mΟώR­˜κm!ΰύΟΛΘζvΧ7Ώηφ―ω x½²lΌΤΆΎΥ•έ#Y ‹"…₯ΎNcLe70XξƒΔ©φψ4λΆρΰ3}NbQKγe‰+';3ύ˜ύ4+Šυ3εΪΗώ₯ ν&Η[ϋξΞπ?Šή»ψ’ͺ"T%™OSYRγ}ξ8)@ Λ`Wο―IσaE“ξGŠ~Α¬ΙSeήυ^})†³ιX€A«Ψ§&Ψ^ΔαύΉ9γΡΈ±†=(π ± ΐΆ1±@SΛ±θw¦φ~γΉήzqˆΚƒ€γ‘6ήγΖ%œZkγΠή&Žh©KΊ½>Dφ}ό`'΅ Xίs‘ έtφg¦ΫΪ±Μ:¦M hά‘I‰,Θ C4š/“έ"S“₯Φm΄ρGΥλ'ͺ~Ύj9ΓζφΤ§>5{ή~χ^φ/ίωΫk;w>Έ²ϋŠΙFη?u4,ε˜{3`<ƒώP›u6O%<ςX§Υ0ΟΆ6ΐOc{/V”9Γ΄FΣzE!,λ’„zW@¨mFfε‘LPΖ{1«wR,LSŒΙΧ 4R `vΪγΨhb©1œ¦F7όΆ‹pjOλΉθpλρ4Χ{Υ$°?"θ½ωΒ݁G–ƒμ,ΛΘ€ΩίΚBτά ξ hi™Ηζή‡!ύŠia–λ)A«9½6?™Υ΄ϊ},8ιw¨½œ° *~6Δb^Rφυ€Ώ?όβ;~ς›χξxρΑώ³_ΩΌε{F£ώ‰©ΫHζ+d5(”Ω ”AA΅’^+‚)_ΉΨX€ΆUΐίκΐμV#±1Ή`zΖb\=˜•Υv‰huϋ •IΆ-Π›ΓΖάT'—SΩkλϋ”ehz%ͺΥBυήͺ.o#D™Ψα-uE@Zί*πΰΩ8³‹+clu+. LV+ΉjPx,/dφΦβΐΩ«cώ€uyεΞ ŒT§qκήΈ Q°d©\ԘSΆfV‰€Φ΅O΄“eΚΰ„ρF‹6²yωλΏό³ζ‹Ÿτ»χͺΫΏύ―ΧΊOx`iχΥv―|ψh<ΞK4ΰ‘Χl₯ ζ«Ωλ­LκΆv‚VΧ8±"Cδ0~ίͺYS« ͺKV)mΆ ΥkO€G/&f€I^5©LߎŸ\216Ή3†‰€nU&T†…ΗιΛC\Ω,ΈByl΅“«-λ:wu4‰νR/6―ˆX»εΰiFƒQ¨Υώž¬lJCΉΦψΩΊ–iLυΦEvΙft»ΐb!+6€mFάκΰmv Θ‡ŠX¬(ςš–Μτ-Λ4^΅s {‡-―ϊ§―ΏωΟς“ψ}žΪενg=½·~M9κ­z_!sžμ‹jφ ?dιHp Γ‘ό@ΰ₯νAΣΆŽ&ΕΉ€#Λ7KƒΝvk ψŒbg ?ψŒWX4?/ ε‚ ψlŽ,":`ι2kiαΒj2KkfΗ“¬lDLŽ`Φn‰±Fμ[nΰΨΑ6Ϊ.ρΖσd_8uΜΥKΊ ΉΔΆΪ†ετE@Υ•­bŠ£ι˜Ή°41―Έ-&@chs’Iά+“_ Z}^©ιαΌ6W5εlκφ3r²ρΉ~Žκf»ΌNm6―_φ΅j9/―Ώγ[–οΎγλ>χ΅π₯γ­ϋ>δ]:ΏR ·ήΛWrW!†²­zΚt m0^± ύΡ{²΄gτRp`Π:hŸ ϋ@`(ͺ€vhuD2soŸKΤ$y€9AIοΨ΄?oŒL¦An& p ploέ]šbx23F΅s§>[v–0Ε¨ιI€ΊΊE&†<#θ3ξυΖσC\Ϋ)Py»7rΪΘΝα”‘ο,v² Άσό};2΅7Mΐ°(Φ€K BͺGe[#x@βzͺXμΰ…]IŒΜ2)ϋ)NΆO­rγyUλ${mdοεu·ύ{½ϊΎό—ΛΑΖ½Υ`γρ±Ψώ\ηϋKΓAΡΌ&FwŒ€ΣΘ2Ε•1›iit_y–4]ŽΓ€JY'R‰…z›ΨQ°£φ6,ΐ σ₯i²‡Τ7i¬Q―…Η°k9g_w~σΑΧσΧ|Νkώώ‹Ÿ?°Χ„ρ֏Έͺ{3ͺCcnz&²\ω£"‚€y&Va2~R―D°‹`=΄ΜNƒ#C#pΥg)62LkΩ7Ϊ«X8ό~X<υρΰA΅Ρ‹ ‘£‰SΕΤD…ΐ{Ήh©|CwiŸmc»±2 ΉΔκEί€#$έ0χ  hΕΈlfXΠΜΨT(γ΅ pτ6b·G½ΧΝ;Ά@Π ±^ρ…£HφδZ»ήMΰP!οTω|•ϊ2X ±γ±αͺ4OšΥάΙ₯…3mμΒΑμ¬­ι)J–Υ‘}yή •c”Ύ,βΤ²YKβc—‰ΝμξΫΏ>Ÿ°―ν?|ωŸTƒkϊΡΖ_ΗρφΗΊͺίςՉ«ƒυΞό‘y1―\€πΐ•ŽI`'zΚλδm ΥΘxE¬¨πΩ"šϋί{oόX,ψ4VNэ ΥXΞ™&†δR¦.›΄ešx=1.Κ'‚a :π•€`W‘¬†Ο¬Ύ-Ω¨αωσ`Kz4š€ΔŠc)Δ-KΛ>³m²*‚RoΐMα̏VχG»ΙAql—6ƌ―εΉΓΈΜ”‘YκΊ΄`Δ₯,›©!] 6EχΙφ&šmc]튧¨ϋΦ>ΑκΩ8~ N#ΞWύ›£ΩΣϊ(s {Ϋ}/ψP1«ˆF:1;™1{4<ζ™X••VΨ³!gΔγδƈKΩΙ£qi(¬KSI[wΨ-Όαά€Αyζh!KήίTxΛΕα$πίΗΉυP,ξz―Δ₯k#ƒ)}ͺzVΫ λ*–I)O›-Ϊΐn‹€Νx )π,b§˜’σ²XΔͺ=€rνΣΠ9υθ¬a˜Έ(lJοX+ό ;³_rΑG؝Js {–³gήΎη/‡ΟΈη ίχΟΫ―ύ΅Κk―ωνθwίΏu3žŸθKA«jφΤ zωή‘μŠ •gdRυJ“uΙλΩ^ cΝκ‚ ͺ §ζZ{±ΈφήX>5―΅:«ε(Uεxέή;ύιΔ b]!›%–fAK–ž~€Ο’'¨ήΚ6Δ δ6ΦΫxŒIdΪˍMOu’x6 „ηJώ—Nrε3쓇Ϊ8΄―Ι€ύΪΎ„£*—7 \Ό6fzžΜ¬ ko³[ΥY0vGΪώtx 7ιˆ]¦ϊ1ΨΣtΡ)(ύ{Ω=ΜΚ§ŠΪ]τΘPΒ7 X~O”KΗήΦnΔβς BΩC4 }+yΡΏ7QlXΠ³Ωgέȍ μΑΧdλ―ψΥμΎυIw•½s_]cWήB…F o\YζFυv!O ²z0NˆR’U³2©­E.‚ο‹ΙψPq‘ Dήμ ½΄~8Ž}$²•›ΐSΆ«Έ‰…j»ƒvzCμŽ*΄š μΫ³„V«©}}5+’84V}P½Mz { ω„R>λ=³ORΩ0AΙnJχrW‘fiHd7wb‡3χ(rλΠΎ•&Z ‡c«-~πί­uΦY:*ηδriξς:.΅fo:?Δpμ‘1TZ€IJ{εΞ+ʘ-τϋ„г Σ¦˜Nχ*μ{θbˆ}ΘφŸό μ=ς.@±‰jΈ0ξςΛOύRG¦ρX†›φkwQΔ`Σ2=ΪΚȊ{?5ψς»ϋ#ΔnΔ?θ¬Ή€˜/ςHθ2IΞutMh‚”™ƒ* κj Qɜτl‚ŸX]Έ@ΰb…fž£΅Ό†ΞαχGλθG£ΉϊήΘ[{?€«ΆŠύ]lο ΠνyΨλςb{–Ρ^XFΦήd-ϋG(Α©fτ «μ7. Ηn‚KζΜJ›λΜ%L”@.΅ϊ¬Eΰj_²γΒρ=ˆρͺ½Ϊ¨P‘?I”Θg·‘ρ―vΰ  ²γƒόοτ―μ½…“δΨ=vξ•Tά<έΓ=<³τΰγο3c ΩfΖΓ 2ƒΜφω1Ώ·»oq¨g§±P½ΧUΏŠ5ΣtŒ&rUR©U*mλtΒΙ“·Ίψ`»‡ε^FYωc²Θ•RρtDœD5Φ;ΫΗκX„t€lXΘa& ΏMu’DίΔ.. 3zνoΓΚ£έni²Tž@Ό°z„§@ͺ9‡Σ1O₯XgδΊ9ρ*›”Ι δYZωy»ΥΊ˜.“yn\NK±΄σΈ΄²ΊΪι/_Ε,89a<™!ΉΎY™H‘ ¨Α%ˆ§!LwΨ~Έ‘ύ’P2«‡S―Fˆ5ΌΘZIά·Νh]ύK·α}Π1”§(§C\\,Ό―©δlΪνΛK]τϊ}΄Ί+Θ{@ΦΓωρFη‡πή[ 2Κτ˜Ρ6‘ΆH`ϋ#­_R°΄|I)Πi?Κm£$W0΄ή-Δ ΗΙτQ:ji5†eΌ:œAš€„Y™πθfG4Ζžςςo]iγ[χϋΒ[]Κ±‘τ ΫΣhΉYύn†Η·{θwΌzU Ρ”8bGΜπ`3θWΆν a;ΥYƒμ!EŸ,’ΘP‘Β¨'HΥ)”bαΕήΠCδ܎¦ολΉ₯bŽθrΌ>Λρτd-V·fWn>zψόέίΉ?7\&σάΈ”&Ρf;2š?‹§XY]Ζϊ΅{ΘΊΈΧ8=Bς*ynˆΙJ©Π‡ U 1½„Ÿ v’ ‡(DYŸεπύ[hmύ1Ί7V΄7>@Άπ« ΔΩ1κιF£ .†SL¦΅€Σ ƒ₯~έξZύ+ΘΪ«˜L§Ψ}ϊk|ύΛ?…“g¨Ξαξ™\˜•»φήΞ€L²‹jκΥΩHΚ_3ηΓPͺ‡~ΎρLΨQ ϋ­D3 ž|2hEχovΡmgς+9VϊΉΌOΟ— λdςyoΞB€όœ‘…Φ0[? N7– •Q,ž2Ϋ’’±iϋά°)4gθ ηYυ ±#ΥS©TΖ…EΫ„ƒ’¬EhŽSΐΛ{Σq†Οv³τ‹₯ς`vsά[Ώ;]Y])ZY΅Ω.ζ†Λdž—Σb(QOOP χ09y†ΩιWˆγWθΆ€+ΧΆ1XΏΪuqv1†xiΎ’³Ÿw‘!ιC&žš—œΉY."s.% ΅Š|ύ[θήϊΫΠ»ώ‡(›26Lί šœb6]δΎ¦Έ—"έN KK]τKhχ֐uΧQ#Η›½ηxφρ_Δξo,βΩ§ΨθΝpu£"Ο4±Ne «Σo­M$-zI ΩΦ€dτφΟ'Ζ‘iΙώK4z‚τΓmΝΈ6υZ`9Ze%€ υnl ¬,UψΝ³1žνMδgz/$Χ(χ@Όι±\€—^’„•eιQΙ}:©Όf‹€<ΐΞLi"’•Φ! yG,JAh6)T^P8Cΐ>6—T‡G2μ{όόE+|q²9έϋ“ϋχ¦χξmtΆ–&ύv½ηγx)Lέάp™,_όη/ς@Κ/G=‘‡ΎFβ%wˆΌ½ —χ±²²Žjiεl†ρθ“ΙHΒΌVΡGα£b+xABP”<[ΦΏΦςmd5ΐ±š LOΰQ#Δ€ͺ (+Iάχ»Š’€Ο;@Φ–ux~Œσ£η˜ΏFމP Ϊ«Όk±’ηΤ£`&Γ=ρΥlsxΤ'&冑v](…mXκά§ η›³Ιˆ*H¬ΝΣŠζyˆόnBξτΆΡ$βΛWcνίt U™H7•Ϊγa²¬±2hΙΙ»m+ΛΕ|ΐϊ Γϊόuζ(Κ"% '―o΄Pδ±ΩΖκξ1D»$¨°€žCΒS¨¬«¦¬:~G½Η²ΝςθΑ ω ^XΆ^ΆI I*)ώ7Ξ'―O.ͺ~•΅Vκ΅«+X_iύ’ΞR}κ²j‚zzŽθΤ ”—mΙν—ΊΜ€Ah(kΙ/ Œ‘P U'Hƒvwνυ«β…‡§˜Ž†(<Πι΄ΠκtPΟ†Ι£5Έ‰Ξς=d½k€Λ‘Β‘Α©ξ~UΥ(ηCDž/ΐ«5_/ΐ«ψ˜Ž‡8σ9FΗ/ΰλ3΄‹„A<λΒͺ€j euT1ΦλS-@₯Tœ`g Κ@GŒάΦͺ*0π!\œ+σFP‚˜3‰.΅Φ"I4τόlsb €Ιjkλm‘ν©κˆ™φB^L‚ ‘φ—™QIΓφΥ6nm9I!,¦)§QΞεEŒ±‹kmω$ΕζγY8α·nΌŒ‰€₯!Ό<`ύδΠΡ{fβž^ΑgΊ?\ Ξ/,C‚‚Ήχ¨’Ηξ™Γαy§XͺϊKkρΞ­Ύ΄Ρiϋ© ³7pa*© ΈˆPM‘εm€ΊΊ”Ίdσ—Ω΄eH&Šʈ΅jŠjvŽ0>B9|ςό)¦'_³#τϋ]¬mm£la4.F₯TW?ϊη±ςή?ŠΦΌς]1Œ8‘σ§%ΞηΟΚEžce₯εε%΄z«pνuΤθΰτx;Ÿ}―?ύ3˜όKΕ96!Ο +?#ά. 1'—{}ΐ£>¬h²ζ\ ΪV ›ΫΦμάΚΞΛԟ³€§εx,eoL₯Τ; "ΒJfds»Q6Ε+Um₯Qό—_ … 8Gή- /’<2χs΄r'³NΛcΠυz_!!eƒΖ˜i² Χ©Αΐ§J…έΠρ· =>ΞΕTTφ9΅ΰ³Ά€6²’#ž{ΦκIž5Λ<†S/rόμE―~5Ί>Y½ϊhφώ£νμρ­vχJϋΌ—;.ŽvPφPΚ!B=•. !½cφΏ•‹Ψ‘ ν@•jΙ£…ΩΥψa|€κβ%fΗ_‘ΎxŽΨΟ6‰±†2„%‰‚PiiΒϋšΥ2R$χN*QΐΠϊ1’‚_·΅JμΰΓ;=άΏή…wΰ²–Ο“ύνΒ Έ­2΄ hf{6½(ξύζIη‰M£0 ΗγxRλ=š&H—ΓeνΉuΰŠžTΣσΞ*Šή:¦uŽΧ{ΗψυΛVόxu6+ξNξέ?|ΈΩΉΉ^wϋΨΟέdaό qr€Xža§βžvΪΠ;Ÿ·rαCNƒΞBMœξ’΄…„j‚zvj|$‚ΩΩS”§_ ·§ΐ+έ©Ύΐρρ1vίLp>m£;Έ‚΅- –7ΰ;pΕ:BΚqrψ;ΏύΛσΔύŸA}ς ΦΊ‹Δ}k+=dY&ΐUΧμΣlε €kάς\Έlκ%,œl ΐ5ͺ–Ž9/;=Ι€  ‹Ό?Π¨Λχœ%»κξyΝ6.Vƒ yΖω›Νϋν kύ\?— !χ NΌ―‡7Ί2)ιώ–{Ήqȝςό&eΔ—;cQ·7‰ΎΪ ƒN©"‡ŒΧc-ψ¬ͺ-ψ}šKΦσš²=Ύ€Λ»πΕΪύ δύ+8{όςן₯ού₯?UοΎ~>]Ύϊ€|Ιέμα­Nw½sήΚΛWβ}…ρβτ±!HΧH`˜ξ47χvλψΌ2Ϋ[(¬tyHŽΞ"Ξ†pgΖP(©’_Œzv†j|ˆrΈƒzΈW`©cs9a₯D™ΰθ"β΄μcŠUρΎvŸ Ο?ώ³8ίωΊι[k-¬―φ%Ω?«ͺ:¨¨e}€€£]Θ 64#q/C=`ͺΊa[Š€fλΟiBΈΟ@΄°γΌ‡-Š3’‰½fq ob2—GϊJVτΠξ_A*Vρboˆοπρη?ύQ9ΜξέZ‹<ΌΦΊΉ:½xgΣ—HγWH³C€ςL(1Hξ‹7(Χ,:xgΉmΈ„ζΉq)91z.Β/CΒω8¨Ρλ€GuΔ©žJ’Ώžœ νczφBΒΞ’>ΔF―ŽS«ΣΓ_|/~Ά˜όφ5ΆΦΊΈ~} ƒ₯e ‚’HΫ…πΣμŒBνΕτ~a”r^hΜΡΨ‘ΌF+Œkδ0MΨ’+!pQ©Βhϊ»fΜ9GδgΑ{Π»s€½^Vι`θΜM™! ά}xZ*αXΞ*ΥΘ­΅oΞ*<έ•v#€HΜ΄7φ|\Λΰ‘ƒ³ 1Ε9JE0-IϊΩΝ…|<l.ηύpο¬GΟ“‹VmωyT£U!‚<Γx–πρ§_β―ύ•ΏžρλΩr'V<Όš=ή^ι¬w†­VωΚΗα „Ικ‰ Υ3 “6‘ηͺ@‰ΔοωNΖην^LΚIΩΊSΈEΘ!IδΘ¦9#Υ“z1@H‹³ Δι” ^ΪιsΜΞ“],΅f"Φwχή]¬nήB•ΪRS§E{€ΌΥƒsb b¦2εgUPο„ω1½JΕ0TCLΰ΄$ Ύo”a›`ŽdŽ 6μDδηΑψ—5ύH Θϊ Ός‚XFφ»σ–―½­ϊ»w½+€UUΤ€Ό]IxYΉƒj™o/rk3@²ΠηΟ΄p}£ουε\ΤΠ—Ο‚χ–•― `?Υ]v8=Ž-XΚ&£υ)mžΰ±‘γ³έyξληί+O_rv{«ί°ΩΊ³ιΫ}e˜ΌD=|…zrˆ0;j),‘Bc*Άz:Ϊ6Ψ6°w“ΖίB;??ΥΦJ1o.;τ;…8tƒαU$Q4Q1λZfUNO¦G¨F’O«†»BΆυi†ξ` ­ώ*ΧΗΕ$a\1λ’h/I‘ 9‘[΅₯<4·₯2@œ<θd#…οΐ„όΖ„>M½(ΠΥ5`+Ό|Ž:εF±ΜfΖΡQŸ?Y/LΆΩΌ‰|vΡ«$lˆ%υ’nnΆ9-€δΞ:Z„ά{Η2Yi‘|!χΛ―DΖGe±vK@Cͺ–M™zΊδv™ό•ρΒΨQΐQΙ@–Q]VUnΉ-šuΪ’Θ0­3<SΰΧ―{υΛαΥYΚƒϊΙ½Ήχ5hoφΗEQξΈ4YTw¦Η‚PPO²Λx]Ρ~'6ΑsI—πŸOΈάώόŸ θυϊ€ )8«}•žeΌ;²ΡΉ6*(ͺi+Δj†Ί °ΕςLΐ¬<{ŠϊόkΙat;ϊ+›5­[N#B*P΄ϊb>AΑpςŒΪfZPh£x!Ζ…½—œ¨“x,š’>”λahDΎOς+ΟΡΜωˆ1τ$ 2i<` *[œrγ©άΙ‡y6κ‹τυΉrΚΎΨ™ΚH·9¨Io¦†Γ"oφπώ>–z™Iβ; rPΩάmπŽ™ 6Ϊϋ’)}&ΑγπΒγ“Χ­τ›έΥjθοΜnn?JοέίjέZ­޲Eε1ŽwΖ ολ I+ή[±GήΓ}8Π…œ=ζΛfž—ΣR²ςΣ‘γϋ9ϊŒΐ²”«I6Cσn§ ! ¨₯€Iͺβμυx³³˜ž|…κβŠ4]©ΞBΆŒαΜcZ{ΐw‘§xiΞ{½.ρ  ΥloςΔlλ4ΉNPŒrε@ŠΆαœ―&5ζ{Ίp’?‡Χ!Χ%­\vjˆ ―\ x@Ψλ ’°=‘¦Ζxρbj5Ώ(§$}–ƒN&΄ŒεAŽυ₯EΖA pž!™3” {Ί€ou1•εL½³q•αιΗΗ»ƒπόμjΩZΉ_=ΌΏνέκΆ7ΊgE>ΫqiτR*aς±ŠΗ/•Ηda½G³$jZ7δ}ИΡyωΜsγršN‚w²†‚ςLζ @‹cuŠ+ƒ€ηĐ€mD+`β€%„: †Z€©c5”Δl5|ιι3Μζ 'ϋΒβο-­£θ_Α4v1.="Zh­NΞ0ΠΠ6σͺ@ Έτ\ε;2Nσ}Z΅υmΤ)WYυ.τώΨδ}‘Σ1ΗχΕ’ΙΩΙ’kd$X1€k,&INΗSϋ\κi)Ίs΅#O:-Ι%…%U±XžlχpηZGΗδΡKρΖφ¨6d|Œ¬6‹J‘ij€ymr.Γρhα}ει“ύ΅ϊΜm—WoέOο=Ψ*n―‡ΦΐdN*―§β} ο+ΕZ%™\ΓΣγ$,†΄`›—ύ>`(FͺάςΈΛgž—Σΐ0ΚxgK@+ξ4œTβ© «v!Α λZz$ί‚ΟΫH.—|ΫJ@j "Α€ΧνLsi;˜>EyφY8G§ΫF{°‰―b\·0©s‘ΚΫ}δ­ΰ2Θ|DN ·”TΠΎ€RLδZzWcυύ“ΚΙuΧ‘\",PΩΥ±O³ΑS`4±IόσΙ}M―ΖβžkVΦ¨!±‡7»Bk_^ \]“ρn;9Ο› Ψm,θ΅39޲Ϊj ryέoA”χšΐeœ4½ΧΝΖχYΘπς8Η―v:αιωυͺολφΒϋjm΄ΟςbΑϋ/Ό―}Tγ7’Ž™ž ’ΊΝκ)τ=Λm³’α™g‘A‹'`ΪΐΑgm\B»ό-J@CAηZ…ƒ£Ί©L6— υ²φ2²Ξа]Ρ‡/zΒΔFΆΛηζ οzΊ4C,ΥK-ΘΆΟ1=ώ iΌ‹ΒΝΠλ― λ¬£rŒ«BZš|ΡGw°*}rδmΙy5ΌΥ΄³ρd“t>tB˜άΌσlῊλΏϋ― jέB=‹)›&φNRן ?(%σ₯ΏaUimΘή }Ω―I‰jE.ίƒn†'·{XιBn½£+dw ηLόέΆ'―MŒ‰ό˜Xo%8ζΆκšιή`ˆH"Zyt8dψlo‘ϋZ‡ΩΉχυpžϋΪΜ·7b«—Ό›.χ―¦‡’φΚ1RοΛΘό0Μn*ΚRΌΡ‚¨~7έ°Π"Ώ SΖί΅(‘±a “,oL₯› —Jώ₯€“η?ΔΩ‹Ώ&`–χΆΰΫλs @λ"‰χy^;ZΧECΚιυdΡAπJΌ΄κόμ :ν­ή€?3[ΑΗO‡Γ—{§Ρ{φώAALCKxΗk%’ͺςΩqvŠNo€’ΥΖ΅Η;Ϋ;Ί·NLΛΔŒτgKήYƒ+Ζ| {'ΑχΑ‰T`ε ήτ£Ϋqj:Ύ­%­F03Ϝ([¬ $‡aΕ{6’s²—mψΖ;sΰϋ§()ΐ{±lneνρς8›{_½ψβb«*VΤχοέρs mmφΞsιyοkW„4γlˆPΞ ΪόžΧ$kΫi!!3sŸt˜sθR€‡ξ~žχrό;BμΫi PCŒ ΥπXυ#;]Ν‘*§οόEŒχ~ŒΡλŸββΰsLΟ_£έΏŠ’·Χ^†σ9†ƒqΕ@@Q‹@ @œI@ΘΆγ}”η/Pž~pρ…£έια—Ÿ|έ??;ψσ&©«ŒvnR+AΑO†šT“7˜žοŠfΫϊ͏°±ύϋθ^cdˏPϋUρx½JβοΓ $k“ŸρΞQΉΦ Wδm₯’©ΫόͺΣΪκ(x³α–k|_σ l!€#=$ˁ½“ZΊA^!}vΈNΣv΅uσA||okξ}…bΰ} x!NΚ eέKψ―ΘΘλmV>ΡΈξΜ<Τx<ΟΕDžμ2ΧTγuοxdo«±g+”τΎ(B(ޚζ$ψk₯S€T2Œφ~„Ωξ_ΖψΥχ0>}†γWΏD˜ % "T©‚Š­°“·­w–kΥ5ŽXž£žaΆPγ8{ςb·62·Ύέ³”υ€•\™ΟBδδoΣεΡg›ŠμP5»@9~ƒΩ…pΰpγώοαζού³(σ-5AΘ`ˆ³ϊGΐ4™;ΨΉ™Ž^ €W@’υ§χzό qb€Ιι±ζhdήN χr^ΝYyj>ψ²r€μ7Α jf¨cŽN©XMΕΝϊή½;n>ΝΌΨμ^δ-ιy|…4έG˜#UC@XχB›IΞΠPΟ^cf+ν$ιςšΰ,?MΦdή)κ»lQ’5ΝgξRϊ+ώ²;dδ<ΩΦ˜Κ§Ι11ΒzNDΖ¨{&‰Ϊ‹/ŒŸΟ8ςΏAœ©^΄Τ¨G˜ΐσκυΐYq=€€XWΣ‘ΚΉυΫλkXΏσ7 Ϋϊ›0*c—ε:D{Œΰh\GYT(²š!Vcΰjzr²Θέ\υX@•Έ&·Υ&ΐιΫjΌώHqFCή…Άyώ¬iξΛύHK}C%7Λ «f½Lf#Ιε3­mκTΏΛ‘½½‹ΥΉtLM=wΊM@d!!ΛrŒΏŽΎ‡,+ΠΜ'Y`Ψ7šΦ(« ٞIEC*T•z|8ΓΪr›ΫίB\ϋ#ϋpRo‘Š="½~]y-ƒšθUΕz‚Ί£Ζpˆˆ=JΟd CoΆ%1Οceqœ™τ©Φ,Μ 4sGφηΉ`χ“Baχ›6.ςΗЉή}†“aΒ§;~ω<ΗIΌ‰ε­ΨάΌ§¨FG"•SΥϋͺG’±Ÿ΄ηQ=Θ† {1mE²Ή0Ÿ7œDQh‰1απtFΊE“λΖܚk #+―Όο—Ξ<7.©„ΖœY4πXφ(ς=’D1z)¨τ=jˆi’v˜KμΥQ-Ϊρ ΗτΪb4Sœk`²‡VΉƒΝφάΏ–γώƒGθ^ϋ\toβ]Lb_χ9“,Ο2™ϊŽ*zΌ<¬ρΛg5Ύ<μ£½φOήOξmb₯Έ€›ΌF˜‰»ύ΄0Qœ`j‚f―ο›ΐYVvΪΊ•¨Ω–Οφ¦2ΒξŁ˜τ™~ωj"*·Ξ5ο₯εΩv3Ο‘uο’ύo£­-υ¨“efέfE‘ΏΕ4U‘•ΓGš‰ο˜@ΰΧ!2ΩA©$‰γα$βtT#δδu₯:e”ΰ ΅ΟΙ!€ŸsϊΛ~·W'xxg 7Ώjεpδ?Δq΅!ΝκPŽhZ„°©±™—mε„©δ΅ͺΑrœ.>$s‰c^;ΰΰ‘6j!R¨1Dσ@r„Έ€o’³Ιyslγ$“Ιrœ>~^βWΟ3œ„λΈqο;ψφGογލ.Ί8@>C=|)χ]τΎ(F €9¨oς¬μ9ž*ΌΤυŽυwaX }φbŒ³QΐΡy…WoJ”U”αΒ'΅΄Ή}υj"ϋΌs†W¨ηΣ96΄Ž:-ϋκκ₯3ύΛiάΫΒ΄¬TΧ«βιΊAψ4^T£ξγ@[’_ω[н!¨ι>QgpΞΙ/πt&ηVΠ”΅ž“Us¨šm=A€\&{Xp”ϊρυρθf··ο£Ές8mύ>λmL@šζ8Iˆ½y Y^.‡χ9ΰΌ™¦Ο˜a8‰‰ύΠ:rΑΜ}mθΨλ†χ$sς<€ 0΄$HΑzxμπμl™4m#ΐγεQŸ]ασƒ%δ+ρή‡ίΑn`³?…›ΎρυpWΪ†dXMœ!†Ππ(έΝΕŒζ~² ψvŽJ|ςl$ͺΈHε!(λύ Gšρʎ/jyΏ¬“&Ω"%kή3Kt:«πwόώΓKgž—Σ~π›ηΒΛ‘ί@½ΰkUΒ0Œ~J'«!’0ΚπT‰Ψ%`CZ*jθ˜~\_Ο°άb₯ό•U/Ζ^θbAC΅$:i1€z|ˆ8zV΅ƒUΏ‡{WJάίήΐκάK+—ϋρ ΞΒ¦¨Φϊ,"o&šπ=ΡƒwyYΦ‚ΛΠ„4Jω ‚±J:³ (‘Xa?@π6ΔΨΔ<ŸώFξFAK«“ ―ŠΉ2 f¬>Β΄W©FY&*$ŸΌ,ρ³―=ŽͺΈzη[sο뉨Λvγ!‚x_;"wg'B‹‘›MŠ<±l4CEgޱ O~CΐƒΣJί‡“€I)ορδvWΖΩe袜πζ<;/ rίΞαλέ^L œ*CΔϋhΓƒ^ٟϊΩ₯3ύKiŽ'¦OΡ4bΨ ‹›€ΖπS^'Ϋ pˆΌΗpU ‘ͺRqΜpc=C―₯RΡό,H>Κ ΔΖΎxm’{+λ!dΨΔx~φ ύπ ·–NρπF ·Ά ίόCϊοβ`²iεΡ\mΦ^BήZBΦκ Έyί’Ο!πŒκ¨Ό³” zaMi VΩΤKθ„Ν‘}σbε¬y>Dξeζ( ™υψ“/J|ΊΏΏτO>ψ6披ΨκO‘Ν^ŠHζœ”¬9° ‘§Έ$yKVnωΙΌ>VQIΒeϊ@spμΩ$ DΐX~7ŸνNρΕΞ“™άoBά[4Λχπνϋ}|t―‡y›“x:Θ"ήΩαY‰vαMη…ύCA~HHzΩ,_ό—ŸΩΟ0Ε†iͺv² ΅θαB”}žη'a4$d™| X‘ΤΎD―‡υ€?tlάβύŒ‘^§·W'ηςV‘z|¨$©! Π4D m\i °ΆΩΗxνއΐΗΏόϊΛλΈ²±*Σ’rΜΔλώΡ…‡ζ=sWž•Tfyο¨P«O΅¬|Σ‰"Ή˜Η 4TIΪε§Ιƒί7©†Ζ{™‹H0¬x=ΎŠSΗΈΎ}Ž­% Β…Eξλ‹0}¬(ZΓΙƒαΌfηΐŒ ‡΄h^ή(ψ*χ-6Α8‚ΰh%₯"”š²ήΛ₯πstVβώυžΜγ„χΘ}†ΥAŽ•žΓήIΐ‹ƒ‰Άmy©j~ωjŠ[›-tΫ@0sxμβΛ8Ξ%\jΣgΘ + GuT‚)TΘ‡š27$…jHI`PŒ&œ…†Έτμ$\4έΓJQξ°Ήz•ŒnLƒΈC”žΚj:BPI!?{%ΌΖνε3άYŸ’U`ηω—ψςΩ!. €ΞU΄—ΆPt–xW@€T[›τήxͺ*¨ΗΙ‡TωAΨNz©ΙT!MΌωώ±€`Η°Ρ»ˆ.Γξx??ώ?ΏψL–&l?όή»Χ—'Θg;Γˆ“}Ι1¦z¨Τ‰¨`κ$ŸxrQ±“ |7Π«‚lΛ>†Ϊ–ŒΚk”cδά’FMƒ(Ψ~½;E@rxp£‹ξ³6eΖΓυ+m$9“pRŽ+ΌΜκάm6ΠTdΪ­ŒδWΣw)Δaςεd%4n\Nΐ\V„₯$ψΠ‘©g/!Ψ’#%Λ‹#Xρ΅ƒXC Q©΄#2,MnΜ δ–©ΌvRΆ“λ…„}±ΦιηgΧιg{θΕ=\οŸβήϊλνcœμ‰Ο>Οw‡˜Ζ%Qυ(²ουA"¨8ΐθ‹YΡF†6ς6Ϋb(mΈd¦ €[zbfijƒ)u"Η$tρΩι-όπθwρEωGXΊφ;ψξ‡wρ{ο­ΰΞ ‡¨/^Jθ(Γ:”τ›b΄Κ:½κιλ!ΞGr*H ‹₯€8j“Ρ{·Χ ΗπΟ9•6O(η–yΰΡΝ.n\iΙuάήjΟ·;rόΩE―_Oπ۝ >ίKΕ 2ο³Θ€ΡήΛ4)­Œ3 6Ώ#ΝΔ₯3ύΛi o§ω«5ίμΑŒM"-†£Že³&ϊεx­l*0‘S&§‰`” Šθδ yήl«²Χ‘‰7œ /Υ¬LωRΒ‡«₯―ε’β9=@Qξb£8/νFﳃ_ΰ³_ώΌ)ΧqΆ1 @&ρܚ›³]Hδ)χŽ;°:ηLXΣψŽ-Cb1 ΐP_Bͺ˜2Ό<(ρW>ΛρΣ‹Ώη½Ώ}ˆ?ϊhήvΨhΒOχPKβώ Q$s¦@ X*Tb¦ωΜa8©1žΌ>š@fΥ›kζζτΏV—LŽuΖΫδχI8:­Δ‹J’‰g·΅Z ©τN’(έώζΩΏy> ο“‹€ύγJ(Σ2©ζqχj[ΐφυ›ŸΟίΣΒF³1ή(kΌV|­3AσK4:=0‚χ'γΑ…`[8X͌‘η”m ΫbC^1ΫFE H€w–ΑΞ0Τ<`z~†s z‰α bΊœ š ½ΐMχΡΗ! Np«ϋ wΆ:θn}Η­?ΐNωgΥBτπ.B9fFζΗ 4œαwiz0jΆ‘SΗy ΄χUFΈIερ||ς’ƏΎςΨ›έΔΪ΅'ψξ·ήΓΌΏŽνυΈ0|‰Ωp΅ΞzMΈ8T₯‘ΒΑ‚βΡΩ p>”Ή§λaŠ_Œ B°d Α•πΖΕ$ΰ·/'Ψ?)ε³n_νˆ'V§πιλ ~υΥpξ}Mqr^!%θ\Sˆ§“Z¦m,’šcBYE)…£ˆ€4'iυΜ€w½–o‘1,ϊΙbA‰DΠUδΪ™_£n;ܨښhrΒλτ†‡Υ`ΝΑΡσ3* °²θΙseΪ=Pa„0=A °ΡΪΗƒυs<ΉΩΖΥ[Pό>^ŏpPήDΫjΛ‰-7ςα–ΚΰΧ‚‡]š‰ϋΜ£ΘsΔδρκMďΏŒ˜k~!ταΡ{βΫOΆpw3`£7…/P^ΟνεδD=°„¦CKFe¨§χOkŒ'5znΠiyuΝ°ζσHΆΪjΠ°₯˜©'ΈΎœγΪZ!ΣΣ?y>χΎžρς°\€)MB°\Οh*š [©#δΌK½Lήγp¦ ΄νQ}K—wzdδxyzCsR7D!ˆδWIδ«R,˜xgxEpδΆ•ΞŽ‰\1N7¬¦’‡Ζ°RΦ@³Ά¬ Ε~P½n–ψY`±ΓˆzPΊH§CΕςB&Cωr½ψ·ϊGxr­Ζύν-t6Ώƒ}χ;Ψ­β¬\FΠ¦u‚+C@#αcΊ ώΧ{,σΜΟ-Γp₯ηρϋ_xΌ_Γϊυχρογρ­«Ω!π9κΡdšUyŽXŽt`Ga’r^A’ΆΏ“Δ;Ρ‰½Šοέ]ΖΚ G·•A§Μ5=wζΑΨt Δ”8ςœ–ˆ:’Ν;Μl,φζΌΦ™œ7Θ2 η MθΠb™C°΅Βr?“σg2‘ώΣgcœjx3JiΠ ΌoυςΘθύ3”„n[z€ΗD= δ’Ιq¦pκh‘―Ζ|˜)ψ8ψC©Žό4υήLnΖ~.9YV?CX˜IR³Α―‘ί‰ΕωΩͺ”\Z˜³C΄λ]lδ{x°qŽ'Ϋ]lέxŒ‹ξοβu˜{i³«˜Ε€zik8ΚΙγFΙ‚E„<ΟΤϋͺΕϋϊΩΞfνGΈp`OΆpkeŒNύi΄#•G”':*mΔJΐD€i‚€Έ#±J †–ν–Sο ¬JX’kdXͺή!++±’χ’„ύ‹ύ΄˜$Μό‹q€'Α •Ϋ[[νy³νωš’’Κ1λf2Lε½νnl΄‘Ε'ΩίkgΌδDu`‚ψ; {;ΏS2 {Y5 Ω;πLdkή„\—zY⅁Ϋz¬&hιΥ$+P(―%ŸdC~ƒϋλ8ήŽΞƒ^Ÿ#δZŒF=ΓeΕA4.fΗπ³},a·—πΑΝξ\‡[ϋφπ»x5»‡³r ΓX2ΟΠΪ™Δ>y}[Ό aέO1On/Ό―_Ÿ]ΓκάϋϊφϋwρήνkωςΙB°π5€ηQ*/[ ςΈ<858‚P£±Ϋ|ΘIΗIFH‚Uc8΄!¦lšALr…RaœƒLƒΈλ ϋo\‘!ΒΈ­³Θ©δ8δsΫE&ΐ&ςΪbցψό=b|3©˜ϋήΖεGΦ υκZ ϊθ’fΎΚΜ΄<©hTRω—zY Z!€Z€Vόξ,ΏΑ·n%lίΎ·ϊmμ„οΰωhΥ@,θδν Γ)πΙΛ€ο}žαΩΕu,m=Α·ί»‹χoΈ>BQξ“]$i:‡‹௏JCsq™ΘŒΓȊ6|ށ˺b>οΞ‹‡ΘŠsOfΡκΧζZGϊ™ŠPc‘Ω,l8‹iρλ―G8:«•ό aιo­ΆL€λ9.iWϊυΣ±4WuRuYΰxX‰*t8ΜΕ€ΦaΔd˜V¬b$La=PΰΩΫiΡΑ„3C+0'Kξ$P$Ά‘Q @‚ΐE?l8“Qlhζ͘θwM)““ Tίΰϋ Ώ zάηPε„ΐ–~ΜωΩ ­ΒŒεXB=W‘›φp΅½χ―ρd»ε«pRό.^ΞξcX πζ"ΰO#~ϊb Γμξ?|Œο>^Ηݍ Ίι΅(y(λυlρYm,QKΞ2ΙΚ/ΐ+ο#k dUΚz¨“\Α*"«;yŠ}΅ΝΕMμΒ;μ@oXG:ΰδ·VϊΦ–ŠδεΑ ―Δ³ΐœγ0θ£³ ϊϋ%²?ϊGύN6Α»hΞpάbβωΑφύβm4(œθΓ>Η—iΎKφ[ ζ·`=!YΣoJj@δό¦μL@T.μLJc±Ή€Fq!Ζ&7ΞόŒ ³ma0ͺb¬:ςši“|›zi©‰—VΤXΛφπ`νάNΈ}c θ\ΗώxK›πΡ{ΫψθN­ξ΄κWΐt­ϋ Έ8RΠ?4Žy/Χ4¨vΏΟΪ2/Α·–ΰΫ«~³…‹ύίΰτΣiό.+Œn½Χy ΤΞ λ,[˜G"±H&Φΰξq?@Φ!Iρ`e •γφVΉ2ςε{>Ξ7@b¨ΔpXΌ»9ωUd|ΞΗAΑ‰I¬·(ή₯}ήz•λwτ ΞXŒΜc9d oš|‰§FO ϊΧ#(Γ?°8ΑœΎ~Ά!ΐBɞЍτ εΏˆbsŸn8©β‘ΝΟIhbΑ––ιƒeΗχσΏTΥOB: u…X‰—_‘»Θ₯u°}%ΰΡ­ΆΧ¦XΒΎ(tΔΙ‘Œ¦KΥX8_`HO  K­ƒΔPYϊ#[ƒΉ­ΐχa2βόωΌ;αι_Ξ?A'£ސ\FRUXτΠmgFΤ‘?g牆v(,šΌKŸWjŸ>‘Žΐ­+mωl˜^Y9^ψ`w―weΨp»εMΘο<۝˜ρ²!Ίv‹*(Ι¦ R|doύ’pΖ-‚“a₯3d΄ο;Η$8_C"‹OfΤSΐπΕ‚’),ά bвŸ‘ΡΎ0Χ&fs‘ΧΜΟ±90&.$γΰχv‹h-v MθΗπ8ΤN©"ΝNκYύi²'£by.œ―kύt’!+sͺzδ.ΐ·PΰθΛ?ƒγ?DΉCtβ:Σί’εΰrD‚˜!·rLΨνoάŠ›؞Fσ BL’„―C«κˆΗ·ΊθuI΅ηAϊ,Ώ} !θ΅ΕΔτεΌ!ĞNVMo^‘c_#`‡ΌχΆš”KFΎiα‘ηΕ~L&ώ9@ƒήN3_ζ0έ*ϋhμΠFlngΊ G°βρ@0•Ej―ΗςβB°l8VzΟ_£Σ‚ ?ΧVx<`C₯Ί:@=΅z hξΛYα?Ϋƒ»ν™Έ&pΦ8ϋκO!~ χζϋhΕS #rΔΔΠ*%R# θM0‚χΜr!ΰ93άE―Q@‘CiŠάΙaηΤpm­^Ϋs‰IΜCxb™w’τ―ƒδΎ`‘ά0-PaΠΝψŠάƒͺΊ\§ Ζό0€Khž—ΣlΨEBΜSΠΛ°μt]LŒ€G`ΠΔAy*~N£hΤSν° °"κτ„Οοz„`Ψσq΅…Σq‰ηͺNB:­ !P&‘υ³ άΧ‹Ό²ήAύΌHχpΦ)ρϋΗIŒ[―’9 ƒU]ŽΡ™~* ·yήœ7δfA}πΥ`†οΪα½0dXΓ3Œ~ή_[έ„3ήΟ¬Œ†―'χΫžηΝy…ŸπυήI“ψΒγοœf—ϋ9τ§„ͺ1šF[­$“ŸdXΣw‘ε₯ΰ’Ι’Ή)&΄“‘ ¨5Ζ·ρ80fψW /-09pΪ9“Α¦€ kRΚ U£)€ΘΒbN?τ"廍¦Ql8YLί)EEaχM…£J¦7-ΆwηλaΤŠΦ[μ…†uΰ΅`ψLχρ}&ωΰšΏV?ίγψΌ„›Ώyq†~Α…ŸcUg φZΟ²Ω&eΊ'τ«gθεϊ4μτ²νΘί‚ξ—ϋώό`ο<ΆAy@ύ°½c‘½F}έΞBP±€σ~±Ν²οφΡieˆΙΙy:-oζκ5θ=·…οΌΎΖ[gο<2˜_D±šκ¬φήΓ;ΓχΧLΨ[m~°ŠΕͺ#Ο£€Δά=!›λ²CR˜Ÿ"pšJ™ΔEΰ΄'>€ς=ΏxUα|0ž%Ό9€Γσ  ϊβ¨Βλγ…¬L­›N}‹¦/ΤΈ­`C·°όƒ$= †””ψ9½¨1-ς9cΉξζ" €όŒζ\Mzιa1?Δ?.T³•Χ Ϋ\N—ηε‹1ΗFo΄Θ(Tθyδ{9ν»Ό˜° -D½~H˜yοzοoχ$|¬bšƒύTh)[k>Ί?Κy6ξ sЬRΏΛ‘½…Ζ‡[Μ€CzS04 ˜(αc €W4hμι#š2·Δc­τσ£M{€” ~ͺ\ΩbΒ³ƒZŽ*$œΘ—”ΡŸͺ*!y™ˆΟvfxΎWΙ΅δL’˜‹γΥΨΓΜwqΌΟ°ͺ$ΓΣ)œκΙOfy3“s³σ)]ΣΥΟm2 ξbUg”FˆτΒLί­ν΅σ$ "n’/v!²Τόκλ‘Hωπ3AΨ; u(4ϊm/ze 2Qƒ-5qsΓe0ύ·ίώυ{mούŠsnin ¨K6¬>šσ±ΤSιδΐ ‚δ`(ζ½ν_τηΝ€„&i—ϋΈ­ήŽu Ÿί¨"zαΝyPb^O@ ‡―eR xyXacΰε‘ͺ€‘~ZFζꍒC<&Π%†#lΓ5‰ζ½C«Θ”> "„™&©½IX‹±*kMθάφbΰΒ½―F>“@˜Ο•yž_+πό6Ν OžήφΔίκΙ:1'+ήΧκ Γ½kέ…&£ήΦ— xHώ–…Νσ eΐˆ„₯gσΧέΆ—΅Χqτ" ΩΠ †˜uάάππαCάΈqΓΝ o»ωΛπ%NOO[ggg+ήϋ₯ΉY–an.'}΅\«[n5ΓΤ³βbrf‚Qς']ΆaΠΦΨ*E°S/Ξ„ΏΌ6˜qψ€ƒzˆ¬€"™!ΊkΤ‘Mδϋ'Ÿξ”ψjw‘ό* Š Έ½™γΖF†ϋΧ άΏ–‘ί‘‡Xšœ?yQΚΪ;PB¬ΌκΜD^+ϋTΠƒd^Π@/"Adρ7©¦ή¦™|n« ο΅"•Ψ1R΄iΌ)\s‘Χ $’:"‰¨ΜUιw8Υ8Φ†*³Ά”Ο¬‡I±{\B”Xτ$zZ₯ή@şνOυ‰δ·Ύ'F“zΧ $π«έUUΉv»Α`ΰ¦ΣιΒπΆš›/ώγ?Ξ?ύτΣe―67,,Λ2ŸηΉχs“Η^z«Ωo‰Υ4r₯ ‰Φ*ΑͺGe5ΌΘ§ŠΝήΜΔ†n>ΘbM‚m4ޜ ˜œ iœεvI’α9°>π€6k‡g_Ό*ρυ~%ό²AΗαښ—στΫnzXκ³:α|”0šQ)Dο§$q:er>šP΄IBΥ·ΐκΪ΅θn @Y’Yi4,wJβπX‡lε—ΧΐŸgEšθΕ° ηMtCν<υŽmΑAΑYBΗE8¨i ™]ωΫγΘIΈXSIVCO`4“ΒžΎž"Θu^YΙρΡ½>n]ι0χΥό^†sΗ1uζ»dsƒŸ/Ξ9_Χ΅[^^v½^ΟΝ‚…αm36^τW_}•Ν­ŸeΩ"„,ΌάΌ6-–gY‰ (˜"¬΄u"ψπ—ή /Ϊ_Ωf΅Δ±!7ErΌŒ%±™?cRΞό ΟΝ&vp’˜^·™h­η—’\Ζ½« ₯‡nΫώ₯’DρΩN‰Ο…_Fν³vαpj!λ„$€IΙ‡Βέ%nσ>ΖΨP+54ο- ­-΅€‘Xνm<œΰb‡ΐ²U“e»'σ3ρΏι…yΟ”χŽ=h“μ’›Λ-ά»ΡSvΏμO¬ΣςŽδ3σqϋ|1IΒ#ϋβΥΏώj$Ό2 ƒε^.us‘sτ»,2ΠZ3Œ–frXo-¦δ£|Yœ.(ŠΒέΉs @›ή&σoΣΕωΉu ΌΜφ‡S[¬zbbEQdyQδ1ω<ΖΘYΔΫ1™s‚ 1Td"°‚MS8%wΜ_e†^μQdίz{b©IΜ4η'𓳦η]ν9t HΕlηM-γΓ,y\]Ν@υZΞι<F|ρΊšZ%ΥΚ:&t[ΐέ­E©b~ςb&y7}@ Wξ…]σZήW6ƒΚcCCΓ= ΡτΜb‚ςY₯γμ_h~ΛT#9X˜ GΉΉΠ«άwΖΒͺ Α?ž…`x]1A$|΄Β) ύα$`2‹xΊ;ΕožŽπι‹…e I₯Σκsk -#Θ0~§&ΧΞ‚ΉεΞA‹Θώ“aΗ§”²c6_‹gct‹psss“'xK·`yρβ…Ÿ[@@Λ9Y’‚˜SoLώΊΜmdΩb)ςΌ 9•\uθ¬m¦εΒa#¦Iϊ³6bϋ*ͺͺ’ρlΞEΨͺΥ3cˆΗ@Κ%ƒ,Œ!-Œ‘ [ C_ώ§U Yއ€xY(2r§HaωώlΠ>{Yb8BΟΨZυr½Σ*J5ԁδ^ζοΈ­Ή€HΉn£ΡŽ†.h ‘œxΡ^2œ§vΎμ7ž Cp*ΐ²hΐΑΜ]ΪͺŒσ¬/XΖ°rΩZ2o•©Ξύœ‘ίΓΦZu”Κ₯Tϋr,ο—΅ΜͺDY‘³žg"cχοτ±2Ș—l $ͺw0μ΄‘<42©κ˜;η–yΊenΎ[Ϊϊϊ:ή‚0σν-ηΰεζΦΠ ΫBΌ£ΙυΘ²\?ίDB&(I¨†”ΚΔ3ΩΠT~YΦΑΝίωgQΊ5$ί‘ Bδ}Ρƒ“΅˜κω¨tˆm“ύ”_U&¨‘ wjτe‡ ϊϋς:χ™S†―ζxοVŽ•΄ΖΟnΐϊ`q<Μd¨³QΔΧ{•T3――g’CC’'© h+»ͺΫUΧβ‘kcξ©ευ ό/οΈS5Άl₯˜ ΰ†„Jκ…ψ¦j«έ$‚€eA³ ­£Ι‡#`Κΰ9;mA'Γxd ά•εB<ζͺV s@  Xδ`«σuQxš‰Ζ!π)+—Mξ\2χXHΗ9ς…GΆ°†- ΐΒ)€>cοͺ–WM΄˜! Z“Ε{Ÿι’/^‹9Ÿ§”ΪνpΔ;s`4«Ι―[ϊK3ΓςΪ&όρ?‹;τ―’Ψό#Μf₯(;Δ>Je" Ψρ‚&HaΘ ΐ τ&(δψ—_€ςΦ• yυ>΁Ό8±Μw6³9Π-½rή’wI{k䀇§Ώέ©d¦’vA0‹`ο!=TΣKJ€h’ šŠŸ™†‹‘š_ς>©ΔX†3ηeωXκj1΅†\CPž“|,Cp&7Žηβ:5¦{Χ1 CύΫ=tΫ^rdΧΧ[ˆϊ/žW‘{―Μ{\ΏwηZ[&‘;Cς%‘7ΛͺΏ¦@3Ό₯άi (ζ&@BX„˜zΌΣ₯ΣιΰΩ³gnnψ»ω?]ΜΌιζVθ*€Y¦(0Ν‹A’―Ίνt '«όGŸψ“―ρg]֊yf&5Έ\ΜGi―!μ`ΫΩθ½ώ ς’…kχ~ΛΫ Φύ½¨²uˆμs]!%†§ M‚Ω$Ε5·Β†k5ΐrΛψG$zF`O8H8:P<9rœ²δΔ$‰/ω³^ΫαΑ΅§)W0“c€p|±6― uˆβ­Mf”8b‘χΥ‚UnνΓnΓΝ%Α˜ ΡΥ†ηSΪόš3βˆΜ[‘χΠxT1Z• MyUAŒ’kζξΈΆΆ„AΏEαDΚηΙνžδ½Ί-y-> nΨ­ΝΞ|»+€CM H&ΫSA­Ά™ΎJΝΣ™C³ΩŸϊρΩόƒ›y—AAK.[—€'(ΛRž΅΅΅·Βό_.d2™δΪ –`—&‘YMηθω:Δcš\ίθ]ΫXYΦov·ρ«ΓoγωΩ-”±‹"‡Lψ!σή6cσ“M˜'ZZ³ρLΞχD(pkϋΫX½ώ6όπΛα—‘ηq ΊMRfθμs2·£H!Rγγ°6mλm3«s2SR% 93†Ώ&Η#Η|΅WγεQ…I™°Ύδ±άŸœδYZδΛ΄υ |ΡΘy³5†ά…‘p¬Ί¬¨Αd a ς\I¬Ά ‡L)5ŽϋΝ(7+‡²¦+JΥV=ΖͺTˆρx} ω}?S.š·”‘f5°€§ζφΎ2Θq-ις\ιJ2»μβfzτ1/33333οοeζύ˜ωfžyLΝά]Μΰr™QΥJW‘qΓΡϋ¦czΰ"2$Λ²ε’εS 'O<€˜)»¨$<Χ"Bl!c“'vz6ρΖ(ΉΟžχd½ƒ©/ό½ƒ‡ …κšh$¬πέ*n,y…S•βo˜―x³~¨ϊ!εμX₯,ύM‘Ϊolω|ώmaΦΧϋμξξΪ±y€ar1χ1xE¦²…›/=ύ'~γ3ίϋ7Ώw}ίΩόΤΙΚo%k©ω^Ϊ|·φO ζιΗc[ΞiŽ—Ω˜ Z4 Œ{GŒ{ ;»4·0=³€§Υ_;σmόqZεΤΚf₯1}ŽΙ’ζ(£ Απz&/Ο £σ³QΜYΙcJ ƒ΅ύΒ@vκ #ΐκ~€‘Ÿz[<hDd‘βφ§‘Οα―1|ΝΈO“ΓQπc^Μ/ώ‘” γ±Ω$Nˆsd†Š+{Χζ{πΉτSΝ”pφ’Ω«3R *ΚόΉBΚS £σ¬MΤ…”³MθΡ₯dŠ…Q0ηΞθ}€΄pΨ–ψμuz5Ώ ǎχgώθŸώμόΎozι/ώ–§ώγΩΉBΡTȞ­Α‚—ΡΫΒ¬―Χ‰—––¬Ψ\ `β-Ό―‰}άΘ&k^4φ§£`τۚήΜ8PδžrΣΙ¨ωβ vϋ—πβϊ7፭“h τ₯Ϋ·!₯‰U˜! و‚1‚ρ€Νv0΄ck"w‘ός>“cMν~Ν‘νΤ[P&h².Zď7d^ƒ_&­ΔXPqu/ ΎIzKΑ Ν’3DΥ οk0Ney8w7&~gιΉύFˆ­Zώςψ…Cπ φRM€ΐή "& šβ”0yy|myW.ΝpΡh0ίΜδ§Ο›ͺΌςS‹‘ϋ2iΰΤ‚)Όh„™τ‡ό†"hm’<NφΊ’ˆ=±΄‰άshάΦ%>uΕΓ'.ηqОA₯XΖμTŜΛΆH.©ΣΜ9jψϋ»ƒ±ςiu]H)q||όΆ0λk}Βεεe+6 @>>W…Œ6y‘J©P…!zCΏ~PοτομβφςβmϊΒσžK ±[κT΅αyΌΊύΝxsλjύr hFυ(bgI·#ˆb ‚Β` ϊ}6β™™ύ›μ˜ur½bOΚ¬˜F0“ΖΜO2ωfbB·;ŒΠθ) FΗ½K»ŽZZqA6W&!ŒEœ3Ε΅|ΓG)eό€ςiΈ° QΙ Ϊ_Μ @˜MοόΈ]IΟΰ™ ΦΉ ,ΆήgπΎˆ/ώΥN4ŠΓP„}ΤbxXz›―» J&‚Mο| `9$½ιDl 󡀋ΤχžΪ6n p£gΑ!4ΐ¨pΛΖƒ]‰O\ΙΖ^X‡ν2NΞVbΛaΊ”E.γuΪ?jαΞΚ6nμ£Ωι2Ω$“φž₯ŽmΫQβ$t:·…Y_«Ε &b£j W΅αΉ*―Τ’0$R„n-ν`η°‘ΌŽΎΤrΑΕβLsΥ)tΤY\ΩΌ΄~{έ*„p%·ζPςίτ¦ho|BΠπŒ`Œ06fβG`b«©²ͺŒ sΖΜ0Σ–34)$*δήNέ{Hΐ4[Ϋί Œ&>ΓΙͺEUΛŊ₯―)‹BΞ•mβ‘e\ΟŽ;!+{ΟΕaηSX=žFŽŒΈ `IXdvΪ8œέδ#γΗMΫ|zύύ+²ρq.ΞΜαfηρΰψi|iνά9\„p+Θ«π²%H―ΗΛCΨYXj"X©V‡?ΨΨCηΔΨΨƒ3Θ²‘ˆŒδ>­ΎG³˜@Νε… pzΪQ'θσΡœΕΓ¦B½£ˆόšqY’ΖχihI*ΈHύ—9WΠΉΚ9Aα¨{›Β Ώ 6φMVˆ83oNŠ7ۜXcŸΑ‚±ίl¬Χ^σψlc’§αα―άfφŠt,‚1(v‘&ε‡ΐΐ%μ#v3ζα eQ˜Z€—«ΒM@ΝΝΓr²l“΅Οα$˜_ΕΥ6ύ`tΊŒxΕS™ ,Ϊkx`¬D–·ϋt NT-œΤ€ΖD]ΞE9Ά)KΣΡg’„a3D¨; ΊCEf±$ͺA]` qΠbςΎΌΟ„Π‘1(B²Ν¦Ž9bΒ2ζ€ωρq¬ΥO hNβ|•9ΝH°NσΛΐ v£ψ“™r‘AΞΰ”=z1{& w Žccϋ؎«ο9ƒ΅Γ2f*UœšΝc’C!λη:nuρ`c«Ϋ5΄»L— ψφη.β;ž»„S ³Τ1j‚7σθ,δ3^ϊƒyh )=‘ΘŠή•zdρE±=2Φτ syœc)Κd0£N~@Y‘R‘e[Θf2†ΠΔΛ΄ΝΕs:F>ͺRleΊ)β*ξ―οS₯σ°Ρ¦/<“ZΩ‹A/‡aλ/>:ήρ;ΈΊΑwO"_^„—›"MX’υμΑlz#χΒ ΕμMpXdδΚτb&œ™’Α=ΔU0[‘\$’˜ΡV-Δfl7Ρ³c@³‘u΄ώKD*­Œ‘7$Χ^C‘Σ'π"!ΐΝZ@ή&ŒεΙ\` ~ΒΤQΣΐ`Β0š8"~djηO&τx`Θΐ93žΨΔ2;ΌπρˆΈ Μμ&0sgϊ±©#–’q™<ΉΙs=Κ+%n™v Z6>ϊΊ‹½žΓqoggpa±LΘR>8¨·ρ`};‡-Š(lΐ.ΰ[c/lqΆB΄Sπ²θ~ Ι’Ha<αΰθXΔ†V«)bU™„Σ™0ϊ£Ψπv3λ+|›υψIόΗθ-ΒLK‡š–φΠlέό§›OΐL_ ―ƒ0)Y(…¬cΕ@U P+f\tϋ#ςZ›GUΜOΟ&F³CΗ' ΅΄yˆε­CβƒE*ΐ Σΐαώ)Ο•ΛfΰH A―EΧ@:[)…0L,Œ- ‹"•V*Ieχ †+·β¨Υ-ΟVK₯Ψ@ΛΫȞ|ω€ί[Ώ§Ή¦δ>xm(Ε@s}σ υ’+νΘ–61ΣWv8lυaII€%₯―]ΨΆΕr8 ! ygdτŸl”TF °XΙΰ».ΝΗΕ³(fδΖ#@»»²‡£δ½…HH‰Τ€{z6κΤ,φ†ρΚΦsΈΌ}ΗΓ"HDΟζά‰Eήƒ‘x$WτtyίL’«G4nGœ“C)'p~ήΒL‘γ›DŸe?$1:"2[€ςi»υ[GaZιKΉ΄Ισ¦φG‘Φή‡ΩΌ.8!nJ!i‘Δπ=”˜ž¬Xb‚ΕoΠQΉ‡ΌφΖ,Ϊo ¨RΚτ―h™πy˜0ƒ$Μ3³˜ΐ#Ϊ’4Oήώځ…ΎjΕUΘ,ΆeT§Κ8=_@₯”AΉΰ€―[δ}m4ΡΧ‚ύφ1ŽvΠο4ΰYŠx4μ ϋq0"-ŒΪΣξ<Β=™ GΗMά]ήΔ½ε-΄Ί}8’ˆάcώ.ήY‘₯ψ:"³0kπνυ₯mχG?yε―~ρκΪ߈›ΖߐΆˆB•΄Ωτ±qΨΑqk€΄q{”ΈέA ΑŒxTHL™Οd2d PUs6fK¦‹Yκ ŽΗXέ:$κΖ~½… ύSE—¨Σ•*Zκ^ν•σ¨ ¦θFsl}>aͺK D+ΠC|4e…”nŸ;~τϋJ ŒΊEμόεύtώ]EZ!)’nL—6τΗJ_`*oΕ`nλ©K&¨ςp,\ σAMκ™#CςΖP―`p0Ξe "2­‚rŠ•C hH§~›ͺΠԌΙΧΒDXş;`Δ‰·₯`γ_zIβodΠT‰U~±„Ω©ς°W‹lύΫ‡MΚ‡…γ!ΊΗ5΄Žφ1θΆ ₯”°l‹"©Ό‡ιRŽŠN‘ ιz;’h?”#σ2― \W7ŠΦqύξ2ξl‘Ωξ₯(>xλ°ύ“ϋ[±€ΒΫx±Ύήφ<Ί€C½/  ³Skόs—Wώ‹76ώA£3ΈξI›\ργ6Ϊθ }ΧψHΥfi[&εnΊΘ€$‡έN›{5¬ν·Π% uωŒΣά]DϊE΅”EΑ“T…θŸq£έο»UάYήD³K\Ηξ-oύπςΪχώǟyρΟέ\ή»rωώΆΓ‘ΜΫ{‘Aψz.Ρ—Q΅ωXΪΞhΟl€ΗΘ ₯’huηψσ{ΝWΟ-Nύϊo:?χgcWύιΔvκ}4{c̐vεΘΘθ|5ϋθ FTΘdrhvWμ}όΖςώηΫ½Qžm[Y₯’…ΝΛ±wvνΔLρΉ§OWϋb΅π½΅Fۍ«˜*εko£@nΰJΓ¬ΔΐΟαn½‚ϋ‡Η8[©γΒLŽ@n$η%ž’V‘ΥksT>‡œμΉ… ΙIΔ›Κ š΅w Ψi§!7GDž΅bότΙH}–žG @ŠΙ—ΰ…Ξ0ΨρζΒ2>&aΦ 1υ6― )#˜Έ9œP}1)«pΓ¬'fX μH€5ρ‡–iΑA‡ίlKΠ΅©΅ξl ,mΫ€Θ£ZΞ`nΪA.λRˆΠιίΦ€ή εΡοΆΘ{²<$€I-4ΐ&ΔNIΤX΄/ŸI*—eδsΉΐΦpάκ€ΊRAu―?άύΕWooύίΫ«Wτ;{ϊ†ŽΜ;Θ)-m¬Ÿ|’Ÿί±rv<$\Ή”ΐ|πώ2Eͺ³²sόαύζKNT~Ϋ³ggX)ο]θτΗθτG˜)η‘υ\ΆfgPΨΰ&Ϋ[7–7>rcεπωώpάΰ !²Ί³@ XΒ‰aλ u7kοΕω‹Οœω-qo篌oδ\W)ζ1?]B1Ÿ‘~ΊL PπlΤ;6Ί3Ψjw0Ÿ;ΐωΩ>l”Βp€™}¦τƒBΔ π b Ήΐ^JΡΠ^:j0¨5AΐΦμ¦Ή"}·; ―cΆ‘ ©λŒcΡϋι>OƒeΕ£κΗ"‹“mDΌŸ=;³bΙΚ&Rr˜jςΕh‘= KαžψΝγCDƒ–KΗƒE ιZXΖ89nύβ¦rVΌMε…φ›w76¨ΘΓιω₯2CUJΌ‘z³G@–l‚qβu¨ΛΔ}‘aHžlKΒ›B˜ϊ{Τ‘ΒΚΒϊσYΘŠΘζ‹°l‰Z½|6ƒαΨo­v~.ώgϋΏnΧœ?5#s»b|ίΐR1EIΫο4μ±νλο©Y`°³Δθ1τΎœρωu-UοαVύgΧφŸΉtrϊwΖ€σ'‹9οd­Υ§Φ8$ °¨·«Wμ|θΞZν…ψ>hΘ !r†ΐAD+•ώά, %4ΥΓFοϋ+₯μǟ9=ύ›Ξ,”m³Σ+΅Ί’uΜO Ψά¬Χ΅“pΝΔώ ˆ΅.Ξt€9₯_q θΌ7gSN 6"S”ΠŠ =wƒUϋ =%μ E °BΘέ΅1‘;δ©Y‹Bͺ@6ηyšκ²@yψΚ€G¦C]§hΒοf9l›mt<€i)ˆ"Ν#†π(]ψ23ΦΪΊ~c3„εkΑΥΧtv'weΠ6υ¨|‰•½DJ:ΔΚΆΟΙbΎšA.+)yοRώUxΕ½Ί$)M8¨Bτ;-ψΓ‚`l¨ώ¦όΑ@p\ΞΡSZsOηni;νQ%ZM±„|)ρ8]Μ΄1υλK{?χ‰Wξπό|εv|œυoM:S’αΘ^ΏΎjƒ=1 €^ήMΜ~¦j°,a χΈέߍέϋΤλ+ςήFύ—#ΏήιυqmŸ”7ŽΫ€¬— pΝ`a:‹jΉ‚Ζ`΅nΗ½nοTΠK β…IμdzΊRΪΕlu΄«€S‚*yΔλΔKζ§Ρ4ζΛ\-Ί˜δΤ¨:f›rB™ΓPΡ€Ή˜b†f6'ΰ#²Gν‘PΜ`˜ύΥ°~ς‹ί ΧΕ :@ή`Θς¦Β+„1ΣMάΎ’Έ³!pεΎΐήQGM‹Σ•ά DŸ(f3t|R­Ύ·Ί—δΑτπέƒvΝ£= ΊM’H—NηP¦ƒ¦uk‘•YΙτBΟs?©€ν8(–+X8u₯Κ ιΐs,„AxψΚ͍ς_ώ₯_ωƒ~γŸgr™εΫwrίράIωύ?ϋΌϊO?όgu«–܏Cξ›:‘ομ­€M°=QΦγεΟL ΅ω1-!{k΄νθη$€L T½[+?ΉΎΧόά|5tήωaOηΫςΚ!jΰρbξ`ο,*™Αnu‡υΛχv~1}ξ[/ΞΆo½0χΫ½!:›CrmΜL¨9=Ÿqθ?| »A¨°Q―`Ωχ0_8Ζι!fς}Φϋν;AΘ |ΛfρCΑωv€νLaθ$Ό+¬–ψ™―ψ>bϋGzaΔ1β½I£Ρ …pΟ(=žPΉ=g(EXiAΐνXμ₯±τ5"…‘(Γ/œνx(NŸƒϋˆΒ!Βa‘Μ@Iϊͺ ΄…@ζΨR©ιt6 θά,pcCbm?ƒφΠ¦Κσ©9–’mYTά9<Ζq»‡ΡΘ§T€n³‰aΏ h>—”RW‘•()εGθφ"ξS](Ά”(NU‘Ι`ΫR· τγƒΧοnύΔ―¬ώΔ^½σPίσΕ₯υΐπΗήχŠ—¦XjυΞ€δ˜yΏΫl2Yh‚Ϊ“—πyτ1VzO‚ΧΎ­"€±~lk@Λ€ΏΨ@'Aν8OΆہ~/o‰L@3φ+½Ÿζe@―75Φv―ό–ο>χ³€ό™Lρ‘»;ΖξQ 3₯Ζ΅η9©ͺ,5uνYμ΄ͺ¨dκ8W©a±”:š "…‡ˆAΜβœ,@‡‘ΰ‰AfžW· q˜Κϊς)p‰TϊG'ϋuZ7ΐ›αkͺHkJςΠσšΒΜOp.`ξXˆΙΎΛ4€…‹ςΩ_WτλPA ©‘‘)C^›ώΜBŽ:Oε ˜fΟΒέ-‰Ϋλ|• ΛΝ`¦,)tΣ$RŒ|φΝv7 η¨rΉ0[ΑιΕ9άΈ~ύn›Ž…°™‡¦7Rγ>_‘V' Πm "PœŠ­D4 †p;ΙεξΖUΗώΥ΅Σ[:}RΤχuOίϋŽΎχ%8 ξ²b{Θή ΤΤ#Τ2Δ€b˜ηβ$¦'2ϋΖη`―Μ‚ιΑ9§FΫ‘ήvιΨGχ€ Σ#ΰβ° h–~œ¬œψι’•‹ ƒQ„³Τš²]kγ°ΩC΅”Ηάt‘*©Ž΄αΉ ΩPΑχσρq\ٝF₯ΡΔ…Κ!NLυ!0‚bt0x p0½(aTY[h±‡eH7GJsΗΐC@θIs AH²Bf ™ω*ς!$<ηhςJΣqJΥAzˆ‚> (OFΓΠμ€>WΪΛR(i6ΓK›„pΤ±HFηξ¦ e¨u¨˜wι\RsYβd9­Z>½ΦΒβ|BvΑt₯DδTϊšΣΩ¦i2,:Ώ–f§2 ™CΗyGΗu‘ΙΙ“R{`ΠΫ/ί\α/\]ϋΉZ£»–z`" bxρύϋθ ε»5Gφψ Ζ€βk ονΙxk›QZVdόx¬ŸwΑ_z[ΣΥǍυγΨ#kπί@ΟMœš™Jc@£\΅JΙ€$‰;žš-αΜ\•‚G7t¬ΉNέΛ›5tϋDAΦuIŸjΊμaaΊ_œΔΥύgρόςyl·§aYIXΚά*>±ΣŸΜFq₯Rγ›‚Εa ’%λ£™κ€άs©5οYΠ£ΞR«₯=ΕΙiαόœΐό|&ΞΎDc@ω@Hkΐ‚ŸΖϋ#’ώG@Pϋ2‰ƒ–O]sγ6’<–χ¦βk:‹ 'ΚΤB”υ(ΔG―?DBt^Ϋ:$iΛ²0W-γ›/Α3ηNΨaΓΫνv0Ω|žςιB}]© N7OR'¬ν T™Euξ$ςΕ)β4Ί2‘ ΧγN•ρο~ςω_ύ ŸΏυοb;Σ($ψ·pJ%[dΪ;k‘_ϋ*£l+W˜€φτύ9Ό4AΣT έο x;₯mdΑUG―CώάτηΛ¬μ©qxc)Ey*ž eY‚δ·³3Ε$fw˜Μι€Q7­ZΔTΌv€ ’¬λX(΄{Yά8˜Ζr½‰³S8[νBb€›Ο‘¬‘Τ ˜Lk„˜Vh PLƒη©δ¦Ž1©Ξ}MŠšRΨζ·eτ>Љ‘· !ε₯H 1Aϋωuζ-B`c³&pcΝΑVΝE>›§ρ€.ρζ$yZ‘ I5ΈΦθ Ϋ”ό—8΅8‹“σΣ(d3i›j#VR€n³ωέȁρό n&Kΐεx@P’HΞpυ»›ί·Ϊ½?¦lpuΔ0Τχ˜όMλ!§XȞxε{@6Ή¨G€šEφ•d' ΞlVδp5 “5`Oͺ²’Mͺ` τZΛβ9ζ"X"(ν2€ a““―=ΐv!$υΙ"6ξn άXuqΤJ<°l>&0’Ώ!KZˆjU }?$`τG#œ>1‡σ§i¬šboT«‚(j!KΏΏQ4B€ψ{tΤk/›CX¦D>U€‘(‰ίθοΌpmυb*Ε/Άϋ£G FΆNά #'Š^΄Όd__OmLf€™υ•Lil°g&ΐlgŸo:Ζ?—W==^Σk„ήΆγsR2W+h΄•Ψ…C’Ϋs$εlŠR>Z ˜ Ζhuk4€uav*΅"υΦ…ŠTPΜΙψ+"ΆΪ=,䒐σεΜؘ© ›ι!AΥHCγΛ²ˆ >B 6f5Ρtηx ·%…Ίχt(‹4j¬Ή˜ήœIhU€ δ@$ΐ₯/½%³πr%²ŒΫ»\–h²˜*Δv*I:a’Φύ‘έFƒ<]ͺv"‚?‘Υn“Ψ³N‘˜Ο"ˆ ›†Ω€Χ#D†\JAœΝPΫ²mdryδ %x™yί"Ѝi―ΦΌφ͍xνΞΦ‡¨ν πT\…”:t΄Α€š©ΎzοΩ7*¨ρ—ιή‘z*~^7›³fυS€¨C€ƒ\‡7υΪθ3:ίΟH}η%%„$]L7>€δ±gδφy.ys•nέΉ£½]δd€Σ39β`96ε΅HT°ήκΡ Θ΄ˆ0 qxX£$‘V p$qΗ臔’<.A9FξΓΤ±Έnk’χ§N}NΪI@–ΚIΗΖφa덯―ύ―XμσC‡ΝnR±Ξε<7 ^WFc?ˆ.žšέ_?@·?vΎωβB0ωCD’α# ‰@τ]Ϟ’5ήε‹€ ΑΛΫΤήί6A F~ŒΧΦ[PD{œ‡ΰη]½ξλsY΄Ν•Μ‘^GzΏ ^Œ ­*5›ΚcΊœ£›?₯%Ψ«ϋμ,€d‘–1Ž,EIfΟː'‘p*Z1]D½=Δήq—;βό%­γό…œεbR+ηϊC‰ζΈˆW6g1­αts₯!\'€’nβ†₯ή­y$šΡh 0(ι”™Yzδ‡Œfšί£Η«qkyCnŒ ΄pweW―ήE}oGαd ςΜ“¦TDΘ£f‡h,)ρ΄έj’Υj“†rΉl›ιƒαa@Λ¬4hοUΑJΕαl€΄τuQb—…©χΆήy>–|ϊί―άZlΦs{Ώω{Ÿ’Χ—vΛ'fJύΝƒfxv‘bΖοΉ4Π@ΗΆνοxϊ„ΏΆ{¬Ήφσ‹Σx*’§NΝ€―Ψ{‹όΥίvoχ%ώΨS΅σΘψΏ»Wo»q萆 ίςdΊξ68ζ)ή瀏 8—‘+M †ΩhIΦ±uΨJΚμ(x‚ͺBšϊ€”“ό<Ο%¨…΄ΧU2ϊΐFИΊD΄ή`η¨Cο›Hp·zδ™ΕΤ j²4Q=£ΰη*ρ1ΩΠf0WhαB•ΪŸΰ Τα\  ς˜³0$,₯Œπ!ΓŽŒ‘™ί":ͺq$7oΓμ20^Zˆ&lΤϊ6vpχζ4φΦ‘•!.ΜηA˜΄(―νΔΗ‹=HM­ϋ8>£VoBJ›Μq¨ nω’YtURψɟ‹ΥY•%`EŽβsllοΑΙ–Q-9τΟF“£€ΎτΉ7—ώΧΝεύ/&mo1 ΪΞΈM8…±ΪJ'!ڞ[¬XΕ\&Š΅ΒŸ Ÿυ’oj18w’%ΰ»Όu„ο~φδdιο-2Qš|.ΑωMίθg\GΖ7sυΑΆƒZ‚‚κ-BN³ΆωyI6I,ξκs8ΐΪ0»ϊΜό(ŠΖŽms!Ρ;λτ# Π4iΗ‚„’΄χ`ι„rDΙfδ) qσ8”ΡΎRΖFιtύQ2`·‡ύz7‘!Š­‡B. ’nLε©xΰ9Ι92ϋN|| —wgPtκΈΪΙJŠ’σZTJ±ΠΫ‚‡|°Π"‡”œμaΆ-(InΆ:fn-‚­šΐ΅[υ\χc(ΊNΟδιšP!φZΔΏ‹·ιΪ‘:Δ°‡A―`<‚DDήp`(«ρhLΧ;ΉΖ”ΌΧ`²"$‘νΊΊ’~άΔΞA¨1Ά-!]"Ρ€ŠϋvΏπό΅ΥΗή5Ω±uέ}Ϊ·―Η3IFqžmΏίΆνΒoΫΆm›Ÿž­ΨΞ “IFwtΩϊχΩ}ͺkWΏJκ™½«NυM§Α¬»±φZΏσ‘ιg1λZ+ΈŽσeχμ0Γ’ͺώισ‹!Ύα·_3ξ«ΐΙ©y}rΆ&Ώ―πσΈ6ψΘσΗ¨u˜w­θαΫΆΒΕ₯5xuda|ώb|4;•§)Σφ'φœ±^<(­4i¬¬΄YοnOŽezŽΧ§qJυ{»Ζϊ?Ώœ·7b&Iϋ{«λ-²’“fm€F˜‚~ΰbp θ$ΛΙzœ (ΕP RHpsή£εζs3σ$³ŒodTvζ‹ΦŸ+ΟΥ1“³ΰ•™ œΐgΆt/ΐ`q ?―G+5q₯qIRκ1N‚,Σ K©ΐ*Γ[™‹ΌΪ—ž# „8{Iΐξ“:IωπτUL(δπΈDN•Ίχ€ΎZ[m¨i"αxΝuZβ>Ÿ*-‰ΪBž ^@%ηz½IΓ•’]PξτeeqF,Tsί€ƒω…EμG.QYh:έ7u!§“ΑΔlν£Ό|ϊ0{A°‰Ο&* TΖ7t―#0ι――›ΆP)8p†Ί£έG§ΝηφŸƒ―όΤ›:·\5 τΘ~8tzέ2˜e`―!ΔΟ}Ϋ§Α‡1†Q―ω:Φ“{ΟΨSk6¦ϋζψjt?=Υδ}/VrR€&–6Θ°Ω.h`™ΊΉyCΧ'oξώ"\uτ}i«³–]Ν¦ζ»$jζσωxiΉέ&… δ%F dΠJ|(Έf—–›žόύ„hya~»ΧΞa?νΧ6Λ^¬qESNΪι΄-ˆβ’—>n₯ξΡt/§-cΙ9Κ«`! Ιο1f’I°0dfB™Šηϋ”•ΎO»„&Ίέƒ[aΛζmΰ5.‚‘%ώ‘ςš”s”Ν)4Σ4ε=j›$Α=1―Γξ!ΜΦ,¨–Š₯Š2/׎y`Vϋ_”u&ٞοwhy»U_ΕŸ²EKΚ[Α₯{"©ζΪ€WΓ€%ebŸχιΑπΠeh2VVΧazvj`žOZω4Hΐg=λόΏΗχœω#TQyΖΒDmKC03Μ|2νΆ«†=Η2CΛ5|σŠp(γ뚠&Ύά]“ԐΧ`Yrςa όsGnΞjϊ];Ϋ7nί(°§f>³¬sbrΞf{˜­ˆa°'σ2 s0ΊL‰#DπάΊ‘ϊΙjŸŽ%SEfb€ζjκΨ?s ‚ &hQΉ”3$` Πά ‚^£σ–”^Φ…ωε©#ηζ=5½°³)lωΕϊ1>6”«[6vίΌucΧ=Η쏬Zv‘ΊQΒ―‘PŠ ­ŽΰB‡%VaΈ²€;K˜5ΠΖίšA†šFΜ ²ΤŸ2L›2²m[1#kbF&hϊͺ=(3SYJT?: Έ)0³t¨—‡nΪA5Mΐv ΐΦπ)“άΐλ@«ΉνF=.δ»–J‰?αͺ%YΫ}LT(hhoRάsη­τgΊ8Ώyΐlκ‘EMμiύ;–rbjώf`‚υOΫ|·ψϊm΄[vmςžΊΫ†{}$2kύ]…Θu¬·€–…ψξ/Ώ.Y”±~{lχ)ϋ9άs.³ΰ΁'Σ¦>Ζb`lƒ€zilq=ΐ/=²s΄ο3Ζ+ŸšsΜ‚‚Zlξ―P)£LμτΩ%ΉVΣ‚ ’μ“2šΩΕ•³NΞ>ŠΝδ}~Ά@υλDJJ"ξΡGžm™Eό:Χoξ½·Z΄7Εέ!eΌ€ξX΄=Ÿ€³Ρ0ƒk‚WΙ0EZ5ο“D8€‘JMΈi:Ω6ΨΎ}ψυ‹ ‰€΄€LΗΠ5’Q¬4ξ@›@lr*,Κ*‹ψuΔύ’”’z#¦9rj¨ΎRΩΎ&ίι ―­ξ'™,‹ΘLΛ†\‘†ε$%iΞ‘%gΈvΰΤ…~bοΩ?ΓΏίWԜ#§©Nžq½Θ† œN†χέΈ9ΔFτμsδ¨υ%Ÿ|ύ[‰e!~υ;> ²xmαX†ώΒΑIυ ά‰Ω₯\Zύ‚ƒ7ΎXΞ]lm#d=53]ΆbOeγΞ‘ήΟͺ<„ί‚KΛί”‘Ω€HκAΌ–δ#ΰāӳœ9Ώt0Γ[™βƒ½ΦΒX!Ρ7M#‡2BW£ΓΤ}=hœ‘ΜͺΘΚn §Lœ4]Χ¨GΦμx(>fƒ±€τPqΖ»— ’‹²ADΠ %ΜΘΆοΨ„τ½Z–AYΨΊ{OprΖ$!C©V)Ϊ`Υ€H₯BAΝόΨ¨C@§Ωε#•’qU‰kώ3ӎ(yΝ@+4$€‚€«P*Λ5’,ς£ אσυoOο;χ{h^sDήN ψ0ΧΊ Άlμή²5,䬳c,dφΦ…xΰ¦-Ελ/”48ΨWΛΟ.¬εfj+Πh‰t)θκίΫvμ@ ›CN[/χΡ‘ΘΓp]θŠE ©Μl΅}Εu€PΤ©T%Î(‚Vs&^'aζ'e"­,ΊŸ)ž­‘r«ΊG=4ZΚ)SΟE¦ΈϋXC;΅ΏFβί‘΅ίQΎ½ΑΤ„Cuψ*4xϋ—Ή³Ε[ρΛ_'^>2e?ΎηtW§ ž¨4‰΄—™Z2SοξŽ³€ =ΪB@Tr!”Ε^GρΥsV€^»ghΊδ€ΠΤ?eίπ™7ώ@Ή^\mPο+Œ,“μϋΙxXφ"ˆˆΪ~HԍFczσΛpη΅Ψ<> /μ›4Bpr`S_‘JVω9t!Hβ{nq…ΰCˆˆJψtZMΚΐπ΅"' `%D f£¦i¬Τ@ΰQMAzΞΙεc-|;ιΡ@3°Ή}'/όνγ{Ξώ ξDž΅Ιρ™^ΫΩwtΊ3s±¦3•+eh£Συƒ$)Y?πΥXOξ9S89=_\«·]‹eLQϊ5½€υΜδ OM0ΐγJΆE6<€Χdι2”ƒš³ΰ[>ηζοΩΤ[%Χ§¦-/ζ»x4hEμ’EuMκέ­5<štr‚ΛσΩΊ9“8`2Φ-@ϊ>Χ&‘ICΧ ΧVj”‘ς+“Ώ&0DΙ’vμλI™O“ΧJ?Ÿΐ,ηJ+…‰Ά‘igχΊψט] 3NKx(ρέ}ΣqzrΎ=uaΡβΊu\κ`eρΫίσyζ“{Oη?ςά±2R ©>VΔJHƒ/²«η&λb±2Τf―5uLΦs― ΘΨ3μπΎρ3oψ^d€zͺ/ε‡!‚Grά2 ‹ ’>Φ –›»ΖπZ¦΄ Α‰>]Ϋχ‰87άØBA2:Mε–ξC©X@ςi/™Ÿ<σάσIΆF1q6qˆJ"νΌ*+‰?ζΥΑ‹΄ΐL‹^ -ːίϋyt$ϊλ§φϋKΜΐΞsp7Mκj/πχ/ελJWΘ@,²]°…ς2žΛ¬4(,»Jϋ䘴q˜n΄Α~αŠ@vεΑ€­ŸsσχŽoθCK֟4‘QoLfiΈrEDR? f>QDvŒ ΐΘ@Ψρέ$}ƒ\ˆζ—ΧΙ‘("Ν5ι9Cόθνͺ@ΞΝΑ"<ςؐ ‚d™;œ–L&ÐΆΌrςΈy/@Hε˜|απΤ_>hκŸqΪ+ΜKυΐ£P˜μ~Δ{_οΎ.~:Ό+‘³οZΑ³ Δ=–Ii—‘J»DiŒ|Λ]Χ©τLΉkt^;ρkxΛ wβ ‘!„Ζ€aσM%cΞ1U_ $PλφΜό ‹\šž]‚₯΅fAΠpΑψ¦¨ аΒz½“SS ˜”Γ’jσ Χυητ AVν·X]3hη1 I9σ܁Ι_ϊϋGόθžγώM^:)Cg‡νњ¬ŒΌ‚KwYF–…PLŠ’:.0 4ύΐΙ0R‚’|MΚaΪl!/ISSΝ+{dί7_σό ™ψDɎeΰ+,^qβG{€²0ΊΏ ³3]±ιΛE†ϊΊho2‚d’(νκΈjΛΛπΒK»ε³ΜΔ’i%£Q€κ™¦A^iαkšAΏGΓ $iψ.ι faŒλXΩOiέϋ\x”qΔ’Χ™e‘Y¬™_e%(oψSΑ–•›.7DQχrά}ϊ2„^Έ Vǚ­6—²ώOΐ†J;*5I:›!š-¨‘ΠaλΖnrOG;5°„œΚΉ!ΦΔ_Z\‚=ςXbΘ’@+Ιΐ }M7Ό P(UhiHδj€I1Γγ`φ\lθ±Κˆ€–RV€„šμΝ!xk§Yd@–ύ[Ω*SλΒSetΞe2-K]mΎ Κ²3#ŠWΞΘ>ϋ¦οCξΪξU*‘£@(!œΖ$Xε$D{š~\κέvΓθλ©Xyž—,k+U\&šh`FVƒ?ςxΊχ•&³š’j⫬MyBΐΦ?{`⏟?4ωάm\`™νƒoa0σΣφ€ˆešύYΌEΑŒO.ͺc+@++p#ΠJk£1wtCeiuumπeu–‘]I@Q 5ό[­ΨΆ AΠQΩzΒΆ'`Q=«Bή…ξJIQ%€±ρ)H,ψ-h4šXά’\-ˎ'vŽ$wπkH£3W«οlχι?B_ΘΔw•άc{―Ν!9m˜XΦμΟβ­–9¬«AΑœ΄ΨΔR°Χa*γΠΨkž₯πoή>xW΅«zAHΦΈ$$·σ$c’~­ %Z‚›{ˆžQ*Ν‰Hν–dv”ϋζδΤtςy5εψa €JU(UΊΙ‘ˆHs$_-`~ΉΎϋ#/œόαzτΐOα>γΟ φχ° ―ΑlόZ¬ΉΌγ,ό,²2‹W3ψ™‘kž~<ΉTΩ©±©A@.Υ?²ΨηχΏπώ?€ςB#εb²±€“Mιž†W²‚Šd![*Τή{롉3[™ƒ9+ι™Ε₯%ψΔ£O$ž‰Ϋv ΰΊΕ„½Ÿ³mϊΨιK΅—BρΨ{ΌΩφ|η›ψYd₯e‘:Ό―ΥΑΣRΪ4τ¨RΤe>ιμ@&+7λΌ—VoΡδά”κT\ƒΦyLεφ„‡¬μSΛ’¬Š©MΠkAΩZ‚Ώ|’ }"’+ν?bŸ21‘ιΚ‘(ž˜NΟ՞Aλοο>>σφν Ψ›)…Η²1>|Y³ΘJΛ Ψ8§ΜW(3xζΐω|€™‘šΪ ΞeΫΎ©ϋΧ±*’͏ΪlD6ul4‰™°η‘Μ½Ζ p")-iE(ΒdP€ΟΣ‰ι±κjΗχ`fvZ@©Š£‚–A".p?ώŸOωα~όπ/‘Εq]žΒΖm’¦‘kΊ†Ώ–,ŽΎ²Œ šύ'²--ΔηDwΑόuθX:aηφ ₯-dηύ{Δ5#Θ⃇&—Σe'— ρδGmXΡ:\uW«e`έ{έπ·Žuέ„d*BšdΊ€\!ŸlΛL@Jc²9ΚΑξΊi—δ“%>|I²=^k«0;_£ύKΓDϋ#ΐύΗǟ90ω»ΨΔΏN³˜3s’\Ι΅κm?{KN8ά›œ«ιa$—κNΩ΅Δ΅£?^œ6έΒ-Ε@ϊ uε>@Xβξ]½Ε3ž=:Ο7ψ‘†?c«ΥuO#δ6Υ³$Π¨λš…<²[Qη3€λ“όΔjψΒΤ‚™ ε‚D$†θ8Ήn#βέwΫuh’f˜¦EޘŽ ΅eΈpi‰4ψ•Β9ω~ΰŸ›]ώΘSϋΟύΑΎφ†Q΄lš6ά—· ŽJƒ•zG„h2Ι,„sΛ-ί—ήFδμΚΫ†JώΡ©gο`$Ι0Ž—νjklΫΆ΅Άm#8ΫΆm#Φ™±­»ΰΜθέW“ΪΛ:ΦvMς+gώύͺω νθό•―ΰm°&Θy*nΰΟUΰΑbPέ0+`-¬‚εˆΡ֜π“3]e?¬«#«GjɊΑj²֌T“ύKšΙ‰•-δšM½δ¦#δjΜoή9LΎ|ζ8ωξΕΣδΫηO’ο1ϊΉγδ½;·’‡ŽΝβΈ!rϋΎqrϋή1rα)ςΐΡΩχ-m―2?:ŒΡš7r”]@'Ή°&σj{iH«sΕ Ν[ΦZŠ]ΚB}#τTFψ‰Ζ;Χ–f€¨ŽΡ@]ΉYφ Ί™‡ή'€ Y7† θρ£ΆΖΪœΐ±»šgg{Κ~\=TEV U/mΩ@%Y?VK/o%ΧnιGΘzυ釳£δ³'‘χοΪF=ΉπΌξ94EξCΐξ=8υχξ…ΦWΚa?¬¬,°‰Έ-›QK’+3¦Š 9Άά]–κςlE9»©ΐβZlEΖδ5v5Ō7Δι‚˜ζ‘l,ό¦gΞ|,g‚vVΘDόΉ ¨XζGm…$ς;ͺ c/,τVόΌv΄–¬<΄ώJ²n€ŠZΦBnΫ3JΎxϊ(ωψޝδ±SKΙ­»GΙϋ&°iςΰ±YDlϊΟν3MΟ₯άA𦽀Ϊ!]173!Ε1^Ϋ’Ž+ηωΦbWι« σ miΆ«<ΒηETΎ§"Μv–…Ής΄ΙښΐT€ ¨@vπΩΜs"θgέ0 λoKA΅΅ X&‹όώΪ’Ψ[Kϊ*~] Ϊ`ΥbΜ–Βž…VŒΐ–;φ“;χO»LβτqΖ‰ύΉ{‘νιΒ”ΫŠ€yα s,Ξu4A˜’‘ +biΚ” –Ržγς%)KΞ «’₯‰RmΎΓΧζ»lM’ͺ(iΠ@²Λ™…@ΰμ‘{ΡΗ‚:˜,ؒΐΩ<ΗH†*: Cb{'Œ¨2¨©,ω>‚φηΊΕ a„†ΣΝ›w’{1{ψψFcγΏn›iz(?αtυ0«¨cȊΐ³N2l*…©Š6pŒ6ΦV,VδGŚ’Έ΄€·œ[3ZΗν]ΪA§£ΫV•‘ d―sV‹άϋŸ=kYπ)ŠΔ+ΉqKβ9ΦQeAΑά ‘γ+ƒ:]74—'ίGxώέ2έDξ?:Kξ9<υλφΩ–ϋσv-ŽQiz1ŒnΘRΜ’΄«-¨5E°ρκMr[eF‰Ψš‚„φͺΎ‘4)4”$EάΐΐLw•z(Đ*έ.Ή#ΈΘυh’έbΨ‚€#Έ¦’„LEŽ9š! œƒu]ΰX²όΣш₯Iγo¬­»?5½€Ι4M«8ήMEL³»6W25Ι©/NΘӝ₯`cXU5†›‹x/v}υωbΖN΄—p@adΖτ7Π@žΛξ .rΓ€¬GyΡsuΡ1dA‘xu²‘w2Ηxt`ύeΑ'2 ±tΩ’Dή± YΕ6;ξκH’ΐIΙ°!DmΓ¨ŽΑΗηpzΙ…W^§:[ @{“Ϊ»k„ˆ  Y·άj΄Τ€—_|w{oe΄O^βΰπhb’i₯g ϋ 뽡ϋήΗ1e§);N8$/>ψ¬=B«pQΣΞeOAΣ`mκ•έیΞΦ‰‰ojSaZαdψ4χ{š%\Τη:εc½j{•0OTύΎ κ[FiωuγTΏyΚΊα[T^~?―Qλ¬σg° Ό.•Β,ψΗjφIENDB`‚cargo-0.66.0/src/doc/src/images/auth-level-acl.png000066400000000000000000002602741432416201200216050ustar00rootroot00000000000000‰PNG  IHDR\Β’Όββ`ƒIDATxμά·nΓFΐαΌΣ½‚^€ΐY³F½yσδMKϊδ%½χή{'-9œ!  ‘ γτ„€“τ}³'ύ}<ρ‡;=`Ljbˆb Š€(’ˆb Š€(’ˆb Š€(’ˆb Š€(€(’ˆb Š€(’ˆb Š€(’ˆb Š€(’ˆbˆb Š€(’ˆb Š€(’ˆb‹Σ£IY”ΓŠ’œ}·φωCΣ’ψ§X«φ£ΑΎά›&Έ3Λ§W”_w[Ίθ<Εh?Ÿ…λνw‰Ϋϊθ¨ CŠΓwkΥΎ[†AΕ1Α½X>εqKη!’ΟMGαεMΊ5Ql†”λŽb΄οŽΓ ς]/k€εcΡέ‰η!ΐΕΕEŒq±XΜησΊ+6\?Δ~”ύ@cŒύp°QΜχ擝ΉϊΧum7 ί(†—@ΕμDFδŸΓš¦Ωξ&υ#–Ζ@Ϋ0?OΓmŒNΣφ«ŸΫΏϊζ£(&ŠAώD1;‘ΡΩY­V'''Ώu“³³³εrΉMιDξμڏυ·βΩΊwD± Ρα–fŸ·iΫ½{X†!₯(&ŠAώD1;‘Ρω9??-—τέd»[˜:֏ψ·τΩ=’XώΊ―·6š>ηηΐD1Q ς!ŠΩ‰ŒȌXέΫ±J’φ6ΰΌ нqP„;ΏΫzΕD1ΈQ Q ptθ·[“ŠΨu±ίξQζ}0ΕΊΟ'αn¦Η?€l‰bΔwΛυΏ‚(–Ν’³e=€¦i~»5™v Ώέ£lš&e ΕNŸΫƒΚιΑ^GMΪ8’˜Ξ[ΌΣύ(–’³ ί1±ϊR†Η…0}@‹³q49ώ!~tπψΧmΚ•(FΧλ€(–Α’³ε9€£cb‹ΕSŽQ¬~rΟ5Χέ¬,ήHΕό$Ÿ([‹Ε’o"Λε2ν$ϊΡWU΅X, ŠmΠwξ0~¨MΏzyΏΓ&Ÿw)’X¦Q Q `>ŸχMΔνΉ]ΎAYUΥ|>OωD±¦aΨώsuΊΤ~> WώΝiΚ€(–'@C¨λΊo"i‡QUU]Χ)7€(ΦΌq†?j gϋα ε,¦ ˆbYD1D1€κRΪa«Υͺ]Ά)3ώQ¬{|¦O¦Ώyχ¨ ΓFOΦ]ZQ,g€(†( ˆόό {ηαζ8u5ξ#ُ홝₯₯‰P–’“Ρ›θΰτDtΣ•NUͺLυ—bŠΣœ‚Ώβ„5E)&όLqΐ,hωhΩΥΑjAαΣοΨgQfwΖΎcYZί™9ο£GΟ•-ΛWυήΡ»ηžϋ°xταGN=ω”£Ε£ξΌύŽπ#μCQ ‚€Xβ8 IX˜βŽ>ΎYϊ ›Aσ=Χu{;ŽγzΎ?oκ{X‹9q±"|ΰ».TΟ²,pύ•$.ΊwΖ™{g–ξTrώŽ'EΧΕ?ώšΉ;\f¨^e.+Ά ₯ϋ_F› Oν–O)FύEX»XzU‚ HˆlΩ²ε'₯Ÿ|ο;ί}β‰'(-Θ.ΠώdSS°¬™žω―όΟwΖWlΫΎϋΞ»Ώ=ύ™$²’՚¦Ο»MΎ'bέ.ϋ v·ΞώGΥn74EN W]ζ TTκ1`<£inΨcυšΥ™ιΥ™,„€A,Ψγ<a‘ηzΈρΠϋφάkz*‘bP˜žως­_žΏ'±sΟ–a8ζL&;3•ω»KύΠπ㡁ˆARΜk•„>T:^0». }P*cC"άm…Έ£H1Ο2TYˆŒ”/ŐΥΝν” 9aTΔBΩp‡ξ’ΜΊ˜ˆg–U™-›XOυ†Λ~ΠβHŠωVUW„ςΕΆγcΗ5±Ε‚Zj,Ί₯αoœΡƜV5' £+TΒθ,ώ*–¬‹π£cb5«Š4ΊΚΧZ6ίν6ωžˆqkxλ/Ψέ:σUίni91–^• "σ­Zͺo}γ›«ΣYώ‚ez*2λˆC{κΙ'{1­>μσΦ[omqΆœ";tXoOτb|ίš{,[8Δς£—ζ`‡™tW±αrϊ)§βΧΝwvΤ‚ )6:~M…‘τ>Οn9'τAj8$Ε’DŠujšωfΔλuΠE!>Δ|έτ‚!pK2ϋbϊfχ]Š5]vSαDŠΩΝ²Χ7ΦΝ`|΄±Ε‹¬[#Ί'©θ -ε…x*¨`Έ¬XBR,LdvE•γl)…Šνσήn“κ‰Ψ·†Οώ"ΊsZe1ω^• –7$Dώίγπϋ?(μη―n|6Aos@αuά‘ΗL§Ά{.τb°ω‘c{φΩgaŸ­[·ΒϊΫίϊφn3³ΩTχIOβώiX_tώoΌώΖ›oΎ »½ΆeΛU—_™™LΓwQнw½žzκ©y£Θ}¬ΓψΓτοαχΏ‡2΅6AcŽs QΚν VMϊ+΅ƒ!!)fΔ)b”HΑ;UU@«ub”bΎYΓZ()†B$F”R3^]“…„Θ•άΡά“†σ­W+ναΏbJ1·₯ŠBδ ‡λvΛ‡γΏΏ`H1·UJΎW%ˆe ‘ Lρ°Γ§SipX_pα_ό”PŸ ±±ϋμ΄]YT]οy|βIΟ<ΣΝ‘ίόλ_Ϊmv˜³ π‚=χΪmχΫo»˜zγMπuπ"‘Αα2“Ξ|ξ&ΚB‡^ίkΝπ©=fΧ€£604A3«ύž·₯ΊE₯ bΑ ؐ ’mhBΌˆš …Ε2˜Ό‚aK1·₯ΐζΚ’b^­ Ϋε^¬™π…•‹Νθξ)WnΕd+@ψ―WRΜ6’l.JΣεέr Εxμ/"tλΕ–8u)ω^• –=$Dξ»χ^R³Ω™ιtwPδ‘SLβ[ΰ€0ΆλςΟ(Ω^š°τDwΙl·]ιι©,N<αC7^wύQ‡ž™θΎˆοβžP@Mo}πύΒΥW\uαyΜf§αu0\ψn(ΕΐΚ-εΊί#Βƒa•½έRPIΨνK.₯60AcL%™+{Αόj^ϊPlΉ”Sl±R¬ +’;ωͺ9„“‰+†sπ]Ό„ΛKŠ•Ϊή #¦&xoԚμ*C’§bϊέ“ ŠBRδ+αΏbœH1·© #i6Oν–‡œbM—Ηώ"B·^j6u)™^u…A$Dnωβ—Ί:lj*›NCΞ{ΠO»­ž½BΉόΕ_„w1Άλƒ†ΧaŒΒ2*-Ti˜e,³Γ»Έ„/β Kœ•2…/†R ?ήmΟέv―ύααΐΙολί_»ο~°'dβΗJΒ>Η} ΌEm`‚ )ζw*BΤΊύi!_σ— σbm!κ»μ™ΉτrΥh΅-Ϋvz3Ο»cΆŒ²–BFΧsJΉΕΦ¦P,7šmΣκVΖqΛμυͺ^`;=Φh\ΆoΤ©δ…aЌˆR,ΒγTBΟ~ M^L#)–kΝvۈγΨV§έ¨U yφgΓ™4Ηkε6²’–*υπ\πt ™΅šjYΟΛβ"Z}#Š{bW¬P5ΪhϊV jΥl ΡψuΓζΏbΌH1Ώ³ΘQ“rOί΄πH»Σ2*Ε‚΄(-fπΩnν‰Ψ·†ώ‚—^uωC$D`Θdz2*έ ο‚Qι‰TvbκΈ£}ΰ΄ΫΈαΊλ»!Z½“ι9 Ό‚k0b`Σ@Z…―,°ΫdΧgαD“(ΒvΪ3΄c'Ÿ(ΑΞΏύυo»4•»ͺχιUς={μΉeΛjC@I±†& ‘ggχۊ€°η±β^Š!Ύc[€ύo,Ϋ±ϋ%”‘ 5ہ=ζΡ=„;ʟοy½ΪqήΟͺ,΅άXF‰ŠΦhΫ[‚Ϋͺ/κY©Τv£H1ΉμΊ†Μ¬§$+jA/–*U Φ²½hžΫ»£buE…5°ΪωAS΅Ϊ ρ(–ΪΨΜϊαZυRA` vό i Mbδ―4,—]Χj²β*σ?Nχ€k¦λ³9'²Ν‰αr_1€Ζ ³uV₯a{,‘U+2ΥXΉνqΩnμ‰Ψ·†λώ‚ƒ^uA$D 5&[0?ύΆmێ8μ0Μ†Q`Ο…‹πήχΌφΠXΩΙT˜b?\π\γ²ΰ>s^LυΫ_ w[»ΟΎϋο³/Ύ‰ΖΒ}ΒΒ“O>Ι&^»$τCν€I1Ζ ύlμ†u¬‡WfΤH1μ‘m‰YΧ†Hːb ­jΉAoR¬5("7xώ;C—<ΰ ¬ΰΞΦ_ξxγψwcε"ΰ–F.ΉQέSdCΣ©Ψ©£8RΜ)ΚƒlψΓKYtΣμΖΟq»ε@Šρ_pΠ«.‚„Θ‹^Όθό Ϊ€_’{τΡGƒyόύ±Ώg&Rθ›2½e ²žKΑ: ›ψ.¬Σ½}Β=ΓMάaϋ&*6|ΗEβ[xœpΟ^!άά)†»υR…›αžXeίψΪΧƒyΌπό 0―%Μ€yΜ‘G=ςπΓΤ‚ )Ζͺ,« 2F¦,’bRΏέ#Ί-ΩpB3‰θ­†. B5cbR₯…υγSŠ1?– rύ VΎΡ_ωυΒ€£Vύ £α”(η‚X51ΉH1…~:upL·γ@Š1riI…Θ!fAdgγ·έr Ε8κ/8θU—ABΔχύΒ-…^²°,«§gN=ιδx'”Dξ-ί“ιΎΫR°ŽΎ όšJΓαWξ«@žώN§s{ρΆχ[Ϋ} ͺ‘JuχΔ/ΒŽ›ƒή‚u˜°?•ϊψG?ΜαΙ'žT>σΩχμΎ'Ό g λwΒƍ© ARlΠ_η’nΔβzΔB=X,$Ε€ΊDΓk—‡O€βU”A•©Y#HVRόBΓMŠε’ΐ«σΝͺ8œ†.'eχŸB@j8IιAMκσΤNbΪά|₯3šS’g bO€Χ™ΟŠWŠ‘‡F =Žζt &ίν–)ΖgΑ[―JK "λ~<χQbέάφέ«…₯'&?ϊΨ{~VvgƒΉα<ω\ ζΚΒ4\s7Yoα`°$.½ψ’M›^ ζπόϊυπ˜ͺ FXΠ‹₯Χξ³ί£<κmυλΦ]xήωγΞ–w’eφΪ} 6P ‚€XΠ,Κ£NHΗΞ3’k-ξH$Εδbs$ο }`£!\½%—άθRLΖG$~₯˜ΫR₯"jDžπN‰Ν6‹Οδ ΰ6εΎγΕάγWΙtOμQllL•=§§cHŠ5+`Îύa«ώΪ-RŒΣώ‚³^• –$DΆnέzΥW‚«ZΞ„ΩΑzικS°΄φ€½ΦμŽ9Ώ0uΧ(ΛLWEMνύ~f«\ _h¬›IgW§³#~ΦGVΞ¦³GrXfbΟΦ½4dݚΐn췟υKπ՘qΕΆ‚ HŠuς£ώ‡<γΏΎ•ύ(CRlτnQΞt΄Λ {ΈΚhx²Π±jωΡ€˜ŽΉΈψ“bŒh,$Wb~‘Ω7ZFfŸ:Ώ–ΩOž1‚Ž/Σi—rρK1Ό‰¦Q5›ΫŠMŠ1Β…D΅žpM—ίvˁγ¬Ώΰ W]I±όUWOO¦Wg¦C©”™ΐ|aaβ°ΉIΑ’/«ΣΣπEW^vω믿̝Τ+―ΌrΚI§ΐW€Λ„_„…Ή›ύίΒu6,€ Όc—TΥΨAkΧΎ Rl…·‚ HŠ9†Ξˆ†Ξ€‘rΡ Γ„>#ΰγ`ԐΝšΨxΤΠ Ζ$–Λ•½ ΰXŠω΅A)½ΥŽ=bB֍„ΗMγΣ~Μx |Έ3½$Dn"©ΚF‘bZΓNϊ0«=·“Γ–ŸπDNCμϋοJ‡ƒvΛ…γΆΏΰΎW%b‰ ‘·ήzλ ŸϋW_yΥ}χήρ_¨B+·mΫ6(lάΈρβ .„{Iτ»Ÿνϊ/ˆ›žΑΌcL~ίΛ²εάΕ—ΌόςΛΛ†g²aΓ†ίξχ·ήrΛρΗ 5Α£anώή&ζc€δΟΒΫσŒΣNϋϊΧΎvΩg”SN:yaoˆAƒkΥ«O |ίΗ.ύ(N”Ή₯A$Ε0F` ˆ5‹€XŸƒ[»μμβI*ΊγM,Ν¨‡RΜοF:cΈγγ鏓"Φxβφ’iJzAΙ1N>Y)&Ηίψέ&cD$Ÿ‡Γ–ŸπιsΠnω—b<χ\υͺAπ/ΕΎφε―ΞdΧL―†1ƒ3½œ\ιν1Y˜¨ Σ~±3Φ‡ Ό0σ=&ό:ρΓ)ώθG5›`―ζ¦-››¨ /ΏτΉςΉπuP<ΰΘPΐœϊPθfβδ'A’…υΗX­ω Ώ Θ«rίύ φΠ©Αg»΅‚3Βͺ¦gό`x)PbΪ2Έ\³Ω™‹.ΈpΛ+°μ€A$Ε\CΖƒ\l‘γγμΜ~^TAμxΝ€BδOŠa"$FΠMδ‘ΝcHςψNΫ¨ yv(‡ή‹$Ε<£\ˆΝ ŠZN]Š%ϊ„ογ:sP1>€X3yσΒQ»εYŠqί_cC$ΕPT½φΪkηΙηΞτ'fS½dσ«ΒΔσ½rΈ9/›>n’Dƒ λΦΟώγ§›6m  +ιΜΜŽ=γΕ¦'Q΄u~Συ7n}c+ξ6Τ9ώ[½ρΖƒ|πTι8fΡΆpώ‰>)ωQMe °ίήϋ<Τh`ΜΪr“bAσ;eaΌ(UŸ€X€ƒP€X‘as$ΕόŽ*τ'_ρ’ώDσΛUŠymdα'Ι9U+Vκ†ιt/p»”‹3RL){ό ŸLΎbόK±V<·[Š#)F$Ε’§»αΊλ§{©ρwΘ¦ŸΒΒόΝ9 ΉήΙΠ?΅jςϊk―³,kŠΌΆΑάπ‘γ?„GΖΜύ0%f%‹vΨΉt]χξ;οz^οΝl―:Βsι›w?,caΊ;”2sϊ©§a²˜ Ž *6α όo¬ό΅–w άDΈ•pCαΆβ„ V’k$aΜH ‡€ΨΨΟΞΦ€₯—SLoΊάH1·”ϊ"Μ 2xϊΛOŠ™qhW z©\«­vΗv\ίg·yή†Oz­γ:σ^1†Oζ«Ρ §ν–k)Ζ}ARŒ AC‘λ;οΈ€ζ˜ΧΈ€w*„©Έ0WύD ΚGr(δνŠ7΅Μκψ«_ώκ‹Ÿϋό·΄o>ΡjŞΙώ…^€ω.‘ώιΙ&ΡOΉ¬ξMˆyΥW€1ΩΌy3[„‘[Κ‚ΜqRcΔΚ“b^KΖcVu’b4ϋδ"€˜Γ‰k Ι# ·(ΊV~ςŒmΖ[Q.+F«η’·yΎfŸμTς χΔ]Ε8H΄ŸΌδΏέμ“$ΕbDhψ$ΞΨhϊρ<ϊi<Σ0ΕnΞ-c–ύ=fwϋρ wΌΙ2vabΘD/±Ό₯ϋ‘¦ΕŠkhBŠ-7ΑΏAΩW€€šφψΦX0k}-’f8KKŠΉ­’Π΅j&h+$έYš6Ύ B―›ΈRL¬Y~+†& Œ†ΗcΕΖ!Ŝ’$τAi{ΤnΗ Εψο/HŠI1dγƍ?τπΊΧύΕψΛγ?ώτΣO―ž[ΏΑ4―»&ί‹Jg§z#%'ζ$ΥΒσ`‚π­I(€Αˆvπ!p΄er­½θό ΡτuSŒΑSŒ-”S vƒ·R>|"ŒΑ Vθ0«Ε‹+BŠωVUϊ‘šρ_[ϊ"ͺ΅deΜΌIR,h—φƒGLΤϋf²“j–Ώ”€˜Σΐͺ%š^Η1tβeΟ]…GΛ_֊/Ρ>R¨[Aœ˜*3`–Š%/Ն&νσb½X,νH±X¬΅lΪ-oRŒώ‚€A 6ΪO8φ8 zΒD`™‰,ΣSΩέffΣ]Τ{k‘ίB lžvς)Ο>σ,;ν=χΰ) υΈρϊλg§WwcΑ<ε –§&R“ο^ $; ˆ•δΕp%Λ_Š5u™1$!ω€ώμG©Ε9­HбΏ₯˜gΩ0bΎν\I±θ)·s%76]ϋ|GΛ[VςδJ푇š ρJ1œ7.όNe@nEΫŠAŠ1&+εN\’Ά’λ{…9h·I1ώϋ ’bAR ₯$Ο§3›žΝ€ψΙN§³3X ™FΥ5‰hαDϋYXwie@¨]rαΕ8λb˜Yx1ΌJίΦΎ΅Χn{@ΌœiΆOκύ©‰‰Ιw­Ίΰάσ‚γ(Η b9K1ΐšbΥτƒπΪ%‘?ωͺ9Τ#΄¬ΑhΰŸυ$Ř³μ‡ς%ΑΈ'$>~σ/Εά²"τGmϋA|tϊΉ­a1α»Vk!,wδ3ag#BrMwτƒG—b fοb‘Xiρ[±1I1―S„€ΣŠ™*cΪ-GRŒώ‚€A ξ»χ^0bΩή(HXOΟ[Β$ϊ;,ψ"μξ₯›˜ϊτ'> Γ0α˜0m%¬yfΨ«„Ž―ψ£ƒΛN¦f™μΌΛ±u©w―‚W:N@¬ΌόbV #–³s0ΓמΨΛΒȚ‘$Y΅BBυ¬«I±98E™­,GΖ-εfτRΜΠsšr=ζ|O~-/φ©lΙ‹-€KZΨ‘kΖ.Je(—άΔg©‹ Ε΅fρ`i";―"K^бέcV΅DΣ”Ϊ.ν–#)ΖARŒ HŠ!Žγ°ίΪ™TεNzNϊ|XΟί Λ°ΐώ`Δΐ]φ™ΟnΩβKίχΑάΑmΧbΌΚŽΫ‹·ο9»;œςtοrΝM΄?΅jΒΔ.ΊπΒ€XΑΑbλ±L₯˜WQόs6B<Rξx =/GŽ`όG7I1€ƒΝ"‘4sμhA|ΚεXб§mEtΓΩ•ΣήUβ‰λ΄5i—δςΪ γG—HPId)†^ΠF†ρ-ωŽΟuΕ’—bŒ*‰`a ]RαN΄[~€ύI1‚ )†h_FSΘ‡y²0‹|ŸΝ°ŒFL>ϋΘΛοlή ιɜΝŸ 8 \ƒ ΫI ½ιΕτοι³Σ3έ`±t7‡Z˜trΥΔΔ»Vύόg?ˆΙΦ­[α'ΉiΣ¦€ –§³λ’ΠΩHTcv€ar‘ΈM½οd#Ψ€f1'Fβ`’¨°ƒ p, RΜλT„d―!3Jŏa8u9ωŒζ¬D~ΕVR©βG“b“•hγ—΄Η£CœXoε΅FN@Ϊ-ORŒώ‚€ACέcΏbΏo―χ„)δΣ;¦ο· e\#ΩΔΞ:ύLω¬sψ]Ξ<ϋά³Ξ9χμsr—\šΏϊšο~ϋ;΅xε•WΒ‹€²ŒιΕ`·[Ώt dƒenύΙww₯Ψ Ο?¬Th%ό$1§A,C)Φ.εNή!y ’΄Ό<š4jB%Ά$)†ΤUQH,Ω\»¬0¬ψ—b‘8‰ͺ$…‘I‰MDθ•Ζ`κδ₯ώγ3ΝΝvO8Ο]‚ΏρΈ­Ψx₯¦ΏdO Cοί+Ϊ-wRŒώ‚€A ωόΝjςε£εYτ‚Ιζ!^l¦—•Ÿο%σοu ’‚₯>ψAϊπGςW]SύMuΫΆmsΣ‡υγν0Ϋΰ§?ω)8q8λπ:ΐ€“0|ςΝ7ί ˆό«΄,+ ˆε(Ε¬‚˜θΘ/f5Ο|`δ[yτ“&%Ε$­±t₯˜oV„AδΆD’S-0‚q/Ε,M”ΟΖeYŠp)šγ­Kρ',·Ί€$$ΕPΩ»‘Η'–Vθr\±ρI1ΖΏcH،:4―, ŒInΫ-»'JZŠqΨ_#’ba ΤsΞξ³k0|vΦXX`3,O§pέ•b«ΣY˜Ή²»—LŸ2c3ρ=»σl¦§g¦έ„h“έυφzοι'ŸzχwA’1 ωμΕ` S œτ‘³=†mκ]«RοZ,pg`Φ^{ν΅-Cΐ ‚Ϋ7+Ω_%$Řιc'sΑNZΜ.SJ­`(ΌNA„EpΖ€%(ΕΏšg\›ZΗΊ&eUH‘nΌK1―šˆ όT’ψύ*€θ +§Ub«8q‡¬Η UΑΦ•¨CςZœΧ –πšαπ[±qJ1v—δ+­αg€>ΰ˜b‘Ξm»M 'bίήϋ ’bAR¬λΉϊΚ«²)H!ΏCf}(ΟέψVΈ™…υφΝ°Œo16aΝΪ3άΔBψ{3›κY0T{=—7c© †zνΆzφδOϊ―ό/΄„ΜY;Ÿ~Ί½ήϋdίIΊab°,Ζ?Ί»…XRΐ-Γ›Ύr€ARŒ="iΖxό3Ζ\0σηΛMό·Sλ(A)†”ZΞ’”bˆ]… λš‡xf9ΟА8Fk)Ζh„6ΪΔ±·&§Χ†Ή"~«ͺ μ1Σ¨₯Υj{ˆpZΊΒl­aβ§₯"•›CΘΗN]gOw8R hκ²ΐjω‹W/f£ˆ†‘ͺŒσv½'Š~kψο/HŠB9Ε(άuη]]Ε“Ξ.>Ρ~ΏΝLΏ¬d)ΖζΌ·X›©°Μޜ[ΞLυ˜^ •ξu&έ-O½wΟ½nΊαFfpΖ‹ύςΏXΞΒpψδ*Ά##Άt½ΨJ•bI1א™ιc’Ηi |Χ Ζ¬ωσΙi†ιϊF³YRsc<―ζ Εj­n4›­VϋZmΣαΟwF2—)_i΄]ΐƒŸY/…UY”W)ΖΘφ¨UΣμ΄γ₯ΥκXξΠvεvΉn:#H¨Qa=•+m/HΖΕd΅ΤaΤί·ΝfYS„E#ζ«~ξ s…zkp«υΝVM•ΩΝ§άε΄bάH±ΐoη&ΉR­ε ' Iβω‘b>#>+)Δ…#7mΖΕD’­TΝvΗμbYπ“0ΰQ`έΪπΪ&f dœΥbΉΪθ6,¨<ώ ›υjY/δ₯Hχ#_¬u΅@³Q‹MΫgk6RΎ Wλ^aUΫhԊšΚhύ,ΒIΕ8’baϊ36’’j•ž»§¨QctŒ~§v›@OΔΎ5œχ$Ε‚€$•Ι²}ψ‘]wίxύ ςYgΈοώ{¬Ζ„b½ΰ)X―°₯wξ°Ζ%kρπ#žλ<ΗJι8Ξ‡ ϋ§Ί‰φί Tm!–,pϋVœ#HŠaͺ`Ζd[ ΑŠΒžV1t!v$έvΫΉ¨ΰ2ž‘‰«ωσέ-+‰» ΅Š­Žk)ζ6‹Β.„}›|S“ώκb3H -+˜-‹νžΔœ"%w€γCŠ!V]Kϊ7Χ°yo· τDμ[ΓgARŒ HŠ‘܁™ΕΘΝLwSk…iυa ΣoMξ˜αkbήf·ΒΝμœΝμo16Ω{φΛ)6h?ΕΨ3›Βλ…4^™c<Ϊά`2½XύΏ‚Λ ‘x±` K=³>εέ_qRŒ )ζu“mνJΌvI@μ12‘Ετόγ5‡Œc$hc q&ΕΈ5Aϋ’ΥΜα\Š΅x’bΙ{1Q­zAXš˜DΝk# u‡–ΓΎΕrΩq 9‰›«Υύ€γHŠ%οΕΔJΫεΏέ&Π±o ‡ύI1‚ )†9°ž_ΏώΔ>4έK‘Š!,ΰ(Βτηdψš˜»Ιx+άΜΜΩΜμπcΦ¬=ϋδ›`o2χΜβθeC/vΞ™gy[·2mγΚε Ε¦&&‚l!–8+HŠ$ΕΨΑYJΕ v1Φΐτ!Rέ^ΰ#Ί,Δ„XΖη·-R ρ;•ε+Εί()BόHεEg€&)Φ»œ—’p"^°K°κρΦ>§7|Ό­ρI1όΑzjΌUΝ—?@*Ζ£CœV9υ’«›ή’h· τDμ[ΓmARŒ HŠ=ΡzβπήX?b°ΞΜY0Α–N1VбΉε/}ώ  ½|ίϊσΘCO­š„,+)F#HŠεΨΓ“‡υΗeHnαY·,όΠhΘΥp’x)ΕΕά2–bˆέͺδ„Ψs: I6$ΕXΰτ‘1‘«4ν]»Z“cjSΕ†ΙΆ‘€ΨφύάΆ–‹ιρžu‘9¨ΧR Ř& q!*EΣ[2ν6žˆ}kΈν/HŠε{λ­·nΊρ¦l*=›Ιφβ€pJ ,―π/K7€n*³°χΊuλP,H·Ÿ‚H±UΛ+RŒ )F4͜μjΨsJͺfŸG’F)]4¨%bΘ(Γ'C:υ’υQ€©ΛIώωγΑF™ύΠΖ@R*ήU€˜Ϋ”ωΞ)&-2\Ρi³gVe ià ƁoU #U^Ρ*; ».Ζ)β·λΨ#’Χ«–°αΏbμŸΟθŸb€gG5.ε†Ή΄Ϊm=ϋΦπή_πΩ«&AC§ΣιtΊ#(SS«ΣY!HΛ‚Λto4%x1XςcŸ€™υ― ρβ† {ΩŠΑBR !)FΛ\ŠIZ#NQŽ8σΊg·ΛΪPiT$U―΄mo~Šb%žDγnΫ¨—ŠššWdYEq‘鍭Z!ΉLην²χΑݎQ-δε‘ηΒΣJFǐψ€X©εŽ c:κΒw)·σμ“&ΨΫ1P¨™CœcΦ+Ί" WSQΞ—j-ΧΖ‹k5KeθšW «O՝V%l2Œ–3„{BΌv£’*ςU•”b₯1΄uβ»bμŸO\Ÿbΰ›­Ί>΄sͺ^o™ώl· τDμ[ΓΑ―JDί¨uώτΗ?Ύ―χd'zΖ'ΜC?/5~˜x>ά w “ργzώfvUDϋύχΔγ°χ 7a™ ίκΏ9ΞΩsf*3Jο·χ>7 /ΰN|­π•μΞ>9’s§·ήμ8›βZπ°xδhP­HŠΛLŠžέiΥ*%MΝηήysyT’$ηςͺ^ͺ6Ϋ–±β»f»Y- s|ˆΨ½τŠZΠΛΥzΫ΄w₯r!<Ηj΅’εΈ3(ΙvΎ7ω‚^©¦νςΦ¨,ό9ςΉήύΥ–d~ΞΕr½ΩΆ]/`βۍ²žλ]€œͺΚ½ƒΘŠZ¬ΤMǏμžB|Οi7•’>ί>`ϋ/hΕj£i:Ρ/2/γl<νf­RΦ *6|q‡V½VͺԺ݁ΏLΪ-υŸ½³pŽΫκϊπΡzuIZc˜™αs˜ΛfΞ[ζ6 erΰΕΠ eΖ0ΈL†PΉMβ8Ζr{€γ\kΆ±Ζ»φτσNϜΩ9w-]]ΙƒΟ\ύ)f΅Ξƒχ?νΖ=mŒδpύΔD-#UάΈ™7ž§Œ=&ΉJ$mb½Ν³½ΡCn’L)hŸ+ΉHώ „p•ΦB\Ύ‚ž[B²{n»!’bM’b,eΞ­¬ό±²ς‡&׏ίє婃UAŠH1©»', hYRŒ2γ)Y¬’’bςΥΧ‘ό g̍oΑ/ζEίϋϊL¨p蘫4)3Ώ —ίσaMΜΉηOZƒ_ϊ\%Λ~ΙK9Β Ύ”‹ο”δ M;εΪΙ΅5΅ττžηζM›ULΠ‘± 2œ ›ΈS¬Œ,ΟŸ}ς’β%Ε;šRΕE[yΆ9φdaUbR €ώRΜ†‹•ξ?Πψς«>Z^ ϊš¬Uvή‚ΉσΧZ½βξ{ƌUwUΠψg±5Κ•\~οvΆd+|Ν–lΩ5d7ΏWΛΣ¦Z^Ύ6|h~Ρ'E qϋ΅΅΅C αK™bM Ϊ/+;E»–ΘςΌφκθ]o^±λΝ˚P—Ώρϊ„ƒ——Ÿ- (O¬ R @Š€^ŠΩΝb555Τ,^Έˆ…ŽKŸ7Κ8bΘΐΑ»vνͺͺͺζγ?Άόξ{rΌL{€'Υ‚9σώΆeΛ–M›Έ6μΨΆmρΒ…Ω^άXqc}f{nκΊƒEvfζ—^vνUW'U΄χνςK.νή₯+-’Iΐ†¦Ή ˈ\LΈw})¦=:wέ»{•bΏ<ϋτΣώU„b)FŸMΫ)vΊ’β{ΪχD–gίήYϋφΞhBΝά³ϋΪΒΓ·4mOV) Ε@Š₯? ΕXθΨT¬κͺͺΡ#FΊŽδ7(­λΤΎγ}μρόΒ ύφβόΩsυ9oεJ5ΞάίΟΒsΟ΅ΙΝ ¬™Ά‘ω&ŽΟŠ:5Φ£k·#₯₯'OžόφΫoO6šS§N}συ7τρ3O==kΪ αhGΠR]‡ζη™CΏ*9 χP4ύ4ΑΛ/ΎdCΩψ±\ΪΎȚiM¨ι{v_]xψ&Ϊύ”ͺ~Βͺ Ε€H±τ€˜Υ[DIqΙΪΥkƌι)ΓωTζ\3mςΆ`φxφh…‡·kέΖΎGΩ6―υ|πsη”QσδO΄ΚΝ fSαX.eKD i Τtnο=kͺ«mgΛ˟zβ©Ξ:j!ψύPp9=΄}p/tΏ­ZΏτΒ‹ό4ψΙ8p UN–ΚH_Š΅oΣnϞ=b\b@Š€΄”b•••Ϋώ΅•^Kμ‘“«4iτ6?kÚuV„YHΡιύϋφ3Ύπ|πΧ]owNρρO<ρDλœ<ΆTγφuΦ»{OJR§‹²qKаώ{νΥΧΪΆjSw›©­'Έ_εˆn»μή½›ο—oyφŒYΊξ*#Tώΰ‘φ?))@ϊK1b@ϊK±Ο>ϋtι’Ε=»vΛ‰gjGΤεˆIeθύa]/–ί³ώ„Ά‘™ϊŸωƒ‡@ŠqAАώR €ώRŒ’Ά&Ž`|Ώ#Xρ§λ(j\j8?‹5™#'ŽŸ`£Ψ.QPWλμ\W()©‘θύόνo|@½ 2Εθ―)νs¨½>Y^Φ$)f—½}λV›ύŸlΠΎœ’©'υμ3 ΰ;]³juψQΈŽμάΎγ[……b\b€Ή)€Ώ+)*QŽγρϋ’"ΐA…6C©ιWΆ—YπΨγαx›ΨψΡcόSψΔΰHšsνͺΥΌM,,Ε8SŒ¦΅φυ½ˆςN±ζ”b;·ο )Ζ“ΫΛ5fhυAΜ™RτΝγ>ΖwJιiΤΜ™5˞ΕΣΉ ƒ~|RŒ R €τ—b Ε€tί)vςΤθα#9«Ξϋˆς³κ²ΐDN<λήuΎωζ›@?”ΏUψΦψ±γθ¬p)³γ™Oόο ΦOΝ™)ΦL―OςΉK/ G§%W1a€’’Ό°£GŽς΄|§wάz»vχΗ3 »ΰB#5€€ι/Ε@ŠιŸ)φώ»ουοΣO9ZΫ|}§Ύ”sžΘ­VΩΉ£‡οΧ«Ž *#•lγ’††dΚjjjš?S,Ϊ/+;MΌ~N†‰φςK/·kέ–7s%Ή [ŠjΓΊυφω‡κΤƒ μJΝ;Ε¨ ΕΈ ΕHs)R H)ΖR4βς#=₯9π‹³Βε *n‚Γψξ₯’O*IΎ,ηβρ?όΰC+‰μN±p¦X²εJιIy}zφβ S£ΊΊϊψργτΒc«¬lγˆd—ao< -ζ’ρOŸ.ϋ% όbfΑγ];v ½‹*dF RŒ R €τ—b Ε€τ—b,qŽ=zρΔ‹l˜½Ξ†#΄2‚ŠΕ_fΨ0~ΘΈ4žΤΌ‡«S»φW]qεφ­Ϋ8Z‹IΨ)ΖRŒg£O“ΆΧ4mFΓΓsYψ]:tzρΕχνΩ·wχ^ϊlTνέΏgχžW_~eλ?uσ7χκΦΓs2({_αΛ50δgΒO‰z^ϋφξ³?:ΙΤο;tˆ^ΟΠ§]Bψ;Ε.„γ‚K€H±τ€˜u:Ÿ}ώωδkε |ccΏb~ٞώͺƒ^KιiΧHm₯Ρ―wŸ% ΏόK ‘]Μοƒφν΄:| 1d)F0RΊZ»ΪΈΚ/n†Άμ7ž4›+΅+ύΗ΅kƒπ΅ΎχηΑnΞΏ:ζΐΏwμL0bϋε'Όzεͺ!ΛvŠqAŠύ b)vyA%Φς)f%Ξ7_=mΚTWj*•ΈOΔ™9|Ċ{–ΏσΦΫ<ΥΣ,Aϋ6 Ÿ΅Z εi“i;qβ€€X‹@Š€νc₯E’‹^„μΩ΅›Ž γΤν+΄4~²ΎΠ«–―ΰΝen(*h?£>hίφΪ‰gcŽδ!7φO6τ'ώωHΧ‘Σ&OyύχyX*H1H±–€H1+wώχߎ1B ©…0RΩψωΔβ¬z‘Φ―]K6mWCDνΫΩtxr=lτ‘v(l߈aβ²Ξ;=pίύ§OŸΆ R «‚Kc Ε;Ŏ=:gζμVAΪ>cB°}BρP>όΰCt.₯eEΜ΄Ÿ.εiC ^0wžΝγR «‚KK ΕRŒΆzm*ΨΨ­Sgν*Oi›CO:ΏiγFk»"ˆΪO—β@±vy­/\XqΆβWR «‚KS ΕRμΥW^Ιr=γ[*aΓο]'1lήφΆq₯ά²eS*;ΕlΠ~C©ω±θαΫ‘Ϊτ9qμψ/ΏόR «‚Kc ΕRμΑ0RyΪΤ Ξ³?ΠΎ¦ΓHŠmNZŠq¦ο³³%Μ9LςHnΈςY iΈΤjFδϋн€V)–Ž@Š€ΨαC‡³άΈr•‘Š£ΎŒ°ibQ™b[6mJφυΙ'ώπΧ'Uγ†ΡGj!yΩ܏5ΊΊͺ:ΩΘηŸή€‰žΓ‡nLΠΎ‘Š>{wοyκΤIΚψ'‹χCγΰE–Ÿ)ΏρΊ²]Ο“šΦ ώ7‘C>%xSΉRš_UUΕ“GsϝwΙ 3 ΕZVAŠH1€γt°β’’Iγ'h·ΎMRž2Tϋxχwy‹Ω΄―…"Uο=ΛΛΞ4ώεM>’χΑΥTW/˜7_sΎXψΙU°©:·οπΖλoFg«ρ#Ίυζ[T†)Φ‚ R @Š€;χί{x~Ύ+”_޲αϊ*CΦχ1ι ­Ω*'χ…ηžηs›1h?bh„€UGΡN±ς3ΙI1» ~8ωC†<₯ƒ9“ Ϊη•pTΏ'Νϊ΅λμΜ Q[[;uςΡb‚φQbR πΣwΥeuTTWχΣ―ΰO'Εx«Χ‘C‡)b?ΘΡΧZ„‚φ+SΎNR>ΞR,• ύπ;‰ΆDԐwŠιTvŠ%δα‡’ά΄ίΨ{'U²2οο­…ΊχφκΎχξΎονάq§]™εFπN£bΝ³Pnι³ΐHˏ!CΜ Άi›.i(4ˆζΫ“­© Π ACD*Μr’Ό<•Τ‡:uΊŠΊεωFZAΒΙ“ΟΩ{>ύώ ¨αψνΫVqyŠΩ\υ?όό;Ÿ·mgξ?š%V_m“ΛίψΊ7G°…1€`α7￟Έ‰‰DϋΌ‡€bBŠ 53»6ΏΩM5Χqas·­h₯α°N3 ]9œv₯T½5bͺ_ΜC‘3©ZόκšχDk\ώZόn[ξxŸΟόύxF ‘1φ {ΰ[<γ©OuY―•tm-ςΫDŸ/mοg1χΫίρΫίώ–pŸΙdς_T‘|^€ΨΏ}ύλ>ɜS νopA1pmσΌοiYωςIcYνΓ睩§Ψ3žς”Ύυ-R‘έF'³IΩ£>ϊό;ο:GO‰φΧ~(&$ ˜PONHD{RJΏ6΄άž #B™υ@±΄χn[ρZbΥ\δ€OX,U(Vλšn΄Z†‘kυ²šKΕψIΉͺ)f>Η<:j$}φ’χΖω X#y·-orΘcε{όŒ: “MΨΓ―nV ωQMxB (†Τn·οΌύΧ9‹ˆΩήaΆη$"œŸν„ˆtΛ3ŸUωяoΥύχβM7[—on;nn·ψνoή~ #/£$?ύ°cΥnq¬>kφ±E>?Ž»Ύσ+έ>Vύ‘ϊk_ωjδ?:·±f(&Ε„ΥgžB‘ϊθŠ (f©ƒ†I©ͺή™Pθ‘QΟH»W@{9TZΜ|Žy($ ˜G@1!ΕNVΓΚlYx+hω (φσζΟΙ»&/nΉ\ην"ΘΞ‚Ώ1=]œhŸό΅Šm’Hƍ}ψ#ρ‰‘]_ώ—"_ϊωKψ Η—Ύπ…ΨΏΔΌωθu;,« «H΄ŸΙO”S’ΖλΒ4|ςΆηήςŸ=^O±ϋΏqSŸτdFx₯άΞώL9…€ϋ.¨ml>醛ήϋΗοΞ€ŸωLŽ-HoΏ0Χ>$γ·’ξon­<μbVmδrgL’3=ν”f{f]ΪΉπβΌπ₯/zρΛ^ψ’—½θ%/{ρτ ΰτ%Φ‡—ΌΰEύ—…Ω™}ϊˆώȝ·ήNОVŒ!Ρ>§νšΎ£ΐuύΕK7\ΊξΊ ΙgϋK»ΥΔML$Ϊη=PLH@1sΠiuz“Υeλ$φΓ!#ΝγΥ-β,V?U PLH@1³ί6ˆΪέρ5 ξ…θc*μPLH蘑p±―όλW^σΚW γšΊ‰m#V5ŸSΜώu Ξ ΫdΰBŸ]rhΙulΓŠr&Ψ.ΏγπzM|σΪW½ϊjδQrŠΩpΠζ†φGKαοΖUͺHάΔΞ O18PL@±γ–€bu›τΗtsdt,υF ͺy©Ά&λo’€bBŠa (&$$ ˜χφ`}κ“Ÿ|ήνwήxιΊν ?mάƒ“βΫ,Ι*0ύ‡σtΩOφεGI΄Ώ~Ϊ ΐχw^Χ0œVμΓχ|Πn 9˜ŒA]±Eo/ωΌ±ώπIq(&$ ˜©ΝΠΐ^Σ<Λ PlR!j²άΓ?χͺrκ`Zύ”jl5«εj΅\ˆ³Y16ϋY1ΕΜPμš“€bBBBfcΕ|5“VPHH@1Fω|ώ}οzχEΧΞUί«sSΜρχόβSό>uνŸ’ΚΡO”Σιg;“ΧΞΉ«ΆαӝΝ͝©εοΊ|όΒpZ±/J_°^/@<γμjΞ–OΩK"& Še (fZU΅ˆH‘ЭО”ΘΘu½·^Œz β'£YUΉP(¨(”Ω3Κj!‹μΝξ"χˆ%r²ͺuFΨ’ŽUTzd­ΥλY?Μ₯h₯£ΧεLBΪƒΖ„€XJ)7“•Tf Χ‹ΉΉΦΎΨ22KΙΕzgΔ>IΤ˜L£°·IεΘ(t&?j%.…}^―g*―/ Ε³­½(δdάmS΅‡π3Uγ~Χ.o²τ“"Ν ¬ψ­Χ„|aH{Š9Ρgό(bL¬β³4nώbη(Sw:b0ΝhB…9ζοΜg†b|‹gu©Σ°Ι U–sioF₯lτkzΟhΚ9θΩγΘθΝχ’υ?σπΛ`6‰»τI<ZεΪ֟ώxΕ,ΏΊ*ϊζ’0.£$§₯`€,2[>_0š„UΆXΣΨ―ιέ‘VΆΒ LͺoΤςΙhΠη³Νσz}Αp4―Φϊγ㠟DΖΕφ¨»ΈΖm­’KŸΧ3kXΪΕ,˜]Κƒ―%9Β}πS‹ΣTl ΜF½V’σΩt:-WΪ¨˜έq 2άp/―Χ²Άf 7»9 Νr»I»Βi eLΡJΡ*J2JšiΥ 3-«TΪxVq_‹να_žψέμEO5› |ϋƒ”yUο’¬2νιmISΘ²°•mέΆ%Έ/W‹8dvuUNG­ωΙKΙ¬ά  'έTX;έc¬}έ­a>sH@1ΐ―κo{»ν΅ŸVμœγο†ύ’ŸπιΆγt„Oρ-¨%·gωΏˆ§%|ς;ίVHύi· ¨vΚ^’ΔNžoŸ„C@1!‘³ ΕΜV!ΊBQ(V6F‘g’ ›Ά O9szπμ¦κ · ε#…Ή@·Q0 ΊRΔ%i %Ϊ•ώ”’M–tD]&Ι¬hJΘΝ _’}?t λ(μJͺ6x< Ψœ·+YςΕRyŠΦŸ7―ζ™ύ” ¦;ϊ²››€-‡:εԁήHeR1ηHϋgΏΜςˆ³XDm±OέQΖοœftαΒμσŠgζs@1ŽE 2 ³λRVη@ΝH”JφREDΧΠφ―²ΑlξΞΎ8τ{9‡IχlK%JΉnI‚₯ΰMΣf―ž Μ † τϊΤtΨ{7u•ɍώB;γ3;£\€V˜ ŠΫΙ =‚εŽ+VΫΝcK΄οŽVŽΕL=κμΧ°2ž½―)₯ίέΡ|γH€ΐΜΜvšIϊ―–zž’ >S±%D㚠»7VζŠι€š~~ω„tβ³Xu³”/7Ι›oq )Θ¬d%bERΆΝδΏ–nΗς™0W£΅!ιX9 Τδ‹*CΖ7NR&,vŽqέΛLcΎJΌaUC λnσY@1v™¦IώώιΗΌqFχpB1¬G»ήϊμηZ‰φIΗθα)‡€bBBgŠMZεζF»»~΄)©†“δf—e4“lγ%Όw…]χ Ύ·κpMoΝΥhε`o•SψVDοSMΔ5”6h‘‰β„FPFˆ¬qB±΄64δƒ{7ώgχ܈Ψsy’«eέUg₯έJ{ΜΪK{}nl4Ω9…zΛΛ2Wζ%C²^y"(™§ξ¨ΕV§ϊΗ…9ζϋΜηš‡‹wiA3‰ΊώlIς2e¨ lΘA;ό ½Χ”Ψ³­iiΨ»JνεΕT'^𦇔eε›Νς°:Η› vlt³ ž@BJa:λ7²`ε1ΰMΦϊZ–ŠQΎgku·pβ†xi|pώDΣ.OX6ٟ’Ιύ§h·υ0?EΉMΕ–Δ&a(Ιxβ΅9ƒkZvΎΠcyRυvΉ=DKƁȬžlΓ|Π½ ™Ψ` ΰ&η΅t{ψ–'ž =8Δ<d‡ ­ΆŠΥϊά-bWΏbw끲hHβbSι•„eΏώu·ΞωΜ.Ε ΄°T*=χYΟqΝ2y-L΄O9Ε?m )s­‚bΠΜΧΏζ΅6ςΫΩ`N΄WžυDϋŠ (&$ Ψ€cQ/QΦZζBω 55·Α-θ#L MΝφ‚Yaa±XhΨ»ν\Θιί“©λ 0G-­œpΨ+h`\QΆ₯(…„vΓY±Ώ.Θu¨ΙΎS3η4CΚ(zg4Ήz£V‘ο%ΤeΙιWΤZ£YG˜£^³˜s61Qνρΐ–Q˜Œ:Ίš‰ΜσŠΉƒΡ°γΏθΖKZΫCp™.Ηƒσ›Ÿ#½UΎΟγνΝV3……lg›θΞ±)£@z&δœΞώ…Fδj}9§.'γ˜Wμ3Ÿ{²/RΤ|ΐWPJ.k½4ΆSWR~gcΛl^§1τeΥFwΆ+ω¨Ϋi‘Ϊ>|@­’΅•Ο§%7tz:›·ΏNg+m“³Eμκ'½N·³xEΫ7bl†*9 ψθ.TS‰ΜΎ.GηcZςΧΐΉξΦ<ŸΩ%Β'‰Ϊ­φk_ωj‹ρΈP1ψ”=ή%7ΕH θξ'?΅³΅MJΪψΣJIα)‡€bBBgŠuœ1]1e1jιΚ#τ°‡·ΑώύŸ3jσ@Ά£žc—›(˘‚,ωΑώ}²Ÿn\›PήΣΪUτΡΒ&…rϋό’ƒ6D”Ρb5@S`$Œ`…I§Ί‡ι$ϋvΞ©Έj,.ͺε=Ph.Ήo7κvΔζPδ(VΪΜoZ„i] Χ5|P…) ±“s³t7g_Ζ2uΉ ΗΌbŸωάσw‘BσAR‘·8δEΩοςPa΄`A{εΦβ‹‚LP ΟLί’ΈΘ±ž?θ”±dBœ…`o₯νΫv†Η„σ%Ϋή’νάN Š’σ9PB+tζJζvψ1ΈΧ ΕΪ%ηn֝t½§δΙk m§…’p#槨έ1>Υ0ٞ’ό¦‚%`„»Κ΅ΎI{μ{Α±Žξώγ–tRIγ•“ ρew0?\άS cΉΌΧνα[žΈcAtwq€oލ|NΩ4ήO΄#ϊΈZΔ¨*Αε’’/[#Ω€ΥTϊΪqG»ό5π­»uΟgΕD Ι²??qοΗ/nn_ΨvνlΔφΛνŸΓ§ΤŸœως'Ϊ§ŸΆ€kγκι»ά‹‘Ό’RyΰΫ7^ΊήeyЉDϋβPLH@±#©₯ΖΞGϊ!JB&Ψϋχ³9ωc‹^Mʐ¬)’NhTμ‡ΑKh²—ΰ 0:‘όλh[>j†f_Qς™Ί|Ι#θ£€δt( εF|PLR ZιZrαοΥσAp)€]+ψζ‘mΥ―"¬Dy‘›˜?V¦Ύ’b'A{¬Š1L]N(Ζ;―f>χ<δ\€Šν¦:”$\KC‹ζηF5ά―Ά(†t  τ ©ΘΐαŠΛ‘‘‹Šƒ ¬glr€žM\ΐ˜bmΗζ6L4ΐ9t͞blΠΙvÀ܎(έ Xڍ{8²δQŒ1ϋS”ίT°ΔΑ+έαF•Α¬Ή4¬μ±ήW3euL‘ ΌΨέΙΤ²Π₯jwΜ{-ΕžεIιXO:ŒΪ·^ΆΧGǁΛq?pΆˆAcuq'Sη}ΙƒF„ΑTT2­™ό5π―»5ΝgΕπθλ_ϊoΊιΒΦΆkΣJ(6Λ²Žε§τ’8~’Ÿ¦$5Ρ>βb£ΡπΞΫn'-έΆέβ6 B¨ŸΒg†’"Ρ>Š1 (ΦK9βœLZI9…}C$mqΛΠwΰP¦>`‡b­˜3K:U“–{οbg2_žf:-2sΐ–Q@Αe²aέΗ£©$yπ±„x1Šϋ Q%ξE4a΅FΝ̌ψiΈΙΘJ< ~;ikˆ>IΊόPŒ^?γ_€¨Ks΅’^BΓΔ-…ŒrTCv(†‰­§Dγž¬šχβ%ΰ¨(ˆΦP;šŠ6ζˆOeuσ˜‘€Cb°°χŠ™*δΥΗτ;b`?V€±s€!γ!XˆχδLOQ~Sž 4ΜΥηυ1ύ.yPΗ4Ω Œ):τ:•pώxžk‘=ηςD›lP—EWψΗΌ³~(Ζέ"ΆΨI,Ο!ύ FAεΜ¦Ž΅ΚΌ5p―»΅ΝgΕˆΨƒ>xη­·ό‘­‹Ϋ;™Λω»y ‰φ5§=Ρ>FQwέvϋΞΤYΜφŒϋCH΄υ}6ρ9Σ4S@1~ (FίCώ&Š ΝυT΄ †xdGƒbΝ\ϋ[QρKΚ‡8ŸΚݟ)κ”Jψaλ( θ3π~b‡b8G2υŸΆπ:ΜπΏ,!hˆρF@r³”ςUDa(y¦"αΨIK†Α‰αρΤ=.(Ζ1―NŠρ/Rh>TΛάW,axU(\¦žΑήOyŒ„9akνΛjΛ+ΙC%P₯Β‘GWΑK]ŠΑς€D@γ$AΎ5A1SG+"―$Sξ^KzWΐz4ΠLqd`!άex”§(Ώ©ΘΚLƒb@V“·Ό2&¦+}LΑ[Rž)Ϊκφr_K±‡yβŽ…ϊΩW(?γo§(€Π^vS!uTcώψΧέ:η³€b oœ$nbwέ~ΗΕ­ν‹Ϋ.Χy;i&H•e Ÿ²”„S†[0ΤΉЁΎQψA„‹ΩMΆ>nΗiτ{Ÿ Œξ9ΓΗΤχ@cPŒυ썷ΑΜ6θuZΊVW DΘ ΕΠejΉΙΉ ›¬€ό‘”¬j­ž99žg8ΆΗΒXώΏ:Ε5 DKπ+ƒzε]πFO+œΔ _…c'¨A"<ζ©ΛΕψηΥκ™oφZZ³©-ω₯ΩMθσg‘βζGγH}5)'vΑ0Ά%ΜΕμB‚|δΝ‘J]( ™eπ*€—Κα%ΓΞι« ŠΑ— ~"ΰt²P,©Iφχ©έ²ˆα!(ΖπΨ1I(Œ‰΅ψMexžC1HF!/Θν‘ξ‘“>Φz~~έήhV†—ubρ_K±‡y⎠ζυυC1Žρ ΰSΏΫ6΄FENΓsΪΛn* όλn=σ™AΒSŒόm·ΪoyΓ ΪΩάή9ΏE8‘k–'ž|˜~ήΪ>?—φ~λ OI1@i.ϋƒ§;δομ>£Ÿπ)ω{¨’;›ΣSΕ.―€b ψ»Ώί>Ώ±¦n8κ<OŸi§€NΫͺΣΕΐSμΒzΞθqo₯όρRρO~ ΎWxŠρHH@1€%)Ζ A1s`4«r.‹H‘Π ;Γ±TώΓhΧ) )ΈYLX΅`νξE2₯ΪΤ{ΐx‘Η(˜tEύ˜‘ό##©s g?έΎΤΖa8T?2: ―}άqvv;ijδΆ(Ζ>―g>Μe,μa‡η!"…ζCB1φΎ‚% Σΐ”Šα€NΙΪ}Ή!qʎάVάψ{Η* Π·ΚxΙ@UΗΕX±NΏ?a(ŒΗI Ν΅ 'JsFΗΦeIηgL΄§(Ώ©μP Π-yα‡bΰΙkεργι|©¦u‡x\y―Εφp.OΌ!*φδ‘Ψ1΄ˆEf_―•²Ιx8πz=@Α°H{ΉLρΧΐΏξΦ9ŸcЏόγ|πΓwάrۍ―#@g>iύ%Η‘νo[‘‰Žƒ8d9N]θ'Κι!KΊf§ΔS ]δ΅›χώŸΏ°Zj7vcj61nmΧ‰n‡"Χ’Sθ ΧΖ)L΄οοςτ δ!!>gθ τͺχΨccWίύΞΫψϋJ@1!šΛ1lbWνlWnbz!Άw…ͺέΠ.cΨ„ΠR§©FB~šRͺή19‘Γ(πΖΣ±—m)iSp¦Ό%θͺŽP`ωCĘLTί4WταχNڊ;τ©{ŒPŒ^Ρ‡šI§WτyΘ·Hωϋ }9:ΐRΤ—±ψ…η#dΣ‡Χ°βyϋ.Xμ›ή“τs’nvr

(Ζ/Βξ~έλ!λ%Υ=αAφΛ+_xΧσ_ώ—ΎμΕ/YΧρ½ψΣα0φ£«ώΠCώΏΈχ΅―z5Η­­VΏκ―|7[xŽPΒMŸPL@±3%!ΕLEΊΊ ήΛiΗΕΥ]η6{/Qls²ͺBvO1 PγΣhΠΡͺΕLμjd}ΔΕ8FΑlς†O&ΠΎgeςV†ψEuxι8“ΝΜό»&;)θχέLon`΄˜cHό±2όb¨ ψ^§xΜPlΓ…ωηΥρ'ΪηY€/@άΟ)–;dN±Βžžkœ7Ζ°Fΰgτ Έΰ)Ήψ„f>0K¨—l0μQΉΒ'ϋI6(ζe}΄i PΜΥL“π`AqΩp­ƒ;œάS”ίΤkŠ‘zϊνFI‰‡―.PVς]K±‡{yž(v -bx -‘Ϋ')5½oŽWΆ—ΛT όλn-σY@±ΗU_»ο>+]ΧΖ6$³?8έ™zŠ]Ϊήωόg>ϋ‡<?ϊᏞφ”§8J;»Ωϊ‘τœ-­‘―‰φύ±βδP›ψN³Ω΄2wλΙ‘·ΑυΤΎWj…wU/³Λ Ε‡σk2(η"ΠΆ—?" ΩGXδ—t“y…„QT‰#(†ƒ³Œ#¬nΑ¬-yΚ³ ΕΩηΒkz£,ΟXό2}aεdΤR{πe¦Šθ κδŸWΗΕψ)?Γk!r¨΅Π)ΖπυŒφΜω… +^”)Α2sΊ=Œ:£,βwX3ψ¬AΦΌ«"Co80žαe'Ε<α<΄ΔVW:°XΌKΏΏŽσ)J3υμC1€q_MΪ€όιΈ₯ΨΓ½σK@±ΙdςžΛοrΩωΆΞ9‡;xJΚL‹mά|ύχγ~;σΏηEβI…’Ιιbό:r{qUΔ`goaƒ!T“ŸΎΰηO}ΔΆ\VAN1Ε;[PL/Hϋler˜Β45XΉ F!Q{Ήζκ$λόoŸ$Ϊ+ΰί±LCH‘X,I©€|«ͺδˆ2Ήκͺ|aΊ,9’‘qΐφQ€Ός@OFGL¨ΠΖlΙΘ!š §Ϋ‡\9Ξ(§4ϋΫΖμ<ԁc%r˜6ώΉ D‘ͺλΝzΉ\Φ:#6(†SΏW{t›'-ea²-ώyuόPŒ}‘ϊI[Y8™_Οτkq'Τε‘JP‡U:c‡qΎg-`\eέ (Dϊ@"3šΖFώn(FΆΔŒοžλ‚'χφΙ†‰ͺJ`Ή°‚² ²‘n$€# ‚αp0•‡'ϋε7υZbF)Ÿ$Š'K΄I°Δΐ}‰ηZŠ=όΛσ΄A1ώRύδμ"_²Ά`/~"±›ΪUaιDϋό5p»uΟg~ (φ`΅J^Iι²sΟ£πItj§ΫΊυΩΟ}¨^·)’ Β0¨"ωΛ}τQΣΌ62ΑατdψlΨg³­·ΏεmvΒ~θ α)Pμ¬HH@1΄O”{+ΛCpΣnͺΎzΜώNΊ‰!_α€b#½4€ˆZƒU†Π»=Ω$l‘ί|gjΨvΑ< θϊ‘³ΜF+ύΥ·j€IIM†ΣνΫH• ―jκrΝΝ^ꦙ΄r{~”ςJRuk°κˆ™νJ}4Ρfΐџͺ ŠAώ,2κΰβ:ωηΥqC1ώEΚΕΰλ:·½‚Ά*ŸψKrτŒΩΐnΣόŸŠδA™ΛxςfΗΣ¬tiˆJR»,o‚s‚žaΦ_ΦV‚'&(FvŠj~.J‹N ŠAC0`%ηΛ χΊέx\°€l²Ό„)Κoκ5Εϊq7­―ΠBx₯ΨΓ½ζ}Χε˟ψψ'Το«ΰou υΰƒF#‘Ώ»Ώύσ?ϋ~ς“Ÿόζ7Ώ! 3Ύ?υz·¬8Σ-G}H΄/ ˜€bgKBŠAΞl;x―΅ΚOŒ€πšΑ;[Ξ4HΙΔΕ@„₯—ΒBΫοˆJš>Q#~8Ÿ¬NοΝο)Ζ4 8<ΡnΉΗnƒγ°ΠΆΑhjτ&+΅l`φHνq$ΆRαjw ZZQ‘ D²RΥZ¦s―\Ws™L&—SΤͺΡ³~ΡεΘ¬WΜ‰φ1κ’κΆΩ°o†κδ˜Wk€b<‹”ŠAwΩςΛΊIˆΠα|=3V‚ξY@ρŸΐώSΗΤRΦM‹m1χŽ;l¬N8ε›K•…QW O{:!t,PŒh,έψ%τ<>k‚bΘ` J\Ψ– YaZl#„ΚBτχ‰?EyM=ύP f˜>^Ρ̚ΧαΓq-Εήεyκ ‹V‹5 RB{™Mvή²ό5p»5Ξg~ (F(‘Z7\ΌΞ΅i#žΥžb.»ŒΛύΆwόΥ§vΏφΥδχΎϋ½ζO› δ ߐ‘χ ’Π["§H@±³$!Ε°·…?BΩφ =δ±:$ƒ]4+ԁ> „φš&% ήβGαK“blχΚ|m†Χϊ• _΄Τ\«™ ƒΛpl#† £κU3W@‘β„ο}䞨JΫΝ‚?՝ΔΤ²P ύS›K­bΒ‘/L°8š)d|v&t(CλŒŒΪš¬t’ΔuςΟ+ϊΜη‡bό‹”Ё&†βtψS΄EU μ,ψ―v«Ÿ vIΘ$ˆκ7TrΔξΡαο°–„2ξ`ή€έ=~7(8—%ǐƒP‡lŒiΜP ΈΖκάXZΜX3ιy‡I¬‰ΒŽlIŠAƒ‘aθ7_Ν\ΓS”ΫΤkŠA–I"·ΣέːΓΠΜ†Ιu-ΕώεΉV(†η*‹˜_έHΗτ€nΨ­ŒΑΤ>$Ε‡Δg|5p»5ΟgΕψEήΖ蚦'η2λ/<΅KNOIω ›zήνw—1ς*FŒμcϋ —Άvs] )ω/nΉΫΉHΎΌύ–Ϋώγα‡!Jq½3z°φμ§?ƒ˜wΓ…KΔΪK ·]Ϋη7vΞmνlZΗΕ흗Όΰ…ϊQο‡οωΰu.’oHavΧyθ4ΕhPLH@±3πΚ+"S蘘 u φϋρΐ―ŠeάΜμΞΉό`™άEΡ—ϋΡψ—Ύφq’KΞlSŠΆ¨LG‰…φM’uT?ўͺuοNJ)'–ZJξˆBt(Ζ6 pk¨½JΊœŠΉ­?^I^τόΧ²aGΙhϋω†0SΰWkΙ{J³΅:H™ΜJP¨Ψ2™  νέ©ώXs0Ασ©žƒΑΓuςΟ+κΜη‡bό‹”ŠΝΛz…bΉΊή˜ζdb:z±π3“"!?Εg“aΓ ‚4aXύJ|Ύ [n—{W9ΚΣ sΘhŠ“ΗyKύεοzs‡k}|―q% ·a€bΰΑςΕΥα’VT€ŸΦΕ°Μ¬£έAηx5gHωΖ’Η[;Άoƒ²%2C1ާ(―©§ŠΟ Θ'7μn … Β{-ΕώεyrP χ‘½Ž«ELͺΕ=ϋS1][dE;†2°<5fSΗν4x§‚—_ œλn½σ™_Šύ¦ύβΥe§ΨwΩΩτνΓώ ϊi{ζ$εΪΪ²>o~΄}iη‚uΈ¬γΒ– ΚLέ"DιŸρŸμάό§‘ϋί“Ώ Ο―-+₯ΪΥvmn^ΨvY­ΨΉHš³=υ ›ζ\³Κ_·]a‰φiPLH@±3Zl>#“?–Sšz«7tZzQ†!‡=Ϋ6β.a«V΅N―Χλ΄΄fΉŠ8x[‘8ΟB9 e\# r.—ˆ%ŠΖθ@bΤω°ͺˆ\·:½ΑhΠΓΝ εΘUδίK(Υ¦ΥδRC«ͺ…Θ|_@’(ΕΨ<Εθ£Πλυ’lί$kŽ·•Ή₯€dy€΄άЍnΏίm%΄ƒ:wΗ”JΑ₯$‘Όΰ<hJh~T2JYοτΜ‰³χΜŽΡ”3σΨf7:ι(§<…0 XN%Γ2Yσ©YVφΗ%”R2“^Ρg>?γ_€ΌP © \Œ’έ\o€ΝΚIοu€-B]t|ζ‘&Ξσ°ΨNζkš½ΚtXe l£ΏΘ Μν,NΚ€‚αpΨο55Ώ_ƒ7š˜ ˜=―‚sϊ’rIoϋmCSσqŸγζq9X;Γ9w°'φ]υ³jΕ ν²ΊMS²σΓβMφϊη³>Ύ§(‡©§Ša·˜hRΎT³zŠ΄Ro”δτ|_ω*}ώk±=όΛs]P f;˜šΞ&“RXRτ!W‹XΥUη'dX.5Ϊέ.™πššŽ*­$σ“·qSΫhΪ£Œψ5p¬»5Ξg (aŒ_‰+qγ²]’μ\ϋ] Η¦Ε‰Ά!HES’ΟBh]²ν²?g+λΓ†’.ΉvH΅³bΦ½~ ΕΦλ)φα{>΄³a5yκω΅Az€ΰ0b6±™ΈŒ71ς ω•| Mƒ™οα)F‘€bBŠ M:9ΫŒ*ΏTθ‰,tΚ)J΅ΐ:S‚π@μ·."α[4CΣ^¦5‘{ύ`a$Zψδ` ŒΨΝ4ΩGR±»k’\ΝμͺΡ»WΛ§&ΓΗ¦ΌŠGfGIμ- "‘ύΚIaΧoxΝλ $1ž–©V'ΖZ\Œ8ΠΩ Ώ0ύiΛξ„Y«αοδžbHŠ (vΖ41ͺςς]h¨P6ho–\΅ θΕΘΚύ‰Bu43£)'vύΙ₯¨¦œ‹Ω?‚R o=0δ„΄ονΚϊdΩ~½ΥΜDB6Λ{ψbμΤ#Ιζ’Έ°DΉs€QπG2*ЬλΩΫ/³‰ΏΥΛ^τbνηΪiH+6™LΘί¨κmΟykϋi7δ)O|RδΛ‘R©τP­φEωSvMS§M;ΒSŒ"Ε„;“uŒzΉ(Λ…BΞRAVλ:$*ηΥ`VΉUs‘ ¨ε¦ή±Φ>1{’^―7 2'TŽ’ΥUE.L[“Ιεd₯¨@hšŒzZ΅¬š*“+Λu½Υ3O~¨Ε―ρ°Ϋ¨¨ωl:™L¦Σιl^©hΖpΜT‡ZΟjζ±wΞ £5«EU!γbŽ’λšήσ­Νή¬ZΣΙ^ŠZΧ <(|σŠcζ―y‘A#-7sΜτώηTkΨΦ+ͺ’Ν¦νe–—ΥΣ*3ϋZ­$Ϋ«4I©\iθθr™m½‘ΚyΫΎl–Δτh]σΪί–™}½Q±ϊmΪ°x2Ižo cΏ‰ Οημέ[ηž{οΊ»σ|ήχ=υ‚bΟ?χœ¨:CΫ ―€ΉΣφ;|ψΑ£» άwίήatΠ£zύκΎ1 R¬€ Ε@η1ιΤ3€&βUΏLΔβρX,‘¬+TK±ώ @λ©‘bΠώRΜ„ΜΚΜ‡fž·Wƒ_TΧtΗpα­;}>•–(Q`j[1xΰΎϋ₯ΠΎ„’Ir’t{ψΑ‡Ϊ‘¬˜²r·xoΆQlC=υδ“Ίά˜ ϊˆ(™;{ΞpŠ₯έ.Ο.mΜBϋ–͐b€R  ΆΠ’^€  AϊΣ ,ύΝΨ―¬ξ|σσΛΖλ€@Šύ_ H%ΫΕ‹ο±Λn!%J¨ΆΠΎluϊδ4ΫΤωΡθ˜!`ŸφΉέΘ―”ΊυPvΥWΆCϊ€ ΥΝΞ;λœα"ϊΖ‚²Πδ?³ӝ}L"έdE{Mβ€.΄oωSRμΧ Ε)εŒ^ΘΙΥ]¨4@)ή5Rc-ϋK]•Ρ„xŸξ €@ŠΥ›]ψθ#ˆΖ²EΑΖ“b’;)[©>φώ{|ωrU–Kp5λτ™kΧm‡*ϋίτ~sΤαGH^€LLΤήΞ;μ(Ndt7ύ8‘B‘b²*ε8 q’>9R )P.λz±έή‘Ϊ°‰Js¬YϊτθΕ.3c/š •ώΥm~ύEφbH1ΙuυUW ;#»έ.2¨ΓhΊΔ~΅–\[$ΞλΩΰ3cJ±7#o•ψmi›O9`Ÿύ>λι-υί*+¦'ωΏc‡mΆSΒNΪ΄)φt:=ζ{xΰήϋε=lQ³λWQέ7"ŐbR )`¬άοtΉέ.gηDΎ4₯ΔΖΛlv=ΦRYΡ ›Νdϋϊ²ι₯ρ·6^žυΊab€@Šύώ@ͺ|>?σ΄Σ‡M–ΝΎ‘ŽX‡±ΥϋΥCÚYgžzZφ‡*#λxά RwΜ>eŠmΚ°A΅yγ }#‘²iΡυΞxό ±Z«Υf±[£,Z ™Lζ€γO”Βj6]SΜxv½/5Ε:b€R bΏΒΛT&€ώψΣΪ{™ss―«ςoh–]vΨιΆ[nνξξ–x΄_έzB™a2ΰθъΕbjyκ駞:ζΘ#mɝ=7yRΩΉμχ[oΎΉpAόΩ`πΨ£ŽΆ¦ϊŒ¨@γ‘Τͺšς–¦lΦ±9RL@Š Ε6₯ώ5ι/—%β±·60?žXϊu¦oŒI€XsΕξΈέ7ΥfίrΪt‰Ÿr¨8©)Γ†KΆΣμS7I©π±jR‘ξ&ϊLRΙ‰‰±Rι–/&—ŒžΞ?Ο >ύ΄¬P™_³ζWJK*ύ"T5™`βΏι&HυCuI»°―ΎϊJ6―Έμ²ΆήVT€LŠέ3&oΥs“ωHD˜œqώK弜QΣV}ΤVn9}†lΥ #&^ )& ŐbμH±ΰSO«ΰ)ΥΆš6γΰύ˜}ϊ,ΟΥΧ<υΔSwϊξΨiϋUΐ”Tά·WKΞ‹$šnuΜ°M•­όJ4Ά†5“}£Ώ\šnsHm{Ηζ2ψ”­gly–snΧƒΚ–_₯ΎϊωηŸΗ³uΏΤ ^-’ΕΉrΕΚΔ’%ΟƒWΈ/Ϋ{=δFr;™€άZŒ˜μΫΤ|Œ&"LN³Ϊ₯ƒΜ_uS—μΥ«da \ηΏπά μvφά3>μ™ϋ¬>)MDΟ§Ÿ\ϋΣOR-/Mo²Ι ›τkκ·2₯ζ'³nέκώώυ½_ΏdΌ«yH±‰bΪ1χγ­7έ,εσ]ηΉ|·ί~%œ\φ…ͺ¦ΈαΊλmF•}]ƒ_ΧžΧ‡Ά±.©3Uw_g8¦Jd™ψ5•­yȁο:;ξ =Βϋο½χωgŸύπύχ’ΙΜW«”X°ΎU}’ΉdΡ’Χ^}υα‡ΊβΚγ=NβΉddUMΊΓ1ΝζC[΅κΏ—―ζ&Mκp0Ω—žϊ)d9s䑇εΧ­TYρνŠηž}φfοMν)Άdρά₯Ÿ^?0Pξο—4Ή‰&ƒ”d‚…¦ετσΟƒ2  ΫL#64TIσΖ’…³ˆ€R¬%¬Y³ZJζWF!‡R#_Uΰ‘GBυSΕ5ΤTN₯UΥ“ψ,‰ΐRΉŠr¨"ΉΆΫrλχΩο”O:χ¬s€<Χ^η»υΆ{ξΌλή»οΉηΞ»ύwέ}χwέxέυW_qεηΉf6σˆCΫy»ν₯R˜ό\›*&Ê sy‘:±±ζ°Ϊεη³gΞ’,I”χ qjϊ‹΄O€Ψ‰³?ώΘ-£υφΎ,Mvn½_ΏΈβΫw'"Rl±f—~ύΥσίτ†›šRοKιτŸζŸ%‹›H)€]ΖKμ*Τ₯ψ ΊοΏ$$ŠoAaj«wFφM•S‘Yͺ†Χ4»cΊcͺT4³—T™QζίζfJS;rnJ7ι¬j‰ͺ“&#¨fΦκΰzžj§ζΠμ’lε.’>yςρ'κυΤ«ψξ»ο2™LΫH±s /vŽ Υt›½0ήΩύρβŒΦE²8εň-xψΕ‹ζ4=±YU#v.R bH±‰gΌ΅ Ε‘ωnΉM$”,U/LmυŽ>¬ΉdzΨQ­goΘ2)=6μΘ¬vQor£aΥe3J}IΩ/»C’“rIvδΠΣ…σΥv"›άB&vταGJ]Eӎ‘bJΉšnη'–œ΅τΣ&"Rl­$rJŒ˜±?Έ`"ζ6O=&R bH±M‡$QήtƒW2ΕI‰Ι²nf”Ϋ―V¬Ÿj±Ι%U}_š\RζKνwh3λ©ΞTeY՚IS—T7ύΓΪCσKτ8ϊŒΗ±¨Ϋ©žͺξώα²ζΗΆ–bΊmJg4Ι)&R )644tΝ•WΫ%’ΛξPΙ‰ΖΦHu΄ΪδPšμ‹BRWG νλVshriμC‹ή―λΠΌ§½:£ΏΥxγYŒ§ΠO$WχΩcΟΎΎRΜ€ΐδ”bH±υCλ―ΊόŠαŠψV›ΞUtˆ―Τa9sξ‡|¨ ³’3Ά‰nVΩΆ ι©ξ²ύŽΜsm»εΦκ€<—ͺz&Ο+Ϋ=vέmUί*Uv )VR `ςJ1€Ψΐΐΐ…σ\’Š€Ό—x"½Ξ£ϋ’K>ύδ“ΑΑA± WŠ5S•ο먻―χλοi•VΟa}=e’jͺν`&σXΏ•+VϊοΊgΧw»g΅X”υ“νφ[o“Νf‘b& Ε&§@Š >ό°*„/^iϋmΆ½αΪλ{{{₯Ώξπφ[o‹cοuM}}Ψ±θβϊ²SIώFΑ,j«v,vΓpι«Λ†­ϋχθ%8WΈϊΙǟΨoο}δ'*nΞ¬?¬ R¬€ΐd–bH1 σί}Ο©'όΧϋο1—«ν°όΛε†{%ΐͺF¬£Z’Kφu›btλΠ=₯›Ω‘μ¨AΤ%σC{΅ΐ™jϊ’ž‰4%ι€[Χ_¬Τ048ψϊk―Ν™5ϋΒσ/ψφΫo+R¬€R  εR¬νΕ°Ν–[UU”ΡRΚ:z«cΈZQSLχ·o(–―Œ˜μ¨ϋͺCέ,Ÿ·fΎR )0ω₯RLΧΥͺER)8δP¦$§N±Ω:,rfί=χ>τΐƒ·š6CgSšd;ŽJx¬©fQ[π₯Oj¦Z›O™nsΈοώ‡t°šƒLFέ]‡­I‚dϊ›oLV@Š R )@€˜ΰ:ožα€¬J<©ͺόΣlφ3η8SΛS"˜>ΆΥτ*hktΔ–v[Ή$>K*—©A6nΪ‘I7έ¬#—ͺW-†:ομsϋϋςω΅·άtσv[m#w/fdVͺIZvΪn‡ώώώΖΏR )€@Šέ}η]b$όjxk„eν²ΓŽάw_©T’«ΏόςK±8x؁«z^1SΥ`κŒUΆΓ­Γj—Ψ.ΩͺΘ―ΝT7#ΆKš₯ΪMϊWεΘ8iΦν·έξƒΔ:ΊνΝHδ°ƒ6jœY¦Yν2ŽLςΨ#–Y5ώEbH1€RμcJ£v˜E,ΥρΗϋ^,¦΄Τώσ΅TεΧ^o‚m(΄/;V‹Šό²IθΦ™ΞΉ·ίvϋΙ'ž$Λ\Κ8φ)r΅šYΝ—AΆο^{ίδυ^sΕUΫl±₯Œ#Υ8vΥΑhΗ}μΏύoΉ―ά]vδΦιtϊ’ /’Ι°#^Μ}ρ₯M}€R )€Λησ§ŸršΓfΫy‡υόεϋοΏ—“JHι*]Ι/ΎzόΊv˜*μ5Ν搝σηΉςk֏œ=τΰC”Σ™•vU lŠu»­·ύΫόω•)žKΧΡWM|ΰήϋTxšlυ4DΜ=ώXΰΐύφ·[­²ύθΓ‘bH1€ ŐbΝ’λλ{υ•p|Α­’jλτŸtό •Ai “c²/1b}«ϊδκ`qPΆ/ΎΪzΖ–βΛ6θ°jgΙ|ΌΒ}Ή¨υλΧ‹α’@°ϋο½o†}ͺ‹9 /&MzξΈν?όπƒΊF:«Ο?ϋ<ός+ŸφY³_)†@Š Ε΄„)“cvxϊΙ§¦ΫjqΙiF1ώΣO9U–μΫ%Ξλ¬Ήg†Λͺ η+;&‰“+W¬Pƒ«­,»ύ–[•/S^LΩ™Ξ3dœ1' n‘‘bH1€ ŐbΝ’j‡ &}ΦζσΫnΉ΅8¬ιΆ©βΉŽ9βΘο2mΔ΄«ZώεrYrXŠeυExmα˜φΤOκžZ{ ίΥι#!cΔ²½ϋφ;2ˆP©Aύ\f(["ŐbH1@Š Ε6’σ¨JςsΔQ+W¬κR‡χωο›fΛewX‡«†Ν<υ΄υCC’’YΫS4Η•—]n³Ξž9KΝZώEbH±M Ε ις o ]ͺό€˜H«Δ’%οΎσN6›­5b:žkhpθΈ£1*‹Y₯ΔΨ_,«ΝyΤgϊϋϋ₯–ΩίηΟ_³z΅N.)†CŠR Š=ξN…»§X€ΨσΒ^+ΎύφϊΏ\λΎψ’ Τ?Θ€“bH1€ Ε ”τT₯X)€›ΤFL/aYZΏΎ‰1‘bH1€ Őb€ƒ6—bH1€ Ε)«V­2ωσb$ΞUΖ§ιζJ,9SœΡO?­[·./MζΩX[·nuϊή―_ZΌhφ‡œίόάτcώK1@Š@)U•bžd©“ž\.'~KΥω±.Rμ\%ŒDχ4έΞ\Όh槟\;0πKΏ€Ι$kbΔ††*ιo^_οL,9«ωΉ}8[?,R¬%R,ŸΛδŠεΊϊ— yΥL5Q*d»cα.ŸΧνv)<_ ιIηL†/ Οaψ_=θοο\.ζyεΛO7“μŽE#αpψ=ΉFlL>“ˆ…~Ÿ§ϊHn·Ηλο Eb©\±¦{]οDθ Κ;ΙΛΥŒΠVίEFŸ—3ΩD$δσzͺγφϊΊΒΡD&―zhJƐιΈ»s‘T6Ÿτ½`²vνZωσ{HŒ ‘bη‹λ™ˆφΩφφΎάΫϋŠle†5ωm:ύϊ²Οοώ€ϋκ₯Ÿ^ίά”νYz}χΗ—}8‡H±H1Wη|±lέ‹αͺœπDŒlΨ/ΧΖΗε₯ cŽtv*=΅κμ\Šx:αtΙP/ωDΘ/}5σ=•Ί1"ΛΞ–›ΑνOdΔ¬΅`„Άω.awυ|RζYˆ½γγ Δ‹#CφΈLΊκ{ΐdd``@ώό–|Ά±.Rlή’Εs ΡΣ/’GšŸΖšdMJŒXοΧ//x„Ε‹ζΘܚh³%Fμ“ξk …‰˜UYήΥ’…³δ]!ΕZ ΕβZΉBΏ-ς‰jog|TŒQ9›pwΦ…/’2mΑ€ΙΜ;Cϊ|ͺ$>₯V½Μ{:Y©ŸB§f§ΛεtΦJ₯`ΎRƒ~'uŒΰe*-‘έΎK8•‰zυ0BΝTΌ‘β†Ÿ&[ŠΙ½`2"‰“« ΖΜ "ΕDτHΞ£žu?5Š“”IcVs>όΰ™Xν|ɚ”±‰š•¨:γ]ΝCŠ΅ @Š•αΞ*Q‰z2%©Z wΈ¨ΝK.α-:<]‰TΆT&5ζ± ocι‘j…|ΡηΓΙTPOΘΐνρωύώβ™ϊ•˜V9‚Λ”4Γ’~ήR1›Jtκΰ λ'2':™1 lδ‡Βιb Fh―οβt9υ ‹$RωjΎn©˜λ‰τ5‘+‘S(’Ρp—Ά±]αHTGzδ³ΐ$Eώzί4Αb€Σϊi2Ξ )H13ςZHΈ=uφτΗsΊ¦Φ‚?šͺŒE9Χν©18­’/#σqcΙΚNε]#‚f\•VŒxυόي¦ήwRLG]#3 δ[0B}7œSΞf’zΎZΉŽP)΄Ÿ*Wΰ$X¬――Oώ/‹@Š5*z˜R bfd’:bΘ—ί8”3ΡN-&ͺ¦)σ 5JWLΈδ["_”W1ώΝιOΙυF('tJ©/V6›Kwυ‰\‰ΌξX;Ρ=u)΄ζGhΛο’qr&qˆažGςΧ5χ“}IO&;₯Ri•^ )†CŠR¬5{΄ΈtηΗλΥT½D7%jcΗ$%N<†9ΉQX‘L©•I^ΡvRΉτ-Цφ,£ΝS°§ΠΘ;)§½΅9˜MŒΠζί%”2}ŸωΈΞ‘ŒΣ@Š€θ0‘b*’ϊbH1€R bNY•Ϋ7u(Nͺb*ΤYsΣ§ƒ°Z$_ΊωJγδƒξ±ΥL … kd2½“ξκq‡R͏ΠΦίΕ,Τ1ΪAbΔ‹©‰~BŠR¬ύ(&έγDΟ'ΊΖͺ8V Υη;LμF δ‹yί…tΨ/35ΓεvΥH±†ή‰¦™Ϊώ»Θy€R ύ„€X;’¨ζH:}ρrESŠxtϋܘς%˜l3ωΕt΄6(ΜιςψΊ‘H4Ρ“ΜdσΕRYΏ}ΣΖή‰f“€€³BŠRL—ΫΧ9tŠB·«SαM•6κυV …RυΛ—ζτ ‘VJ1ύΌ§§+žΜJε:$N£οDΣΜν]bH1τR bν_nίΛͺS™¨·ΊμdbΌ‚ξNΏD–ΥE1κά€7]ͺU!Ε:J_΅VŠυܝUρŒωλ ΊFnΪΰ;)ζ’Ιd*•L¦sεζGhλο‚@Š‘Ÿb€krϋ`‘"΄χ We‰&φŽ˜”ϊμKw :œ+T¬Iχ tη+¦”³ΡΞΦJ±B¨:AO(ω›ΡUξQ“iμδ~­ΰ ͏Πζί)€C?!Ε)Φώεφ£+–‹u*œΌ© ιJθλuΕΉƒ=ϊ–O½ GJ T+€Xc«%–3‘Ρ“iμ$Ί\ͺ³+ΠΣόνώ]bH1τR bν_nίκIFΌΥe'ΣMm ~Αι‡fN2€{»ω±”Š7Zͺ˜ρIŸΦK±zru-}ΣFήI>!BKGc5?B›~—φ—b€CŠ!Ε)VJG:55€&°,€;9}fκ$ίμΤ«[j²1Ÿ#–-›…D΅^ŠiΥen‚z΄0aeΏϋΒz ]Ι«©Ϊ»4/Ε<ΙRbH1€ ΕZA^˜Α)™­uΨ©qϊΒΉ±Ί¦γΞάέ…ρ|tϊ“…ZRξ ιΫ΄ΆΠ~282o89Φη"~Υg_4ύϋήI9©—5hρν]j;Χfυ:#ιbbH1€ ΕZ t™ΧY/g7–hN(šLgσ…B>—ι‰G|ϊͺA$U+ΪΘ9Ί?“ŠΕb!ŸM&’##ΈΡ ·…RL'$jάώXw*—ΟηsΩT2ψF‰¨p|c%δ₯~σδ²ιxdγ‹žpζiΑmύ]Μ;λͺgϊAΒ‘P¨ΛίΟ ΘbH1€ ΕZSnΏ ΛΉŽZ2ΓKΝ͚)^IΛ„=-•bB.¨γQ9q5ΙΰFη‚ΙίϋNœή°ŒSK FhοR ίΉ»vΊ ŐbH1@ŠMεx΅άΎ««»ΞŸdΊ#^ΧΈκΖŒIŸ)ωXΐ;ήΟύ‘xΎ¬«Ώ«4ΊΨ«FN„()€γΎρŸ₯+ά]¬>u2εrκtΛΤοy'ξp"S©‘‰Ϊϊ»„;ν±σ9kΡ!ΏΊ¬ L¨€R )H±¬ΦZOΤIQRσρH$…Bαp4–”½bΉR/₯B:Ω‹FΒ!!‰ΖzRύσML‘ϊ,Ζ£ΤόW Q¬ί+ΆŸΎώκ‘—^Όΰ΅W―υ₯Ο5―Ό|ι;oίΫOύ±αax+Q Qμ4Ά»cvπ±oσΐ¦ΨΦχί=ύφ[7Ώχξ½±»TγΉχwnψ£‡κξdy+Q Qμ8»?>vώTηνa &Q,̊ΜθθžΑ°&o%Š!Šνξζ‰ςΧ·X|ˆϊk€ΪD±Iε©Ϊ;YήJC+?Ή!Xη†βk:―ύ•@›Ε`£Ψq|φΗ\’Ψ’’ΨΦ·k+―o‰] c¬=ϋIΚυ Š!Š!Š€(†(†(+ΕF£Ρ°±ˆρ‰bˆbš(V–ε°±ˆρ‰bˆbš(Ά½½=l,b|’’œK)ŠbΨ@ΔΰΞρ Š€(vpp ‹5±ˆΕΰD1D1¨Dβ"^Y–>w.c:ε­IQ Q Ίέ "ŒΖ1Θ ŠΠ)₯h"{{{™VŠΡΗH)eΕh^―M€ͺͺL+ΕθγΔ1Θ ŠΠEQDιχϋ™VŠΡΗˆcAk·ηΊcnPΆˆι#Šΐ`0°,Φζ5±8Ή…Ε°.΄ΎΎq€,ΛάΔΈcθ1zkb-%ŠΐΦΦVwLkOλŽΕθsΛ!Š ’LξQφκΕΐΙ­I t Q ϋb“{”‘K’›TU΅JuL ‹ΖX'ι3mG Q €ιd0Œ» «)†#–;OQ i¬(Š^―—RZ@&„Ε(c 1V9¬6Q D1Ε@Q D1Ε@Q D1Ε@Q D1D1Ε@Q D1Ε@Q D1Ε@Q D1Ε@@Q D1Ε@Q D1Ε@Q D1ΕΎϋאJ@€Ε@Q D1ΕΕ@Q D1Ε@Q D1Ε@Q D1Ε@Q Q D1Ε@Q D1Ε@Q D1Ε@Q D1ΕΕ@Q D1Ε@Q D1Ε@Q D1Ε@Q Q D1Ε@Q D1Εh,Q 8wZΨ> tΈ:&Š,·°½%»{μ-Yc’ΨιΜsΨΞΞNJι‹/ΎxύχίmβGwόO)ΕσyΕNWΔbΉ ͺͺΟ>ϋμέ† ~˜WU?Ψ›ήΕD1ώfοΞ™$U( |ΜVl€=ΡBCB ‰½bvg˜ƒ9°g†ΐtsΠ3Ψ ένέγCUde†§“IU―Šύ)ύή‹άI³¨¨;όθElYVp-bω—ωψΛQ?)€ε­{y_ήΖ―]lyc_οbD1±η(ΆvΕ¦Σ₯ΣξΏvΜ†@;vΗψˆb@;’X „.mgυI›ϊβ.A'Υb(NBγ7R½˜J½$Έυ¬V£Ψή,ϊ“₯ {iΝΩ-ΫΝM"4~+Ώκ·ΈΙ€Τή©WΤ§iF±ύτjΉΩ€ΪΔχρ’fη!Ql:»βN§bΨΕA§vMω;D1QŒ(f…ω¬δ|kμ‹$―ΨΡ¨> [θ,7N/e™Η#tN\­>ŒΧ7 ŠŠπU―’j=Šν€ωLœπ2©¦Δ†°SOκ³ϋκŽνϋŠOοtΞ‹<aˆͺqΤξΚpVωFsƒS| l±ΞρΒ8Ž<γeΛΟδF“šbύrN˜vγ,ε<φMςj.v<ν9d#ŠυyhN(J²"ΏΔ+4Zλ\bΆ'Iz4σ)S€( ŠΕv°/7»X•'[ο#Ι¨^τE΄ΦδΕ·΄3žJueΌt*GυNΫ9sΑ—ΧHu vžΠhۊiΝE XŸs“νμfμsGθΌLvE₯UΣυ][Wυ(Ν(ζΌόZΗ1»ΰΛƒSβYF$݈b―.—vκFθ§+§}‡Ό3€>΄^±^½«³ΠωiϋVs£BΎ}`TM{§ @’ΨqŜ’—ζόZZΘT(Q9~¬.zΠ±Β~}Η.―žΥ;Qlο] 4ηfΦc™Ή­˜ωh§Χ̟}“½΄UΊ!ΗχNmύΧj%υ€4ΣΩ1'ψ’<½·”icσΥν•b'ΞΚη/|œ‡ΎλΊφI7ν9δ˜ϋΔΩω ^©bטΤj zuk>»Ζ³ϋΗ@’ΨQ±R,Κ;}/ŸγθE|~]μ¨β—ΎIΩTΕϊΪ¨‹TοD±ύX'Ϋό>Ν&ΡΖ₯m+fΆ©…SNΫ·bύφ€(D±οƒoŸ,₯’σ)ε‚7„ΖΓ‰QlΜυYn=4ώΝΥυ΅σΎ‹μWΑeΗMφešb3ŠΥ_+ŠΉΥv†s>3ŠUŸΕS›ϋΆΨδΖΥΎCή@—β–wΉγ7εT»£ΨώρΕ€(φ}Ε*΅b:»BΉj}‘–ϋΗ±ƒ€VW²υυ‹gEbiO,ζJmD±έΈΊϊ£“ύζΒ1αG'G»P2οΈΙVΠ©;VŠ•γώ©­ΗοΕ4cίδix%ήtn潇¬ IΌoΌRlοψˆb@;’˜κ/0;‹ω5…v<«ΟP„Άx[TQμ‹0–'ρΜζγœmκι―Ύπ³nΧMΆ.Γζžbv>ΘύS;rΣΘ‘«³σΙ±VΎΦ`ο!χν)ζΔζΈd›Š[φiPϋ£ΨήρΕ€(v$D1Υ_,½μ€ν| ›ΫWΙ.ύ ?ό k¦›ΞuγΛ7’Ψ—@?\s§Η¦b]Œ{or\)έPœΜζΈjΗ‹b²Οέ3ψθ5CdΏκQεŽCή‹bm"Άnfq²…1©έQlοψˆb@;’˜h΄e\ελ”6£z&ΗZίOίΚu£ Δ:+Μ•ΪŒb_>™yb[Οκ£Φkœσ΄χ&/’’y½»8Bη₯σώ©/Š™[ΞΫα«$•κ·ΐOΫ‡Ό33B +Έ­£]~Ί οΥD±½γ ŠQμ؈bq=©«!·Εk^%ir ܍Τ₯uM\M›Qμ« `ΘCarŸ›—Όψ–0xI»ϋ&_Ω^ahŒpU;ονqŸ”­'t–Ÿ•uΧχ]SžGθ’vήqΘϋσΛΘL―~'ηΨw-ρZΨ+΅?Šν›2Q ˆbGC3w‚χnΆ4©/ξ┣1„ζ,Vψ­TΫQlο4Sι¬4±ϊυ~jλΛέ7Ωd.Ϋ?΅cF±ν‰θErΗ!Ϋσ’—ΐχIšI}aΫ9~’Ŏ†(¦¨ε_€zQ§Ψr΅’3΄ΓBi6’Ψ— `N\γ'›I=.–ΠΩΡ°λ&os’^ivLm_+Ώr3O+σ“#ΆωυξCΆη5]ΒΝΪI­₯Σ*vΜ[±ρ;Ζπ=£€(F‹+υ†ζμ ΧΜzei‹“ηˆ–eέ€ήREΉΙύV3w.ί?€ξWkSNΆ0.½ϋ&{ΥΠ§‘k2ˆs}Œϋ¦f^Ρ݈bζ”Ν(ΏŜxΊο΄}•-ΦΩα9ΥbΧ!›Ψ>‘εŸΞΈW]˜·bϋvŽΰ§ΕD1Θy욺,‹ΚͺiϋY>ώL›λΆδ<Άuueέφ“<ώΤφ3'·MuIYΦuΣ“άΘrΪO7³¬κΆζcL€(‡GφF1ΰΡ’€(φών+ΒΏώι«(φ§ό{1o’€(φ D12Q @{€φ»ώεO~&nύμιwΏ‡HcD1Qμ@9μ·Ώύν/ωΛ_όβυWΥώπΓςΰό§«εŸόώCϋ{`ω¨Ή|ΰ\>v.>?ΕD±Γz^Άό~ώσŸ·ί°|ψ\>‚>―SxΌ( ŠOΩε¦[>”¨oX>‚.DυΏΰρ£€(v ebγ8Άm«Ύ! mΫqY,φQ SWηy^|”ηε Ε~ΛΔ–mˆbψφQlω Κb1’Ψ䬑κp¦Ψ7μrR@;τ2±k»ξ¬―Ύ!ΰΊοώs[(ΕΦLU$nΩΡxΌ1žqΓ©&D±£/“RΆ©oh?’R²XŒ(φΎ©ŽΕ-'žŽ7ΖXb¬@#ŠD1’ΨWbφ£XβΩ–ύΔ²ƒfV@;zϋΝo~σ½’ˆbΛΗQ’Qμ(+Ε€(FˆbD±yμλ"O“δΌH’μR4έ ΧΆΑŸ^Μκ‰μκ2ϋxl^J#ΗΎ­>9Iμ’eΥφγλ8ŸμΛHά²Βn~zI¦‘+σΛuΘη$ΝΛz˜€™‡Τe‘₯Ιωι ¬(λn˜Υ½Μ›πζ%ε4Τež>ο’Χm?+ˆ(vό?_ΏoQμ€ ŠΙ‘Š|G¬sβK-Υ‹*~2i₯μrΟΟ¬S©žΘζ;–x“εž‹N]M•-ήηΦ³z6΅yπΖ©έ0ιfe›‹―’%,»yΧFϋv9­έRΟλμSZΞ ΐC ŠW’ŽΕ ˆbC‹MήyTOκΨΥΦqΕ‘#4NT]ΛQζ[β~ΦͺΕ\ΉbΉŸ}›#F½ˆΣ\•4ΣoŸόό[κœZ©Εˆ(<~#ŠΝM"ξc‡…ΕnY+Q¬l»vΝλ+ΕΜYmκ‹»|Y²ζ‰;yΝόEQlͺΟβNv4(D±£!ŠňbΓΙ2U]Šβ’œΜD•΄³ΕtΪγ“] 4–Uέ8K9O}qφ….ϊΈ¬«+/Ω%½ΧG&—K–ei–χR©±°…ΞrγτR–y8BηΔΥϊζύΒI«nžε<M‘8―3}AλΓ΅[šεΕ%\γ%/m’Ψ‘ŀǏbD±©Š„.Όtκ™l=βΈηζΝ(fϋi^u}ί6uݎj*τC­B Msμ˜Qμ‰lΞzR:ΟκE©Η:ΛOFυ’/"‘ρ[ΉΕΌLKƒy ―ίͺvG±>….*zυb<{–9BD±Γ Šňbsκ =0eRiΊΤ3k‘Ε,/™”Nφ—δFVΝΪ«]h½ΕΜ€eΗΣːkύΪ^#•N¦zuŠΚΡ<ηΒργ²ιη§ΓηΎ[΄Wέ0οbSβšΟœκ¦jca’ΨΕ€ŽbD±ιμlΥ™yl_t£ΤφΣvΫ Η‘o›ΊΘ/η(°ΕbW›JGhόψG/–s„0Cή¬Sc»ή)N?2₯ΩΕΜ[znΜ[#/ϊχ„y―Ε‚(φψ@#ŠΙΦfΩΩbD1ΛΏΘ7KX›Ε'Ο±…iW›ͺXάOϋ*Μ9σΕ&7LΊω ’Ψ\{ϊωͺY™κ³kd»c@#Š=>ňbSε|(ζΔ΅™Ϋ{}΅(VοŽb‹)}±Ν­ΖέQ¬ρ„&ljbˆbD±Η’Qlέν(Ά³ιΘώb “εx~xŠ/Eζ~­•bξ{Η’ZŸυΨTy| Ϋoq“ωλD1·šŽΎR QŒ(ΕΨSlT―τGλ‰°άσt_«­½Ξe'Υ³ωμμΪSl,ύ₯Yν'§‘.²(pvv_iO±Έš6ΏίΰT κ(ňbD1’؜Έί>Ω&ZΏ±Oε+ŊΠ~w/ωαdίŜσΫί>ieT:Ωε‘αdΝ‡£σ“+žκžΒ«g}4E$4vωα Y_4Ν–Κ›a+Š™·τ’^ ›oŸ<.€(F{| ŠΕΤPœ„.ΘΪ—8Υ^‘ ‹αΎ(6Ŏ0£˜~έ»£˜πΚ~VJI©嫜f‡Ν¨žΙ±,­JεƒZΌώΒΗK§ίˆ\?©ίΚ‰τjΚ­(¦Ζ2Ί mΤ³Ή[†―±Β^’Qμ±€(FΣ ‘-^±άΰœ$ΪC…Z*Ίk₯X9BcGYΩυΓΠ·yŠΧ΄ΥRS >€kΐya”€ΙΙ³ζ/Kήt^”ΥmΧχ]y9;B禳VΎ΄™ΎΕγΚ-uόψœΔ'ί―—^ ’Qμρ€(FS²Λ,q—¨οίhΘCq's_­1·Ε*§œΤ’I}qgς“ΉφΔ½Ξυ€vG1ύ–nπ’I’Qμρ€(FΣMMf‹ αΝc•U옩Θ0%ΈŸ•·Η¦žΠν©N±ε\ ZώλsGlσΣΖxt;Š•“>ω6ΫΌ–€£ϊ.ňbQŒ(v5wiδ[b…Δυ Ս.Σ‚”Wjέx‰<±Β>eu_ŸυΛ$“~lΆeF±ΫπTœͺκvœΏό”cΧΤOYVuΣ £TŸAΚyš¦q§iš₯TωrώWhΪ~–j‹‡Ύ©―½«ŸΆΫG}[WΧ+•uΣΣ¬ŽQŒ(ňb’Q ŠΕD1’@#ŠˆbD1€(FňbQŒ( ŠΕ’Q @#Šντομέ kΫXήΗρΌ"Ώ?\ŸK ·½Zφ¦μΝ…ΑyΌΣ°­“₯8»4υΞxΖx/PQš,nCΪu&k“’e©˜Ζ4qΣκiΔ$ŽAιŠV2#ƒžΏmωX>‘k›zœΜ©s|tbΨύ"½~ύϊ³Ο>»~*•Ίή@A€7ιG Š!Š 6@Cσ‡ώPώώχΏπΑ‘.θG4€†Yƒ ΕΕQ QΜ‡'Ožόφ·Ώτ€†Ρ` ΕΕ`π Š!ŠωP*•FGG#GΠ›έή§X€(†(ˆb^ΕΨξξ.wΛδ‡~ΈΊΊΊ³³σuύύ“ήδn₯€Z€(†(ˆbΕv[}ΪJ?F—––hρΦτ&ύˆ°ΑτAl½€(†(ˆbΕΤΒΒ‚³ˆνννYžh€³‹ΡΗ­gκJIΚΝ ™L&•Κd„Ω±€κ¦uš™†ήb ψΐΠρE’’ Š!Š Za,‹΄Π…`VhX€…>N“XJΝ₯’!wΓαTQ3OeSζΪΏT ‘½λΏΓdόŒRf¨η/Κdθ;·ΕΕQ Qμ­U*Ί€λΟώ35¬kΧ%“Ι‡ŸώΉs1vΧ€7ζά_Œ&‘©ώφ·Ώ}όρΗ49‚D‡£ƒZί"M +‘Tλ΄Ρ₯”£ν₯τwz¬’γXΑ”ζs  {’τTpˆIH(€(†(ˆbˆbo‘V«εσyη  ½ιΌLŒΆ·zFƒ‹u›ŸM ψVΊQh¨W™ΣVt)Α]|Τί¦γc ΐUEο(–FD1D1@C;‘"φΙ'ŸDzC˜΄zFƒ#½‘τ½‹³#CœΰHl6WX)δR1φ3f€dZ§ˆ©sssΉΊΉ\‘hφ=ŠωXC!Š Š!Š’’]¨iωΝo~σ駟nmmmllά½{—ξsŒ8ŒŽŽ_‰Υ3L‰8Π„4-MN‡ Ρα؏hύέΕIΰξ‘Έ{$5)ΜU1‘Δvub¬&]-I’(UΓbtU)›τnIΡΝ.dLC-ΛΕ"—ŠΕ’,+ͺξ˜ΛύΈμ}B3ήΝ₯©r©~‰R’•²jpkͺUMGm.΅·v§•εϊμ_[Q5χALϋ©oJ*Κe­ΛΊυrY)•μ/TfΛφζσŒψ_?Œ&­/Ά·(Φ8Œ–rF1±Ϋ—m”εƟ$·αλ{α5&—ΛΞΑτ~렊f˜>N Š!Š’’Xχcw5n_΄ν—επκΥ«7nœT£©hBˁGeχQφq1sndΘ)V([G•sΑ!§°l»Ί£J„І!¦Ϊυ,.jυΩU16ΰ’[’ fΪυvΎ˜Ϋ3pΓΡ‚ΒŠσΈ#’,ΕΩ!Ψ»©ƒίδήqυ–!Ρή†Rϋ·—fΓΑ€λΊ’ϋ@ΕTΘcͺ£kp0₯ΉDΠeξP¦ w»Š*”‹Ήψ‘Œ΄Ώ%bΘ™hΘuΩ1{Ω|œ? c 1εGΡi+ΕΌOϋs"ϊJΖεΑ‘Ψl™E*§˜θ°’ ΉΑ)Q₯ΙΨϋΜH’@λςqjϊΕΕQ QŒΆΊg׈±"ζtpp@?zϋΫ'i’—/_ZGΠAΩό΄«?ŒbG ΔUΛ]!td£σώ΅@(Τ1 !ifΉΰ|‹°+†€ΫW¬E‡Όζθ „ΧS(StΩδ>hG±PΟQLž³¬θœαΕ‚ ιΘM xLŒε >Šy)6?`ΚΡγ—νΑΗρ±0›‘‹½G{G±Π±QΜ”γA―_ΎΔΣσ)ζπ|NIρqjϊΕΕQ QŒi MZ]άΉsην7Ϊ§I¬.θΠ‘ZLί’XΘY!βbχ§S&Έζεέββ‹DέƒE±B4Πy!H, ωΊdωhQΕtί¦ΚΗ•bj.ΐ]̍ΕΒ‘£Ώ‚G‹ε”n[e‰¬άtJ|σžShpI訝ΑP8‹rWΤ₯ŠΊεΞΗρ»0R.Δ<ΎΕ•bΑxγ€λBθψ―•Β“―SόQŒ]Miω85ύˆbˆb€(†(ƞ,ωψρc« Q#-~ψ!­Ωκ £Α‘šΔκ‚͞Piυ»΅ΠγήIF[ :G¨ξ ŠΕγΡhκΡg©Žέ¬PΜ\rƒ 8ƒ”ΘϊΗ\8ΐ‡Œ#Η %rͺnθj1ήρώ°¨uέ»]UJΕ–’,βG Ξ¬\Ά;:!1δYηo‘)ιόžbμ²»naލW Η›;±ιŠΨΩ£B’ξΕBΉRΩ4RŽVΟ—Ν’γNΏ`l…¨KZξόœ #jg- b=d™t«f”ύΐϞbμΖ―'6W4ΜFKλΌ"L(>O1‡7›U4]• #G.π“Κš‘+μ7bGτqjϊΕΕQμˆb΄/~ΔaiiΙκ ‹8Π$Ε’ΥΟ5e|# ^t‹@L±Ϊ”Ήθ‘(¦Ν%bΡXC4N}ƒYq†`J;ΕB‚iΩΈ«~’ζ€ψ/!δ›F‘9§Vœ‹F£φ²2’Ε¨+Γ܁<ž>ιΎvqww!λe|ρ‘Iψ;X[ Ρ 7ηδΑψμJs'{S-Hύ1˜s…b·ΣνηŒψ\˜)ΟvξaΗ¦ζ{™Ο§O²±†β ΑίΜ‚”―S̎ȦfΏ§,ŒtάUZfk‘ψI|œš~D1D1@γαφIοΫَψ{{{–'ΐφοο}~ZΜ·Ε’ζ=λJ±αŒαž‹ψ[]撘ƒ©«%IΜΝ ‰8»Y«$μΈόΥm¬άωˆb¬}8b‡ξφ¨JI,δ„L‚έ[ΗΘυXοKΞ+†β+γvέ?I ^ΆΪJ™γμ(Ι Gb‰{6¨?gΔΧΒt)Ε=±ΑIž »QLγήgGFΓΆ(KtφΧͺω;Εlfώ―Ϊθψ’ŠΕh)n‘>NMΏ’’ ŠyΓFϋΊ_»vp]Œ.s½’ή€±"ΖΠ$4Υΐl΄/υΌ§Xά«°£X :/[αξΔ ΈD1­b!ηΕR’Ξ…QΜ(…ωC$Κά/^Κ± Ÿ˜“ˆb”½ΪΣŽ|σΕLˆλ/.—‘yΜo*‰α!‘xN·<ψ8#>ζ9˜θbό­’{v„7vt?§˜;"»|ΞγΤΈ,Η©ι @CD1D±J₯ΒΦΗΜu±7oή€R©HΓ6Dˆc1ΪJŸ1ωuύύΣΉχ)šŠ&δŠ”…6ZŒΥfi„ΫœΎΛΈ\Έ£ΜrOŸδ·7„Ž+„ŠόFι|Σg]KF0ΰΕΈβ;Š•ls.ά…!ΐ·ΕΨδ^ΗΥ₯\& v,ζŠεΞίρ^˜Ώ(&%ή>Š…†zHhώN1wDέγΧτX€Sˆbˆb€(†(Φω|>BZΧ‹}ϊι§[[[₯Riqq‘]#6::ϊωηŸonn²‚ζ4Ϊ9‚ΣGθƒμ§4a.—{ά@wM²kΔ-ΓκV»ψ=˜<ΫPTρŒSμ1ξ>5ξA–,Š9ο>#‘„XR4έ`κ{QLg³•¬¨]² ‹(s’’j†ΙbboŸτΕC/Λ\&Ξ" ^xώΈΏ(&₯ψ22lς“»}2š“ ]kΣuΓΑη)v?’Ώ(ζγΤτ Š!Š’’X­Vϋδ“O"žΦΧΧ­Y–ΗΗΗ#= a4Ψj G<ΡhV™ΚμP§Ψ\©σώΑ9g7`wJz~%νhmΙlF€ςJˆϊ$Ώ)•³M‰ρΰ;ˆbf!ΖGˆ9Ε΄xzͺγv9ηqQΜπ±Ρ~Θk£ύ‚κ/Š5o! ϊg΄dZLi.ΚοξοΒίρWλΚΉ”OFesϋŽbE½ύ~h¨km,’#ᆑψlΙη)>‰(ζΤ’’ Š!Šυ©‹Ρ…Zέ£›"-‡―Ύϊ*›Ν:n₯δяh ³h’nW“Ρ‘YλχΕbœ`(–HeR ·;Ό’JχΕοVζ7ίρQŒa3ι₯Ωΐ;Έ}R™ qb9Y.I*Κ*+&¬λ±£Μ²oΜ#ŠQLš])J’XRω58³ 3/ͺτSSV’Σ‡θ+ςΕ:χt % Fλ\K™^.ΕςwFό,Œ?G$‘+¦eκŠe‹φΕH(ΎR,Š+’jΪ—"2αΜJσ―rqn€γY«eŸ§ψ$’˜SpΊΠ–c{PψυςεKD1’ Š!Šυ1Ϊκž‹ΕθΖΖD"1??―išεζΛ/ΏόΗ?ώρ§?ύιwΏϋέΥϊϊ'½I?²άΠT4!MK“Σ!θ@t8:¨υm1e― Υω+ͺ Ε?IπxΓ"ΕH`$  {"QŒέ·θ-(κ|Ύ D£anOvΆ½·%–sΏ°nΡDLΏ’AΆˆΟφ$ΖƒόΓ|α Ds¦εΚίρŜъη#бΊΔŸ5φ#OXΩ"ΎNρ‰D1§N1ϊŸ@ρx\ΧuΛ§•••_ϊΧOŸ>EλD1@C³wΈΏsηΞΔΔDdΐΠ’ha΄ΌΎέE™ 'TP ξξB(FŠBΤe–XΚqIΠH©>₯Κ*†—`Bελ?бϋ»c—°i,ryŠ‹*Ϋ?=tt’„Τ=šhάIœαx‘Ϋ=˜šw{ryŒ€ΗS8ώΞˆΟ…sεψΘOΕγΟ#;kφ»―μIΔΗ)φψΓΣ₯QŒˆS§ ŠX€ajjŠ»RήΫ?ωOvα|?Ί’’ Š!ŠQrΊ~ύzd€Ρςt]·ϊΔ,„‘ [K Η2Υ΄ψη' %Γre¨Ε•\&U—™ΝV qχCNvn.ΰ{—{ &Εμ+›Jμ>΅ξΨΒδΒΡ:ˆfζh΅ŽY₯φ δ»ΠˆE1~ άמK…G“Πˆ°’X†<;ČΜ:ν.σk…LΜ5ΏΠ~Zt<ψ:#ώFΜβά‘44+¬ΓμXα9Σς¦Rα€k#¦šKΈ”±P4STΩ}b3 r]μΤάMCiΟΙJœS§Š“L&ιR[=(‹lg‰>u1D1D1@C£K±"iυ—‘•ε’$Τ‰RQ)k¦εŸ&ECΓΓ‘†‘”Κ‡&\2‡VεR}o―’¬¨ϊΰ΄C-ˍe•”2 *žLΣ0tbFίž©Š}YՌlZ}υφΜt6{]P?ΈAΏs±±2šέϊwΩί΅Nί΅Ωυά5~Ν8ΉSάS0ΐ¦§§#GP&3MΣς΄½½}t/WzGQD±wQ ΕΕΨ]“ƒŒiBfyσ6ΐΉr#©hradΘ!|κχNΊΆ‹΅-§Ϋ·oΣΖ³Ί ςυΑΈΦ4ϊί„ˆbο’ Š!ŠE|Œ9•Ην?ΫΞήck§³ ]μ―ύ«εfŸž8δ]ΔΕNEϋΏ3ΕΕΕN”Qς|¨e #©œυ.φΰΑnδΛ—/c±X‹’’ Š!Š!ŠυQΜeΒ‘α!‡ΰp(žΙ)Ίί‘.V(œO:ΊqγΖ@1D1ά> ˆbˆbˆbοŽI Ά:|ΗΊΨgŸ}F?ύςΛ/ϋίϋ/bˆbˆb€(†(†(p »Ψθθ¨(ŠψΓό1D1D1@CC8ΝΧ‹ω/bˆbˆb€(†(†(pFΊX‹’’ Š!Š!ŠΡŸuν+λΝϊZΕλ^τMχIί*xw±+bˆbˆb€(ζ’X?{B_wΝϊζ_'†π’o•Ύ[πξbύ/bˆbˆb€(†(†(Fίυ»-bθbτ €WλCCD1D1D±ΪWοΆ αEί0xt±ώ1D1D1@CCλΗ>bΨ_¬;t±(bˆbˆb€(†(611qσmE±ˆZ€uBϊQ…πκEˆ"†(†(ˆbˆbwξάό(F‹DCD1D1@C;1―_ΏΎ~ύϊ G1Zžλˆbˆb€(†(ˆbˆb'άΕθR,ΊEqΠ’-‰FΛ³NŠ’ Š!Š’’˜―PεΑ{NoΗαG±ͺΌϋμюΫλωΦ£ŠΊϋm€₯ύƒ΄qw§Ο‡6΅§;[βΞ‹§ζŽb€(†(ˆbˆbˆbo€[σ_αρϊή• ρEŸγΤΞ_Ξ7ύQΎΟQ¬˜ΎP­Οίzu–£ Š!Š’’’ΨF£ˆ±Χβ‹έ~F±»—λύYzΉΟQlkϊ½ϊο{9ωζ»ΕQ Q ΕΕΕΖІ&ΏRΫ―έ­\βr«‹]™6Ύ;Qμβw#Š’’ Š!Š ΪFϋύb·₯—ϊκΥf;?±ΫΗ(vQ ΕΕQ Q¬Εώ"Ιnδ”}ε%Is|κΕςν›α?G?ϊήω ο_»%>“ω™ŸεoίύΩΕsί;_ύθβ₯«7’»ά°Š”M\{ο{υ©Ξ}οβ₯ΙτLE—ο]ρΕh=υcύˆŽUοwη~veτξβ²3l=˜œΌ6z{υ±±Ώό:\kεγ7ΣΟ5χ(Ά³˜œΌ1A―Υ#·ŽΎΪ˜™Ό16ωΡt₯zL«ξmΞ ιψψXΣψx<9=ΏypΘ Σ);€Qtκi ΚKJTΣ€|6Is5ΡTΣ3yEγGU+ΫσB{ͺ©tvcOηηY˜INΥΥ:š1g’’ Š!Š!ŠΉ]΄u±Ε.²½η©MΈξ;–X|Μ>ΈΫe •―ΟΨ%iϋbϊ½.Γ|D±ηχΗάgΈ8±£;‹n―π–ξΕήHφ7σ£4·ίώbΤ…EΝ+ŠUΦ…ˆ»±ε½*Ά½ŒΈšZΠΩ }›Δ,l·Gνu9’°^i539q7³qΰcΜ€(†(ˆbˆbΈ}rcΧ₯7=l΅­¨`ΨγΣ,o%„IΚέK²ΐτ yQ•šYλρτ΄(.‹KΒδεsvcj₯Κ"‹Y4L€ε₯ϋ·~ζ3нo±ροίHŠωUη$—Σo›”±ηi.ζ‹«ΒΥσφ;WgE·Ϋ'Ÿ€μc»ΞƒjΛο7‘›9―Ϋ'υΝxΔ–Ξζ76·7€ειψX€a<»m5h36l&Ώ±½½±6og1Ή9jf,bOηΧλ£ζ§Ϋ£δΓfΛZ`ΡMXX£ζ³νΊ΅\©’³­OΕ§—₯υlΊ½Μ½šΥ˘³Q Q ΕΕ&&&"žhΐΫD1ΏΗκ{£’tλnΎR7Η7'^l$€zΊ°ψ¬™ΜδΫv]Ί$ξο;&™Ά“Σ΅¬I”&ν«ΟDΩq¬§©‹lΟ~gr:wok§=L'ΟχΕδΏ\΄§VŸ΄ί―>JΠϋvμΫqD1š3η–·ζ]χΫ™ ³IΨ§–μPxοΩG«nΫ΅+»©[mΪΜx3dε-’gΗ팡 λ–γΓSφΫ3Z=―e#MΙ|Υj“ηνQΒF}T«₯Ε7΄Z{&9ίΚYλ5«Ί·£™f΅mΫσŒmμ=Œ9;ΕΕQ QμΝ;O4ΰm’˜ίcυ?ŠyΌnoΩυΗ|1σ=ϋQ•OΈyŠιKΑαη:Ε&ρφΝ‰Ι›ι]ηύQ’Y».&ιNLγu4χλ­ ZROQŒΦ#ΨέΘς?bσ§σŽ(6ϊΌκΆKο;nε’»δζŸgy΅?rΰΊ*¦ͺΜ Β΄°P±χ„±HέψΌnY΅ΚZ€a\Ψ°:Ιy!>5•žY«Z΅΅΄ύI·:Tει)’^Sͺ4UsPzνΐκ΄m_ω•TkλΣcv^›_?¨ΦZ«:Ψά KΩ*;ζ @CD1D±Χ―__Ώ~=ύHΧυ·ŠbώΥ(φώ•K?»Μ^›sMVšƒ%6ψΒx4|Υρϊ‘ύώ%QuάΫψBgξ¦'Η―\rΜ–|γ½—YυΡΈ#ŠνάνΆέΨΙγΡ™²½Ψe:E1Ηέ”μΕή§1ξOŸά_Ίvcί1ΝΞdWο?κρι“zEΩX_[ΘΞ€§βf|ΎZΏ l>έφ8γΥω8λh]±©h\:Ωa<ίΠ­ƒυιˆΓX|JΘ.HΫ{U‹ρsΆ Š!Š’’kUt‰wo#ύ“ήδ+•(ζχX±§˜φ(E͈₯«ή.+K‰4rλώΔχάπQ,±*YΥσι“Ετ…£“|ο£eΛ9‰ΘMΒ/ϊovh·1έ£˜σ©Υ§Ž}Π.Iͺz\«ΙΛφea ΕδyΗύέTŽbl*/39•uϋ–MΞt~»fΩ<ǜˆbˆb€(†(ζŸο(ζΣ€<}ܚv½2λ{Χn=Έ/ά›f/ϊηβΓ™³‚τΕξξΓ±φΥgΧ&ο=ΜJ’x >J]t½Rl‡?nUΌκ8θ«gωΥ₯œΈΪ~Ρ?wΤύΞ’ηv₯»5Wγ‡=±οϊŒΞPμkΦΊΉ6c~}L«¬MGZθ6ΘόΪϊζΆ’ι{3γ.Q,»]=>ŠΕι#Η_)6–Ξ.//ηθŸkkkΛωεΝƒCμφΆΧ—η§ΣSΞl—^«XŒΗ˜³Q Q ΕΕxˆb–noE³ιUϊ§ρLh]v$Bi‹b~λM²sΫήδ>,νοsx±δdl€νj&ˆόTϋΩουΆ§[όΥΩ#“¨Ή΅Ά3ί*бνφί“žεήοφ]±»ηq<άΤjVΫαBάεφΙδςžΥI^˜j–°Ν*έ>Ωεɏ‡²=(»}Έ·άΊ¬jqͺ•νΝMyO§ΤφΆιq’rΕ1¦Vέ\μ 6½^λiΜ™€(†(ˆbˆbD1ξz«¨`Τωx²c—1φΪ΅w²ίmοj?ιLK΄I?U*ΆΫ8ŒbSQλ˜jιΖ…7ΪwNΒέΟΈzӞdrρ±υVQŒυ5φΫυXwyWdΖΩ¨h;όρˆγ²―CzΚdSZ©YΒXϋι“μ)“ιΞv¦I‚}Œ U™Ž4ŒΟXN΅ε€ύιJm/i˜–¬ϊLσpS Υ^ΖXg’’ Š!ŠqΕψlτͺρΞs²Q³mMˆΟžΥύ7κγΕtΨ‘±δΦ•bηRWί軆&o-₯δΨΆ·κ(Pτ:^Ϊx\ŸjΡ½›—Έέτ½_l=ΤΕˆ^黝“ŒξTΥ·ŒbΤ鯝k―jΪsU6½u₯ΨΨ̚\=<<¬κ²΄ΠxΟNW•šE6ΆΎ°Ή§Υj5½²₯ν₯4&“Ψ…ό¦vX«κΫk©š’ς‘E”…)6ΥΖލͺκ•΅l~/»ΝS#Ιμڞ¦ΧΧ₯UΦ[cΖ{sF Š!Š’˜ˆbάωήhπιb·/s•|jΏγςΊ$i΄ΔB•η^ϋ4Υ_ά§ςΕh’»WΊΝpξήΖΏάžPΙ’‹b―Ί|K4ηϋ^λαξτ0ήΌXloz,βn¬}Νמc‡2Ž ±QέhΩEΌyWε‘’t—j=9#ΕΕQΜ?D1zLd€g4xΰ£ΨΉΟv][Χs?|ΔZκτχpΙχo€Ÿ«¬ν¦Γ\ŸJάΟ?_½ΥΈ.Œ^·ή΄¦ZJvŽΌΊŸ½=W?β}vDο׎(Lό¨s=?Ί2A[ώ}%w Xσ.ΛοέȚ,б{E/ΆέώετοΕ΄JΩ$WΉ„εMe#ΫψOze«­ν—{$“œYΣj–ΣΑf>9Ξe΅δš¬YtižlSΣ ŠήžKWΦΣq6„Ν4³}ΰo ’Ψι‡(ˆbώ!н~ύϊϊυλ‘Π0]Χ/нύkχΥOv_<ή}ρδ•ξ6 Ίϋj_~£Ι•/d“5Έύ§τ΁ΊΛΖœ9ˆbˆb€(†(ζ»‹Ρ%`χQh+b§7Šα%ΪΫφ‡·τγ’’ D1D1πY‚π2Ά„Ι›Ι»…ν[2?Κ)@C€ΣQ QŒƒ(†Χι–cŸ²χΆ4Q Qμ΄AD1D1ψFσ•„π2^d―^Ύτ³Λ—ήΈUάοaŸ2ϊ†Q Q  ’’ΤΎz· /ϊ†Q Q  ’’Πτ7zW=―gο|΅+υ½>|Qά7ΐ ΡH$‡CαP ƒΑ`0&""οDDD~λεΣΊ‡ζ4m†ιτϋ~7ΐμ“μΊVž•„ΉύΌ?€ƒSΫ2 m]WeYVUέvΣΒ€ήV”|›mΫδΧw]ί›¨vπ…5ΨΆ”hωŒ;€ƒϋΎόί>Ν‹Αˆύg€€Ψg x“Ηήcό¬Υώχ!Λΐ{?Θͺ~Ϋ?­΅R\ΒT<5Ρ/ΆύK,σ=*Q¬)Ÿ9ލ Ÿφ―α‹ ))(γ·]~Ών|1θ°½εσsώXR RLσ!ςή#,ψώ—!«ΠϋΙ,ˆž rΎŸ˜Λ§Κ„₯άΏ §ϋ ΚΝR¬ŸΣ5Eχ?ΣΧτj.½'ςžCŠ}H1 Ε ΕδyDR43Rύ`csŸΗώOχZύ—J±8―»φuύάχN‰–„£|$ΕH±/V?yQ­μΕϊ7avα―’b{ Ε Ε€ϋ†RL΅±wvLν¬Ο½ƒj‘₯+g΅?b*£—δLςOHŠ RμŸyN–Άλί…Ω…ΏJŠ…bξ@Š)φύ€o½ƒzQϋcTyoΝ6ε&Φu™ηyYWΖ…»ΦtQo|§™KΊ€ΥΖ–§ Ϋξ_?³›h%ΨΣοΝΛΚ„ϊMR¬˜Άύ1,½"Ε¨ΧGW&δ©ΥO½άΚc“ή°ιΫ%«³Δt@I~OδΒψfζΪ)η§z©m™§£ 'ΏΔΦ{‹Ε¦~cwNΓK ΎΜΟέ£y~ΪZ¬‹8ΤΕώBQ–ΔKš£D9uAK~ ωKRL½4ƒ[‹₯6±.TVΗχˆΊ΅MΗJ±|Έ]‚sRμΫ Ε ΕtŸή~ΪYχU>¬tGLMx~Z j?Pst_b¦τΪ½μΕΜΗ§0ŠΥi䝉Λ/γG{»ΆLύ?ΧΛσΜ<τHu]–bͺŽηΡλ¨y»μΟόϋΘ—ok©’{³a7`mJg]]¬‹ύ…Ό=E ±10υΞx#do=LϊU~PŠEε0wΕλ΄FωbΨ–. ιΙΤό-˘? jM RΜ€Ψ·@ŠAŠΙ—‰f5K§Υ5bΘ½·‰ͺωμ#ό¬:ΝiσiΫεœxΞ‹³τš֏3σ‹RŒ5ΖΎGŒnΗ­₯޽7 zιΘͺ3~>’O‘O~,¦΅€ή#7ͺχ 9w%¬ΆίΑ^’bVςAΌ>h?,₯₯X’χƒ·θ·†™ί2}©.φjηE`{)'ΉΏ;ήnA(.aϋ+ρρaΰηb'X—Yž-ΎxΜΟeψπΦ(!Ε Ε Ε€€ΨŒz‰½;AR B°e,“ΰΈZΌ%;ό0ΞςbδUӍ μFώ‘ˆύGRL·1]MΚvaœ³Ή-θͺŸφ‡‚«s8SL‹:ρ=:gέ*_Μ[”^/Κκi½5zκͺθιgΓyί4][6¨ΫΆiΊ‰STΉIy4ο#1m¨†rζηMΏr!ψςs&σQ8εά¬r”7·Α¦βΦ,"H«‰‰M°ΎL^ Ϋυnu1€XXu#‚―C5*χ1H₯­XͺŽΞ‹ΆΘ—…•ΌZΛ ₯V –νΐnΨΨ•EΨήoΊ§%bQέO\ˆuξ)-^<«J±0­¦…‰ Ι―κΨ£­Yσs]ΫqΉuxhŸzΦqύΡ1―XίvEt\‰σΆk›Ζˆ!Ε Ε€ϋΦRl Ιa9 Χζ˜#7ϊtg+‚cYΝƒyrΨ.›1%qjS›ψ―€˜Ziώ\=—hzίίeΞ6φύ0 }? ύΡ3Ε’΄¨ˆ²(ς,½ϋ9ί^γ£vg)6α3yJRτ0?:£₯Ν–6;ΕχœΏ«΄ΒRšώ‚{=‚³#ΏsYŠ’{u|ΨRένo½¨Λu1ϊb ζ.‹IM6)f4Γύ­°Ά­œνΝVK}Ψ·qΔT„§£ύνcR R R @ŠAŠΉ³•ΑγΣΑίήψ΄¦L½ϊp€„Q’ζuSϊRŒ,ΐΧK±7\Ԙ‡4ΝΆτˆφ6š·Τ:uE–½”ΎDq’—u•ϊv)fώ5πί†NashΫ%)φρ1)ζ€ίUŠAŠΡ JΖ‘ω&ΌK_SΗ”υ†Ÿτ+gOxΫ€mŸ€―ѝ`™O3vzψ3·O’/0ΝΛ3QmνΡ 5Wφ9ΉΪΔ2vYtλΙ8')ζΣDž•ΡόΎmšfX„[ΞρRŒκςιRŒN΅Ά‹Ξ<8οR]ŒnβοFRυBRΥτR}LŠι> ξΥnε²sΨ>ι0ζ!Łΰ»J1H1ϊμέA>ˆύ1,₯/΄°εΑ^δχi΅UŠΡΙβtݐ4%^ͺθΒAϋ—€Ή† άΞ= Š‘£ο R§΄’0 ƒΠά:7V1ͺ–SKμϋXb>f.ί~fŽ₯ŠΝ-η_/Ŝκr]ŠY‹΅&χ—]›«νΥΕH”‘ ?νwOWΞr·va©Œ3ΡQFaQΦ© RΜω }ε0ζ!ŁΰK1H1ΪπuPŽ&ΓUμ{wβEν)6Σ”YζΙ'ηΧ‹€r>B˜Π¬υͺ³½βCα=CQsίέh»Yθί!ΕHσΡσ{bxκ)uVNΕ±œ₯vœšΚρjέlm²CΜǝ„cΗO¦’4₯cΞΏ^ŠΉΤεΊ³‹"1Uκr]ŒD‚lΨ‰Σ •΄l·vAσΦ?=nϋ9‹ωK}MŠΡ‘ώ‡Π:§u¨Yο0ζ) ~R R €ο(Šňm ="ˆσv˜ηœ-CS†‘υόΎιςeBλΗΝΈpΞ—©/βΐ#‚j\•~QƒπB]¬–™ŠΥήzΟ–Ύ9½P~R―›²tα\τ oϊη8mIWΛY^ή>I¦›>ο:̌³©―cŸΪΫ ·1―Ω!υό¨¨κ¦›€˜ H1 Ε Εφ]σ>ςή!­gΣGΨΙ§Ν6OΦkx6ό”išτΫŽJaLΏmͺ‹Ž.²=y^SC:@ ™gƒ:»6‰ε± λυiηαηγnΰΣ‚#K•Ϋuߝsnœdod/(7‹ΊΑ­.΄]1]€Ψf/ΦγΥ—)Ϋ‰ku±‹§­ = ΄Σή΅fΎ%γnb–f³>·4Έ…σ˜ΧKtΎ3JH1 b@ŠAŠb¬+Σ‡$ˆσ‘©ύ λΛΧϋQ501W§Σ―ΥΡad&r¨σΠ§~˜΄ ŸΚˆg²―²ΐœΧη™/I1ΏYΥnN§γδ t›ϊ‡£λφƒ₯+όΧY*>%Ζ‘l¬c3­~X΄³>5›•ItΔޞKΝpBˣ͌S Τ{τμέ‹kλώ¨ρ?*ΐΉ…;αŽ ΒM \δ”‹@!ˆΑcA Έ ”QˆA!±œ‘4nWͺ€β¦Ί¦5Υ3©Ά“2'iϊm'o'γtXνZ³Ξ󑀫f2Χ½ΧήοϋQŒ(ňb—DY0>#Ci–τΞU/Yy΅’ŒnˆbD1€(Fσ±[™ΣΥΚ΄γ1-£7ζSκŠ`‰8FGοφŒŠL=,6{@#ŠD1’˜BιtΚ/_³½ΔενιRΧˆbD1€(F ΰvκσ™ΤYιb­νx@ςf‹\³λzQŒ(ňbΏZΪ_Σ΄f³Ρl΅ Λυ’pmSο΄G:†νz ’Q ŠΕD1’@#Šˆb Dϋ§ Šy—ƒ( ŠΕ†ΓαΑΑΑϊϊΊa£τ.pxxhΖϊϊϊΑΑΑp8$Šύ Q @#Šmnn†±³³γ] `ggΗ0ŒΝΝΝΏ'ŠΕD1’˜λΊƒΑΐ0 Σ4Gθ]0`8š¦iΖ`0p]χο‰bD1QŒ(6Ίϊ“.Ά³³sqσ(ΐ¬ΙIΫΨΨΨίί'Š]vňbώ”»»»γ.v€qΫέέυϝ$Š%2ŠˆbI,vpp°··7 677Ηλξ_`}}}sss0μνν0Lμο‰b’QLιb£Ϋ°wΔΉΐή‘ύύ}₯ˆΕ’ΕD±ώ[ΦίΕFφΕή_Ψ#Ύ"Ζ01’ΨίˆbD1qΐ=2<"ElΜQ ’(–ΐ.¦€1α‘‘8ΝaΒQ ’(–ό4\0rQ @ϋ‡¦1ΰBynD1QŒLx Šˆb&ˆb’ΕD1 Šˆb@Ε€(ΙF’€(D1Q ˆb’Šوb@Ε€( ŠQ @’$Q ˆb’ΕD1 Šˆb@€d#ŠQ @’€(D1Q ˆbPD1 Šˆb@Ε€( Šύ“ ŠΉϋφΟяλ…rƟ8ϋς‡aΌΙ† |CΧήή“ xι.oο£Ι£’‡φO9Ϊ ½ϋQ ΕΆ·ώοζhKΫήL?Ώ|쿞hGhήΧχΌσX©έπΠέwΣ‚ΟbϋξΝ£+sσƒε] wxθμ»§Έψ½‹΅—Λ£ΙΎώρ~φ―^ΐΡͺw_½5Q @b0Rlχώ­IνjήΠfV±­ε•γ rηγ}Ύ4ωσΓσF±3ΪΖΗγΔπvΗKˆΥηΛSgρ³ΫδΚάϊhyηΰρ­Ι^:[žΈΌ½Λ‰Λ3`{xϋύkp΄κ3¬ή€(Ι™>ΉφκΝqμΈg8^ χΩ½ΣpfjLώότ³λ‡C%Š]­υ“ΕͺWο",ά™ά‚₯ότΔ₯ν]I%1ŠέŠr΄ρŸaυΦD1HΜBϋƒ+ΗS#—–ΆƒηNώ&ψΟψ‡ρ–jςo¨D±ί’Ε|kW =q±QlEΚΛ%ν]Ψύν΅Uγͺ:*-ωQ,ώ3~k’\r‹\š·—·ΓζN>ψδxcγ$0<τT‡Φ¨›|ώΎj Φ6·ϋj5S6:*{ί›ρaωόΠέϊϊcΥŸ…98ψE£mƒνlmVΥμο…œ ½½»~τΙ΅ΝfΧ‘/”/ί{|ηdι·CΕΩ»3ϊΞΟGίωω‡\%ΕΡ‰ŸΓ`g͘\¨ΐΚΆ΅ό^nVΘMΏ,ÃѝZ“;2} Ή­qŸαˆ·&ΰ zQ ώ!QL&…©³πΤΉ“wίξΞͺBkΛe4ΩιΟ•;+‹_s’όYωY^Ωο ;/žΌωοA;ZϊκϊNgRρ–žΎέΈ}gQωπυ'ŸϊΓ©zr|δwή?}ωώŠςε·ή<ΣwC£ΨΞέ›A#’†;ΟU»Ί―\αΎώιϊ­³—bρφΛ-ΗwxΚΟ΅ηίCφn}8ρ+χV7Ξεfg­ύώ=εσ‹χ΅η³­?οv#ήτπ»πΩ±ΓΕߏoΠλžO]ηxαΉρ~'Ÿ―¨wκΞrvΕ’έΦΨΟpΨ­φζ—³7βΖσ/Ά‡?’$ ŠΙ;(e]€ΰΉ“Λ«ϋUHV% ώΡϋ‡AQl)πΓ²χ@;%΅ŸΚjθ³nΎ7‡ΣυδfΨηο.o‡/΄―΄΅?Έ=ϋΫόΎαͺΕ'ψηΪΛο²jΨ™/yςmΦήΝεχ!ίωπݎz.A?j‡’·O^«}q’ίτ 믏7όνyίήΎΩτH‡―$–=Ϋtƒ;3xΑ8ˆq[γ=Γα·fdλέ‡™ίωΐ°½δQ ’Ε„ϋμΑΙ ΚΟgλνϋ“”\…φΏ_;‰8 ΖΚζφV{eyνΪMi [ξo/jοΙgt^,ωμυ—­‘S€ά=ΦΆΜώnΏΏύŸεWO Ξκ^`λΉύrc}όαΑ«η+§Ώ—2%CŠδηΞΚ+ύG°»¦oά>mp‹―³HΠΪUr1eΌΥλ/“½Ώ¨_uΧΛo>ϊΈτy{΄_σσ·Η4ΠτΑξ,ΎώτJϋpϊ…―ώ|1ϊΎΈwχλ'ίx¨•«ƒώ`gνέ§7gŸ‹\Υ…εoζ`wksλ«±ζλS½?ή0ϊMΦίΈ"Ηl©Ώœτ―7kC_,;-Pv’AŸΎνo v׍/rΐγmWχΟ[γ=Γα·FNsœΖίΉΣοX|uΪ+―½xH>’$6ЉΎΔ―~ο“Λλ³ͺ %{o{>Ϋ_$ t¬  GάMC }žνžmz>ΦjGΎ6 Š=žόRXϊΪιπŸΑ™zroΝςόφN²έΒ— ΣΟRΪ‹―‡Α/ϊ|τ§;>}ωδƒOξτ~ήRΖ>½'γυ†!ΛΙ»/Nzάε\d€•r.’¦‰ώΈ‘4&Eδ›.”³S^μ λΦΙΟΣMΧσ_LyƒϋυOωώ?¦μΰΩ£ιΩ—Ρokμg8μΦΎγΉ1ΏœΟŸNNa}θ!ωˆbπ(&AfPͺs'εΐΞ”τψνΐυN­―~yυvcρέΐ=³aŒ·O/ΌώΰΝυGk[jΞ›ΕόώΕ;cqAσ•8‰/Œέ|Ήκ_ο,B[}Ή<³τύμ_ΏΉxυΦβυη[ξψ<~τftReJ£ΨΉKΎ3δ‡Κή%ξΘ’οΑC΄dͺιΤΉάx­|Z&-ΚΓ ŠvΣƒΙLLY₯nμxόΧΝΕΙ^—;±.ριgχθ€—δβxŠ‘ŒφΊ³fŸχΆΖ{†CnΝΎ:­UL=!ΙG€δF1ι 2ΆεφΫeξδUY&)¨ μޟZΉiρϊοž-o­χ₯ˆψQLεZύνU}λΥλ΅»&•DbΚ„J!;U£˜ΜUτΞ:x,Ι#ZseˆΠ’ ‹ΔωΉgn~_z»±P{›\ΟσF1gσ“Ίl™r.2Dk:Š-Ύθί,ω€*ό¦‡š ψšͺ–ςΊ€λ―Ά&Ύδ… ΒZ1εΪΚ―O_-œό<_­¦ο[]ξΌ·5ώ3¬ήΰ&ύQ[x΅φΨ¨/ϋχšκͺjH>’$0Š kυƒo₯s™±(ΣΩΒΊΐ #xEsνξΛ/Φ_ΕϊΖΖέKꛃ’˜ΜΞ y«€”©'ώ‚ΗυΘΫΓ£˜Τ‚τkÝWΟίvkQ=xQμ³!Ν(ψ\VjK² ½œ‹T€θQ,ϊMW©MMjΧΰx–βΒ¦k―χfuθΏ5[ώ’ς#£ΫΞ}[#žNΔ(fλ#§<φH>’$;ŠΙΊοςοeqά„»o<\ψγ·³eαΞΗώ_ΕΦ^ύ1υŽΏ›Ϊ΅άn,όΎ8#Ši‹ƒCOε;―{kΆ§”—+rΨ Fτ(&Χ0άώχSΧjρκεΏwžΎώxεR£ΨRœ(妋«ϊ’οΩοή˚\§ΛΜ-lΊςxΘ4Γ½Η·Nξώ⬟Ιϊhη½­O'r;]}摎ΦΪ— ΆH>’$4Љ%Yoλξκžuœ*šΧ^~ŸA¦pΆwΧV7ξή[τα:ˆΕΤE¬š7^~ι<τ„ϋyν|Σ'υΣ'e8U„(φτž!…»ϊvτ’ΝOKŸw%QM²γ‹›»'dνφ˚>7ŠEΈι³8ΏnhƒΤ–d-°Σ2{ύυ@s¬gw(λΑη<†ˆq[c?ΓΑ·¦Ώq•±` Š@‚’˜c¦«Ώ―-<‚œR»€;ΨΊqgι·[K“₯ΠύF±C²Ϊ ~“OΞZ;ίzΧ9O€,΄h# ό=ΡNWt’z"g<ΧR’G΄…φŸϋΟZύΆ+Ημƒ%]&(ŸνήWή>ι+/ωKΪߏΕΞqΣCΘ:bWξ­άΈ#νU&νΚο原Y‘φςާΪ}|oικνΪΒ†sΞΫσΉ5ϋ²κΝχ[žͺΏϊρκ­₯«·–_lΊ’(Ɏbbϋφ™e­¬Π΅Ζmύƒ¨?]υ%ƒoŽΗ½ΫU7 ΄LωδΥ…-Οοηι ΔΪΆr„γ}MΏΨΡωlH’Ψ'υδ(B}˜ώφƒgOϊšλEŠbΎυ€–^v‘NΩ“8;Š­žN]1Ο, Ώ:τČeώer¨εων<ΌΣ”Ι•γΜ?ŠEΏιΑΤή€Lc”7NΚτ χsϋΚν{σŸmΟ―/mτΏO)ϊm ήυ΅מSβέC™ϊL^Ε€δ#Š@£˜Nίρ,Ά‘/ΰσΊi^}πaQΎώu{ύsYMΚNΰυςB9΅όπ₯ρlω»γ©δ“ŸN©φηΪם­―ƒΕΧΩυ€hλΫoGςω'ŸV7··ΎώX|υ~Ό;y™€«t₯γ#yσμέχ­ώΞΊώΕ?{ξΩ¦λE‰b²τ•l¨=ΤΎ™ύ]σ󷇓‘w²Ψ™tΩικ`΄Σ5ύΛΓš•χ…ΥΞp:²όώαι«O«Wέ»\R_Πόct.f{ε­qύt¬Eipρ£Xτ›Β‘y―Η ŠνΛ½–I “ψ5ω½8|ρθτβάύeς$Όx.ϋ•ƒη»­ρŸαΰ[£<±Wtυ£ϋ»ϊηΙ³!ΣE‘|D1H|S“„ΜΒ ¨fJX-ν θηκΒwΖ†ήppmκÚon bo!τΝƒΎi‰R.B~n¬νOΆΊφωΫڏΐΣW²”ε‰Α·k‘ί¦Μ Ή―οΙΜAίω>ω6kοζΫNΘ·=\έυ•―%uή_Δ(ω¦‡ϊ!£ό&oCω½ ϋš²γξ͐χ9~σ/BύΆΖ†ƒoΝΨΊφ>δ™K/ω@K>D1uA+NεxͺώqsY|ρΥυ„Ήj\ΏuΆ=-=ΤΎ»αΎϋψΫ­EΩD]υ\©!-)»ΈφϋšωswαήιdOΫ?œην·OΖVŽmΞ@ΌχqeυΣ5%£άzσLί ;}Ω\y·γι‚oΧ2tλ…±λω˜o?^U/ΪςΒ»νΎ±¦XΏΏύψΡςΈύΙ‚h!{·7Gγ‘4΅μ<θ,}=PΓ–:K‰bΑw$ϊM·τ$p²Γ“I χƒη`ξΎͺύq%ΰΊύπN ·'νμ·'_Φτ_ίΦψΟpΰ­–±qγΞ’ϊΠ.¬­τ’D±„@»ΞΟ½­ΡΌ³Νν­ώύΣυ.€»?ΩΕs΄‹‘'ά­Ν£ύZϋ‡Ύ #9ixΠο«›ˆ EΑΆwΝ―γoλoKBŠmθNv=ϊΩΜϊΆCk°cŽ>σujΞφΞΡ1οΉ^LξOΉV_wFW&™7=D”k+ΤΫz©§γόνϊθPΏξΨϋ@p!ˆbP~ £D±ˆ€(D1’Ε’QŒ(&+²~ξΕ’ ŠΕΕ ―”χ6†‘ΩεΝ•ρQ,ω\«£ΥK…Ήl&“NM€3™μ\qΎΡmΧϋƒέњMνH³Ω­β!š¦ο{’Fά}Χώy0ϊq†^$ΓΙη]/ Š%Ÿ­U ©_H«-ηߟΔζ§ΞΉΤςώ3ΕUޱœIωdZΆ’ΰςΕ߁ ™TTΩyΣύwG±ςτιΆ½Kg·ηS~™yΛσ³+Ω©clΫˆb€ΛDK>»OSΊhΈD±K8‘-ΫΚH±ι(ΖH1’ΰRΕ’Ο*gRΑ™\.›I§‚εkŽχοdλΥτθδ'鹚ώχG±ŒΕͺωŒβθέρ@\’Xςu…ΤΩbUοٞ°Ίς\@9›oY.o€X€( Š%B·˜N)ς•ŽΐΥζ³)…―ΤΈŽ}Βqε—–ΩjΦ«•J₯ΦτΏΡ΅{m­Q­VFͺ΅zK7oΜ±OΉ^ Χκm­Y;ΪΈ:ΪΈΡΤZm£kToŸrΌ §Χ‘]6ΧΪΊεmjϋ8ϊλp²…Βu,£ΣnΦkrήυfSkw Λ9³kΗνΆζ•ωͺ¦sόWκΩ…^žΡi6Ζ{œœo³ΥιΚώb_xΒO?”Σ5:²αxΛΡaτδŒC8V·£5krτυ†¦›½_nfχΜVs|ΚcGWLφΆI§₯ a«cφœ8›$ ŠˆbΙŽaR€UΞ¦5ΣυF\=ŸςΙΥ\ΟmΞηƒ^h5δχΣršΩ­fύΏ¨:žŸ«7ΚΩtj¦t’™žO{ΊβΥ «S-€‚”κzΘeΙ”ΪΚβχΜΏδuΚ…\jΆ\±j:ΗϋΘ€Βε:ŽΊΠΎ\^?·SŸŸυU™|©Υu<r…‰uϊ*·]+Ν:‹μ\ΉΣsg΄ΐφό\vΦvεF'p3Ϋh²ιs‘₯7ζ‚6‘G²Ψ28›$ Šˆb ₯tQ5œˆ―D”r₯Oώ";ζ§_f)―Gμ•s©¨2eΛ·ηϊ\:R‹ͺžθΘΞ’(4ΜΠ…φεχ(+ί;F-νΖΧήiη"}sψΫ'»QN½$§r…‰sϊΣz•|κ—*mKέ¬α6ε+ΚfZα—Ϋ(»²dGαͺΊg“D1Q,‘$¬ψ₯‹]/”kΜŽ,σ§œtJ%ΥF+eR‘¨KΛ·ΛΉ5J*OD3nK§Ση;f*r);r~9R,σ«(fW"ο²€υ<λr…ˆsϊΣoˆϊ ψ§ζ:z5κfEΝFm.κYΛdtdyέ‰³I’€(–@…+3ί:wGΛ”-‰b‘­OΝtJ57_mjΝκό\h3 ΚwεηΫ¦εΈcw΅ŠΊν|Ϋ «<™|ΉZ-σAΓsœ_D1½šIg²>™L6—KΟNWžY/œ™{ΨΆΗuœήœΛ¨7ΐmjΤΝr^=ηj£Q―ΧkυfΧ ‹bέf1 7UλZ³‘Ξa”ižρ/W˜˜§?ΡΣJgQkhZ£”Ο€Ήš#Ϋ•g6+̏7«*EΦ7.ΤΏJηΚ΅F«Υ,²κ·•gl­m™₯kΥ¬ϊLΪq6I@ΕHσ(υ'ϊŒKIW3£Ψά|­­›]Σθ΄;‹GΝi5Γρ„ΫmfgE1[KO Νφ|œr6jΛΝklf΅+©€ Ε‚}όζͺΊw€5bΕ–ηγΜ~Ε€«O^ΆβKP!Q¬[L«7΅λ‰ ³ž«ρ/WΈψ§o•Υ^V2ݐkžο8ΑΣ{‹ Σ_ ιι¬θ9W΅ΌS]m>0#ͺ…+_χ|zΝΒtξkΗΩ$Q @K$³>g“t!Š₯«ΫqΚΩ€αgΦζ‚£˜ΫmT}κmΗσqΝb:b+t=?G™f˜―κηb]­4 PσD―έ¨TE₯ΪξΊS§ά(œ²ƒϊW’˜£WΞL-τν€;ϋr…ˆϊŽQUž₯Ί$1aδ§·<:}§–ŸήlξͺΟX^ ONG9-]Άn-ŸžzΐZ–ά uα–ήu\yc¦9bL˜='Ξ& ˆb’X9|πΤ°n³˜QΆ‘(ήΧΊΕtX¬Qf±I" δZ½‘w΄f£"λωG‰b™R+<πΙ1GbvgΊέHˆ ‰c›Σ΅JA¦ώ•QLΩ*Wq<•kΦΣSG\κΖΏ\!βŸ~ΰ2…Υ5N˜]ΛMΒΖ²9–qΚ΄άΡF­γΔ\ΉRž?Uψr€49Θ`™\ΎTk—ηu’Ε. ν§ Χ 0―m^Φ Ϊ£LΥ”ˆ£pδ3AΘ΅ŒzΉ”ΟfRͺ¨QL8±£˜ _ 8”lΉΈx[«^šΛg)Ε_ΕτΚτ•;14§Ύš φε τΪ\œΉΖ\π3Ζn—SQ)ΗγΤe!rΕͺιx"Ζ& ˆb’X¨ΣΝdm¦–– |…ŸΕrm'dύ2ω€ΒnSΧuϊGD1»O‘.n@ Κ?ϊHϊX*«ΨgR¬γΕηΪ½ŽVŸ/δRŠtь΅Iς’€(–€”"]μXžΒν΅‚>Ψ ™‘§ΠŠeλŠo7V§šžL§nXTΗ²υJ™K‹bN}.:£2«#φš™ιλΥUg£–’F±l%R yo£ΠJu§Ε⟾£W~qTΝoWΝ…Ώ}Rκ˜)΅d|YΨsΝfqP(Žζ u}|˜Νx»τXΐΌγž:α7Σ²γl’€( Š%”kΦRAζJΥVG7MSok•’Δ”7HFˆb2@I‘+–λυš “ Œbκ3%Šυ$¬\|s[Aλc¦7’DWβNXκΝgΒΧσΛ·ΊŽ|sp ώΞtaάp„Ω,L˜½€(χτ%t¦Υώͺ;ž°%ε ₯ތ‡‘P7η[–'τΪ\*’μιFN'ŸŠͺ±γl’€( Š%Ÿ^/₯"I—›¦'"G1αv«ΕΐΙ’s­YΙN¦³«Ήs₯•–Œ€Λ†Gω@ά(BΆ²;•TDΌ„]Λ¦-QLΩΠΗn³ΏΌNUYΣ-ώε ϋτ…]εYdKΊγ)l½žIύB±n¨Ι―Vψu¨jχ¦Ÿεf6˜J=Ξ&‰ˆb’XςΉ–Q›/Μ ιl±0mO„D±œΕm΄΅z­ΡΡ[†¦w-oΔνdgΎsΠjΜ―kVͺwΊJyΙU'šυΒτˆ’Άης%Šε*8QLΎΝωLΠ‘ηK^·’ d5Λ…L:Bσ―Ϊ.Ίνz!x3Σs₯šzγ_0ρO_t[΅Ήl:θ$rεFΗυfpΜΪό\: W(wzΑΫΩ†VΚgƒ―Ψ|}Ζ“ίm”gώG&“+6u+Ξ&Iˆb’XςΉ½Ρ֚υ‘Z­^oh­ΆΡ΅\OΔb6+ΕbιD₯i†Δο_aJ8–©wZ-MPG7{rHI`υΜNϋθΠ[mέ0-Ηυ"s]ΗΆmΛ²lΫvάσ΄kχŒγ‹6Ύj†Ωsuϊrη{Ζxϋ±ΙΣq»ži΄[ΗۍΞފpς `ϊ$ΕD1 Šˆb@€d#ŠQ @’€(D1Q ˆblD1 Šˆb@Ε€( ŠQ ’(D1Q ˆb’ΕD1 Š@²Ε€( ŠQ @’€(D1H6’ΕD1 Šˆb@Ε€(ΙF’€(D1Q ˆb’Šوb@Ε€( ŠQ @ϋ§r,½­Υͺ•ςX₯Zk΄uΣρ’ΖulΛ²mηςw;αΈ^rΉΆmYQN§βΈκ#u)°›όK‘|κŸΔz†ωO’Ϋπhπ/AΕ‰@KΗ¬{Χϊδ΄’έ(ώ€Ό”wB•ςηIβΌφα<¨υ> Ty ‰Ωάΰ,Α T\σΑ܊*ΤuA’Μ–˜]έΉ˜‡VÒِοŒζŽ4C5ιΉ},·ά#fVΓφ¬ΦιsNŸn©ξ>]:₯Ϊ2{ϋΖ―ͺΔZΧΓ]«ƒE£οΜV]ύU³Βύ:ƒρΪL§δ?*φzVRjΝΪΓV;?δήΚα]ρ^O²Ζpvσ΅KHdοΆΩ»FΎ‚0΄JHHRLBBBBB’bϋ‹ 1 ή΅ζFϋƒGθΫ’6Β]«£Αήτύ-€ΥθΧ\ί·€XΟjΌ΅ =»oΝπσϋ‘UάΡΊP·φ €νFΏ6kοH1D ρx͝1K gχ?_»„Δήw[ώQ›—‡ ?΄Ύ€hΣ6 IнΜaF¬\Χl/θE‘瘍2\„χΌό/‘(Ά’]«£ΩχJaxMΧ.ΗίΆœhNl²“bjέΚuTHRŒ ΰ|’bH’ΛΓύΟΧ.!±χέ–{Τζκ!Θ­ϋœ5ξzŸϋŸϋ_ψ¦υΫ_δ§Ώr•~θτΏτKzIrd’Ϋ§€w8†‚‘–@Μ7λP’ήΩ7ILΘξ:Ρ|ΥΰΓMR ζ?Ή IŠY9&ΕΐjIŠeαlώΧ.!±·έ΅Ή%Ε`•ΨΟ€ ύο­gΏUύδΗΏd>” £Ε$5&!!!!I±}ύ"K‘hγ·Gzν -ΑύώIΏCί΅-Ϋο }…žγΨΆν8^Εί“)š„λξv]ΟϊΕ‰ΫzO‹ξ† “₯3½ožλνz^FœδνήP͐ WFΖΧQΎT&“· tπwT ₯}Ύ0ψάc"½=CΰΘuϋ ₯Ν@%――­Œχwh5Ψ‚Bsϋ«t'0³\oLΓ%βg»°γ“ηD‘c[,zΨΓΞ›°Η€XΔ"s2%‘ €XΔ"Φuψ^† Μ@€]pEΖ‘€˜ψΊ’tG@yQΏΩΜoRL8²EΐžB)ΜH‹π?\Ζnψ Έω9„ϋQ–fΠuαZ*ƒ™ˆ Η>/πΕrmαQ‹d²?3Ό`$Hδ ’σ‚tϊϊυ«Η?ώς…ούcδγΖ“ξΛ- IŠν+½¬°َ°dXWΩώJΝέ’ˆμψe·ιDΔΥKJj­φΣφWβλ€RC7%~gA`i%υ₯4£‘ω‘Rs|»VTFJΦΪΞΐ―ΖΧΥFξ@Υ}Βόα·k₯U Σ‹y«&’Ψ°ΉI, tτJA•Y¬.xa Ήͺ;v»¦Œ9ΙY³£7 iz^7‡sac,C±ΦΆΝΤω@ΟhTF (”™ώγ<\΄’α ή ε†1¬Σά‘ShE[‘>h₯œcΈQ«ΊΉ}/ο΄RݍΖFήα(+1› ‰Δ`-inr7s­ί•Μ`0₯,6L[―gWΐ%Σ΅κEUΨΛPρ†‚ƒ(ή₯+Dΐ ά―…–‘ΗK5- œ_¨ΆšΥΡjI³)ΖΕ0¨ͺ₯¨ͺΦ4θˆ‘Vμvδΰ†ΐqπ;)&¨₯šεGxKρC.ΈΊπ-…7δˆηΎXΊ-Χ5ƒL†‡`Φw¬ ‰œ‘bgιξΫ1Ϋυ“e£φί ίύτΣΟΦ‡Λ,­nΠ/ι%Z .ωK•οάψώς–„„„„„$ΕςnΎJ‘yD\‹=xTͺMxξΨΤ³Λ„€@€’Ψ΄α%XΚ @BhΠ“T%Ό:ϋΒ‚E«ΗλpPjq€˜§WB릟\Δκ˜L§Up|εψΔ;‚Ί8όzal±¦έΫ1<΅„Ϊιρ;p9”Z¬s`±Φ¬273RŒS‡B3δ£br‡γMΰαj%V³΅π€L*iΡ0OΝ.hnτ•D0²—αcK­ΐλθ u˜;#Ύ‘iz»oΌ-*PΣ-?•#žZ«•vΗ‘BS++P½ξ~†V¨4-κΠοhU œh„Κ,V[–λωΎgιΝ’20Θ7d£:ΥΫ;žτlXΒ&°M¬Ά–aQ―ΈΆQ %;’*yF[―3΅”R­­S―°i”―DΆφiΛ-°«ΨrΈ™3“T©Φ[(ιœbMێΟͺ3›™ƒ›υYΰςΰ;3L ΌΓρ&€ξ…cϋΰΛψJΙ!Cd‹,”D0—!C4P0Ia νQ{a‡‚Τˆ­4uw»°£7*π-γ;ΐfƒnΉtΔs,½’r?žπ1Ό¦΄)5#½]‡ok;’ό΅γ7th‰Ή-[ͺΆ†ΣΡJ fήR|η_Ί₯°fβƒωΌ@?VrΠmΉd2<³Ύ`%HHδ†σ‚ρ±Ÿ(gΪ?Ψx…š„Σb΄0½…ήψΛ•ο<ωτ₯Έ|ΟΉΫίwyμξώξ/Ί‡ςΡ”dσu΄AΘ€?ο9žž,οΰι;³Ο^½IΐΪ‰Α-'zο‚"$ΪLwς‘‹έ­άAB’bϋ‘«Αβ/$ƒ<Θπkh‘νπ4PΑ χ·ΛΚ)F ”4’Ί[³ΐo,Ρ{Δ-ƒVιτžΥ€ͺ( db’Ή$‘J§ΖύnLρW΄‰`»SΤ. ΄fK–ΈU6JΥ΅c‘M+9E/³ονδ6jΒΐΠ`ώ­…‚ftZ°oΤ# ŒΪΘ|€xm&³lG jεΡέΔm±h²·dΠξε–“ΤDwΙθΜY­ΆΑŠ!η”΅!!Δ.Ž9B―(bBοπILo=.˜Μ$_O¬iΉtHΑ³J"ΡΛπ‘>PΑήΆ~(HDlcdΣ΄Υ„U6ΗΒG„°Y„EŠ%[΅μΛ°κ§ά&Θ‘α¨=ΈMZγmͺ2’‘g¦¨°–β#j<Πuα[ k&:ΘΡΟ L±\t[k`™ ΑμοX ω€„$Εh45X<Γ§$_ΐrΓ?ωητCΰ―[β{iρΤηήc<Β½ύ½VRb£ϋΉ\4ε«σ1·uά^DΣh3γŽ’Ο©k+ˆHŽ?3—Ÿ‘HdŒΐnρN>¬ε“€Ψ~Frf[τήX:4 Χ~r[DΓκ §…b{\N‚©)6HκQ’h˜~²¨_SΣσ‚•5ΠΆ›1­Δ€X ›Όΰ‰ξκEΉ9’J`ΐk+ζπ>nO¬WβΑΦ πgΩM+ DIΔ6 ‡l܎‘Σ¦Cω_`κ Z ζΓϋbκ`'GHš=αιcά+€Ν’₯aχ˜n “›YΑ^!Tnΐ­²Α;oB: STΎϊύeVŠWTb“@—͐[N΄{JŠΡΛπΑ ™±ΫύφΒΰ1₯bπrŒͺšθk`‚’Cƒt’1‘b˜Ϊς9βΏ€(ͺͺΠ±• GZ„‘δƒ>΄„?Ι€sψ6-oΫͺ–Μ€`-ΕG”ΘΊπ-…6δΨη¦X>Ί-Χ5ƒL†‡`φw¬„|@B’bΊυ,žξ©Nκ±?ϋ—›qϊGκz1zc\`zξ9ŠJ8ΎI1IŠε’)_MŒ™©Ω/P7Μ^œ>λψ­K·—Ί«―’ .―Κw.?ΕγυτΉ‚'­]γˆΪRLB’b’3-);dOi%XΑ‘ΧZyΤ =Χ6 ½Y―²4»<)¦΄ΉΜ&pd†‹μ¬9rǟ|εΩ–©kΝ*$❌ƒ…x/†σ† B˜ΪP˜χ<›l5eda]ΣMΧΗΌUGŒuOς)r˜ °ΈO©6ZΝ!4š­f5™%}RD<)«{Νa΄jΤ0Θ·„Cβsύ‘!*nšΠͺ£H1„Γρ&:P'CΣK-ΆΩ¬ΨŠβΦιK«x»©€8€½ ά@‘™ΫƒφΒ±P`b°«N8΄Δ*θ)Y-ΕGTφΊπ-…7δΘη’XŽΊ-ί5ΔƒL†‡`φw΄„|@B’b‡N~²YΛΖχΌτψ›«gc’νΕsΰΞϊŸ[χ7ί )vX’bο $)–ΐ«dΥ„… S x $ν•ώΎH|½Q-¨|φiŽcΣμ €XΠ(0ΑU#ΕD[―– Ό*oIŠ9-ζέΞV¬z²’ο¨ σg1%Xεt-ΥjΣ Ε >7± °…ΞxR¬g5P"κlͺ‘€Δ$g‚C8|ό„ΨΐˆΧt#―Ο"—lυͺ5sk7•0’—αCΠvψΩυήΆ~(·ΐΖX~m,pΠΎά€Λaΰ R!)­«Š@΄XRl’Α Z<`ΦΟn)>’²Χ…o)Ό™ψ G>/ΕrΤmω!n― Αμοh 9€„$Ŝ'«ρlό—Ύ?Μ]ο³Ώϊ·9ϊ‘Œ+Coε|iο“b›―Ÿ­,ΈKσξ²·όJΤΥW_ή²<ο.-ξg’Zž7탣™Χν…[bΜtm'λΌ}uεμι™„ΗΟ,om­vOΑχ}υΞr‰«Β‡OœdΕ@Ή“μTwΆΘτΕλp|Ϊo œ €Ψ―ά­λψυKOα¬cm‘BHA3­φζ>Ÿϋhiμ ψΩ#VŒ2h[„σwΗ/{yφ8[GΆ±E!}ڍΈH]n—ŽΫήfoξsmncΰdjΤβό΅»ˆ°Μ$$)–˜Œ'RΚΊ¨ΫπEQΡ}S£•,›WUΰ}&N;Υ.L+![mFR,}’[ζUƒŒ*τrΣzTqš»Ώ}2Σ - §£Wα(F>Σάfη1ͺ*μηz;.ΓΧUάt%·€ΦάΩyεΆΣ©€\fΔtIsμf’Iο’I1|`μ=)–Ή½°Cbϋ€5ιφIΨvŠ!ΕZEΑ^υΘ6ښ¦™N€iwwϋdR,ršΉŸCδ[;η ›Α[šΓΧ…m)Ό™ψ G>/Εrmρ€Xζ‡`φw΄„@B’bΣΈΟ±—V7§LX½=–CΎ#Rl 6²qŸ)h”ξm›/,Ι•₯$₯2sκΚ½Dκ(g(žqŸcvCŠMώ9οΓ”ιΪAΙ³·ΧF«;mOM™P†’bxΏρ@;H±cβ\υΐΆ°6:σˆl‰±zbΠ@NΒcΡ— y@%Ν™e.$Έ}_FžMsˆI/αΒ2€Xώηg1Τ;‘pKŒ’CD/»lbίσ/Νά&,~ͺΤψΣ'3b!lΝPt^θω„Ω‘Yƒυ;ά‹oη$ڏ&˜<ΐ, ΅ΠβΈ½N}°0Ε'}ηφp§ &°Βΐ„&4ŠT ΅XΥ#Ρ€ΘlΡ : •ŠP‹m7Κ/)†6C++ΕJΉΐΞ\ŸΓχ»£€ €ρ½ {OŠel/μPΐά"ψQΑ¬©°π‡ιΙfγ[ΒsHp‰φ|i,λ[=D΄»–h?ΚLŠ%ͺ+ΪηηͺΒ¨Ι'xK3“bx―’[ kfˆrδσY,'έOŠef7ΐJΘ$$)ηΘ΅§^½χx΅ψA?Λ>ύƒώW Š ‰³υΏ RμυΜ…™Αͺ™ov—_ήwŸžΊηϊ|Lβm.aΑ‘sχfv—_zŸŸ?g²ΊΨϊ)Žb8xςϊ©‹wotΙΘ₯Γ樜աWΗ‘λΛΠ€ΨυKσΛTΉΡΥ=³S7»έΥuΟ}|τ˜Ϋεϊλ˜@Ιγs—·%¬-ά~ΐJΟΜtχu²:Έt䝳ΣΟ#¬ίxdsζΙΉig…^pŸ8Ξ+Œ΅AбK›k3WL›w·0ύθ2ύΖY·r«γœ6ΛXΛ)6;e‚( ‹Τ€\ΪX1Ÿ=ΝbςŒ}ωΪ£KWŸ.nnaΓ2§€Xώ'λS(͎Οσf¬±‚cςR_C“gΟ+-§7όΚL₯ I1j¬xΩI1ίPZ=qŠ΅j&/€ΪεΆΗ§N³I:)ΏH³ωD˜œ5Β"ƒͺρvΫL”r›ŒΎλχΫ²j‚\ΞΠ@vO|=ρΫΜ€R'δ~Ng~!’„8DgN(6:#3:kΝ#ω%Επ&ˆsKΨV–Ψ˜ΜqΣ»£€ €±½ π™•φΆ½CΈ…‘nϊ ₯œ–=sΰ'₯k%eΐ―Τ ͺ *§ϋ„ƒω9"ό΅γ· € NIW:EΘa‡·4;)†― ίRH3'rδσY,'έOŠef7ΐJ€•ƒϊ6ΪΊμυτFB’b?τΫtjύ• ίM½ϊ…oZ?ύ•«΄ϊ$*„–‘3b°ΣΡI·’$Ξ«KμΰΏ#WWΆK>yXΉXοos;IλβyœΩΛΧl ,†Jp1χ―ή‚ER(RμΦΒΖπ>ΗλŠκΖ0£ΨeΏξX|ωŒΑ YΗ¬˜;8υ”'ΕM= 'χ·wζι…Χ§N'Ζ[‡'ΕϊxύaΏkσ›[BKgfΈΕ_β…v‡ΈŒ[\Ψδœφ₯9orRŒ qϋΜ0kθ°Μ/$$)–DNω@-ΥΪ¦νωοΉ½YVJ­žπe~he(Χ΅Žeι­ˆα_λ)”’Φq|ίw,£^.«6;nD²b^e¨ήΆήΦ’h΅4ۏˆ§•υΆγωΎηZBm₯άrΓhτυ½Rί–p?_ΓΛ7ƒZΦΆλYF«€€T#˜pςš*4Vέ°lΧσ\ΗκЌM'α&AUšϊΆ±­Υ†ΎH€ͺ5͈=Σnΐ· »7ztšR¬7[šnGΐ“ΪΉfX; έi³Πb σKŠ!M·€ΑJX­ (±οwKID#z>0&'ΕRiΫ =€Η |M³]Οχ\£UUΰ[$υ„AΟ΄©be oRΣάh EŠAr·ψΫz»γωηtκΜN–μ ?Šύ΅γ· €ŸE Ζ½ŽγΧiο°%@ΐα-ΝNŠαλΒ·L|cŸΘb9ιΆxR,ϋC0ϋ»Αdzϋ6eο$$)φ ΗΆI±/Ώ#(3Ό(IPŒ ‘~ρXvR φ”εω‹MFLœ\ΨςbεόΉλGΞ\?Οφί1¬}pκŠ!•K·ξξ'||1εΤΝ—ΙL lEΨs.=ΠLπ_.έӁ₯šzՁήΒo<ή™3Σ˜œε§‡ 7Φ„ΦαI±ΙNŸ|=ΝΦΠ}ξ*X7ӌΕ;4€Ζβ΅Ήδϊ>` ™ΫΧ'$Ε§O"Β2ǐ€XώΡsͺκ1Ԋ"§ Δ­‰₯)0ζšڍ°ihR,„t"(Ϋ{(Β¦°μpύ b&™Δ3j±υNiGΠ“W+§‹ƒ%BD.LΩ9€ ˆΒ…zΔœ’<ΫΛmW牙άΜZ™s·XΪIŠ…“;c.4[±ί†Ύ‡e»«djγ{>0πσUa νe{‘‡p‹h¬s’€žΒWi»|Κͺ΄†}eE„4όH;Φ|νψΑ ZBtEŸέ Ÿ)“F|]ψ–Bš‰rΤσY,/έΊžΛπΜώn€–Ά€{ IŠύώ?noŸTήΜNŠQ!΄˜™#—ΟΝ°ΥCχ>Ό²05ψ|΄@ΣΊx‚!D/Φ½'K³7Oiw³}yΓgb‚K/<&£—–9TVu0ω¨9²>kύόΝΡΥF W΄0Π(άNΐŒ~γu&l¨δpUY·ϋ€·#Rœ€μθΥ.ΊΪn‰nτΊk‹έΥΕε΅Λη˜Ξ="™I± a™SHHR,θu΄zκαJ‘¬uΌqo΄eΝM•fΆj%!€νψl«,‡ρŒ†:Z_±izέΞοΛήΧαF†αΓώA1ΨΩ₯B@Ζ·Q}u-Vš^/hωγό½FΉ¨$%ύ)’φ’δSΠ¨VŠ<Σλ Ο(ϊ64W³ZΌN«€¦4U½mο΄kΰX ΠΗF5―γ^/+)Ν4ΪτžΩ(¨Ϋ‘EB[η4+V›.ŸΣŠešG8‡θ5uαOΖΒEΕDG˜€„YKΝΞCΪΜ»5`kvYΙ΄Fφ2|`  €0φΆ½ΠC¬X1:νΪΰ,σ ―'•Υ°:xŸu?Νς‘07žp1 ΪΆͺ’–Ϋ6Δ~€λΎvτΰ†-L`΄ 6XάΘΔ–β#Jl]ψ–B™‰rΔσY,Oέ–οˆA&ΓC0σ»VB2σ [4Ίw€Ψίώ{?Ρ~°²ž…£·Η¨ΐl€P*β0P›kΣέ9||&₯ Τ…c[Έ³ Ew‰h0™#b€aRμ‘ΛΘ¬ηβW4S~’ƒΛΰ·Tΰ +ΰD€ΤΓW“Y—™C¬ζ{σJ±Ω‹&,*d[S1Ύ]ΝJŠeΛ|CB’bωGxΆΥ1·Ρ±l''|-#€·ώ]Q΄σŸˆΔOΙͺ2”~@Βΐχ\ΧυΌ „+Qθoη‡dΝ'΄VΟΫ©wψ¨Θw)Ό0"o/ΉΗ${>ΘΙ†¨/Σσύ€ω|2kƒJ`ν8Dq+QΠΪήVaZαŽΟ}s „ RI>0φΏ+0C:Œψ ½ΐηoaΰ“‚…qcγ^q+ΰFΪ nD±ίβ6%{jiφΊψ–Β›‰rΜσYμ=ΑΠΑ¬οΩ%μ>$$)φŸίzΟΖΏ3ίΝBŠ}{Ύh~λQFR VQQεΨΜΈΝλ΄³°hιh2εό‘“ΧŽ^°?Όzο ’Γ_Κ@ŠΑΖΊμ€_έδ~KΓδΞ„„œΒTC¦π;#Ε^tg Ε¦―³$h‚ίΰΐP)©€ψsdz%;)–=,χ’“€”(jέβO:ηχŽIHHHμCπΉxH±|ABBBBB’bξb―Ώ|濜,€½=.pΩZfRμυΜ”9Ψ|'PΗnΝ€σNŠMŠ~CTΪoi˜Τ™‚iu‘O α$ΦΑ4mtYΩύιλRL΄' d‘|r|Ύ:σ‘"ޢٍ ΰ=.ΙΫ1Jiζ`š$Εή/HRLΘ(JβΏ%νGmB(!!!I18τ£GRLBBBBB’b3vΠ?Sο럼άΨδ όeύΦ0/F;|•ήBo„=˜bRLΔ,Hχ#JίH(έΫ6$5ΟγΜOtžσrOŠAŽ|ϊ9½&J=i°ν‡OΕΥ‘ύ–†ΙΙZσξbRΞ₯3,ίΉΗdλ`$δβςβ Fh Πyv~±{ώΙˆδ3 ‹ά-dΉ{τΨ Τ―/τ x‘°ΒDώ/<έΩ@:;”ͺpC£mΗ‘b#Ε>Z’€ΨΎ†$Ε$ˆ GΘC©jVτ>(!!!§ΰΥ0€ρͺJβ Ίgο^Όš8χQύτά/Ε}ίΟ½ώ<7εά”s3ν:Λμ›9=Λό.¦νJo±{Σvh7­’lD’FŒJͺQƒHΘ„Aǚέ9ίdθγLƒiΈή―5‹&ί<σΝ$+„ΟzffGЉxb-υ8ώ‡΄½Ξ­™ςzsβο9wχyΉ!ΏΪ.ς΅i5oLΨZ*!’8ΰυOnώu£ευί_{/^’ˆA]ΡO2šΏ>?›ž7ηζ‹_~QλSΊ^’•ΆΌvΉo²8·h¦oΟΎubT•ΙΓ?YzTΩᑘχ<ξΏΌΪ—,dΛΧχΊŸf4Γ’Uι7η{Ώ5πl;SΥKΓRŸΉ=ϋzxX­ο›yboωΩYO/Ί>vΏ<·XMuͺ‡«ΧΒΫσΑχSŸž›š,Κuη―ΊŸΞλ_=˜œ)/ΝΜ½ό§Ÿ\v‡ΏŸͺ%Η]zgκΥSΑ–*[u:τϊΉΉlusoEΥΞτΌ|²^^Ϋ“ρ·ΎΊΧ/wσ―΅ϋΗ“iVн \YυύΆ‘‘B±fάΈ_’‰`Ξδ?νψΔ½e[K €Μ©—Id“ {3Og;}]Ώ―¨b·sŸ]}Ήΰ΅ψο“uiΛ­Ξ5εΕ{ι§™ΛmuYCΟαl*’xΥΩτWΕ Σ‡/tιCfmώΡεΙΗΆ’fyζg©™baΟYΊ+32Σj΄>%σvaΞ΅oYVΉ\vr1@ϋr1ωH·,k»Š}+ΕxqΙίχ»wοNΆ o0y›}°!S“Εδ"e2­`vvVJηΌϋ€6"ές.γςa.ιjš‘‘ίώ…v±t:½ΌΌ,_Άœ§…s£²νԦՍ–<–ώ[ί‰|u”7U:vr1y³ŠmΫWwg;ΉΨΓ‡εΏ)³fΠ&Μω—q'sŸC€PŒP 4«P(8‰˜eY>γUζ¦V>ίϊo¦¦EΘ—x'“7‘Ψ6Osr1Λ²œhL¬ΪΔCQ‹Γ,Λr±6š&F(Ϊ@&“‘΄’X,>iš>pρS―[ίzτίΊNδ &o3y³ms(F.V9νxΛαΓΪ(#mΐ9³ΎsŠ  Eδ ζœw›C1r1‘βΞo\ο`€o\žΤ8qX%b„b  LΦ¨˜OΌΤJMM“υ͌Σ|B­λd²†PμΉ|—'; ’Έβ0a·7B1‰?MΣ4 £Τ2¬ .›°7‘˜Š6šh6[―%Ντ@ΝχίκNŞ‹o]ώ hCίΊΨνŽPLΉ²²RΪ²‘Mή:Š=oϊ§εθΏυmŠ=_ίΪ–ύb SΆ-SΉ˜½1Εάg―Pœ•kΤ―MЍι[šϊζϋ§–tB(ΕΤQ“₯m§?ŽB1aŠΪ@§!ύλ΅΄B1@(f›¦YΪv²Q{#Šiψ‰<š  ŒΆͺωώιΏUŠB1‘Ξ¬Ώd£Ά„b;žeYΞO‘~UθG#„b’τœΨ€P¬.¬Q74λ]Π£κ›«Ρ‡MΝχO­κ„PLB1B±εεeηgCξ»όTn)€PΜh""ΡŒΣ‚`¨ΩώιΏ₯ŠB1»΄'Λ*7XΌ9WνW£A™JΔvZ(¬Τ΄’ZŠ5G—΄ϊo‘˜Š1S¬[*Μz—ωΒϊ™bΕ|A¨eΉΈCgЁDμw5r£ωbhE(φθΡ#«ΖΉρ¨FύΊeζέ—ϊ?ωέ»οΎωζ»οώξ“?Œ^ύzΙ΄Ύ/š>υύ·Vϋχί²NΕτ #3Jl]ω#ρ9tιߜ―.―œλΌΌR2Kx9δ†a•SΗ―ύόά₯}ηΚψΏ’Ϋ wσ₯©lϋP¬’X•ΚιΘͺ(/θ[›ˆEjτQ—ΎZŠωhΤϊ†¦ψζ«»_j¬ΰ9έcυ=θ=K+ΚςΚJύ΄ΌŒΉύŠΠ‘‘X±X,•KK3‹’ˆ]όαΩ±Ÿœ«.?=7ςOΟ\ϋ_qcΥ(ŠΉQ~Όrσ7‰‘|zμgηœΚ‹?>+·σιyΓ¬VΆy(fφvοWzRΖσo(έ»_ιξ5^Δ-ΆΞηŸQ΄Q—JΔy¬ `[Š}?ΑΝόhχK›Ωέ_τ5¦¦Fi.$ZzsΟKΚ_ΖηZ3ή9θTJ"&·ξΌ3ΕΜΎΐΕ½ζ‹ΈΕΦΟΣηbϊ2hύL±gυπαΓκO±0ώ/ω³ϋN=Ϊ ίέλ ΕΖsκ鸟—PΏξ(š>5λŸc'„b€PŒP¬Όa(f¬ξPLfŠmЦA(F(ΦnΉ‰ΨσŠΥG#βΗ_ͺ³§λ—ρ̟Ξόα/»%|ςΨϋfάO€’©Ρχγ§}(ζΏ·ηΥ‹zk]'„b€PΜV'Λ―ζ\…΅₯ΈT ½–¦εμ`*“EB±kGβ’—‹R#‹ά0–oώ6Q Ε~ρ4“ΫωΫσ%£Z©†U§ήv*f!›I§’Ιd*•ΙζΝΚϊЧl‡YΘ€δι‚ε.³ ωlΊ:R*Ξd³Ή‚iy‡‘F―;Kœαm/ΛΘΧQ:›Χgf!Ÿ«vŸ’RΩfΑ°lTGFΚs0£·!ΕΚgkΟ8Y݊n#–™Οη2g/HσΛο·¦bΩ§ϋ(§ZΪϊ 'λrιΪsKgrf₯ώΝδb;'@(¦JτŠΙκ&ƒ}0žuG$ΉΔΗ»=χ‡²Χ”kδs―ΌΦΓJ6ŒŒ$&³žM䲩λ‰ρρ ρxβϊυκߚεůβτ_^Έ#Ήpa|<1™Ι=t)—εGξ w(v‘Ϊ°tβgή“43™¬6##'““ͺ™FϋMΊ¨K#ρxr2³Έ’eqZž¦<}U¬ΫνΉLmΰ΅'¨‰¨²™;ΥqγΥλ©lΈQΌ₯*₯©œž[ή;™b€PLGM “l«όΝJωqu1¬rωΙΚr‘(3Εδaj¦ΨHΗ™δk—VΎ5Ge©‘En˜φΓΙΧ'Fώιi5SμβOΟΚmΙΤVώ\­tΖ”ΑεΆl¨™™b•B*κήΏN(ڟ³T•ιŠ±‚iΛJφ†Υο±€αDF1΅£+‰;cY)5Μ:€i;Μl<θZ7Jh •·½r‰Ύ`W£-£ Υ½VΊ7ΈaC=)υτ}‘υΫ FςΫΓΚφEΨνKX>ΆθUλ 5Ψ—ΑH_Φ΄…RHFλχvWO<ΩςΎ4Ϛ‹ΝΟΟοD ‘Xʟy§‰OgΦ]νq§bƒ ΕBβ ΅jο;Ιrv°[UνώK'Ηʌά½Η©©Šΐ;ƒ*έq΅ο ‰/BλΚžNΧΚ‹γ* [gοi¬1ΏΝ(εμxθ <ŸzƒTi’xaςt`οξυOπγρϊ=Ÿ|gίξFνξ ΚΈώ+›E(¦@(ΖL±#—˜ypϊ^φά”,Ξf²ƒSχx7ώ/ώ$ηΧW3Εδ€ϋ‰<:=4υ` #5²Θι ύΟρ‹?¨›)–ω,=ύ§jεژg2³ρι’ρμ3ΕΜtΧ~`XΐcW0(!šχψG+Ω―Χ5”΅$ nŠεFbšQ"YΥ|n(²ΙsV3‘X·QU²±nΝF"΅‘Jv“†"CΦ¦[τΑHυνΧιξjαγ…«δγέΊŽΧ^š”ωμΗQ ± []]u~ υ«Έ¦pε wΨοcUθσΑ;œ8yj¬P»7卧Ϋ»Ο“ uΎ‘_]ό8π’ΦξΐgEo(¦u0Q”†ΗφmŠ η7|ξ>›Y]{j'wλjχ]ΘΉBΔώ‹o«Ό―‘ΐG“ͺτg}Ώ§nœ1ύW Ν»E³žP ŠΩκ ’7Ž^ώ;ύ£{ΌΎ,Λ@υgΗ€Δ[*Yυ§MιΈ–3c?>§ T±kικΜπίϋr’{L]Ήή*3Ψ―₯.Œ¨²/5α(ιςN E£Ρ`w}β£›)ΦΛU$[λίΏ™ΎŒi‹J&δ}|8ΤMž τ™M„bΡ‘œ<ρ~Ή_/ΨοDP™ώ§‘`8­ŸρΦ›67ΫβfŒ„³_΅BNT·Ω 7Σγc,M(¦ΙΕHΔΪΐL1•h(γβΈ;cΪΊΰ?\Σ%Y»Γ³ΎΨ]7* ήWc{'ι>u[ξάθξŽPΊΌA¬“mΤLwƒfͺΕ œMθtΌα$]RΌΧGρμjUαΪΙMkίI.T{('½ψφt‚ƒ{½ύΎ“—RγšΟJύ;AŠB1ϋι$CW%ί+‡IΦ–ΞAgζ—ΚΕΤν΅»ΤΩΈΜΉKՌώp ωκΣ+WΪ[”κ x™‘΄%k­BΌΗξ d­ΖΩJw0‹E"½³νr˜΄ΧXCα.OxΤθœbΡDΑ3­I Δ³:J>ν^B™ξ|­;a¨γϋέ-&Ν­ŸS¬+V¨φž_ν’κz+5q―ο―EPiΧ³κŽ&ΤNy’Aέ}HΈφ…ˆ9―ZΕLφ‡=9VoΚΪn2ιyv]αώΪιΔ*Ήd_W‘˜ΚΕHΔv€Pl΅ ΖνSξ΄(πΕ=ם…Α“αPx½wΜr·{¦˜²/ αSΧ =sΈfΧβ•ΒδGξ-Ύs-ίh¨}_\»g…k_„Ό©Χp΅ηκΖgίpWΑΑ«Zήf:₯±Q3caO&ό,Q”Ξ N₯'}ϊh²Π ψΤ•κZ)―/ΞΏγν ]y―Φή vΈŸϊΙκω1WmΗΰΣ†έ±ZΗp~ •M s€PŒPΜ{Ιϊ„«ώφΦΚΤ•+Ÿ1³Α Οf•x²3Χg+α~w½1ԍDk"1•q‰D,ΰt¦Β―ΊρEέ4±P¦βήBΌΫ5PΌPn’ξU‘Xbν ρ•L"ˆ‘‘D‘6ˆ‘Ο5”7,ν΅ -χ4±PΖv‰«Ν;Xύ€³ξΨ@"W υ*…t\ΤЧ Ί- ΛΘ5–—^λŽ? φ₯=yY¬»>hΣ½p*Εsκ£9[ρšJ(€P,?~Ιε+ΥμΔ4ΝΪΟωHΗFS²Ζ%Y‡Ηja™cφΚ©@ ¬ „ήvTξh(<6»~¨““U~:Παžrεd;ή€I‘ΥŠκίT7ΆΠLαΚ>wr–gͺά xŠσ[*φN;xΝpΥΞv?Ι―ξ›’Vu―:Όv;oΤ&† ž>}ϊ qκΤΰ£šk}VnΪ„b¬!#“Γ'‡bΝ/ήPΜX5ž)K½1“›‘Λ~'_i0‡«ΟΪψ*–™Trd¨Ώ'Vψ€„bfγPΜpηDJ  ―‰D\3ΞDOΨθHΜξ`Έ§$s?³Ησhwp”T›ΦφιD\u{ 5TΖT§«;νq]κQ³Ea¦zφ7Φ•4ϏS*Ωο9Ϊτ/\e$΅A*ͺ6Δα“/€PΜ4Mo„QOέ₯¨ϊΊ™bααYw(φΆ;1ς€?MΉϋrdγ@Ν½]γΑνkΓ§Ώ8ωNΨ}ψ€Κ‘€ΖŠν>vΧ|jβmΧC:7Ε.Ξ¨MoΠƒZoάΏ51όΥη'ίmΨΜΕ}uω”ΛμνIqσζΝΙΙ{%ΣlX¬:ŸΉuS¨βΊΰ―σpΰπš#Gοφ΄1<³ZPσΏ<:φŸόός­ϋ¦²tQ_©'hφ‘Xϋ€PŒPΜJχΥ(:žl%<”³λρ^±([ Ε2κz‡Zκ!ωD¨+4’5Υζκ¨$HQYi_ ©©^•œͺŒ˜ϊPΜ»^QΙ”ώΰP5ϋOΥk_8£Χ=±,š¨;s™z59Ρώ‹ Σ’bλ8:Ž ›Š+Ϋ½.“ΗzC±ΐ-³ήμ΅/ŽtΊͺθC±ΞΘΌ©x£·Ή«Ύ75ˆώΉΟL|iF…kK—ίφh9―Ϋ· ‹Νz*έσP©oCκοž λŠvόόζόjŸJS§q¦Š΅ ΕΕVSŸjβ}(Φ›«6B S°…b1uQΜ\ͺ/VΫY'(T43ΕΊ{υ‘˜dLώtυj†ΥH_8Ψ½ρυ%ώgŠ5Ši_8«ί]έ›φ₯›)ζ?“_ηηηε'ΉΨσŠ5£4αjŽά2Ί{T;SL%YΚΔΙ† ΝξFP}ςε?SƒhωofώbΔJΞ₯‘+n";6<³6ώ­‹o=ά±a”Χyξ~i«•>Š΅'Ε8|r5ΨψπΙΜ@,Gͺ‡ †bC™†1–b₯{½3΄z’™œaZκ‚ŒΟtψd4kš†‹iΉTμ:f!ŸJ Ε"ΑFρY%©#kͺ‰™Γ'##YΛέ‘ιιΘc™ωlj¨/ΆξCŠU ι‘‘uΝ %ς 5yψdύ §ŽχT)F²Η›Η=c"&+υwΨ~„bςωγόT7ό€bŸ{έ;xrB=\™»tBψ€ΔUsžνͺΠΚ±+|jψλ©™₯’Šα4‘Ψ‰95Ξ3…bλϊŸ‹zšΩ-ΝάΊί ™γ2Ξόp§χˆHχώœψπΨΑκαŽG>xμΤD]ρ—Sžύ_WμXwτζόόŒΛόS‹Λλφԝ‹g{ΩΧ0>[qοϋw/ž;uμΘ+ *·NL(Φ& #σž“+Ψ›ήθDϋΡDAŸ­˜ιϊ )*ΙXχζ‘XZhίsD§7ρ©$z#‘°uαP(–±μtmZYWW—όθIΊΆie"]ή³γλ4Ε,ΥgΠ΅:–°έςqi¨&Θ¬kΩ%€4’©ΨJΖsϊPΪlq3VZw’ύhύ‰φυ/\n€Ϊ˜κK9][ωDp‹WŸΤΗ^δb;@(¦Ώ\ΡFcΛ_ψ’ΧΡSξ1gn~^ΖtΏ¨’2•dΝ{Ά5w’Σ•Θœ›yΪΓΜΩo(&λ|Ο;1Χ(η:~imυΰ|ΔέΜΩUΩ ™EΟ)Ί^9qΙ΅οqο₯³SuΕϋ"—M₯ψάύΕ‰¨{ΊΊΛgO9pθΠ‘#‡86±d^ЍΝdΫ]π†ο?νaiβΘ—”± §rΧ]Ryόό”©HεnOΦιzuόR!kŠŠ© E‰d, ŸΜ\D­WΣ‘|†b"€ξ43]šΓ'•`,‘N'ΙBΕθρl9’ΘΚXR~€'θŽιςυ‡γ9λ»™VΙPέ₯3ύ‡b*OH€SΙd¦ΰL˜RΒ} ΓͺΘFςι!χV"ρ|έ)Ί‚=qΛvTR}ήιrΪ-ΪZκ“Jl(mVμŠe$ϋΓϋλRN}(ζΎΨ‚ΥΥέέΥπΘΝf1r± ΣGΛ}‡$Rρκxεθρ±¨D5NjΤT(v *υŽΉe[ΪPL%_BmEЉWŽ½tιόΩσSΛ+ l±™sG=OωXίΔ’μ£Ή―cGάλ;$ΥjPόΉ¦xζΈgέ‘³7d­ίι;ώŠ;@»#]<ξ~z_~½ΈφRM?ΰ™Ϊ6WWωΥ­%ηU(έ«\i‘X›€PŒPL‰ξύzjz”PLt…ͺΊΟF/άω‘χ Y*κˆΖσ ›ο†Β‘`]'#ωŠνCΓνvΕ’ͺUhή=-Nι ΘL²`έΊΘHEΏΕMκΈWp¦’;RUΙ„χλωΈƒ\Ζ^uιs1y¬ `{Š5R.—Ÿκ†nύς­!ŠV#₯Έ7r©B+eWη#Guͺ­ΈΒš†Ι—jέVœυΣ'„uCsξί͌V›)OΘΨzΏπΞ5Σ;λτ£,£ύio*₯ν(Φ½YVeωΨβfLέ«&#9ΛφυΒ9;<²`΄Χ5_0€ΪΦG]*k’ΈεŠω9E)Σ•tvν‡b3ήmMκ εΨω©υ‘Ψ΄κsγΌμ‰υνvn4jζ’ίfœΑ§γοwάƒ_Šι‹Σύ)vHΧΑ‘WΦά9{Lί푞«[­ΤΏ˜)ΕtŠ’P”™b7›ω§§Η~qNB.έβD`?«F]κ†ήY~~n€γΜ΅#qcΥΝ ϋ™T ιžp Α±€e+Φ@θiΣΏ>#±²½‘ϊ@¨+r_ψ1θ: όχ†»…b’ ΦΕ>]Αή‘tέ†L<ΪπRέ‘dΞή 3W­¨*…‘žΙX0—.Τwο‹6LΖB±œεo‹>TŒto$Ψ`+Ρ~΅ν ηe‰‘ΎήͺΎ‘tuˆL°Ρ §Ίτ!—ΈυŠ•½τ‘‡Ί«‘βΥώχt4HxόφS ͺΎ<²vW牋R½xγCWEΟBΉήυώυΣΉvύοθ§'¨Υ2Τ¨T.\ο©ͺq(Φ©fЉΉώu‡}vŽΚݎ&šq§βΗu6˜"Φ3δ~¦ϊβcž_\·§.υ6˜₯Άλ•―yϊjΡW\]+z†R2šŸΚΟνσ A(ζ‘3ΕBWGχœgƒγΪεfŠύb³Gνύα@ςU3Ε΄,#ŸΝ€k²Ή‚υŒc²™κ Βτ5FΕ²Μ*ΛͺTκG3ΡD&W0΄ƒωάw[Ξ:Ύ|V•΅Ž]β*|·2²K›YI©Σ”ζ «βs‹[T1 ωœΪε[~Ό‘Ё`M¨·°α!±κ`L•[O_άzΕ4 Γp~ υ«ΖΒtϊz|th@ Ε―¦¦ΚMX˜J_Ώ'©τTΎQMφ_\X˜«ZX(}6ΏzUšYπΥώ΄σˆρρρλ)]ξb!Ε Ϊώζ¦_uŠ―¦¦¦5-eΨ”S{υϊυΤτάΒΦ+uŠB1_–kJFi)»Έž_Έ›_ΈΣxΙߞ_š^Κ|–ν8γ9.ςε3wb©₯™%)Ψθ±ΥaΣωΕϋ ₯RiΉΖή  ’Ϊοˆ εky’‘‡φ»„G*6€‘˜>`RΤJ}tΜc›§οώ[Ϊ ‘ ³K5Υ\lΕ0V Γάp‘ΰlε[3;t_޲”,l-λω'§3H―Ψ¦H™f‘MT7Tco `ΈΟ8¦;iZϋ@(fx5)Ν7ώ5ί?ύ·¬B1@(¦fŠ9ΈŠΊ₯ΈX4•œΝŒΎμ)φOΟάλ½mXe)Џ ž}¦`e"Ί“φwυ₯ φ‹‘XλΓ—η ΡΏαOλ:!„bvΙ·εΒ²œ?{nJB1χL1 Ε2§εoV€ δ›ύ,+=φ»t‚±Ύ‘œiΐ‹=S¬₯RΙω©nμΔ1ιΏυT„b€PΜ6 Γ(fXεj(ΦqΖ9γ~uΩ[=§˜„bΖγ²PL6jM©K]σΰΕ€P¬΄*ιP7ΆZ―YίΔΆšνŸώ[Ψ ‘ ¦i–ό).e¦Ψƒ3™αΏϋεθFχΤ– /ο}|KfŠIAΙΩ¨ τ‘XA‰ώ±m0>γ·΄B1@(&?~Ό…™bΛ³ρι‰ξ±δ«—’ΪςΪ₯‰ΓcΩΑ)γΡfŠΙFm°A(¦K³Τ©?ՍV?Vƒώ[Ϋ ‘ s¬¬¬lαλšQ’hΜX5Τ"Ώ–Κ΅»ό‘ΝΩ@Š5β΄ΎF­χR5τίtM ;!„bjL?Ή˜ζ"•κ.?‰_˜Π‡bšG³^C_ί,MŸMτO-ν„P ŠyŽ£4MSsŠ ύ7-}(&ΓΚΰ5 €ήgη.tœ£8ŽΎβpρ‰uΏΕ©…αŸ2ΈŽ@ΟΑuΫξMzσΉzυjRΕΕΕΕεd –1Λ°‰bpΞD1`Eͺ*΅’λΊΛŸΦZΎπ“ούώΟΟ¬ŸΈ€$–1«ͺ³ˆb€(Ά~ΐp ΟυλΧσ“qΞ›*M•<ώί~όΣ=’ŒVl8!ρœ£ Šλz£Ώ}ϋφΠΕΊΫl6-~:…¬—ΗΏ‚G’qΚP E,c–aΕΞˆbΐΊδύ}θb XΖ¬Q쬀(¬ν>—ΆUΥpί}ψ+2NU•Ρκ•(v^@Qμ?’ˆb€(’ Š€(ˆb Š€(φ/X[@λ€yˆbΗγ1[HUeΙ/{˜E–Ο¬ U•u4KιQ Ε=z”δΕ‹=Μ"ΛgVΠ,’ΛD1@ΛΙκ­΅l$<Θ/{˜XΦΞ,ŸYA[kYG—‰b€(ΆΫν>|8t±²s%Σ]5™…s(bYA³ˆΞΕQl|ε«W―†.3Θς™t|νdτ€(6σΙbYGΆΫmk-·u¨ͺ;0ͺΚΒΩZΛς™Τib° Qμ³.ΆΫνΆ'―`Ϋ“¬Ÿ±θQlζ“ΕΖ],vοmΰ/Ω½·Q[ζ41@w±8œμ`‡“cŒŠXτ€(ΆD₯±‘ό%Η‘q‹ΕV’Ζ` rˆbkτ¦'‡ˆb2 Š€(’ˆb Š€(’ˆb Š€(’’ˆb Š€(’ˆb Š€(’ˆb Š€(’ˆb РЀ(’ˆb Š€(’ˆb Š€(’ˆb Š€(’’ˆb Š€(’ˆb Š€(’ˆb Š€(φ–½σώŠ"ωήΏφMŸΌfr0+² ƒ$sBQ76ΉkDQ%3€‰( EΕ "Š2K8ύ­šk Υ5 Νβl༟׹ηžΪšΫΥΟ­ώ‘ŸΓΫ7Ί¦˜gϊύ7M7_ϋο¨+ρ ‚½˜μυd/©ΐ ύ―Ί7ζ'¬βŸμ%e―ͺΐ ΏOOl»ΉœrόΗnΧΊΜξΨχγ²ϊv°`Κ ZP]ίΌ&πλ/•ΕυC?{%Ω‹Ι^Oφ’²W•½°ΪΗΐ;έz‚±ΜXϊ§P?ψϋ(³ΰ“”ςšΎ tλ§Ι₯ί#τ+κg―'ωbμ…Υ>¦Ψφβ•μK;Γ}ΌwϋGFVοΚΎΩ%;(K“uK³Gθ‡ώtG4{UΩ «}<L±΅y—}iνάfπAή#²Πό±žO)+A“j,ςΪ>P5k3ΧΏt{„~θ?Ϊ΅½ͺμ…Υ>¦ύήό«»Ϋ=”• I₯F5|9O˜Χ›ά75K΄Gθ‡~z[5 PΐΛμŽ₯―n6 Μέ| |δ+54XθΌq(χ’²Ž$ΐΈΖ\Rοϊ‘?π¦SΜ Ί(K£²ΐ_˜5Υϊ₯ή#τC€M1¦Ψ‘ΞmGύβetQφ‹NΚ`1σβM•% κΝτ/υ‘ϊaŠ80Εb”―}%Xe%h Ύ#†2NΚJ½²& T‚jζ½–ζϊ—zΠύ0Ε˜b‡ω—Ή‡)Σ “²_ΠδΒ―₯Α|54σ"‹ MjL4,ε‘ϊaŠ80ΕΆŠπ­”½C™”• bεZΓy“k)+χRjΪ·RζΡAYͺ1ΣΏΤ{„~θ‡) ΐΐ;ΤΎυΠ½-,˜ @Ω7 y*ΰ™”•h§Μƒ_ΨnΈe5h Ζ8H‰’ΗDRοϊ‘¦€@SL|΄Y ŒLρŸΖ‘C‘Iar_eΎ²‘sύK½Gθ‡~˜b L±ƒm›Ε7ωfΚΎΝSΟ Œ{”y°οyΚ|ΠΖσΦK)nj’&Ντ/υ‘ϊaŠ00Εˆο­›(σΰžιWΚ‹qΉ˜Ν/jMsύK½Gθ‡~˜b L1ώaOΡFY(4 l‹Ίv1χ2ΧΏΤ{„~θ‡) ΐΐΫίΊiΛΖΗ&ΚΎΝσE(Σ`±kξγžω€²²Ύ »ΧRοϊ‘¦€SŒϋw7π bHA•”•₯―f£ΘtΉXGd₯ž7ͺ‘¬¬CΕζϊ—zΠύ0Ε˜bτe>Oμ₯, ”εÞ4ΏΗ΅n·{Νꍍs "ξ¬ίγώ{οHϊœΥEάλ2μk)v»ΦΎΗ?YΏ‹ΦίΌ~·s]†χ‰οi6ޜΏG?Ιpp%,οvoXBϋS @€)Fζ‘4Ι2»”i°ž²¬˜²oΐη]QΫ Χ|Z°še€8Χ/τZq#Υh mw6σΕό²](vΧ…ˆ{­Ωi]+ιSzTζ3κB¨Y–γk£?²GΚ4ψkτ+σ‹ΦΏ›οΓ‡§_»–DfΠ£αΑ͟­{ˆωžΡΪ˜bL1ζ+ρ ΛJΠ€QΝΚΎraFέ†ošΎύUΔ…¦ΟφΦDΝSΏϋR΅ΑγϊκΦh_MzSΖ7ΦLŠ“M[•uή£ǚΉΥžWԚWΤ^x΅%υ#{€ψ+υ«υ‹ΥΐqθWΗ‡‡žνάI“™ξ_nέσnΞ=ڜ?SσΪ„‚5όxπ’ξžσ-‘ύ‡) ΐΐΫγ^Gσ»ω€gο€2ϋΥ€†ϊω΅–†MO‡Λ’Ζkδ0]G &8½>φXY*ςΤ½Φ(ŸΆ‚&˜~uΕ§OλQ7G&€»_ϋΈEvuϊ•šΕκWžΒ5S?ωηκwEν-MΝ,σFilͺ“~2xFKhaŠ00Εf?ΌEφΦRφ‹u”₯ρ|†=όΚΐkMΟτH‰₯q­r-ϋΰ§μ˜ήλϊΛqMΰΉ"τ³ωΩ;ΎΎ’—ψ•ω‚ιξ£ΉΩ£,ϋ―Ρ―Ζbυ«OOͺ›σηκΏ{εζcΌ¨uVα+YΖ•%΄0Ε˜b΅»έ”}u^„I RͺŽ=žΡό>_αWΦ™@•.τpg͝ΪεΘT›·ήf‹Ί6$Y//§Ϊ’μΡͺ3r…­Ό«6,±"dgYHBE¨₯.2έ¨GφHKeH|IPœˆΔšπ4‡ΪcΊέ«Δ«–λτ­_‘α^[<*ύ1ΤΨu~!ΧOυ”Υ`­±»Θ&HZcΈ₯2” Ύ’Pj±FŒ>£t[$—TΒTρ¨Ki€-•.±Gωτ€±κΒ{IBeXJSTΖΌυN^ŸΐŠΛC«Βw5FΝoŠΡj7•ΝQυG§Τ…'њLveΨϊΘt§AΤ`«‘bjPΪώŒ—ΗdSΜΕΞL†ΫPΖ::i,¨Σδ*~ vΕysBuxͺ]έ G”ο™zŸζUΞ"4™)7{G1S @ΐ)ύα#œ²4 y^@Y΅s\½Ϋ΅―Ρρ %=ΥA5bMGδŽk+Ά][鍠Ημ}ΣΓΆ±ŸθΧ’Pζ}€V±6%ύ½ύtέΧΩ –4gΤ΅’)6πCΜΝ½[Ÿ ΌνoνΝ=Y°<&?$uΆΗHΛ­ε1W—mΘνŸœπLŽ{ΌωΩ ύ|εζ˜όεqUΎ³άgΈHΉ_­ˆΉΞΦοbλΏͺ=ιŒ>μό6ΏεB~σΕόζ ]Ι|\³ϊ)KšΟ=νΨΒ·…νΘ_“Ώι\[ύƒΡ&xδύπΠXλύβ첦0Ύ&rήg•XΘZ‹Θvζڟv>γρx°΅¦ν·£ΌλεI’‹;g„†ά“Χ—Ηά:Κv‰.y6άQΣ|ŠΗάIλg5Ÿ‚Ωˆ)=’«wŸόP/δΡSπ™bt$Ό›Γv†οί‘?­>„uS’ίε~<682ξU>ΪΫܝϋuι Άr‚5»ldβ φŸαjƒΟ½ ².ςWπν›Ύ¨;uΊΩ>­ω˜r»Ωρ8•ΩΘ7ΛΈλ}Fξ Ι³Ηΰ6_A:o=ήόμΉύ;WιPƒ|‘-Ήl’–άμ’ε17χU<`nj$Ρ1[±-?8Υ1ί;BY ΜίA˜b L±4ώΙΝƒ,σei ΞΣU~ΧΐRω͐6―σ«CΣΔ%<ξδ{4ž;³λœθνΤ|Μt~鈼6"jfΊΎtψL1šy;nPχΆ’&<±!’ΊH±Η\«‘«Œxφπ3D,Ί+ž½ϋƒήκg3>γ₯ν+ύ―Σ―ς…~ϊϋφ²JζΝΕW牆˜ŒόUΙΆΉŸQcXLΡΧθώF<ξύa{EΈ#/zύwiκ}kφυε;­t;ΣϊiV_°<Ύ–֏TM1>©nŽΠ‘ΐIgυψάϋΏσj71›ΒΆš5Έ£"<­ως\%ϋφ₯ϋΙ m©uΑ1ωfΗ ŸeC8“ΓcOmΒξŒK¬œη‘, ”wΝ ¦€ΐS,ΝΙ‚,Σΐd^ eDr·Κ.XEOΡΩbhΟLΆΡ‚Ό>ΥuIώ n4σIŠέmšίΫN8"oΎΧ ™Ύχ•lŠΖS/υΤEDBaœϋ­6?γΟ―o½JέΡϊLή;aΜΧ›A¬ώdί=͌Ύϋ{ӚBγ«‹=σ–<ό!*|g‘RsΰΪ¬J[»ΕΈ Γ gzΏ* JqDύΑϊHSŒt*“€œ™’Gδ£bΔPΟΙ€ϊΘ”jσ[οYθŸΨ7βώƒ½©κ3Ίδ=fμμXΰ1 ΩΎ^¦>τ>₯π<§1Βθ‘ˆ’Μ‚©’μΠΌTΖ‚) Πΐγ_γσEšƒ²o ζEV"₯vmΝθ”dέΩgί>©ΝςήΎ§:\Τ™bbq)6ΕM±³Oϋ<Σͺ}τnσnάzDoyL͐a1ώκw΅Ύ³ko‘!τΛφ~u©‰ΑW“ͺΗαnΪΚ]<ᆝšbΈΌΌΔκbŠ=μί»«*Ζύ^ρψ^ϋɞͺiXŸ2Η3ΪΧή¨/φΌ|3­ŸΙg’δ.Μά¨ΟLΊVκ{?³ΨSŒοesHΆ₯β³§šΒ”gFu—Ξ6D,΄AgφΓIgZΩΜqΟΜΈ»+.ΥHFΪ<Κ"†Ο‹θιΨmόŽH―σΤΓ``Š₯Ψ"Δ7y„Θ4σΌ€g)”₯|bε―dr/³Ωε₯Ή‰’ͺΰ»XG5ΕhM~Σ¬.)–%ξ’?τzφ‚—η|ϊ·βΝθ­#^ε?=ξ“η{{φ°β”Ίƒϊ?>zέΠ΅‹Φω‘ήΜ«Ωέ€ή—xσζNν€Υ>pzŸŸαβέΨΝ‡›weΉβŽΈγ²μΫβςv”+VΛλ[w#v;kδ©‘§y‰7V~zce¦«|LVρτlŠΡ3R·wͺχΗβ›s—±(y5%mΤ%uχHκ›ΖŸοlN΅Η½x’wϊϊœ§ήfX™ͺ>n<±E”Ν‘²»ϋυΟ«θΏcά­‘ayΎ₯;Nm°„7Έ%oYΙθ”²,ίΗΉwς‰rς{y‡*ƒk³:Ο?ωΎˆjŠyήTρΟyά%Ο?ξ˚λ!=Κs”³Ύz˜b L±¦pϊπήΕ<σe ›Θ4 LWQ–£1τ”ξοn&ͺlkβo―‰·•OΛΎOίΧ‰ όrN½)ζŽπέW5Ε„$ΕvϊΕ<1σπ§Yύ–Ξί%S¬{7+ήΣ Iτ=Ψ#χxm˜–"Ζ μαώφPί£’ήHmmf}ΘΆ+Ÿό3 ιπΨ;v°_Ο?–dχΎςΙfgJεΝ™s<£/Ί+ϊGz»Ÿ·Ά?i­w%oΞύdσΥ°=e?t{4e£Τ.¦ϋΎ“žiΝΊ€x[μΰ“FυsΤ‡>I―ς°β‘·šί;Ώ˜έΟε}ΎΏη‹ξJ΅ΑόpƒI˜[w’J\³gΥO›Α1 Φ(Τc` £E”c&doo“Ž™GθαY >iςNΡ@ΜΓ``Š‘Γ’Fce)|ΐ3 (ϋ ”zKεVΔ{[άgcΛ;i~²υXyΘ‡k: £Π%ξΥ€šbΗιΎF …:οΣίdiσ3ŎwΣβΔΰ›ΎΗ;₯Ιξ)pπyςalNͺ’mi‰½ς?‡οthzΊϊφˆ Ηεω)Ώ>δα[IxZ3Ÿ‘₯rυζΛWt’€Ηφhμ₯grJS!Ij/ž~/›k{Ϊe‡θu‘]μΆZφ‘Ύ£QW/vI½©ρζθœΚρ‘³ςώοqX²\–,wr–k;Ϋη$_ƒέσ6HΟΜ(.Ψ+Γ@[Vz tgϋr Τ^ΰΓβ«{4™‰ϋO:ξυv?ιΥΫSυ5ΡΊΦ‘Z> ™ν­:Slnσ‹ΜS(¦XW+>ή!Ή!wf½Τ#_-‡T]½υΎCg•ΝQοN]4„lΏό?ΙuυŠ‹3π8+Y‘MΜh ŠecτŒBγ 6”jF(’”.:Ϊ“δ,gδΫ:-R_ToIφ.Β2}ύ=Voτtό'½Κ₯‡Bξ’΄‰AρeAρεkXNͺ_hƒΆE1Ŝβ|i;¦ƒ;œά€vΙUγc&:RšΟDVΒπ€b˜b L1ώήΚƒσ™bΌ€²4Y”έύ\[cNΗ[½ί6Ωw/°‘‘{ά:SμXCX2 ?WBhPη%―ΑζQM±¬ξ6έ_•Λ=Ζί>-©šjjϋtžϋݝ šΠΈ³©δϊΈ¦γΥΰ9κ‘BόU¦xJ]FΟ¨!δBίsƒjΟΰ‹ M¬θl·Ε'IΟ}g™άυx‰3lώϊxϊQ―άΤΤΌhρνCpΜεή$bGUIƒŠιgŠω“ΏŒγϊcy;˜ΞΟjƒSMχΆK‹˜™b/rΤw„2 Μή;ψκaŠ00Ε,υ!–†Pυ”)B(ΛA•†υΙήAbYJ»G[(3=_•ρtŒ3šwmωΚ‹5xǍOΎθM±L!γκΰkΩzHͺIβ„[!ζ%ε‰:S¬3ƒλm“ΤLWlικΔΊ€ΊΰΈ›ŸXlnιΧρή£r_ΪίΆψ_ςΙΖλΩCšŽw#…qe«βΛW³ˆ+[͚½12>ΫεΛkΫ #χ5Δ΄Εξ«έwιΏ6^ώοήΌ₯t­ρ3ͺKμ˜Τf™zRήyό`ShBεͺ/ΊŸλ7$Tρt†ΊOlΌΆbgupbMοΊAΧu‰3Τ’ΦgmΊΎrgUPRmp\ρ2ƒz£}ΰ“κ£a“I]²…τδτ–’U Φ`Ά;Š6τ΅΄?q·~"•ύFυ–9ž‚2IΚKF₯έ™ψΞ·ΧΘΫΦ?OipKας­s7˜€³Yίζεσe·WΘ`ΕΗ@μΏΑ1ΰ²nŠ©οˆό™Ν'ΥQζS @ΰ)ζύκV£>„²ˆ*κ”iT½κβ£Qiω)WΗΑŽŒSτS Ÿ9bΏξrLKž‘+Ϋ+ƒ“lgΗ4™ΡκϊCΛOwΏ“62Ε„ωώ¦^Υ/ŒMΊ±2±.ΨΘ΄"Α‰­’[ΡΣ™ξm*Φυ^“yvϋσΒu₯Ω-―'δωwΓ—¨GΕ Qφ$OύuuT5 Ÿ>¨³=°ΉD΄j;΄!η?7δŸιθΡt»›`UM.Ξδ@™γϋokO+]OΏ-₯ κΩ†υ$,opTήΓΝ!ε§ϊ:5‰©·-gJcΩώwΏ•ϋΞwύ‘ΉΝ*ΥOΤΥg$][ΟOZ½‘Œ:Γc°>£τ{υΌΌD ͺ‹ˆ!3ϊIyG(‹cϊNIυ0Ε˜b‰βƒ<ΡLΩ; ¬~΄ΦΠ αφήϋ3²ΓΡyŠΝΣ―V–φΛ5Ϊ@φ­5Μ•Έ7©™3՚)V+Χ<ΝG­ΑyCΖ¦Ub]’ό=υt€“ς΄Ά͜ρBΗ‡ ,©Gεξμ¦ž΄5œΗ.―›2­t΅Ÿc»τθy7κ™ΡT&[φέZ#Ί0Ηy/ΦϋΘZοΊK’μμ7)τΠLπŒ\]xƒ{KΧπ“fΏτNSιhOτΧF{˜~oΗ€ΞΏ‘³FοH3e}–}υΩ©οei `Š40Ε¬ΚG8 ‚(ϋE°Θb ζχ·;4‰WΟRκ)ΎΤ9,½=ιμF™]͚)Sm§Δ½²?Ρ&ΫNΥι7rI’Ν]ε¦ΤγΉΗηΏλέt±Θ\λώJ75Wζπκ?x`ΰ»yžΡι'š9Å¦˜x|g}­Ρ_~-ΌώκΛqύ>lŽΠΏΏ½az>K¬ωΏ|α K΄zmτΠ9βΩ-πΠΆ«‹¨ΗL=!Α¦οe5δ˜bL±΅kόb§ΘήAeΏXC™gŸJkOΤΉΦϊ δϊ}ϊ?Ξ:C‹ίζΡtLΏo;ۜσBώ[0Ύ‰ΩVφRο’Μtž¬]sιω°²²Πγ|?₯ :»δ΄—ΏšΡό™φτΈΦΚ=^­ο›§¬»ϋKφkβfUZƒX°¦ŠŸφ8DSƒΦΆ³g΄ΆψΉjyΖg[ Ητ»Νtζ>Υ=}…bgχ?“ϋ2«χt>ψά«ŸΊ`ϋ0(wG"ΥΝ‘τ§Έr&ŒΆ—%ϋ„Œ…7HυϋZ _θΧμοNgχΚQdXy1νQ“c vήθ˜ωŽ%3Ÿ/,m£ϊ.,|ž‡) Πΐ«Qά5Xe%hr1υJM-ekΏjώμlϋOgΫΎύΚC5σ]kMψΚuΰKׁ“Ž΄£Ά˜yτ/ Η΅Ων9Φ§ MχσΚΥ8Jςξ¦ύ)=Oκ:΅u Ώ΄η”υ—Tυ—[•δ΅He¦Ο(₯ιX^OaAΗ™βžœ_\ sιΧ™\m[YΑ‰ζo zςŠ{ς :Ώ=bU΅)υl’Χχζχζt|{΄Ξ€χ„…ιͺ-Η:ΠΠΤΟφΏ‘ιQα/φkSmΗφtŠΝφ¨ƒi' {­ζϋOΗ N ¨ΰoG`Š00Εβ«W‹oςΥ”YΔSζΑ')«!ΥΣ€ζiA–i`Ό~5e΅Ζw;Ÿ€E―θ_ϊ=JΏjrέK5ΥEoO]Ό~μβυΓπ§SLύ7*V―gžbΡ5«({ƒ&λ(±τ{ ˜~Υδ2Σ¦˜h‹Χύύ0Ε˜bqU«(βEφVRζΑ”i Ξ+_M™+)ΟSoΎ¦y˜κ_ϊ=NτΐΨΫ™jͺ_οιHYΌ~μGθ‡)ΰΟ¦ΨŽΚ•”½Κή¨Ω[@Ωθγίdή`ύEΥ‡‰ώ₯ίcΰτ_žώωωSmϊϊΤΕλΗώ„~˜b L1ϊW"޲40*[!²Π|Ε ΚΎρš¦λ,>Τ{-ύ©ψέ‹ΦώβΚϋΕ•ύΕΏ5F˜θWκ›"―ϋϊ;φ[Χ…xkΚ9ΦΚ9mΒnέͺφ”s^€zχΌηLΎ_ ƒGp.τ}88]Ξž[ΤόB1ŠέZ^Θk‘ή©lθ_μSν™΅Hoφ7{jΡ9§3OwώΉΏ£ωΝ/`<‘Ψΰzw,}Zl}ΟψςŽζŸσόB1Šέ|wμζΫ£UYTo*;{ΖWηϋŽί3ƒw4Ώω…b &»±ΌΐgQ}}‘ή^ς›=Η³H_nhχwjω½¦E{~U͞ΎΒόsGσ›|(B±Τ›#ι‹Κ’z})}Z4Ο›ΟNl*›ΗžΩ™ήοh~σΕ@(vύΝ‘Τλτi‘ηu?OΟ"½S7§Εψο΅ό3Gσ›t(B±Χ[Έό_{}8½ͺ¦7•‡Ω“Νmp0ΩT³ηpϊ΄θ†ωgώŽζ7θP „bW_ΊφκpU-›ͺ{zS_Κ7Ÿ“ΕŠηχΟLmγόyΏ£ωΝ?:‘ΨΛΊ‡―ΧΥτMU§Ε΄ΉσΩή9uσ―ήνžτ,ۚφtηŸϋ;šίόγC1Š]yy°ͺιYτŸ7u5½*;ϋηL¦vφ4g¦Vœ3‹ΉΏ£ωΝ?8‘ΨϊmόE―.64αB―^€·•‡½ύ͞,ϊ3L{zσΟόΝoώΡ‘Ε.?ίωŁυzž>-6?―ΝιY€wκΚ‹τ,WUf¨‡+Μ?χw4Ώω‡b »τμΓ₯=‹κλυ|ΩWϊτEσΌS›χoύy4Τ`ιx•ωηώŽζ7ψP „bj.Ytχw>»/}QY¬ώ½²ΉχΩΞ sGσ›p(B±‹O?\Ό³¨ΎX€/ι‹ͺ ι!šύσσΩvOΥbOsN§ΊσοŸυ;~Κσ›?‹a‘Ε.?SΎzLs§ΊQM#=Ӛο;5;χ27ϊϋφ­ΘΞ ί€|Θχ ίς=Θχ€|ς= ߐο@Ύδ{οωδ{@Ύωο?ωδ“gϞ]^^^œ™0€0°0Ό Ύ:ωώΣO?M’δβμ…A†‘fπUΘχgίŸΐ[ŸοΓ‘4wŠStxϋσ}8VώβN ΞΰνΞχωg՞Ή0ΰ ήξ|qe ίΏuωδ{ωω^Ύω^Ύ@Ύ—ο@ΎίνvΟ\ΐΛŸt―|€|_Π‹ώΡ±+ξο'ά+ί€έχOΧOž~xγ΅ΫΎ|Ω)χΚχ… ί_^„ΘώΏσ·χq<ύ•ΏϊΒu—Ηαzτήƒ‹g!ğ|―|Ÿσ—«½‰δωžω~»έ†σmž|π8τχ{?ϊ΅oύΔΧΏpέϋρ―…λα;λˏ/·O·§ή+ίοg•η*ΣΔD|ΟΐΝμΎ›εCp»ŸώΖoύΤΧΓυθέxύ χΚχ΅ηͺ³ΔDε{δϋ %ψ°ƒ>$ψΣο•οε{Γδ{ωώΌΘχ†Θχς}š|fŸ¦Ω9HYΉ6Ύ *Mχιs_₯‰ΌΑα₯ϋ«U΅ΟŠδ{ω~·œvš΅r)RΦ;ύΙzŸε%Λι`0|n0Z&i€λQ·U-Ώxϊ,ΙΔ¬ίiΥͺ•ΓO¨Tk­NΊXg9«ιh0 {́V?ό«π‘ΓΕζΪΐœ.§ΓV£FρBΉ\©Υ›½αdψV³^»Q‰Ύ…rb£έ_lŠwησŸΘIΓ+nΏ™‡a”K±r₯ΩιΟγaœΎΊφλωΰωκͺVž«VkυvoΈxωe₯›Υβ…εz—w.ί_Eφπηl·ΫέεξɏοςψH‚ψΞϊβΩΕφιφτ{s^ουεϋέ°]+SnηYlΦΉͺπω>[ͺΡ-Υir˜V§­ZΉt\₯>\μ²+Iηθνι.p'ΝJιˆzgtc»MΧύf΅tTk0Ο ;σ‰œ0Ό‚φ“n£›Ε «+HƝzιΥΦ(ΩΟ―n/wτ{ΈΛ»ο_΄ρΉBmύιƒη;θούψΧς ώήO~-\ήpΉ/ݝ|oξ:hχAv’ύ²])QλEρzή­]ΕύA?ΚραιιzT.Υ_$ΩKIχh…ξΜ’ »žtJETΪ«4ŸΌW­bC¬v¦Yawd"Ε‡Wά:¬ŽBΚΝy’ε_]aʝκ+?επΦn’έq`χύΣυ“§~ρ {ηCί|ϋαρτλό`»yϊδ;O½7wνΆ―qχύ¦SΙeαZ³Σνu;ΝjξΠ“α:k\H#•irΣGTνn0θχΊ­FΎς6?«ϋA=Όi9—£ΛAψKož$οv>o7ZA³–›FΈ»,2iΗC,W[ή0θχš΅ΚΙ9ϋœ'R|xΕνς?¨5Zέ~?όoΦ󭽡Κb_fuuσοWΤjυj₯|ύΝ•;ο@ΎΏ|~Bύ?όΞίήΕqΨ)ΝυΛυ’Άί”ΰ§Ώ”ΏρΤ{CτΧ£χ„-ό»νkΘχΛA=ΞΑνψDςύ΄ίŒ·Jς5Rm&³εr1›-χ/ήbΡ‹2ngœδΞGΆle²Ι"Ι¬š;5%’L«₯H{8?œF²ž·γΑΦϊ‹ƒ„χΏθΟ,²[ŒκΡφν^ρώ{ξ)<ΌβVΓF΄fκ½UόVi²μΦΛ7bGρΥ•_ΐΝΙrs5‘Υτσ•U.ίύ|ςύv» gΤ„ς‘έίϋΡ―…mςΧ^Ήώ]ωן~o8o'\αdόπ`ΫνΣνΙω~wΦΦ*»Ζ’XH›ΛτΖΐΪΌξhψέ΄}υŠϊ Ν΅n•oήήΎŸΥς§¦˜v’ϊ?Xξ_ύ{εΦ:{)™w'±Ι³—OOΫη8‘‚Γ+nΥ,¨t6…vθΧηρ\‹­e#χ»9Ι aχ=Όu»οΓ†χψˆ›θ:ΎƒώΕߟ~oώΏ>zχυμΎίΔη΄tηI=γ•ρζϊΐZΏΪ™η 7ηDοv‹|Ÿ\ύΧ 1Xή<αqεπ  Uz]υnέ”›;•ψδ–βΞ"§ηϋψ§5ρΨn?‘―"«k3n~Φh]/]6ޞ|ςύGQΎ?ƒ+ΚχaχύΙω~ί―E»€w…^yUωγΐΪ\έtσf>za8Zl›O‡ΏεξϋΥ°yl‘€W½¦'³N)>sζϊΌΏ’fEέ₯‰œœο£A­Ώ/Ό+νiv₯ΘκŠ?«>L³Xόnw%ί€|/ίG ΈΪeG€ϋ$IφίϋΣ΅I΄Ϊ9rϋρχέmΦ«q7„Ψ[ζϋωΑ!,•Φ$;jΡ‹ΗoΏRiτGΣΥζΥ™w·˜τϋƒτ‡ΣΥ™O€ΨπŠΟ4ώΉΒp•΅4«ϊ. Š―δπψζxέ ϊfδϋ³€|/ί―šωγζ‹‹k­;/’‡W‹ΩhΠοvΪΝF½Z‰Rσmσ}:nE'Α7[­ζMZΝjιJυjΜIΏ^Ί^ΉRoΆϋ£Ιr}}ςu*₯#ʝݹO€ΐπŠΟ4=\TεΡ:͎ΪΟ{‡ΣHŠ―ά3ΛύρOͺΙχηω^ΎΟhzΎοΖ­7'n¨Ώ§ζϋh vaΉ£]Uϋ•οS©wG³]šςβ$}Φ)0Όβ3UνΘ’ˆλͺzαΥ•L£ΟΪψΕωώ| ίΛχ't^όa³‘tΥyUP.—oŸο{·ήεΞ>f1ξΥ*₯W©τ练=ιΥβq–)0Όβ3ία„|_dumΊΥθ³δϋ·ς½|˜cϋΛύ›Ιχϋa½”SΥ›νN·?Mf‹υ.‰žbzRΎ―΄;v!­ξh‘]g·šέf½Z.έ€:ύl€λiΏΥΊρ#:ƒyzξ)0Όβ3 ‹κ„|Ώ+ΎΊ’ώςύ[ ωήξϋPH“7‘οΣe?ξφυαl΅Ώα1€§ηϋZo‘½Fι~³šέ|―TοΩO€ΐπ"Ŏ˜*“έ«–PοΰευΑώφωΎ2ήdΗlΖωώό ίΛχω^άŽΤίa£όy€ο/φkόš 6έLρ|ΜΒΉ9J³£d—Ό΄ΏŠΫIτ&υr)ߚ‹;‰δ‡wβ’jŽΧΕ_\ν̊p(θΪφδXΏ_κ%ωώ@Ύ—¬άζ|‹|_iOoŽΡ‹Fι–ω~5l–Š>Β4=<Ι§υ2ψξ:ε«7?r{2λΔ'½wώyω>¨ ŽMb7©”4G«’ω>S₯ qδ›vΉ$ߟ3δ{ω>²™΄Kz‹λ[λjpΦΛ­uvΛ|_νΞ³,ϊυmσ}ΊF»ΙϋΛμ&«ΓW–«τκܞΟΨΤ^νOΐη?‘"Γ+h=j–ςcΛΙ= ·ΎHΏτΡL«aγπ³Fλ¬ΐΛδϋs€|/ί«fιP=ΈΝ~9ͺDT¬ωDΫX¦YήrΤ.ΕΊ³δΘCJ§IKQό/χη»,o3^Uι|ώ’I«rus½ΏΛ•ŽΫΥSΟΎ?γ‰^1ι"žDs™^ϋMτβIL‹?Y!pΌ€7iξۊ>HΎ?[Θχς}d5j–"•ώtΉμq§“~«©L6_.°ξ¦ψ ZΣΥg―L“εlΤͺ•K9Να2‹μΊ•ΓΗίv¦‹`ΉΫ_ν[/ŚύΙ.Ν^Jw³a'ϊ˜ψW ’y/ds²Œ¦“¬η½fŠ;ΣMVάωO€Ψπ ZτλΉη/χW³Ψ/ΖέxΥιψƒ‘s ˆ”ΫƒΙj½^­V‹ι¨]Ώš|φοεϋΘώκρ+εrΉ”W,Ύ|`]·―yϋJ₯R.Ά³οΧJyνƒθ»4J9•jPΉξgYd?¨ηΗXk6[ΝF½τ„η֞D ―˜MΥΥu «=αΆπκŠ?«ωώό ίΛχ‘dΤzu­w§ι­λ>:¨ύzΥφx·~ρ_œλ²·J±ψCγCxŽͺu&i–“Μλ₯bΚ­εmγύωO€ΘπŠΩτκεRΝψgBΕWW΄€Ϋ΅γλk8Χγ§4άY ί_ΖωώΆ΅ύ¦bΎΏxvqrΎ,ΗέΚM™·ΦzyKlΡ«ίxZ},]O›Υυ*αl}Ο΄[A™ΎΗ¬ί|eVή―¦Η~QΒgέdΏμ6*―hΝ½ΙΙεχΜ'RhxΕ€σaώW/βυσM–SxuEvΛI»^½ζ3Ϊƒeς|,Υƒέχ» Έ›ω~»έ†έχO>x|—Ηχ~όk‘˜ίξ:βoqέϋΙ―…λα;λ°ϋ~ϋt{zΎ%‹ιΈίνtΊύα Χιφ£Ιr“d―Ιz>ιuΪΝηZ­v§?/V»,ΆYΞF£Ρx<žΞ›$Νbi²žΟf‹`Ή\ovρŽήdΠm7/„ Ÿ5š/Χi‘―`³œŒv«Q―Χ­f£V­ΦΝv4Ϋμ³ΣDŠ―ύf6„9ΤkΟΗ4Γ,†γΕϊΝ$τ}²^―^Ψ, ύ’wπ[ΣμΘχς=°5γC™ξ.pxNψσvχθύ‡α€š/u…Άώθύλόΰώ/£ƒwβpρoώύαΓω:Ί½Ψ΅}Ό}9Hωώ«ŠhO}Pλ%Şά™ξ2ΰŽδϋηvAΨδώe―p*ύεώr»yΞΝ;χσωώΕϊΠξ?ϊξGΙm>β°έΘχ₯ςh]k=n—TgIάq€έχΑφK_Ϋ§ΫέΕξΙwΒΑ;Ηvί‡}χ‘έoŸloρέχ€σZόDίΡbΏ ™ ZΡKZγ Έ“ωώW飋§> »οδϋpNΨGΏ ©ΆμΎ‡Y·VŠ•«υV'h7κΥΥηϋ οεϋ7 v½Z©˜ςp)ή€|/ίΓχΙ~:h—KΗTκνιzŸ§“ο‹Λ€4YLGέv³^«ΎT«5šνώp²άχ/€|Y(ί_<»οψ~ο·ΫmΨ}δƒΗχy|οΗΏφ<ΦΗΧ½ŸόZΈΎ³»ο·O·ςύ+ ί_^^ή­|œ½έοŸ={vϊα9ΟΌέ=za8!η¦kϋxϋςΕ§ ΞήnΘχŸ|ςΙΕιvAΨ\δ:‘έGΒ€3x»σ}$ΙkΨ}l]Αι? 5€―BΎτΣOウ ΓP3xΛσ}|ŠN8V>~’νYC sfΞ ίς=Θχ€|Θχ ίς=Θχ€|ς= ί€|Θχ€|ς= ί€|Θχ ίς= ίŸώοwŸŒβ?ώδGή©ΠΏώΦήβrΉΒβ K(,€°œ2δ{ΰDιΕϊΫψΓ―«αΊ\a9…E• ί§xνή₯ΰg·€|<{φμMΤ[—+,­μvο€Ηψα‡―·ΫΊ\aQ…₯•ές=πΰΑƒ7‘ο]ς}XZΩν ί~OΦ€|Θ¬XWΘχ€ΜŠu ί2+EYWΘχ€ΜŠu ί2+Φς= ³b]Θχ€Μ Φ€|Θ¬XWς= ³‚u ί2+Φς= ³b]Θχ€ΜŠu€|Θ¬XWς= ³‚u ί2+Φ€|Θ¬`]Θχ€ΜŠu€|2+XWς= ³b] ί2+Φ€|_Τn·»ώθαφπ&œ½?ώψwήΉwοή±wΏ‰[iάΐ(KΣ©?υV\½ΏΚmr‡”Fy%.˜Eͺ"ζnŠvQΛD’ΚEJZΥέ¬·K΄ λeΔzkBΆή7ΦΒ3v·η}αΰc'GNBŸ¦Q°m~<υΕ7‡ηόα€Ÿπ;l!θfόςΛ/?ύτΌΓ†aΐ-βΜΒFΨ`ω pGτθv»ΔήΡ$hY‹cV„u…B!„BΑ0Ύ§Ωύœ >ρgφ:z†ι;Μ±ρλ°oχ΅ΠΏύοoΏύVdΑΨN(.Ά\Ω·±°•ϊωηŸ_Ώ~MΣ`0 /}ˆoΫΆθρη?ω‡~ ¦ijšV©TDΛ²χΑφ:ΧΦ3lrk`.kΜͺ7ςιΔjδΒκκΪFv»Φκ’_›Υ’kc΅ζγϋ»!„B!„0Ύ]ζ9ρc+ρ•+ESΩ’sΘ€c+#EΕΌ΅ΧB0ΝYyώό9€₯νΑOψ™ΝI…#X4^ͺΆΙm’NKΘ 7ζ2d¦ω)H–τξ[εώώΎθΎBFΖm‚B½ΞιK±Ρ’In ,ΒΫ³ώΟω?…‘h4Jζfι΅DDπΩ¨ιαDYƒώ…{’‘՟°†djP TΈ6ΐψž…B!„BγϋδΚ5Ε$έ&wZŠ_ΞCm55Ϊ’,©7x-Ψ@c<―ω_ϊaΑΨ{a Ηq­Že+φ-*Χ$ qΌΟЊδlpψ²NΓ‡Hqόm2½›D:%ŸŒH’αmΉ‰ψή ·α-ˆY5M&ξέ»Η”Ώ‘.†„+…φ4‹π#–¨ˆ< T3?έΞΚξCV™C0ΎG!„B!t—γ{Ίε&βϋμ(±Δr)›tΕN±’Aξ˜v9aό+·νK_8ˆ•½ ˆπ —ϊτιψGY–[­”(<$³ΐvΨ c`$Œ‡‡p,œ#Ύe₯Š»¬ΕR!λNS cιγ{xΗƒΑ™ŸΣ“γγ#Η'§gΰ$K™ΰŸžžB™Α€{2l6EϋΫίΘδψπzݐ«9j₯\ڝ*—w3΄£™σΗS₯rEuΘ .Bp’Mͺπε G.{Μϊ駟 .Ÿώ9aρΌ}š’_!”Φ ·€,Ύ™Ÿξ‰Πψgί#„B!„Ί6Œο“~‰‘&ei~_ν‘»ŽΎδxI!θM€8γ&$„~χέw΄mNΐ`#a<ε…ΗK*ρrτν•,ΫΛίϋOyΦφ‹ŸΔο»Ε=ωN œώL–Ώ?ώu"#ωΟDτ“‘Ώόε/πΝΐ«½Kλ³M–_P‚cω«ΜΆ n΄^αRΖ¬’$}τΡG_ύ΅γ8t‘€H„Iάααp8$#–e …wί}$όSο#‰­¦ήžο°τ–΄άςΝαe5WjUΆΥΪrΕχω&™Θ’8Ά'ΦΊCŒοgC!„B!„Νsβώ Ϋ θΘνŸ*Ϊz[©VΞUλŠξ]tΣ±M`;8†Ϊ¨J"ͺΆIό8ΊR-οŽ•₯ͺ³ύνFU,—αŒ•j΅‘j¦γ½:Έ8ZeόjbΉŠa†i³ηΣͺΕ•Λ’€h=βa=`Œ^‘έkW+’T©χζZΌοetΪUiτ‚$ΩΠ:&Y.ƒΑ>,X₯πΏύο8œ:<<Ό2Ύ‡10ΒQpl`Οq& ψš…­–Ψ―•p}^΅j›ΰ’0 ]…‚η/IΥvΗ ˆοωK(¨ΥΈίtηo>Ή?[|³ε? ωϊΰkš6nΣD·ΐpΡγο;ι,Λ2l9>>&ό‚λ“λ§#+ψΞ*ΥjΟt ….x8π: ώ@Ξ S έ³{M˜ZΫn«ΫΪΙ§λk#λι-IΦ†„v5νHή Th[>Ž΄~|ΟΞzhΊc}ϋ|‡>Ÿψ¨)O΄`ΣΕFΉv‘©ΑγnKΚo$ΦGΰ΅£ξμ¨Ι;[ٍΔωΘΔFV”uxb­ΪΑχ–.νδΧΧVΗΦΦΩ­YλίΉΊB!„B!„ρ½ΉŸίλuΊg*–*ͺ†χΨdYςŽK0”eλΥLtΕ#ZTΒͺlCizΞΆινG_RMb«ρ U"νz)3c²ΐ XDt™ZθΠ°ώ―ύ+J{φμ4ΙωӟώΙ"DόL©*?ίs|βτ6θ•Μ•;ŽweŽd½έΘ^: šm›ŽZΞzWΠώ_‚›ξΊ{π€©ΒO΄ΰ"\–˜^ L’ζΣφΙUΝΌ@…64r‰΅—ˆ„#ηΒαπ†Hχk[λΒL‘uΉ?>T ³ύΏψπ96ΫχkžγΪVΧ"‚G8±3 yΦ“\M¬³WΌ δCΒl‘΅-ύφΥB!„B!lž‡ΪG'7Ι(w]Αt[Μ¬ψJNΪ‰›£ΤŽχUr%•¦ZΝH6Ν|λ…€‘εΆ9#΅•˜ο*‘F9΅β/VѝΐΕ~!ΎgΕω.^1ζ?0%šΛίCχpψ/] `ΜΒγ{£1)‘XΡΰϋΌ8jΥTKWόά₯g«„|Ό~ύϊΜΗaργϋOžΎ€-/•ύι¦ψγγ3_pZ²DθW@Έ78δϋοΏ_X|Ο‰χΕ•P„μεΈe*Χ ŠΏO[ίΠΌώΡWΟΖ›ž<€Ϋ>~ϊ2°—(f…68Β ε_|ΒkΫ’¬χ-d(m„… λ- N-G„Ωβώ·0S8ίχk—O7NΎ^03j_Ϋi‘‰£½uaφπH~|ϊAk[JˆΓ»QW!„B!„°χ½Σ“rthV§›uqe"ž-·{¦CœŽ"Ζ§[wM:ϋžJzΟvlhΐ@g€NOλh™ιαuΤ,Β6;ΥέιζβEve§9Q1œΡ¦ŽZLN¦Sηκ3σP£ΣΡU16YΠTοθzΟ„νΊ+/ΛIͺ ›FO2e—νίΗ‹»’(Υ φZ<ο yGβ9Eξ˜j₯8Ή~¬n,gσhβ ¨8ω) €ΉΊπ~§Ϋa ŒδmžΠ[άι)τÍf+ΗηΕS«τ“₯σΕ†aΫ£‘YΊ5[ι0G LιυŸυός κ?ωΖ“ιΣδtω'ΰCXS·‘=υν|ž ‰οω>qφξΛ–uΣχΘiˆ9:΄a²—£q»’™ΆQ§ε:Ήtt2zz₯˜bΞΐqƒβ+Bπς°HgΪžLŠπρŽ"\–˜Υ²,Ϊ ‡4Ϋ‘νςƒX>!{(²Ύ‘έ;΅nς­-]fž°› 0eŽp$Šδ?€;¨H$IμYΑρ}πuAβx:ήWθςxk+ΒΌφτΦφΞv6Β>ΑΪΰ6ΥB!„B!lžβ™\!ΗΘf’>Σδm1EΣλqλΥγ“€²q˜ψ>šλ°­ΗιΩsΥσ=J–ŽΤ C‘ΩSFrάO;V4‰[{2`ψδ‘τΊMυ,}&€Ί™Σχg·mΣwŒΎψŽγΣω„ο]š-3O@+_lΟ5Œε[ΊV–εύύ}ψύ‡~θv»°p¨Θ‚-°φŽΗΧλuξ₯kA4U*οN•ŠΩT|ΕEΤΟ‹«ViξIczͺ½›’©~gΎβk8Nί?ϊ8ώΰΌYIfGŽο—»>t΄5—‚P―F£6›Ν…ΔχܟΈ£MΖζz„Ρ(ΔhK4O|ŸQνιε¦χηX‘ždς­)zŽ_‚SνΩα‘,ΚΟθj /ΏΜΠΩχεώ"Ό³1+΄³<~χ»ίΑΒέ§#π << Q>Ή9Ώ* ­ζχθ’³Cq=δΪ΅>Y[Ά/¦έηYmZγα͈;‹·Υt]7’mκZρ}DΤ,†ZvΥύ|ϊΈχΜθτ^Χ‚η9¨m'fΔύΓ#χΦDmς,Ω/€εών©+„B!„Bί‹€¬βι{²:‘h(™€a₯;ΎΟBFΟ’h¬¨Πΐκ†σL©6σ΄³’¨žή>§υŸψžKS3°&*A)Ϋτ±j/`έQwit,άΪ± eχΪ#Σ!Λ&ΜBλhjι+Ž+·MrΗAsΪ’Δ³eξcƒγΡ`œUΝU«ΣΣf*δ2§’G֍ωJΘΗυfέ·ž@«q·›ΚΙ΅]Žψ޻嚿S‹‹ο9>qϊg$*‹%S™RY,ΔίΟqƒ:»ΎΣνψΕ³§ίΈλqσ»Αέυ˜>{χξ Πζž‚ΕζψΔ‡Υ=’eΉ dΉ₯<»υνD˜iMsνψ>œ½Ήψ~ΰ^ŠvC‰Ϋ ρΔχ $%"!Α_{ίί!„B!„ρ=mΗL›°S|Χ&Αh”™Tνΰ‰ν½bμ"zo;ΔCΟ1]ΒYŽέΣ!Ιgζ‘JΊsΝψήh']‘Ϋ5θηοι»ΔΓ6zj£ZΚΡtn²fοΐψž―ͺΉj•ž6YRƒώE–GrzͺΨΛ§ΧΓ~Έ=u…B!„B›η(ζ5ΗΠYσ γ/υJ!‹Γ?Q³i@ vΫ6aΡiοΙ]F–b+~³ΰMœΜ/Νι„ΨšˆEγεΛ#ν29OŸdθΖOΎR–{ιZθu#Ps7ρ><Ѝ‡ψ Ω7΅–ή‘4M;’kR~cUp e»l2{rίZέV~IΜ₯7vο΅Ί–eyγϋPb§;XΓ…Δχ>―k#Ώ§i<ίδ,Ϋζžώ•a°½zgί#„B!„Βζ9Δ,Ε/7ιΥ Σ>σ₯jΟ΄ΗΤbςrχyίGΟKZΗΆM­ΎK³{š]Ngž‚xA혎m=MΜΑf6<2Ί)Y¬ΐ@Ψ昽F9ΗδVžpŠ=6)Φ•ΆnΈΏ ’j:Žmπr¦.Τ αŒο9ή%ƒTEν88=­‘‹Σ,X]βήχ|{ίsT5o­ψ>=QpZ³§•ivOΏ˜2O ωxυκUΐΤϋ/ήτΰi`89YŒοω‹P-Mο‚•vΗΆΟ{ˆ• Ӂ ΤНΉγ{Žg‚SεKζ 6?ywαΖQ„w4f…υfsΉδψσ,]‰DŠΕβUέ„¬5ᚲrŸ€²Ά7$#W³v-Y9λΩΩ,&Ύύ|DBγ{Ί˜-#‡VBΤnA]!„B!„ΒψήdϋΪΖχ ΅K3»’‹­ψJ)¦»»wΤd¬ΪqΨy£Ύ UL¨»LJE―@»ρψ‡Sz–9°xΎΓΡ ±€ηXΠΟ;+ρ=Η»l5ΰυΐHΥ&7γ{ŽΟ‹§Vi|τ?@:g ψ駟Ξ|άτqp|'' ƒρ=φͺγ¨XQ™3ΎηΈAq!8Ω§Λ/xdΎ”ιΈΰ"\‚˜Υqœχί_pωώϋο‰Ψ%ΈΌχή{Cڌ>˜u”ŽWZίn’ MJ―Z{d‘ }ΓηΟΊ˜πΔχω>Mό§νzρnτΖχή]Vk#|­ψθA―ˆ>7ί#„B!„Ζχσ"~œvζΚό…viπ¬@«ŠΊƒ‚)φmsΖβœΕ]ΟΰxΡ± ΛPaž©WRR ΒpεμΚ Ρœ¨8ΣΥSγ­»η€UhπκžDίs3œΜIΫμ9u±ƒΆΊΦ•οεtΩψ¬Χ“,(=‡ά}Π©Yœ ‘<_|OŸhέ.§&sjŸ­Ωχ±l±Ί|βlΉαΜzΞ»*G ψε—_|;Ÿ*Ÿάφππ$¨η8œœpΐψήSŸτoόEH+0ξύc£Τvl5E*(»S΄άΆgή΅ΚτΤτο£t;Η Š«©—ϋ3ή/<ωN‘#‚‹pibΦ|>/ΈΌύφΫ3|Ψ»—ΝΝMΒaΨ’ΆVC>χκΖΑΡ€°,½–žuΐz^Ί4ΤΦ#‚[Zξχδ—šστG±ϋš@­Nfί7]ך‘¬£maφ.08Ψrgψ‘΅τ^³ΆςΔχ ΫΧιPF(‘—ΰ‰έŽΊB!„B!„ρύόμžΪ¨VͺF½Zm(zΟ$ί«Ξx0ŒuUοœTSκ9ΤΝφΏΊR―”wΕj£*–α§jΠ‘Αl£Σιτz=ΣvΨσiυΚδΒ•ΊΦ³oό]bυ4₯―DͺΦ+"όTuƒ, ڞ?ΎΏΏΏψΞgƒsώzUΝ_«4Ύ§έEzΊzρ?@CνΩ‹J^―_Ώ>»pZΒΒψώ ‘ΩV.jJiλΣAΆ‘΅5MΧ Ϋ!όΈoPσα‰Φz&>=w(+/OΉ‹p bΦ/ΎψBπ€&9Πζώt~‡‚G±X$όϊϊQν@άΫkςΑήΞt k}‹ψt5ωΰ`|³“jM}0τΩοŽΐ_X–{Τp0έ1°ΘβX`t₯αΘψΆςξΩχύΛο@λ@:ό¨5ΰ5qΓψ!„B!„0Ύ³gΰζί+&AΘΔ3Όρ=τίVl|_TΘ›πσΟ?ӹϜ–zκ}@Ο ήΜ½Ρ"\‚ζ9~ψ‘ΐ‰ΞΣ·m›όV΅ΆV…©ΘAwΪj'¦V·-rK`|B!„Bίc|0ΎΗψž6_(Ϊp|Ιβ{oΥΑΓ€νί/Aޞ˜υλ―Ώζ°΅΅E~«ΊiZ[O$—»γ¬οiγ{„B!„Ίiί‹w:ΎGίσ7ΟΑψžΫ«W―ΞNE–…8²ψσ ›/Βε‹Y?ϊθ#a°z-ύ:ΒožΎzέΪ΄N<0ΎG!„B!Œο1Ύ/FWFb “ τ›‹ο•βdΡΖ›]x`0œΝ N²!ΖχX„·0f•$ι­·ήFB‘ΠgŸ}φθΡ£wήyGπψΰƒ`­Zh”O›νΐp8ω-³ŽςλΎ~h-―Ydρ0ΎG!„B!Œο«Υͺ878 yszνF4Ϊ&!Ακ‹" ΆΗχGέv§Q?§vμ7žΞ9ύ_‚μγ{,Β[³Bξ»ο Λ²ΘΘp8ŒD"‚ <„΅h»|hΉΣφΗΩ=²ΊΪΈ“Ο¦Η²ω­=©¦u-rC0ΎG!„B!ŒοauΎ9|8NBn „ωέσηΟχχχ‘8α'ό[‚γϋ€£?lAΞ»ˆ(€C– ί=ΖχX„·?fυήΩ>όsΑεΣO?%·Βψ!„B!„0Ύχ=Θέ‡PP|ΏP¦ξΎ~ύϊšω) ƒΑwτ%ίc.AΜ οΐ½{χ„ Mϋμέ‡©#1€aχtΕΈ 7xΈ—#™…y9έ°"šμ“Ρ²ϋ}€—γ0†!ίδ{€‰“ο/Fšς={ςΝ³ΜyΙΓΓΓNEΝWσωό€©‡{ωή. ³n6›?£υz]ο˜xΎΟp_F΅ΰ˜ίψ‡ΏγΆΫνίFςKdV0W Οχ™μw_…9έ‰_ώŽ{ΘσέM ~~‘όR™Μΐός= ³bο™s ί2+˜+ωY1Wς= ³‚Ήο™s€|2+˜+ωY1WΘχ€ΜŠΉο™s€|Θ¬˜+ωYΑ\Θχ€ΜŠΉο™Μ€|Θ¬˜+δ{@fΕ\Θχ€ΜŠΉ@ΎdVΜ€|Θ¬`δ{@fΕ\Θχ€Μ ζ @ΎdVΜ«X@c=?hdqω(@#«²@ΰξ{@Ύωο@Ύδ{οωο@Ύδ{οωδ{@Ύδ{οωδ{@Ύωο@Ύδ{@Ύωο@Ύδ{οωο@Ύδ{οωδ{@Ύωο#βϊϊϊδδδx/δ ΝEΉTK_v; ΙΜ ί~~^]]ΠH.Υ\­₯+»ΠdZu9›YϋΞΕΕΕγγγΟΟOΩ ΉBs‘ζ:­]Ξΰομφσσσažžž^f ΣGΜ m›L|΅έώ[ΣZπ#’t΅έ χΛ"~-ψ1“|ŸWεο“ΗEK#δRΝ՚ ΆtRwϋ0 /‹‘1€6™™δϋϊ|†ξΜi p‹N}Φ”IένK;z8€_›ΜLςύρ¨0£νzϋΟrr½ΩŒϊ|Oςœ&AxΏΧιŸF— jνMσΙ³d―ύVϋ,«οŸ?†ΎGΤχΥϋξ{v}O¦;}O†ύ·uδ ,ˆϊ― fώ…Š ΎΗ%„ ‚ ‚ κ{:§―ŸƒύιgŽίοuŸ>ƒ6©GΐJσJwόwΕVZW«β­›Ώ*_ί#]λZΊθΊ.σKDYΣ3M¬πaΥ!’ΐΌθoμΑC«ΎπFψχvήΜ²’_ϋζ_¨κ{\B‚ ‚ ςh Ύ'9r ψΙΛν“» Ε€Χ«‰©w{'έώyϊγ°mϋ“ΓKαέ1Ά{ψκαθ"ΉrΚ°τϋΦφήϊǝΛCP[{dx3ΪώI2βΡYΪO|bΫώfko³ε›ξΓuGΓ£ψό(^˜άHΉŽFγ)ωfΣ[KβI&e‡έ8σ >?άίγ„p+{‡ριαt¬ΑY2ξω`4&φκMwe£\«=kdΜ8A:0λ³4ΤΡir{šOO·{G·Œu˜6τzzΛKήέ?ΰ‹ΑθάX™œ1πOύαΩp0"Γ ι{xξ“iZpH2‚ Ύ_ aΆœo†om\CKBzT#Η εš€y!aοmUQΤK’1όλžTEV.‘eΥIn ]₯Vψ€£rfθδχυjIHώ%/‹uÍB»Ξͺοǐeωj<ΝΊŒΒΡe±,π)BYT χΆrι†*U+IS.…JεZC±\bTڌZ»iYz£΄4CM3-ΣςΒΘ'·’³‰pΚQ@y”… Xj£œ„S,$,- Ε’P%Ν rΒIΊRS’žœδ7‘$YšΗ—j’Ζ$w}ǐκΥ²0ι!ΝtMΡ­ό΅pύ(Ή”΄}₯ΪΠm/^HϋΠUΥRšλˏ}ϊ©Τ,7&€e–ζ-IX Ÿd­&™·¬γΐ5κb™+N›s|©,J*$yρsψ Y ± 8‰€.Cuƒ{κ{r 1/ι~ ‚ ‚ κ{Τχ›oτk)O7§»Mk*ϊ[ΦΩl}ω–yώγωυα₯ΑDΛξϋλYΆzώ³­MΗ_7KΗ?ίΎ1 ΄o­:Η9Εj”ŽωΡΊΡΎ΅y0ΌYϋώ­{?Y΄}ΒχOίΈΌ6ŠΫM^fπ9Ρ~=š ς‰ΨžQΥCCΩΚ9K ϊΕξΩ&Hγ|Eœ΄\mΆoDh¬»§ycνΦηi›¦ί7ΰ#¨z!LaX1UߟυWή9‡$7Γ1A}Ο^ ŸζΔΑ[EΈ”―ΉΡ5I}φNQ–Ky›ϊ}SΚ€/²κ{£ ·p’Τ(ηt\κiΰ@`Ι₯œP ΪΣgτ?"1 κmβa%kMΝj&„†Ζ$σΜIJ=/ΔB]wηL#Kρ˜†xcΠVΆΨ§ ‘η=?‘γέƒ “]Λu‘Ο MύN!+΄ωΥ4G[²xkξx‘“΅½«ΧαΉπUbm†Ž*PΣ¨ΘAΆΉZ₯5‡t-πcI¨ΠpM\<‡aIϊ*@AAΤχ¨οΟBαzλ½}:¦3ϊωtΪψ­s…Sΰ}θRΞΒ%τύΙλη΄–kϋΓΜ&nΪυΉ“)κYšτxΈ4μ„ύ•Φς7H#αςτύ!9{tνικ+ΚΠΝΝƒ – R8OόΥ)=ΌuNσΗ}ί€œ―yOŸzt-θϋτ€Ϋγδ•`Œ Ώ Τχ…Βδ—r);tΥόΩ&΄₯% Œϊή¬Α€4Š’Oˆ\΅xgϋ²ΕwΟhΌIΥπKΥ”d'όŒv.Vύ8‡ΕL jξΊ60ψ»CςX§lΦh /N˜€°t'Όξη~L–¨”¬(¦©•;ηWθΕ¨σw5Α 3Άw”Κ]Ο½κ±^_1Θ™"Θγƒϊ>5ψRR’J”YίΓξ{―Jσ–…Eλ{@μιN^Ž΅X,δG{nj€w`εjfϋZf|#Œ'$4¬8ELP²£‡§‘QίG2E₯e‡qΚΆD{”"?J©°D&’€ΆρŸB ic“Ω‹i‰HaφφAδήβe‚ΡγM,fZr‚X­–yΈŠtΑ:‡ζBY¬ŠειΠΝ…Μ#WΞΡϋE‡άνΞ οΩ—τCΎ AAυ=κϋ^gͺΤ·ΎΖw1\£ί¦ο_oη”Ωyώ₯;ž‘λσ7τύχΏML1QyΏ?‰ΟΣχΒ‡`<ƒώ9•Θ?sυ=½θΪ―Β`ϊβα3DžbΎo‚’Ξών―„·ΣΪq­Β[ΗγYb˜Ξʏ!λοΠχΟ`Ÿ>„:)RLκ{ρλ€(Sΐ)φF‹°νχΡχƒύλŠ@F<‹ρ~bπ—ίωcy|Pί…šζD©« R)²κϋ"/Šb©\Kz ΝϊR–Šlψaΰ˜`SηΤχε†ζϊηθε"ω/©­2Ϋ΅‹krΌjd4J˜Q‚Ÿ­­‘Ή ₯Œΰγ―gδ©•ΩlC1 ’EΜU4Λ Oo”σ.{™υ½«²¬λv˜΄v4‘@ΊTζ)ΣA~œΔs½ΰν8ΕΡΘ22‘w3³ Lς ŒZφ#`Mk ₯œN"%«»‹ε†ν‡qh5|ΣΐήήΘ>zNTό4ΩΡ(ζΌμρgλΐ**lu—J9οN²ΟEυΰ¬αLλb-XπǜLBA¨ΩΑΥΒuΰaAΨτ=ϋ’f*@AAΤχ¨ο{Χ…Ϊ”v °\}Ο­tΘ.ιοWSΙ V}C°Ίαlz»£llϋής­ϊ~«=ΚuρИο‡iηι΅όΖ›‰rh6έΥΝφΊ}œχκ”4Δƒζ¨j(<-­“— θe‚t}"qτY:-kς°Θ±šzš±LζωήϊŽ"xΡ:“τς•‚<¨ο’dΓΏ…:7‡Ύη«ZxϋFς²βΜ d 9ϊ>ςέ\ΌΌή-U)" X]ΏΞ– Εδj3ιpΥ λŒh©7GnΚ&έ"Gq—σ\Άc€Ψ«^΅ψidχδΈeΕ…ΗnΥ3.[󘧜 ퟈ωVT/“qM€Ό²‚YΜf)48}_O§—ΒΟ>}·RΈΡIdgrW¨Έ·L-z¬ν‰²μ\=œS―f4΄uγΝJ―˚γ]Νΐ“jΥ+D±fψΡ͌ρ•Ίf:Αδ₯—­e3Š)°―ΟΠδoV <±Hώ“Ύg_ω*@AAΤχΈϋޟ}6¦^τ=Ήσš΄·Π'Ψα[¬ϊEgΏgnϋλ›νΧο??}5Ρ»€Ύ'O……nτύY΅}žνŒsΖΗm'Π[ފςυοΆ(>Γ¦οd`Τ°&H@fތο?V'ΐπυύpmZ9gωklϋ›­½ιεŸΪΛdA'yb’yΒ5#`ΡχΜKzΎ―AAA}΅οA΅S8ώΗ3Πχ·*ςΡΟiυλϋ(wΫ;yK»eρα’θϋηίζΠχ'―‘@όoV<&‰{o_QΞΉeΣχνΝίΘ0(a³LοσζπPΉώ–±(/Xίλ΄zQίjΤχΠz`ΦχPš’ο9Š3„fθ“萔Θΰυ@³° –ΤχΆZη ΄Κϋ”1f/Rn˜βΘ–ˆΝΒζ Έf)iπ€˜fO#«Ύ‡Š+Π§Μ¬οƒO―ά³€μ]/C@μ 8₯‘ΑΦώh€ΔcK½_‘άˆψ“ ΌdΈ1φυ5ςU_–Π$ψŒϊž}I³ ‚ ‚ κ{ΤχPύ₯}2¦2Ψw―Ϙ5ϊ€Ί%ϊ„-δ€mΓ-³Ύ.[OWΎ­m~ώτύΕζ»ζυ@kϋCΪΖότZ~ή^m½έάy)>Dί[ ΚA¬ƒίKίg³w«Ύηή”±žCί―Ο<…εgωWR<Η>#κϋ?΅Ύ‡Ζ@€‰©Βιa€^dΠχ€«Ι[ H7ʌ³ϋΩ $’9 <Δ’ {ΐq©•"!¦ΩΣΘ ο)6|aSfΦχ‘IH(Ήͺο‘ς;Πχy’™S{b΅Π)N&ε+"ΝΙCϊ”ΐVψ;[ΛvL}}Id—ž%F}ΟΎ€Ύ AAυ=κ{ΰb=)cΆ†~­\Εo‡Eφφ7ϋ,οΐXΈ%σώΰ}πvϋη³œ_΄Ύoon=ɜΰJήhM΄ΦœŸƒι΅Yυύ‘ύu2;κτzτ Ρχ-Θ<ΫN†€ηΠχηo§ΕˆVα Κίυ=κ{Ψ}ΊŠ”ξ!©›έλεK:_+’e?„j½‘Ά₯T¨οΘ–€RC›UveՍιΜpΝ ¨αVύ8fHγcθ{φ)3λ{Ώš-ψ~ηξ{‘ϊ^Ιω •jCV-Η‹«ο#ϋυPΥjΥ|Δ†κΐ’φ,©Z!«e>άVζ3 I²Ρ_°’˜λϊ„έχπ)ψ΅ϊžύ«AAAPί£ΎŸNm’Αw¦Ω u‡ϋ)ΘYςθΪ‹Ν7zζ(ΎΉKίθ=·Ύ?΄a¦‚’/ΑχΝΙ·OΖη|}―ά~t­συφ·#i x8Αu»οW σδΆza#€ŒΕπυ=σΏ>ώ“τ_ŠM>Ήήyƒρ_υ=κϋ:Gsvz•£‡Jχz°η7€ά‚μDDi‘ŠbΟΈ΅ϊBυ= Α^oώΞ9Ξ0œB z&#e%zPΩτ=τ'…ΎΖeM+σ”ُΥŌΏΧό˜’₯₯’-PίuWŠ’ΒfψœN¨+PgnΰλkϋΘ.gœ»Σ‰|ΛΌΔ²,Ϋv“ΐ£ΐ³t₯V™-ά§Dψ4wόΛζžc)šβΞΕ]ΰǜxTυ8‹ž½E²C}ΟΌ€οωU€ ‚ ‚ κϋ›œΎ~΅b^κη(ώύ=ώΊžΜ³/ρ˜^_ޜΚΩ'Υoέ1ΠέNΥ0ά₯π“n ‡;ŠWΒωυύ°»ΗM‡ζVφΖω ΧίθΉϊΎσιΛ4ς–uFšϊεw>4%‚Œ»Βυ”νLŸΓύθ3/NίΓ_HιτΑμSΖb ψ‘ϊΎϋισ΄«-βΜd{cZLι•7…@Pί£Ύ —‰†―κ`.mΜ«Ύ‡΅Ϋ;δ͐›7#θΝ‘οa”˜Vӟ¬ώOηΑƒ›φ Ή )ϊEμidΦχυl’~ΫΉ£ ;dž2υδΖI Z(Λ3w„J6K‚dΗ‹ΣχdΛΊEj_Rί›δυΙƒˆΑ˜3·jΩΉΞNΚUΕΛδ€\9nβΝJ5³Φ§"ϋΥΒl–k!ύ<δΕ}Μγ()P”mŸa·“ΩJΏšλP©YίƒDHG 3gΐ & {Ωυ}©εΒ]πu“}Κ΄G@YΫ°EšŠ5ά—έχ4ΈϊdzRΈG€°ž­½[YΊ›‚¨‘•¨TT—8¬•ή»»Θy Τ’C}ΟΌ€οχU€ ‚ ‚ κ{ £“Ν[·ιΤυήݍΗνΝΙ†ϊχ~ ~6xω*£ζ…•kϋΫ Ϋ{ΊΎb~ΌϊŎ/&ύƒΎ'ο^ν Θόm(μžή˜Ζφγn€\Uχ(½ρΠέαoXο—Νήp¦“εχΉό½5yK[ΛΣfΠα„ΨWžIˆw8˜&Hέ}oΌήψ²L '~6»Γ™±z9q2 kς:ίaš‹τ½=J₯=™…}p2¦σψΓ#υ֞ι“lΖ²hPί‡’P ·ά}"Ω&¨σ9›IΓά["«Dμ\ΞΗk”A¬AטΉgή}ΟΧt΅V έ/›>QΑ§^*’­„ͺΈυLPΌήwFή jˆ†OLY,Πό> †€I}_S΅š@¦„eoξ4†Ήn]-€―T…|ίΫΠ£O™ςς*`ΙΥβmο7$#šοcBΑΥκ7Ζ-TUΫ3jδαS|K yΉγ*Ί ‘<°}δ)ΥRnͺŠu³λ:|~ ΈͺlFΩ-΅Ξni]ͺš^΄π9ΰ›U2½C4`hΚ’¦,!–%M*@AAυύ£±ΫϊΌό¬ΉότΧ6QTηbm²/[_?Έ# †,: κϋ?8kkͺͺ†ͺj–γΗμ^―˜žω–‘λ¦ihša9α£λͺ¦Ϋ^/€Θχά/ΑχΓ(¦Ν(ΩΠ€cΐ οαpQί΅tΝΈJ‰γ‡σ₯qώ49Ί"7κ I’eYΡM;ˆ˜§Μώ(„žmjͺ’Ds’’™6θνί‘(° -™š©”$d‰S— iξTzZΨΫ‡Ύeh—‰HS‘΄¨’ΐ5uUj4MW/›+†EΙ[δΪf¬!)zr—$)šαϊα?ζΒΐ3ΣyIWX7aθΗX ώ*ψμέΙoΫf£θανLλ|ϋ¬―n½;άlœ3 wP|Ύ(΄hu€ψ Ρ)`τϊŒJΰά@hUXA-΄’&jγD0WŽœ06^―lΡtTΖm’ Ο~ƒiš|)*~’_Ύ@ΎŸ¦‡;£ωdξ¬oΏ8nχλ7ΦΒϊΛw&ω!ίƒ|OήΧΫy[υΧώφτƒ=_‡LΎΏύΰ ½ŒΰωωώRΘχoΤN£ρ ΣΛkWώκβؚO֟& ίƒ|ώΠυ·Ήψ·{zώŽύ~πώη{οσ₯οߘ'χξg–qλqΒk±ϋιε“'ύς½χvθύŸμΜΫ—ώΉΏΘχο?δϋΙ)δϋiΨm7οzuνγς7ϋΛί]ύφΪ­‡έν ―Νζ½ήνζΓΫ돟$παοu½‹γ]οΑΫΧμΒ£VίΨύΆ½Ϋ3ωώ/#ψcώ†RΘχΘχο•υ˜λίύξβί]œϋώΫέμ~wiώOk―Ή=Φη/Ž^‘ΏΎψ―λύσΏŒΰωόK ί ίO€|€|?}ς=ςύτΘχΘχο6ωω~ϊδ{δϋιοο§@Ύ@Ύx·ΙχΘχΣ ί ίO€|€|?}ς=ς=ΐ»MΎ@ΎŸ>ωω~ϊδ{δϋιοοήmς=ςύτΘχΘχΣ ί ίO€|€|π–‘οπ›’οή8ωω~ϊLž€Ιs¦@Ύ@ΎŸ>ω€χ)ί δ{δϋΙδ{ω@ΎχΧvωω~ϊδ{δϋιοο§@Ύ@Ύx·ΙχΘχΣ ί ίO€|€|?}ς=ς=ΐ»MΎ@ΎŸ>ωω~ϊδ{δϋιοοήmς=ςύτΘχΘχΣ ί ίO€|€|π“οο§@ΎΟ!ήΫά>|Ι²Ή½›dμ=ΏϋτY2’Y³υψθΗχ’ΰΕΦζρω^Ύο'zς}λ/ώsmrωφ§·6Η£ϋΪ·ŽΎυιχ!¬‰kώζΕπ³­'I’Y ίΛχςύDOξ~—MφΩεβΏ?ΨK‚υ/oŸΔϊ°¦~*ίtτƒΕοB¬ίΩΌt΄ζΚw[ @Nς=€&#ί—Z7›o6„₯~κkΗ ώΥ{»Ι‘φυoώβ?ίά_έΫϋ…|e”οŸ?ώψrνΒώRώαY| ΙΘχ—Ύμ'YρΟγJ-lπΕf’‘+ίœƒ| ΙΘχκ|χΦ7§7ˆχžμμ>ΩΩ{₯|μωώμ>‹“ ήέ|όtτhά›?υoΦΌ^ρf³Χ}φœ±Χ½·y»ωπv³·ώΣpοhUόbIδ{MζƒΚχλwœΜ}Ωέέά“ηŒζΎ/ΆŸ€vRΏyχαί]9=Υώ₯π,IyφΣƒΒετf—ΏΉώύƒΒρqδ{M惙½fkμG&,W&#[wΫΆ|oσ= ίh2ςύ…ςέφOΧοφΓς}οΧ[—ŽΎup«ϋnόΚ|δŒ|ϋj£ΏΉ½»υ¨χρεΡΚbϋYr(~\8ή²xηζέν­ν§λο.Ύχωο4ω~ςrs;I^WΎ―ί~œœΨόρBXM'>š|-lyεΫΝdΜζƒKς= ίh2hΎKύϊ£½Χ‘ο ΧNR’ϊα–ό―wφΏά}Y»vo/IɎδ=ΘχšŒΙs>‘»ΉέωιηΓe»ϋθηφχ―}±ξ…·ΓΖω><6eχκ(ί―ν€Ώ|~φΰε{@ΎψpšŒGΧ{csΤ|²ύΆω>όΘ„|όwαθk83ΆGχ/ΌΟωο4ωώ‹Ν$+€φΫΗΫΌι|Ό~°Ψ~rΖΰΫٝΘχšΜ’ολSΛχ'_~΅ž<η{“ης=€&σεϋLO―nόόΖσ}τΩ•—=ΊφΕυςMωο4™oξϋηΓkε[£GΧήΌΎωβηϋδλ/n‡”ΎΫLNl6[aύιΘχρ ί›`ΕΙ―p΄σώ :9\/½fϊNFΦνtG:ύ(yΗuZΝ#f{˜Όuβac€Ώ'Ώ9MFΎ‹Λ·?ώό›Λ£εσ΅Ώ+Χ – W$ϋήxΎOΆ^<Ζ•΅›wξήΫΌφο_…5ٝΘχQ«0σ ζͺΑ―Ϋωμβ0³f'y»¬–Nφ0™βhΌΚηGΓζΨ›ΉΠ&η Α0J~k­Ε“ΞVZΙkπ»Ÿο/ €&σαζϋ‰Λ…ς[I’7ίοl^ΕϊπSΟΣk&ζϋΟFωώλQκ6Ύ4ΌΜNδϋω™< ΅θό;Ÿ«†”:lΝ…ύUΙΫ$jΟgNΉήO¦‚vuξδ2,ΆΞυfžk ΟσΗa.Ξs^'œ[l½ŽvŸώίίμ/ο{Αο3žέkOͺφ—o>o]~;ΣώcΈσύκ½έ°ζϊhΝέΓ5ΟΒt7wGwίχΣkŽwr33£ύξ΅Οͺ}φ¨χΙοoν£«?¬5αΡ΅ςύΒJmΜΚRua~όnζΕΑΉw^εϋ¨Sš™έ_J+QςιΧΛ3Εεn2d2χ»ϋ>jΝΏsω>Σξίσ‚ ίΏφžοnmGϋΛ³8Ω·ΥΌ31ίΘχσν8ΙΪ¨WfF–:Γ_›οίRΡJq&#|bρζΡYΛάΥv’Χ°U_=R[mφγ_ωKσ­θ]Ιχ‘έΏ·ω@Ύtλw.\Ύuα?ίόt=JR^,ύ>­ΧWΛ₯ω X*/Τ[goυκ+ΥRq΄iqaqi₯΅1ΘΔΫΝΖ‘f»·uΏ]_,/쏭Φζvώ‘„‡»vWJ³'Χ ΄άέθnτ†9ΖuΫ­‘ΞπΥB|pΰΦςμΨΥ_ny§ή₯+ΥΚB©x¨T,Υ[/{‹DνϊΑkrt°Εεz?ύ±D6ίkχςύϋμaγΞθΉ΅wΦ·3ύ‹υkaύε;“W ίΗ« ³™|5ͺ₯3jχ|₯=˜˜οηŸ[ΪIŽ W+aΓq…R/>(΄ε°" H«£c/š₯°f~y˜δ²±Ξ%kΪJΞ2hΧΒ‰d,,·βsmχš‘Zg”–ΣÈ7ͺΕ°iΖl΅ΩK]£ϊM‹K½όΣΡΜ-.—f2Χ"ϰ󏀽˜οδρd&ΟΙ}&>ΓΉΞ4n.•^2ΆRΨδΨ°½pΖξζζΓΚlΎΧξδϋχήN£ρ)οΛkWώκβؚO֟&―@Ύ,NΫΑrqζε «ρ„|?ϊΡ΅ƒ₯°κ,Ε•(IZ‹!½Ξ.Τ“΄F% nΎΪα4ύpˆΥΉρ^ίhVΗ4[ξe U™¨TΫxΥ-γ^½03IqΉ“Γεω™‰fW{qΨ΄³ό ›.¬Ζyή™ξΎΨδvώ‘΄ΒΕ8-\Ν‰γΙΞ\Ÿ*„‡*g…3λεΒδ}t’$ˆ»₯™Ι²ω^»ο?OξέΏτ’‡λώ[“W`ξϋΈ΅\JGαΤ½κεZ;:Ψ*κuκ₯Ω±š#ίο$ΊcTj­AΗΡ ΉΌpΌrΉΕ+'‘45ΘγN:[;Φ­Ε±ϋ΅s譎§ΰj'Ž;K3c–B—=Ά±)Ο…Σ5ΉΤ_iΛθt‘/Μη gFω(=ΌΉReΉΆ²XšKύtHΓΡψκΩΉΚςΚςbXΜ5‡9ή•ζ °σdc΅R(N—ξΉΒάB-ϊΕρdσ}ώλυΌ[™ΛΉ0W˜[ιFc―y0_,2?Œ&ΈŸ›9ΎΧξδϋχάn»yΣ«k—ΏΩ_ώξκ·Χn=μ†Ήt²δϋ X©.Y¬,ΜΝ¦nQŽΣ1΄ΌΪ;υάΠωtςΞ‘οOφΆPΫ8σ!ŸσKν$ιύ@ΨS˜''{£}Το΄t6ΉŸ’šŽιι»§ •FͺφΧΛ©Œ»Ψ8:Υf΅8Ύ~Ήεί2ξΥΖΧ—šqr`ΠY=\x)’ΪΒΨΰͺ‡[fͺρμβππ6πρr½ΠŒŽ―Τ\vr˜œω~v~©ΆΊΊRλ βόΓ~•‘dρΪΞ9žLΎ΅λ•μ‹Ϋsγ{ÌWKγSβ—šύp’ιIŸζΫQφΦϋΒJ»p’Ν…Bώ|κόώ’έΘχΘχ“•ΪΓSSΣdξmΥ5€Ιω~βήz«……Ϋγ7ιΟ–λΙH³Rƒ[ι&ηΤ+‡0›Ϊy}aφTΣ?Φ\ΌγΓξ-€g\ΙΏe8»#³•ΑΛ>]˜[†ωDaΎ΄Tkt{ΓhΨk·;N»ΥjΆ7g\άΩB©²Τhw‡QΤλnΪn5[νAœϋν1ΏΨKNΌΒ°_e$™|ίΚ9žLΎ΅λ>‚Κώ^Bά/ύεΖΨ>βφψxͺναθ-TΖ7ξΧ 9ς}ΊΞ‡(―έΘχYΘχ³₯ΕΥ~ζQ±Ωψβ~ώ|?Ά·Βb3™,€ΟΣΣ“’ƒσž}w9} vΨu˜,eΌνΓτDωνdL G’8–™ιVfηζ GζΒϊγ½ O^δ¬ΩωRΉΦμ&ΑιΡ¦1_ΦΊƒW{{,‡—>x•aηIή|Ζ39ίηΏ /ΫCψτ Α\a|}6ί/ldŸ=―Ιu>€yν@ΎΰΓΝχ…z/Š£qΓ}Q’Y.fŠφΉσ}ΨΫδ”™~^kˆ€©Β>Ώ%ηΤ¬€§Œ_¨,Vφ-VJ³§§5†aχ Ÿ:δά2S«ƒΩ™¬τΤ½Jaf’BΉ=LŽτ•™‰Κ+άω~ΎΨωG’Νχ9Η“‰οωΒδ=„•ΉWΊƒζbϊs‹”μyεŸG»οψ`σ}ˆ•“΅ηNzFτŠωώxo³εFŽGΜ.Œκtc|ζœr½—œS·4“Σ|;:3Κ·rηϋVή|?Iaτ˜ΩA}iaR0Ÿ[$Α [O=Δ £ΪδΟΩηvώ‘d2wΞρδΘχ­Χšο •fo΅œžμ(₯½”9―W)ψΪ=€|€|RΗ7—;“ {Ύ|?hVΒ±«™”Ϊ͝~φ1§ ½ΈWN‡υsΆͺ3Ή•λύ_š<'jT«O=ψοφ`;χ–ρθcŒπIFœΔΡHœŒK ΊνVmΉZš/dϋ}ύ#ƒ^·Υ¨U+₯ΒYΡω|ω>°σδ7Οχω―BΎΙs υώΨ‰Ζ©S‹γπ ΦH±“ϊ~΄RœΝžWώ‚―έΘχΘχ’χbκΞβ”x₯8κλ«½\m½τΡ΅Ηeφ$.Η΅lXW­Μ¦¦΅9‡0y^Ε•(υΌάpψ±<ή£+­Aώ-7V23δ½Ζr₯²x R©Φ:£I„‚j; '3μ·VΚι;Ω‡©όηͺΡθΔϋ­r!=7ύΉς}ώaΏΒH²ωΎΪ>Ύ•λuΖΒ@73δ$Ηβpž‡'Ϊ$Q;œiφ‘ΚQwe&σθΪσ|ν@Ύ@ΎΟžτΝΕf?ŸΤΙωύ]εΙχIΤ.χΣΖψήΥΉΡϊf?sοpφ>QΏέj,“Μ΅ηSΟη]DΓΑ˜(κ―€fΐŸkΞΓl±Ϊι‡ύΞbq|γb;z…-γήκΡͺ`ΎΪΔqΤ©WΗΧΟ–λ―L!5Ν}«7 W¦³”νπψ³– ΌŠ2ψ5wίηφ+Œ$“οgVϊαΩ²ηΙχω―Bf…Z§EQφ“žJ­5ˆβ¨ί©¦v2{ψ½rX,,7ϋΓA§±[&ίη/ψΪ=€|€|ŸcϊϋΠ1ΫΓ8ކƒnseξ$Β6’$GΎΟΜΎΈΪ ‡ύVe~φΤ=έA|jΆϊ… Ώ0»8‘ίχε™1‹Ν3Ά€ŸΆZͺ-ΜΫ“•yDjώ-“zˆΎ“¬lΔιW,˜- §ΊΤΓGΩmgV7Ξ—οσ;H2ο±Τ<ςηΘχ9―Bj‚¦μGDύϊ/Ÿg±'6j₯™BΎΟ_π΅{ω€+ί‡^ξΧΞ%ή¨&=Ύs#ΞμΌP u<Κ¬Iϊ€:vƒΠΗΞΘ ΫΥ™Σ‡ΘŠ–‹3™9ΚΣ²1w.μ0κ¬LŠΉσα±±ω· ―κάΜεZ7 zaζ Ο€νΥ+3YΩ“ΚJ_―μ;$°Ο1’π…μw'g˜ύnώ«πΦΏxψbnL‘NΖ±RšΝŸοσ|νΰƒΚχΈϋΎ]LΟ’O₯<F-Χ©wŠαΛÓÅ5;Ι±ακb1ΣCK΅vͺέg&Ÿ­υβ$#L>>~Π¬ΈSJΝ@SOR2ΟΝΌDqΏU>+[«υΡ&y·L?Lυ¬2_(­v‡§vΊΊXš9ΛμάΒjgΊTνΥRαμmͺυAž·GζτσϋΌ#‰Υ…τ&‹ƒIγIΏίΒ NηΉ ΡF£”ή²„oυšαχBJΥz8Tjό₯Σ½ΎΌR_^ϋΤ y Ύv ίπNεϋι‰ϊέΖjmίΚΎΥF·%ΏB<衚GZέήΰ₯›υj³™gηNKΏΫZ­­ΤκΝΖκJm΅±1ˆΞ΅eZ<άθ΄υ}«««υv·'/ :­ƒ+°ΊoΏλΝno˜ΌΔ ΧiΤGΫ6ΫέaœœΓ9†}ώ‘ΔΓώ‘Α`0Foςz 'GŽβτ·ϊ­F#œg³έ{ωλΒ–fk£?L˜DΎ@ΎΗ5Κ…Μ£nMFΎ@ΎŸ†x8Dρ }<‰yΉ—šŒ|€|?MΓjafάΒͺzšŒ|€|?eΓΕΩ™σKƒΠdδ{δϋ)‹»ΪR΅Z­5:qς~δ{δϋιοοίiς=ςύτΘχΘχΣ ί ίO€|ψ7€|ΖΘχΘχΣ`ςLž3}ς=ςύτΘχΘχο6ωω~ϊδ{δϋιοο§@Ύ@Ύ·ΘχΘχΣ ί ίO€|€|?}ς=ςύ» @Ύ@ΎŸ>ωω~ϊδ{δϋιοοίmς=ςύτΘχΘχΣ ί ίO€|€|ސοο§@Ύ@ΎxΛΙχ9KŽΌΨ:Ζφ^ςfν₯F2u€| ΙΘχΫ/όηΪ_-Εφ›μΧλ_ά::ξ§ίG‡5σβΡ0.·ž$oΤΪΙHv“©δ{MFΎοάψ*΄ϋƒεΦΝΗo0ίYOεϋΝKGΓΈςέVςΪ<οςΡ—ύ±‘άNdͺω@“‘οŸ~z%΄ϋ£₯pύρΤςύσΗ_]Ψ_Κ?<{ω>|Hpι‹Νd€}ύ›ΏψΟ7χ—kχφ’ιδ{MFΎίϋιξαd5cJks*ωώyΎωΡQΎr3ο§@ΎŸδλsΎόΗφΗ£ˆΏτΣ^rJΌχdgχΙσΙΎ8Z_π‡?,έΈsύρ“δ΄g;»ϋνbλQfύώώpλA{sχσύ³η?ϋ,N2φΪλ?^ϋcϋκώrύ‡ΤqSφΊχ6―ί:8β­ϋ·ΧϋαaΌccΫϊιΗ£φ/^ύqk'ΪΪΩ;9ΑύŸ±·ύσΝ[w―ϊڍϋk?=M2žl?έ|ότΙσΓνwžξΏ>Χλ?^―?ψϊξφ³δ,qΤ^°tύξώ8―7φ7ϋωI8e@ΎΠdδϋψq!άt{νω‹›Ÿίό‹S³Κ»W‹G›Υ―έj‡ιιO–ΫΈτρ»ί­θίο^ύόvzΛΪ₯«χ·&Mž³v^l?γ?ύψΡΡϊΤrλκϊ0Σi|w1½MΙΥ»γ0λ}fπα1ΉΗsίvww<έίώς›μ/”Ύ]ί>k6ž/~}c-³}ύϊ£½γΌύ‡{» ίh2ςύVσΫЎΓήώ—λ­QJ^λ¦σύgω>Δύ3—ΟΎBj?Μχ^Ύε_”Ϊ[/Οχe]»υ}{Βq―~Q`3 ώ%Kωώ³Γύ_<+ίoI]ϋ}νεϋΌuύΡ‹τl<–;“`³ων€ΣQπAΎΏΘχ9μ-ςτ'Ν§'7γ‡Ώ›½ϋ>,₯oΏ~ττYΌχπήƒΏ»RKοGΛΝOnτΆžο=ΫήόγΪρϊΏ«1ί?Iύ~@8ξΪ£h/~±υ¨wrάΟ#ΌψωέξΞ‹£IlΎΎqηψήφ―w’}[›Γξέ.†-οw7·»w³#ΩΧ½qrίύΈΥ?8Κα)‡)†Ž?cΘδϋKŸί]΄?‘NτυΓΖnκίύμΚhŸ76·βΓWl³iιfόΥ‡Ιoο‡½N}e©R^(ZX(W—Vš(ω΅ύήΎώ J¦h°ΡιξλΗoύ@{ϊQΔ™5oJ<轎7-D½n·Ϋιφ†q2}oτOY<μuώJ΄1Œσ‰)όEι]ΌΓ~οHϋΫVΒΫχWυaγπ―ύ8y—Α΄ώ7D™>δϋσΫ~8Ίύ«vœ9ž?ηΒη?žο/ίIζηύŽwcςύιL΄ψΥh'ί>Μ—ο»·Φ²Ώ.ζ_­?O’ψηQεΏ³yφδώ·ΧvNω—ύ—ΟΒΏύwΗη•>‹dη$Φ_½·w*ίτΕgοΛzδ6–5‘€ŸϊΝ|δ¬5kέ΅f#`˜€ΑΩψΰšΕœ…0xB-Pˆ+ˆnβ˜€i›Ά) Κ¦EIι# 5ΕnV}͊.*θTάηδ7š{―Κ©ΜȈΜΘ̏‘‘Jφϊά=-πιγ¦}'»>ϋ~#Eί‹26ΪΩQ袟v?ΏφTΏͺF[·›·…t}e=ωe δ<[§ $~‘ύ—3hΪDΏYvΊyې˜e’ 5³ER8½SΊΕζhbμ`Ψ¨eλ—εΥ[G?ΓNμOhvέΫv–m3 ·³ο·g―ώ'„τVτKAΡχ|δwώŽF|_#ί£ž'τύ_Ζ‡kΤ£θαPΟΎ@ —ύωbπ—±π ϊΎOΪ3ά8?πΧ―ΆΜ8θϋΝΛχΎψψNφλσρ^ώΪΊ;’οΡ RόSIΠ‹ΏHΑ§ £ΠχΏΟΖ^ώιη %M~zρb\<~’}υ$ϋφωK ›*(((ϊΎIέ34Γoξ‘r9ͺu€|+Ȝ+ ,φVΘ‰­sζ,NΎ™Ωκ¬ΗΙπc› λŽ|Γ Τ.Φιf>rw­oj.ΛΟ2Ύœt‚σER Nιτώπ֘£πWg€ΥjΩϊ₯AyυΦ]œέ‚˜Τ:2@_§oΧΪΎ₯}Ώ{uΥ/…·Eί3πόύ]˜vοΙχOΪGΫ?Ο/ΎύζQ4=θ{Πξΐψƒ‘&Ζυ<δΡχhχG^> gmφyu7ϊϊ£Oϋ_rϋΜ£ο_Lτ‚τb―²§%Ρ#$­Ο>ŽeΕFAAAΡχθͺP;΄•ηe+DΧ‰Ά.#ΧαΗRτ½>ΎWPτ}ξΫQΉύγεβ„'=z𹑝iΫ?†ͺ†Ρ#1—ίPό&–ΙΛI&8_$κ”Nοo9κΛ I`εεjΩRPτύ[ΨΏAoέΠφνμ»’ΉUΏ}/ί~ωΨγ‹GhχžUΏB.ψiϊ~σπ“ϋ`Ι_KίτD'δϋQ<{ς_>Έw€/³ιϋμΞία―ϊbO”μρςI₯ί(α?Ώα§ώΉ‚‚‚’οg£Ή~θ]G-ΡΥ©‘‘ Ž£§€ο}ήΞ­9t)(ϊ>moŸΟ9ΕWPώ°+Ό^bν ΠΧ΅ZΆ~©τύŸdςœά½Fa;Ω[5GQΞ–©Έ…}W4·κ—‚’ο%qΘ?ύηήύ“ηάyr”ΎχV£΄3Φ7€θ«;CΖ›ϋ}ΝΣτ=j†$GρS}ΐ‰ΏσχήϋΰχεΞ—ο―~}πίΗ“³/$θϋ+}žΗ_ώεχoΠωW/ρ*(((ϊ~6ΪΤ9ΫΑŽλιkΪV~·{β§ή’i’ΏΘ’°G'e-&θ{Ρ4u¦ΫPtΓce«ež†Aΰoq’ΥmGκν’»ͺ;K’«Q’ΣΣΣeΡ‘Š¦άv'Έ¬8Κ«Γ²ms)δe΅’Ξ·%ƒ0»QΓeyξΌ JkAjx­Aϋ»Sr %ˆΊˆ£(ή"Šε3)?Ρ”iΌνmΧ²’ξ$ΤΕ-IΆΞ°`+hϋyzƒ‚}”;H [S„ΓΛVΨ΄Θ”*†±CΝSDΑΎε -θΔμۊa’dΙ`·$+Η=A₯yωž·3ŒΆΕ ˆΙ€¨σΠDς‘X†Yε%—™ž|S2‹‘›ƒΨi±ω:gγ£-a:ΒtΒf™Β\h"k­aΡ4u#ζλœ‚Ξ2¦{Xrb‚;ΩφQdρ₯°A%e#¦D’7cς2ά;τχΨΆ_uW5_bώ,˜₯jβ”:QO­ΙωCbΖJΗο <{C0\wmΰb#±lρ7¬eKήWίώe‹;x0lz4C΄©―Πχ‘ώވω}αo±X^Aί/³­πm΅κrψ…ΫZšnœHτώGjej 4t-ΓX­Œ¦ν…ΙΑˆk«,T’Ÿ ‘·ξ…΄¨¦ŒCm™ΖLΛρΒ¬lΫͺ, Z€fxEY”hlόΎc›Ζκ†e»QZήΨ‰*‹ΛXΨ]ϋQ M‘λEI=ν ²xi/KΫq$!Uδ;Ζj'ΚΚ\»~Κμ; Mβ;«εBΧυEεΦL~D·κm–μμ”υμYE[‹¬–« GΩΆL}wϋ³yi₯u˜nKvy@ε6oΔι œh,OdqΗςΔl”}r”σNό- Kςχ}ΟG·jΏϋα½GΟ«ψS=όό»ΏάεœΡοόϊ ρYΙsήϋ€–NžσόΓA’ˆFί?}\χώΓ‹ώQάhŸ²ίέώ‚)5ρsιϋοΟΣAτ{IχΟ¦ο Ί—λ»ŸώAG’Ÿΰ׏_m}?]hh{R^L-sΙ‚TΛ(‹Mύμ ;7Σχ΅΅+C£DζμφΏλf3…"rH›=––Wu7–Όΐ]ƒ¬‘M/mΟ5‡E­ ‘—֝$χ½Α[d’Œ°“4;ΘφνpsqC α»ήλ?¦ςŸι«5z0  ν%±…Ωτ}[8š΄₯—VσΥΕ(ΙK"μd½-BtΠέ€βΐ£ȝΊp[T)bΗΈI76 1!qβ›΄ύ° 98•,N nΥρΝ:_rΙιΙ7%£ JŒ1@ΗΤγκœQŒ6§b/ύ(@œ&ί@˜ f _ηt–ρέ;Ε”œ˜ΰ&hV`ΉŽ»£"Ι›ƒ3yV’kF˜F+tsή,૚:%¨šY˜εύ!5g₯γwP]ΎΪU–Š.25šΒ‚Ώρ7ΜeλΝϊκΫΎlρOμd6£ PY{Όβ¦ίWy7»/ό-Η«3θ{Ν²Ν‰ε¬Ž­ύnŽVrf'Ν+œΗΌƒι₯y«s ;v³Ο'‘M*ΐμΝ2½ΨΙw‰k)cŒŽ]ι –Bs’Šϋz36\I0*œ£’¬άjΓμ;έΨP¬’J·‘‡Azέ€θ°4 žHόe‹:½rΧ†Υ(SGζΎ/EGΗf4Κ?9Jy'ώ…+ωIŽ{ ŠΎgΰ"ω ο²NΌj‹ˆΎθ«—γ²Tο75kδή!1ςψ ΐxΊφ/‚‘ΐαΣ΅b—»ξƒgΈLπo“η(‚ŽΠχŸΊφσΎG:{ΰ•ϋΑξωΩ¨EίΏψφλ_ύΕ½wωύƒϊ_~l@AAAΡχΧKΙ1€ +Œ]a'm±Λ93¬nŒΎOΧ» Neb[?£―±ΤΙzͺa#CΑφς­M”uf(š9―)Ίtt‡bΰwΪΜ›¨CdΓ‡}ŒϊύΰR|ξb%0]뭎}ΟΟ ž‘―.~Ι)Ώp^Zφ Α$8ι‘C¬Œ7›Iυκaٍό¬j―ρ؞κN>m°Ζh ίδΈfεK.9=™¦dCή牑ο$ΎΞεM3ν @τπ DΉZΈn¦ΞωΖΩξ`BNF΅VTQ‘δΜ!?yαή›lb(BZΎE~φJΪ€z›‚n'oΐspW:~%ΠΔφυ« zj–€Ώρ7όeλMϊκ[Ώl±OWڎ―α»#(­GjσϊΒέb1$ηΡχ»nέH1ΗΥW»Ψ)#¬‡ϋ`ύΙΧΛ›Τ™–ΣΚΧLΟ…ΣRCS,½~WO}ώhv‘₯O‹“‹έ&φ5–ΣΒͺγΌο}Δ9σ%Ω΄ΉχQΜ°›ξϋΜY@°ˆk₯έΌθΓ(­Α¦ς›7κβp’± ρͺΖ>­GdκπfμFω'GοΔί’°%?ΑqOAΡχ €)Φ?ύαζ"Oπ€­ϋψΥθΑΥUŸΰΈψζέf„υ˜sGζ0ϋϊ~“αCB ½§ΧpνΓΛ_[wo€οΕŸν„‰ώtHίΏσ_Ώ;Fίož]Ό‡^Œκ|ων—¨σΩf}―ο~τh3Ζ]KΡχ ŠΎ—OΦΙLΔ‰ΔQ 7Κκ¦mΚΔΠπ›Έ‰ΎοJεΡ>ΌΨGpUΗ‘D7½« ±]['Ύu†ξ $°΄“²¨ςΛέ#Nψ8Ψ#ΜO!D[E9fΠœΧ\ίβfψȁΈ€¬ξpY ~^' έΐ‡–ζH PΧ ΣΛλm‡»*υw=Cœ]飕Υ:(λΆ© ΟZπι{%θ^RvCƒd¨±œ|uρK2x!ΠMΛ¦λΊ¦L`Π₯/ζ ’¦ͺΚΜΧ‡’AY•eέA{ ^+ΘϊV.Η’q­† «eΫJE“Έ¨αγμΩ@ι»:ύ΄ι†Λμλεn»oΕSqR”ai#ΊN4‰gŽŽLl³2$G<©δτd›’oρ.@Ή…ηνΆ\[Φ‚HΞΤΉŒiΖ@[iΩ§»HΌ%zψύh †žλ~֏֢γλœOίσέ;Α„œ¨fjZΡ”COQ-IΒR“ξύ`(.¬(+›¦Ξ|iω‘XI{ˆ*‹Βθ:’$2Λ“ΤiHΜΑ]ιδΗΒ_!  B ώbΔέ<0–­Sψκ·΅lΝp‘ΰžΚΪΥΥ{φήΊ4έΥ0φ!½UhξΜWΞμ ‹Επκ ϊςλf”W’λκ":Υ; ­pΙ`t)A3Β ‡gW틆¦ψJmέ`r Π—†±Bω½λ(C[ΧυCξy‘/̞ΑΉ{HCV:‘Β7€δΒ°½ΐ_ r£hjtΩ "‹ΎΠ΅…_$=ΔΘh Ϋυ|o½˜Ωw r_Ml•«‘―#‚LOϊiœόWBαΓ-Θnή¨‹ΓΔΝύΚΠϊΚ/Θΰ4K~£ό“£œwβoQΈ’ŸΰΈ§ θ{~όξέΧ'”e€w}·g½?H|ΫΎψιy’ U‘aύΞ?οΛΏ―Ίxφςι“>Έ{ύ ϊ~ϋKύšHΟ/ΔΣgΟήϋμˆ/—υάίSκwΎΪΚΦ'₯ωψ•YμΟZ}vρ«]a7ΉΘ " n\ύy^ύ΄{υβ™ΘΟΏμνλ ΰΉτύ¨ιΰ½ύΝ£g―ϊοϞ?Έσχδ3Γ((((ϊΎŽυΡ>~bwœ τύr”6Ώ 5TKI‡Q|Ω:mΡ@沂VΊb5”³κ›ΰ.RJίkVEo ˜{ ˜»«ύyΏΊƒ³>4ΰ Ω7WnFHχκ2‚n΄oCX N/Ψ₯αψ΄X'ζ[ŒΒ‘p0ƒ‚6Ν‘ο …π…£)_]ό’ IœHd%Œ²Κw`0ΗΘ …½,τβνPwc~Yެ†°£ΰ=ne=ΆLΎ«ΑnGt3(o<,݌oV¦δŽaΣ“kJn1QψpGΝAd‚ΆΈ:—0 qζΘ»6Ι’=<‘'aΕlσι{{'8*'~„ΥdθΥEE’3{ςNΊw EέGƎcϊžo‰•τυοΩ¬άΑ‘IϊCjώJΗο tŽ kΚΰ¬40qΐ¬Εˆ»y`,[§πΥoaΩ‚σ—χΌΑ“Ψ |˜Ησbο„ψVύόΎπ·X<―Ξ οqΟƒš_ šΔFόυ–$…AY? œ8«q²Έ‘Ύ_ZnA”wψ\q5ΪΛ]qβΠ•AΝΩC3’Ί»ͺeœKj‰ε~7†I.5δ‰šž"ΐ4S’MW˜g€™DχΣ„<΄ο¬;CΐΒ‡N‰ι‰fΐ/1=GΠ–n†~7έ 7ΤΤ’'²ρCίΟ^Ά0α‡5»ΖEzΑ·j4+šΡ(δ(ηψ[Άδ'8ξ)(ϊώ΅Θ~σw4Ή EώιΨ?|˜wWτύέ‰Gnο^ΌΊNXΏs¬dx~<ζύ§»ύˆρΏ8Γ±ͺ’dψ&1ρ©κ7?ώευίyϊ=‘€Gχγϋoϊ3,φDμ‰άχηHΣΥϊπ Ρ|((((ϊ^”α‚ϋ†£θ{Π8 „#%"Ρ† κηρƒ‘žηΉARΡ Ηθ{3ͺθ±<8Ψcχ9b4τέAPΫ_Α3ˆ„mߚ\§δ("ύχΤcI—]wΉ/Œ M–Ϊε+fτ}™xξφe7ώ½΄±¨|u±Krθ{h˜€{κ"S‡zΩƒ¦AοΘ E©z!$j€­Ό£δ-ΩΓτS±κ2ο ǘ© uπw|³ς%—ŸžLS2‹αxVΘ—` Ω:ŸoVzbx?h’g B‹·|Ο§οyξ}DNόˆΤ @³Φ'\τLαO^†{χr1­@ΎE€VR‚ 4pS"εiω+ΔXb³ 1•°8˜Έψ‹σΐXΆNλ«oΧ²5gπ ]έΎ2nekgBzKsάM~_ψ[,ŽWηΣχ`ρΠΦ ƒ ΫΆ±#jX17Ιϊ:w}έτ•yδ=όW  Ϋ^―A³{•9G6ό ϊ>%ϋΟΦu£wΩυΆ¬έΥ€`7ˆ‹ͺm•eyžgišdesυp«λQDu7ϊ‚=S’CΊ\Σ Ϋ³’’Κ/EΙ$͚n’οBŠqφdz’i:ιπMvBΖ!ΤFnX8δΟVΤιΑ™λqƒ‹2τΆG˜˜DόFω'G)οΔί’π%σΗ½‚’ο_Ÿ9g—·ύίΖΟ§ ‚Ώϋλ ±rφΓί|vHΝ―ώώΑΕKJXΏϋ_Σχ?8dόςN…’ȍ|t™ Ιρ/žn€§ί~£47ώθϋ›‹ πύ_ώŠ|Txή“—HΈΌσ?‡‘GŸ‘X2―}υΗ πάύl;HφΎώ~@μΎ<ψ"ς⫌πς―άyHκμυόo?…gBAAAEίG[“%qθ’›~’LG”EJI4-`»Ν][εi…λ¬-ΣXκτ8I’–r:8ΕΡΏNτέY8νD±£•ΰ*( ξ+Œρϋΐοi^G!Ε±gjϋσ^ΫΗεώ ‚1ΠUEžΔQΰΉkΫ2V z,α«‹[’GίC‡ςƒd}/¦zAϊˆβΆΗKB€Ά2l/ŒσςuέƒHΘ§)oVΆδςšgš’Y QKšιΕ%r5γω:盆’Άq£!n>4Ι0ƒΎgκœOίσέ;ŸΎηWKE’Ÿ)όΙΛpοβH%–m‰•”φδ”r‡-ŸΎη―tRc‰zΟηAήΤΥ%κΊ5šͺxΞ²Εί<0–­ΣϊκΫ΅lΝ<Έθ ―λkϋFΛL ™ξ»2ΨMΩΩ ‹Εχκ“}?JΎ‹ά;¨!ΆτρW PŸΊ0φκδrΖ%ωΎƒ_ށ> Ί2‹½΅΅Zθ8g.}O’Ρk }ΐbθHσu(΄₯aΙ ‡γΟ~θψI$œ% ΤH -––μΗϊ>!dƒœkτ»&n™ £šχήhj[Ί9y•IίΟYΆ¨Σs—Έ+³½Ίx>Y!+Τ2mg5Κ?9Κx'ώjΕ—όΝχ6 ŠΎ? <ηώy·e«ŸŸŸ?Ίw7~tώψωζXΌω'ύ„Ώx\ίοK~wΌΎJ σκΡWΞ«νŸ‡_όπύO7Λ™}>φπ«Ÿ‚^ωW?δόώ§Wό(]΄Ož?ŏ7γΕ“ξG_ϊήΧξ§_ΊΡ£ό‰<ΗήλπaςΝΗΏωςξωwξν¬Ρ#9¨άχ„ @RΠχψ+Ÿt@΄6ΚΈEˆT“hsg₯O>ΠΚ8(ΆΙ7._ΓN¦Ξ%έ{‚ §O­ˆζΐΎ2 Y>πΛ™χΑvRo2wΙdIςΠΡΟ²ρΥΕ/ωzDkrƒ„Οƒδή b°yzX’Ϊθκdu䌸φ'bR!Δ–6+WryΝ³LΙ(ϊ^γL”€eι\Β4  ΐ:šγa }ΟνώQžΎŸS-))?Sδ'/r) o {Gl‹H­€@½^€‚Ξεύ!rΞJΗοΰ\€ο˜vsVyώζ1}Nκ«oΧ²5oπ໬žˆM[WmDM»τχ›M΅Λοοζνά…ŒΏΕšνΥ'z·€T€h†δΦΐΆ™^’­0ΎήΩɈg‹πŸΩŒ T†―•­O­ ^ε¨0„ΎŸ/ γΩ^ΛΟ'ιϋ !ρ‚.˜bj|H†π |.€΅Ρq5MίK,[πΖψΆΤΨΪΥY&U°ΣjΉΩ”qΛ¬Fω'G οΔ_­ζKώ&{›SCq2ŠΎk%˜HsJ(((((ϊ»:pΎŒs2b€I\?·bάD¬γψ‡[­ϊre8žo£έΓcLXίxGvNFHŠv‘9€2ŒΪΒΩ-πϋτζ―π ΔΧ β¦ί˜ύœv ύΡ"ΫSIΓ€&x.¨v±4,Ηχ,BίsΥΕ.Ι “aJ>’0 ΰA†Ÿ#ίDˆ%MJ‘4d5+WryΝ3MΙ΅ΈL1QŒLπt.gΈJQΡρΖ6™ΛLί~ϊ^Ζ²“εHβ:ψ‹σΐ™>'υΥ·kٚ9xπR«“Υ»Xf³Ίφ$oP5 ς»Νά…ŒΏΕbxu9ϊΎΩͺ˜ΝθQE)ΦΙc ϊ>eΣχDx‘gΘ˜Ζ›§οq₯ ‰\S›β/έf~τύ|Iz4Ed.¦dqΘ”ηEίΓFΤ1’6€A£=:fqœ‘ଘτ½μ²U»Vκ&Ρ‘ι₯4v^·•_Μo”rdx'yϊž/ω›>ξmώŒ θ{Eί+((((ϊθ‚χ9ϋΘΤw%FŽt 7jΝͺ«,š© ‡Β~ZˆŽΠ4„ΎG΅ŒSάΙΈΙsΠ<¬„7τQZH­K2™jλ¦+ œΝΖB2ή'(|c"yŽ·„όœΟ9«uP6(:δdDε|uρK2hDhxI;–Y²EZφ ασ M²4γε›H*dΌu™Eώ( YΗh‡tM–τΘkΑ7+_rωιΙ6%³XλŒ#›ΈΰλœQ ΐ‹γ‡˜Jh’o B‹³u~ϋι{sΘO^Hˆ”ξ4‘6€eyϊ>χ Κέ²ώΙ_ιNJί#“΅Άβ4!M±gYyΕάe‹ΏyΰLŸ“ϊκΫ΅lΝ<΄θG£οέ\АLœ…†Νήƒ μiw ΐΡΛ°]Τ‡%~Ζέπ΅kA8ƒΨ€2΄u}±ύγς“Π0ΠΜτ[V=¦7G]μ’&— [τ›΄ΜABθ*ηψ€ιϊψ‡n0"^ βr’ϊμ«ΪΒΛZϊΕŸœΌδYB°EšπΝΚ—\~z²MΙ,†ιŒ¨. ΄ϊbϋΗ [η¦Αqb“LVΠ$Ϋ@„gλόφΣχ搞Ό$Ώ0Ι\Ρ…†fZDr%m,βKx°$ύ!’ΏΎ―cpκU[ΰ/FόΝgϊœΤWί‚eKf›„η -aΙ–έώ‹αΥωτ=ϊNŸ]β[ ΎRh+Ϋ^i D'€dς ¦Hτ3ΐN›γτ=Ή ² πκP{ }Ο°] »³νυ%lΫɚΡ¬N&†Rmφt0ΰLϋ Jv‹Ή’ΰ{ΜUgΔ\]¦HTƒ‹΄οΣ€Ή@\ψφ6MίwαθΊ‚ζεν~ΈKƒΎ—\Άΰ“«Β› 8k~£μ“£œwβ―VlΙOpάΫ((ϊώ4ψγ‡ΓΛ±χΌ–Ύ<έ½[mNEί“||=ΦQIK΄Eˆm¦fWiE’€δMΊgͺ"{œ’ΰJ‡ϊη8Ταl6ΎS3?ν:Λ}sυΑQΩΠnŒΏkβ^μyu6 Ί?»šA1V3nnϊ₯Ψ4Έ!kG£*’υ‚‘ι›·VnΨO}|,α«k–b™I„IώYΜ$0¨fFGΟmΊD/jΒ8ΰ 6/τ/]γ"π‘1yί„·Βΐ3.‹oVΎδςΣ“mJ^±Qˆ™‘‹be΅' :ΆΞ%L3žtΊ]'όbΜ•Μ3f„Υ³tώθ{Θ)MίK˜C~ς›.G{Ω ί"+iWa!Σ­cΏ₯ύ!’ΏΝK]S¦—(κΙ! .4@HFΏμf-[όΝkϊœWί²e‹?xθΌy Κ’€HšΧώ‹αΥΉτ=]Ξ0‘ΡlΡ±C_Lp|k­­œΌnΫ:_γ…ά τ=ύ&1tΉfhζω8}―™ώΆMΡQKΩAڈNΤΉ3’G‹j|"½^i5Tέζ.yΓ†OίλA^ !ζHr5ΘΛO―OGί£οdΒHaV Ρζ± `zNχΥΐΚr<τ½ά²€kͺΟ₯Νm”}r”σNόՊ/ω Ž{ ŠΎ?.Ύͺξ'ξŸ?yϊϊGYΫΙ£mασ‹—›“CAAAAΡχ@W†ϊθiΓρΓ4Λ‹"Oγΐ6g€Ž¨iϊ—‘³dœαšθ2Μ+!D]fž½mζ’²ƒ$»jύώΈΐώΔ1ήΊ,kI݊Ά.<eέ\νέΌ.μ¬j;!šΊπ­9dŽ2KDΙΡx–•m₯κΊΆHό%~υ Ή²ό€?–΄U0h†“<'…fΧaέ ΡΦYδ-΅λβ9eΫρΥ5«$“YB˜uV5m[§­AΕ¬A2Ύ»τγ4/jeD\§Ύ¬ν:Ρ6[Cΐ΄vAΖ7Πx.l?ΝςF°uώ—Ύ§rΚΠχ’ζœΌΈό›šQV”yκΩπί;iω‘[I»ΔϊΆν‚γ:λ1lΗO;yH…δ―tό–>&’t'rsmΖbΔί’Ε#ΐ%rς$Αώ„ΘrΐS}Ÿ ΛWgΣΐw¬iϊΎŽ‚OίΛ/[αšv©œ¨"―8ΰŸΜn”qr”χNόՊ+ω Ž{ †τ½‚‚‚‚‚’ΫH=(s—γ,#3υ#°tNŽ—ιΓIΊΫoi₯’ͺΓAeΊ¨qή₯DδΨΈίΔ:„ύ. B Xaί•QK$™‚ž΄|uqKςiΔ:q&¬^Ο$τΎΆξ[ΔΚ]‰ΡJ‘ΫΨ§“{”; ηUhŠ•εΆΖΡ@Έε\³ς%—Ÿž\S²‹‰2œhzιBlžΞΕ&Q†“ 4Ι7H+Έ”€αλ\’Ύ§ξ}TN~΅€€„9ψ“—αήΛIχiωακ„8ˆJ@Ό™Ό?€BςW:vQ˜ρΖƊ¦Α‘(όΥ~i-Ί‹sσΐž>'πΥ·xΩβά3£ρΌ …i‹όΎΜάbρ½:<ΊG5α«!b±Ή›ΠWΓyΊΆ+LΖp\ΉiG}>>«0<§nb€Μx-–“‘) c”’wS}Ÿ„(¦§»Άς2€α0‰Ι-]ŠΎ—[ΆhΒ%~γ₯V’§žί(γδ(οx«•„δΗ=…?_ϊ^AAAAAΡχ@W‡kγθ–ΑφΚφΨωYC"`ά-?Ο%ςΥξ€}Pΐα-#šΜ_ΠMww׏yvz=\bν’CΑΒJ*AuΊΉΆW‡eM/p΄;DΌεΕ2Țɧ 1ŠΜ'ς_†ζt•‹η΅{κŸBX‡ͺΥ—vΦvΨ”ƒΎg©‹S’nύέ¬= i\t΅Oθ„šπΦ_XœA£νΓN_ŒZΉξ£Ή –VΠPΊ‘2€Θ]r„θοΖ3²fω)cJ΄αzE:jY=ί¬s%—šž|S2Š‘m΄ϊ:ΜQ†§s ΣUβκDκ0M šœc Αi—‡”_ηt–ρέϋ¨œόj©H2ζΰO^†{eb―τq ~;θζΜYΐΥ uJ¨ PRήRsπW:f±Χe)ΔP ρLRΨ`π#ώ恷lΐWίκe‹ΏMBΆkšz¨ή‘xςAΟο ‹5Σ«S₯]VλƁMΪrΚnβ_αy^&Ί:%Dυ :=©N†J“d`q“"靐±c’Ο‡D•τ«3αD-ο0’-Μ0o6<ˆ26Π}δϊgHΤYhθGΎ΄8D™κϋ4ψ„‰y„!=½ψΆ±kιΧΗ”d‰«1ι{φ2AœΠ•ΎΆϋwυMqε™’άF'GyοΔ[­$$—<ξ)(ϊ^AAAAAΡχ€¨³8τ\ΧυΆπύ Jσα’φ‰ΠUFξ™ςΠζiυˆ{!Ρ…¦Θ‹’,Ρ]?gέeο’αŸdešΐ2#·l]fCΡ$«Εf>D‘ΖA‹8-Δ±N…Qw"IΛϊ¨Βš2ΚΕiΩΜnΉ­ς‘ο}+Υ–ΕeΥt|u±KΞGWεir‰4+h/Ωƒ?VUUΧ5IYJνPΔαΞ΄a\Hu•¦ύΤσ£$ς=?J²Yvλš*MUσsΝ* ΝσLΙ+†ρ&i2΄ήΝΧ9£ί6Y/Θε¬ΝK!5Ί­ :ΗΧωι1-ηιgΚι'o'Ϊ¦Η Τ&Ά@ίr-"οεWΊ· ώbΔΪ<oΟWߚeλ ΊΕ’πκΌζ$ τaR€©α£.0πƒ(‰C?cή†¬Ό[;%ιVZψ tm}‰¦iΪ–Μ«ΊLγΎΣaFIV[D“§ύ€ ·‚mΡ’BΛ|΄ dKЦŸ;YΒ0Ι ”eτ‘°jΫWί“4φ}2x½{εU\έ8όΞ—μΉWΦαp(…AΥ 0QΡsAT M=I“ŠŠ&οΦΐκ 3μιή™ΩΟ3χζΟu.m0/]‘{σφ‹‹ξ˜Fο’΄ώ»‰'ο›Ÿ9ΧιYχό·[„|€|tΩ<ιίTυ³ _¦ωρΪ Ί†O/†r7γ$Žβ맞φΊΫ-ρΆμΒΛ³ΫΊ?xxθ?ζ!^TΎγΈG½Q:Υ³$κυK9s| ΘχΘχΟΧ5uέvuρΎ:ηό‚|/ί|Nηύό}‰τ’ξΒUWlfΡWρOΩ‘!ίC[WuΧU›ΫΒA³έ9@ο›₯izρd:›Νξώ™nΛΠsζψrοοŸ I“¨ΧŸ–Θχς=ΐ§Τγ¨7žΞfΣIυyοωYόYνΝʏœIτΙβzΞ_ς=ςύσ5Λ8κWυο~CIώΓ|XFoβEθ xΊώO,@s܎’ο›oΧήm=>x€'όYν₯‡:ΐ/hOι4‰ˆ'iΩ†ž3G^ƒ|€||έ)ΫV«4M·Ω± Ώ£κ˜g_δΗ&όH{Ξ³‹βά~βΣu? πΊšcΆ]ΞgΣ7³ωb½ΝΚΊ ΌώnkψΑόΊϊΈ]Ώ&―ΆΗͺ 0@[•Ωn“._-ΣΥvŸ—Uξ9sδυΙχΘχΟ ί ί?€|€|πδ{δϋηοοŸ@Ύ@Ύ>ωωΰ΅ΙχΘχΟ ί ί?€|€||ς=ς=ΐK‘οοŸ@Ύ@Ύ>ωώ?ςύΟ ί ί?€|―Ώώιύkΐβ9Ÿ€|€|ίΦε!Ϋm7Ϋ]VœͺξΡduάmΦ«7›νώxn~ΛX―-‡ύξbŸΚͺ Τηγ~»Y―/χoŸε§s3| x5ς=ς}sJ§£θ[ρx}8ߍξγθΙtuυΚl5ϊvl²,κ»ΒJ'IτΡ|Ϋ ^‡|€|_-“θlNmΈͺWγθ‘Ι¦:vuάL£‡Ζy=μN6Νπ1ΰ%ΘχΘχm‘FWΙ:/»πEWςωθΆuyoNλIt³ΨκΆλΪ:_Ο’›υ©>Φ•›θf4[«¦ έω°υ[WMΈhήοαh~(λπEΧ»e|»ƒY=| xς=ςύis­νσ¬ ΥY]ŒωΎœEW³m>(qτfΌ*΅·›FΛ<|Te·‚οΞέΗ Ζλsψΰ΄ΎnŸηυπ1ΰΘχΘχm™―WλΥz[vα£φ΄ϋ|α½νΙςΎ‚ŸwIt1YΗB{EoβYξo‹οΜvη[—Ώ½­ώά†wmu|snΊ‘cΐ οο{έωtΜ³ύv½Z.ζΣΙ(Ίθσ}[€·,Ÿ‡G†ΦΡUΌήgϋ]oŸeλYόqݞφΈŠ>ˆG“EΊΞΗΊ ½Αcΐkοο»4‰ΈεϋγzςαcM‘Fά€ΛΣΙχ¦ΛΌlΒՐ1ΰΘχΘχ·ΰή‹“Ρx:O7λωΗ†~XήV»™gαacχω>~$Šf›SΈiΞΕf9%Ρ·ΦΗfθπδ{δϋΆGW“εΆ¬›π=Œ>δϋ:_DoFι!άικ"Ώ8Vνΐ±>ߏVmψuΪΊ*ς}:ŸDο¦ΫnψπΩΙχΘχΥ>‰ΎšWίYΕΎΟχΝαΡgφe?^δΗB“nMύΨ…;εn‘$£$mNmνv>NFΙhΊnΒύbχύm ^|ΐ ίwε6Ύ5υ:|T.n]”_sώ$ΊZdUθΥιθΆ=―†Ž…σ"ŽΎšmOᣦΏ†MΩ^.˜Do’μώύ­Λ§ΝΠ±‹.oNU€ΟGΎΐβ9‡Qt5^ξͺ¦m›ͺΨ―ΗqΤ§eΣ…ŠΥ8ΊYiͺς°θG§Η. «²Et3IχUΣv]sΚ7γ~λ¦ ‡΄_ΰgWœ»πEWςω(ξ_`:vqX&ύ>ωω>tΩ<‰~,Ι›KrOΗΡ#ΛΌ WΗΪέ/έτδΠά/ΠίMνπ±‹"χλŸŽ|€|Qoη£ϋZ?^Mw\O>ζϋ7Νn9‰ξ$ΣmQ…ήπ±Plqt/ž,Mψ¨;η³Qτ­xΌ8Tέ―{\γ΄ΐ§#ί ίί4ηcΆΏΘςCY΅α¦.O_”ηΊ ½>ς―§sξ λ΅U‘οwϋ<ΟφϋΛ­7ακtΨmΦ›ν>Ϋm6Ϋ}QΦΓΗ€—#ί ίΌωωώωδ{δ{€ ί ί?€|€|πδϋ{gοN½eU’;Ž/υ_ΰ3α2βΡX2‡Μ8T ‡B‘ …Aa0e02`L D‰JΊOχzΕ‘η’7Ÿ|?φ-vΑSΏ΅Qσ4ŒίhηΥ½.σΣͺΝ§ΆAvoΪWζϋj™ž₯ ί Δχ6•:β']7ι ρδvΚ|BuηυWΙΤON"Ν/–ήΦ§Mβ{β{βϋs*χΔ^¦6ι[—Κ|βΌώΊ>σΕ“—JσKΥgžxr} Ύ'Ύ Ύ?‘2χςιϋcύτύ—Žο―χ €ψžψ€ψή¦ϋ*Ο²,(’ΐΆ )ς‡[YΥλ}| qΌ’Mύ¦ͺ»E‰Ε‡άκ3λ ρ=ρ=ρύ5zΘ…%ΆΣa8~Ώ™₯o²8 nΒ8-Ίa1;ΫΨΛ§A™7ͺοΪ7]?s[$£ΐͺgΙέΦ7eEαM”ΝrΕ―ΔχͺoΊώΦΨά7Yςέ‚“2GzΪΊLγ(|ΕYQχΣj,'[ψWω_Σ4–‘#žœ°§qš•^ΗΆiŸδͺmeΣ>nγ΅&Υ²Ξγ0 ‚·β€¨»Y™—ΦIζišΜkך4S$ήq\η°w}/­ΒOKƒrΌu0δbΟχ΅+½0)ͺ2 =aqqΛν/Ι‹²H½γΎβ{2™3Δχώc~ΌžRoόR|οψyUΧe5¬ϊpτή-ϋεΦΫΤEξχŒο½ς>œ}[dδΰο―7“y³6ΞΕ-^±Ϋp‡jζ‰œγ©ό£KMΞM,,~ΪnχΦ»,–b،YRWX‚φώXυάνJύb3Ί­K9αs¨ΞZ'Ύ]Ϊofͺ"ϋ mv_Ο8©ΊοΛ*Q·™7JZ₯"‘«ˆοΘd>E|7‹yΪμλfNίϋιόqIkυ°4ξ»ψ^/Mq”ΧύrΌP.φRΞϋ3ΰk%Išf7Υjnτ¦†:·pˆο₯Ή;žˆŠΑϊΠΞΣ_»Τd—zφΝ²ξγ½{Κkλ~πXηzΑ+=v₯ύttοοηάn‚ΕυΓΌjΗYmjξϋaz)»~Z_άOΗ “ΌνG΅mσp/νe'ϋU€ψ€Lζ ρύq˜‰ϊΏΖχΕ¨?ξ!šŒMΧ‘cΗβφ+dmN.ξ―³•΅TΦ›=υm‘Ɓη:ް\ΨΒi|odκ½›2€§ς0Qη΅+MͺΜ» Zτ¦ž6mŒ’ιΗ3j΄zΪ6}?os<χΑs…-λ•ύ(χ?Œ«n4V·ŽηΗY5 Ύ “ωVΔχςˆο}ΉΫΪ₯vκ¬Ncq{)›—ΚΗ…N–Κν.Š{ŒήψβΤωNϊ΄θ±Ψ-¨ΝT»œύΔ…&•=MήM;σ±m¬œγ/ΏNΰ8(N\qƍ{υ­―δΛΑΔχ? β{βϋcX<Χ±}8ύ$sΏvϊήΛ>YjBqΰΈaŠK[8‰ο–Δ±nr7–Α~ώ‰σ&Οβ{yί—?l|oMπ_›°Ύ«_θΖυp‘h4Ά­π…½Τ»½GE;«ν0˜ώϋΖχΗoΟw­”}5g4y2oυk£~η¨_λ“νG€ρ= Δχ«%Ώ8@άOύΤ_ «% Δχβ{ύΤ4~κΔC5υkΥ}ͺ5―άηζk ~κwίˆοΔχŠ8R•S«=―σ1©Ÿϊ©ˆοΔχβϋJŠT³ψώέ»wΖOxxsͺ;/υSί€Δχβ{u$zυvAύΪ*Μ[Ή1©ŸϊŽI|Δχ7Ώΐg|χ½ps±f΅ζn"2–Q?υ;Ήϋ@| ΎΏVάιœ4fζ­ά˜ΤOύA|jΔχβ{•·oί?uβaEU^κ§ώ* Ύβ{ρ}bSΡXy«ιR?υί€Δχβϋ·£ρϊ‘e6ΆΆΆ·ΊΊgόΤ/ρΠΈΒ‘t² zWj^gύΥγ\‘6κΟΎy>š<½V'_d+ZnwquΜςϋCcc[―Ÿλώ_ίˆοΔχΞ#QΉ=αούβ‹ΛξΪΫ ‹ΡJΞ«lWss^υηΡT΄t%2W­39i:ξ/CIGυη3ϊ€Ζ•ΛŸΏρ_žΒάσϋO|*ΔχβϋŠΙΜτ[“zΕυt&ϊΥ&Ξs~4΅α„ƒγV½Ja«£VόJτΆΝ˜ΪΏνΡώΩΉ0ΎίˆοONNήJŒΖ“ρ°Bρ½ΈκwςŽηuΐι\κώŸAύrΏΪ˜ Ώεξϋ„“ϊ7;άecϊp›εξϋΟsρ=€ψ@|_©ΘRŽο‡»ϊ^<|8ΫΧ7χπαάΓoǚow[ό?=”f―^tξ|œΟ ώΜΌιΰΎg8f"όηG N―όρœ“ϊwΎ1έeί1/^˜ ΝΜ{OGz½«‹ΡΟuUρ=€ψΐέχj—ǝBΪί»7?εΉ’qυ‘Hπ»­OLŽί,Μvά{άPο­Χ―†α{ίΌš ίZd·ΧύώπŒέ?͝œΔCΑΆΗ Ύ†Ζα{]λ+‡'%ΉέΕωΆαΖFίνΖ‘ŽžπλDΩu%WόO›λ=_~Ω«Υφ–~>nλ‰ΌI›ϋDώΥ±1ύZ ΝoŸX†GGWΞ.4mYΛ\i-ͺΧ―ΫϊZ–f^ε}“dΆϊh₯ιόGzŒžί}ΥΣζ»]οmφfLνilτκƒ]ο»έ<Ω7|͟Xˆϊ‡Ÿ6ίξϊλ~·»_οάά6;<“Μ‰.Ήx(΄;Σc:ΧτΜΚώJ(›Χ‹ŒoωnjελkΧG‹Οy¬ΉQŸt~!—±˜ ΟΧBFϊI½žyΦΦμ­Χ+ΌνkhΠb}aKτ)&vχC+s·]ηwΩ7τμ†Vφ·β₯ \ ϋΟίΩNΨφM?Έ ύΰ4ντΰ4M>8£³>HΘοΧΗΡ$N7sιa‡ώΫRͺ§y{ϊ‘Cι-½›―ή­oš~yλΗΎy*ήβαuϋ¨β{ρ=€ψ^ŽJKίΑbŸ¬΅ΔχŸζΝ―w5ˆvΛεj›‚Σp›&žςή»η‘ϊ?~΄²―ωr»wΑvηxπžΫήMtξ™ω΄'ρ§_šŸjŽšbΩάβ7ζWM.€@\½λGΌήΗO―Υ:’₯y [~Ÿh΄^ΎžΛσ―'κ]¬Ρ5αί2ώ`’χ‚M™9<‰ϋϟ­m φœ―kΜφμΪΊεΈη›kΛO­5†vσϊΜ›b₯φ]jΣWjϋςσ―ϊΰ€ΝσΦχ—Ω7m6`©{΄£ϋ‚Α»λ»Ž*Ω;ϊςρ=€»ο+ί‹E½Ή¨;ϋΗΑ˜fŠο;Œ°5χͺΩ’δJWC$}bΨμ°F·R «ΈfCiOΧ_ΩΣ6jt>j4§ήσ[ΉσtΈ£ΦTΙ½ψ•Χ’`_ Έj;2''…­α^eΩޞ…σ5ΊΥktυ΅Δg<ςS" κR=+}Zs8yΙԍ»9±{β²|PaϋN|gήΜξ³UΈΤ'’YθQήέψ¨pRIR| Ύίίx|_³vp–έύq|ΛvίτΨLιkRΊl!i}}―›–O·E&+_šfv;g‰όp³ύ©ϊo­}œiγnzkΐνλ[99σfL3Υά³p΅HΔFmήw_|OzڞΘkn―[“CyΉOΓ½ioίXƒ%1οΥcράΚH­Φ«Ω†­νuΥ΀ŁΚ׈ίίΏŽχή@ƒ=υΦμ{λν[Ωt―W>£ZΝΣθ=–β{''‘wLϊ(’Tyj²ΦΌα5#]=³=mKηΪυψ_$ΎίˆοΤXAάAo&-}RΣΦ΄WyΉ;“₯X?Ψΰ2½Μϊ›œqwπ9{uo¦Š:kΊνšxΊ©wM/Ψ"ςΗ}§azakΤkΙΝ‡σ§uΎq™;»ƒ+q£ώΓΡϋ– Ό₯ώΉys…u}Yc±;Cέ睡WoΚ―₯xκ@^‹moΕ?δΔΉΏΉλΥ°w~x!·ε΅$ΰ]ώRy'Ι…Gύζώ]}œ}o£)³Ύ―χ4lή―1$9)Ι<ζϋθӟj“γ{—{ϋ(Ψ79ΜYŸ­1ώΨΒϊ©†~Υ―„β₯³π{-§ίs,­ΧΣ( ·OOνΘwλvΨPn3{›~2Ξno¨Υά>4΄y:onε±ωWΘ_Ϊισ›‘™Γ •ν‚hTΕχβ{ρ½"jT/€΄Wq5ŌΧζΌζ uτ (Μ£MbΆψΎΧτ•θ›ζob©ιLΟlwš\­5Q,ζC}–;―ύ‡b!Η5―cE]βi«ΎώtηΌ±Ά+}ZpπκkΉβέχc£›bŒ7=u¦ύn΄b™bωΆqzέ “£o6cΩΨζf YΌž~Ίϊ΄φΈυΟ&bEƒύσwλ‘ιάΛί}0ΪkIΖwDύ–ΣξgΚΗτQΏh—ξ遼oΆƒ³4ΊšŒάέr£½ψ+ωΓ'—ζ»ΫΉXN§R‡Αΐv0Έ˜^›$Ž‹κ˜^F|ίˆοΥS^ηξϋšωεTQ›φXΪ]Mλ-]Νς’‰@άΘ*ήϋεΫ»jξλΉvzΪ|‹}ΓφqΡbg¨Χ–KΉόΔtLoš―3έ©ύΣζ5Χ+Κδ‚λϊς–§jΜƒtΧΤƒχ֘ΪΕ2¦eZ«κohšΊl?8iΥr’=½||ι«Σ­]φO„γt,f\ΩΤ±Ό^u|Νƒ³gϊ½m~SοΨdΝΌ± S£u«έCχή+ξςψ@| Ύ/ ΖOx¨¦Žο½MwΗξή9½ZGšξˆtUܝΎNβον[V§½Κv1£Θ΅ΕκΦ΅'§ϊmI}Ιr“&{OΗ_φZΓbρΪ«―E&}Έ0Ωw)ΖoΨΚτξ#š² ­i3vΆφ²AΌt ξΧ©β₯ρ}vy [jT°ά”8ω@Λœ8»²'’ˆοu±Ρ»κƒκ eκχρ=8F| ΎwΩΛ‘₯-νέΘΪΖά™t[nΐ]#ςξ½ϋTN{Εψκv9Ύ-qΫΪSS=ωΠYb vš_΅3χΰόaΓ@ξμ΅ΣW_‹mΟΕ?6ΪεΔYNΊΥ—φιžχƒWwΊ=k:γ"ϋVΔχΦvω£qšz{fΙsΕψ^½ήKβ{­=aέ“NDΜσ[γ{QO|iθqK±½ΣŽ#{ω½sa| ΎίΫβγ‚’θ¦ŒοΕ§ΨΗLM΅ZΏ άφ…3Ϊb$[Θ&3Ι•ΝšηΝe WŽο§ΕΌF:,2ί΄žHO΅›“ξε}ΛΊ²Nλ ’ώ!—ιϋgܚθφdb¬ΟΡΤEkΙ›χ-lέ7εέθB¨΅F<Υέ4š+d³©”1~ή2Ξ±%&NμΜM΄(j–ΞHœX΅Nτκψ^ύε9‘dAΘlLL΄ήχwvNάΏ?ωS §Œοε}Ξς{˜™³œΞ:ψ”2Ύ·ˆo,½oΉυ­ίΫ2qΕψ^4*ήGͺψ@| ΎWGσW'ߎm“šk—2ΦυΗζo•ρ,Ο›]ŸjimmΥ―'-νϋeY1²Ί]Δχ)½igs&Ϋί>eY@9«­Ϋ>>ί1”υλkn™ϊ„lk)ςZ¬ φ|wLάΪΚ„ƒŸάΡϊԐfΌΖ¨\νͺο)u| ΎίΛE“NβϋμΛ;–ο*y20Of3‘ΰxΉέ΅)“κŠ2ίλ’Ολ¬ίDίκ;8J¦"A-Νάξ)ΔBnc [Ύ»ιη‚p΅H‘"Ύψ,Έ[BλϋΉlrk¨έάήέ4€‡θγšεkξCg‡‘ ZξR_=’ƒx-Œd’Ι‹zQφχμοjζ-rSΙ£­‘Vs‘ύ‚ςz»οxŽŽŽ2e“w''"κ?š΄ΖχβOCΔ5δ™2|ρYˆΈϋΎP9—Ηχβ{ρ}>Ÿ7~κΔC5ϋkΟΎ—σ\SsB*:δ#ƒR,.]uΉr¬(5άώΥγ{½snΙ£œNίL‹ΛLZφ-2tε΅”ίsΫm}›\—Vώ|={:N§ΫΦή­iίΊloνdKγ‹―ύ1]γsG—ή}o;n£Τƒ‘¦ΛŠΤ#7™Ύ‘?)οCΙuξͺwίϋ³ΣΚ“sίΫ‡r}«iΆρωΦ ω‰ο€ψΎΚ‹Ι§ ΡΔι•(ώφρύέΟnοd£Ÿfωεύ»Ό'o &Ώˆο•q€py4™4§ΐ5«Qkι›UΌŸnΑήjQ²ZΛNςlυ]φ"-D[|oΨρ©#fΧΤάΎΛFΎ»eιζj:φν*kQμΉό9„EvύI*ΌX[ςΔuI=>΅/:Iώ>ΫΣ?ρΘ·κ Ge[}~Kυ1ƒ*"Ζ8πέ‘βϋφΈt β rpωOΔfƒ˜K—ξΎΧϋ(ΟNΤ¦¦Žυρ=€ψΎrrΡ?όύΣΏ2Φπ ¦αΕΨ_—fωƒ7a4,ϊ¦yΏyύαΪYΥωkίι€ψ>―€ˆοεˆίzχύWλQ©6ψ£%ΛnεΟ€ΓγOΎ*“ωώΨ>˜4Ν»ΡY'žϊώfΕΜ[έ¦φφYΡλA9{Υ‡’/}ήrw·»e=’)ΏΖ½X‹$Š΅ ₯δ1­ΆώηΌΰώΞΉrϋœXύ₯[N–΅[/^E-g”Όψχ[ecθξš?ΎxΉg©ymδΗKŸ'§ρύ¬χΌ₯n#iέ“ΔK―υΈ}ξυξ*3΅«na-‘·H,΅ΡΪ§5¦οΓˁζ―rp_œe3»-Ώ-¦3M>7}‘·ϋ₯xItΦχ£VώƒGνƒ1ηο‘ςˆο@ Ύ―€m|)7ιΙτΗ›ς>q»4ΛίψgyΣΰs[~b3·XΌ,«œ³ΎˆοΥ±£σX_έ§l{2²ϊωη…ΑΑ>ίΒψμ~4SΩy₯>™ύ—Ο^|χ]`d|ΙχΓσΓk{y‰³τ΅¬Œ¨ΧβΌώLtχε³₯ΑΑ…Aί ί`ψUδ8ΔήΦ³‘?ό —qϊsp<Ž^Έ{‘£H$Ω‹νEΣ™ϋΗ9‘υίsou|δΉg`yn=mλ#d’ΡΣy#±½½d4‘WnΩ7 άΖϊ~ocα—KƒΎΐ>}Σ>ίκ²w';mδϋΗ_-O`Ν6Ϋ¨WaΕ™§HΡ™ηMZ΅”l,u,Y΄ˆ ŠQ°ΪL6‘Ϋm&Η6]·Ϊ¦L…T(AκϊžΟG%„ΛεͺίΌωRωΧW+­±cRH—οοS8ωγμ άGΫύ\γ“εϋJξŝΟςα6ΏΥς‹Kς=p›ά}ΘχΩ’οτv«?Dπ™ v­ώ©ς}ά(ίίΰέχς=νv{ψsτ‹ωo‚ωΣη{δϋKΟWπ΄ς Šψ »½XyοΦ'υF§\vΪξ„o5γοt7ΦkωεΪβJ­΄Υ> wτ¦/εϋ~οπΈ{xά‹~οδη†σr»f£sš*ίχΪ­|‘:χ΄nσΟΆK»'Α•z;[υpΒΕ•7εέ£ήpW,ά`ΙχρΌώ’Έ?.έy²Έn 7?ΏωΝε{δϋ›oά?Ώι~±τφ,όΨέρ—Ÿ1[νŽgύσ"gfν0xηpλυ½pηΨV\XήΎjνϋΏW»Ροc[<Ν'εϋήβ/βηΉϋπeΉ\rΊ»wfόΘ™Ήυ½ϋ“yS? ί·άJ euέHϊ€{#ΉΩόζOΘχΘχ)4W^žgλίnφΒ—ε΅(d—v’[ωΨz8w~ώκpΤξ«Υθ<ρ-Κχ±ΣΎ7ί?*_+ίwζϋλrϋgA€Y­\}δδη{@ΎO/}~ΝπΊ0Ώως=ς}z½…(v?Z9ΏΛΏ―w―—οg£|ίo=xw3ϋσ'λνΓγΞFΉ:”οƒ·Z½ύδη‹η,TΫ΅ύφa‹ημ<{wίύ/ ƒεzz΅­½Ρ’@αΝ±v0Ψ~Ύš―Ά›ν“ςς«©ωοΏϋξ»αΟPτςφ%_7{ζ7Ώ|=ω>Y»Υκ畨’ΦΟΉϋΧις}­XŠΪ}i£ρroξ}8ί_ztνΫλ>ΊΆύ£Q»/ΧG—žΫκ…;v ѐ³/λΑυ½ι‰Οχ€|ŸœD“χIψlΧ½Ήsš?έόζOΜχΘχιmδž;υΤ\-ˆV_–ͺ/ΏM‘ο»s?­{»βGεϋγkδϋ±ΙgךW]ΊϋχhΘω­ή‡N;ρwίΗέ|ΦΜκΊ‘ΫKΖqζ7Š»οοS8y-)σ Πh6Žjυp;©οΎΎ;Ί‡}ωθϊωώδΡμπγ‹K±ώήΫέΌ½|Z}υξδ1‡cοFcˆξρ9‰ωο―•;Σ‹3ƒλήά9ΝoώΛχΘχiξƟ.ΫVO―›ολΣΡ-πυ ζνΑ½[Λχ•άσΡΛ„|ίoEΛμŒ/ο3ΠΫίΎ;©ωο΄Z­αΟPτςFe]σ›?ρ|€|ŸήβγBrΎ¬°ΨNο£‡ΔΖΎ|_ϊ|qψςοΥn3vo~τW„;?―Ύ'τW&φξ{@ΎΟ ›†;3ΌnφIΧόζ—ογδϋτϊϋη>ΗB­T~³ta+­ούhφΌΰίΟ΅—οϋ­αοWτχΣ­Κν-žΣ,―E37‚˜Ρu§ΖΟy^Ž/ž³nρ˜`ς}+fΈ3E²ΌΖρΩ]7ω<Χ™ΝόζO=Ώ| ί§R_y{Κkμ³cD­φ±GΧ–ŸoρΡ΅λkwΌ§ώlα·ηSύ¨xΏΟ^υθڳܟσς=L(ω>EM±?.ƒλ&KŸ‰Νoώ³Ιχ1ς}½ω‡ΡΝυΟZΑ{5ή=ΐvaμβ4ΖξmorζΛρ|,}ΎνyY .hצÝ›ο ωΖυς}pύ;€pOω$ώΘάΑV,ωπU}μkαNω&—|dBΎοShοMΕ–Άιόύηc©}tΓ~Έύ(WΫΩo•ΛΫfΓ—γω>Τ~3υnηκ~§ΧοΥͺΫQ[OΘχ£ΏL?ή,UΝ·A\βgΓνQαΰ°vzάΩ(oNG;§ΧFKΈ0d)_mνlΥη?_Κχ0Ήδϋf³ΩŠξlD/oXΧMq­„γ͟žωεϋω>•ΚΣ(R?¬žWΪx6jΩ₯~Χ£εςc[μY΅;ΕΥ+KΞχΡ"υQ…/w>>ίύφ£ΩLψr§Œμ,Ώόπς=L>wίg‘P³Ώω͟Αός=@DΎO―7­Λε“ΰ£›τσOκg?|²ΎwζRγ.>©Άžό£0ψcΐ؊σ΅reκrž[―ύhp†{O.ύ-an« ½mύραG5τJ.ϊμΨrό' sΕx‹Ώ7·έ .;έσθ·…ρΓ6K+] Λέχ sgςgŽΟθΊρcΜoώO<Ώ|‘ο³Σ¬·Κλυz½²{t|PΏS.οεWή,.ο-m΅O―w•£Zύ€ήξφ‚4N­Εβφ“ΒφΒ³Ν…bm£ρ‘Στήv›νNΈφ—^Y•οaBΙχ©σhzι³lz™gε8σ›_ΎοSa§Έzw¦pχ³ό˝`ΜΩΒo£s°“EΎΏMFcψ3½Œd݌™ίός} €|Ÿ ΅εΥΡΓuΛνQ¦?+?+οŸY­ΐĐοΧΦΦΒtxpp Ωzζ7ψΥ Ώ€αΧ0 "ί§ΕρΑ½‹KήΉτ£??Ÿ{dξILω~cc#¬‡ί|σΝGe͘θ­4ι³1ΑuγΗ'μOžΑόζO7ψΥ Ώ€αΧ0 "ί§ΗαΦφτ ΤΗ·_0QδϋαΝΏαIΒ_b©1ItpB6M:C&Χ?OŸ5ΏωΏύφΫπ«7όη/A ς}:t++Ϋœ+=ψσ‹pϋΡάΛωBm§}L@ΎώϋοΏώϊλaΑo{β dΚT2Ίω͟π‹~έ†ν>ό†_Γ @Ύ@ΎΧνvΣν>όCΘχΘχρ{π666O²½]@ψE Ώnα—.vί=ς=ςύĐοο²'ί ίdOΎ@ΎȜ|€|?αδ{δ{€μΙχΘχΩ“οο2'ί ίO8ωω {ς=ς=@φδ{δ{€ΜΙχΘχ@Ύ@ΎȞ|€|=ωω sς=ςύ„οο²&ί ίd@Ύop£δϋO@Ύ@ΎȞΕs°x@φδ{δ{€ΜΙχΘχ@Ύ@ΎȞ|€|=ω€‰ΟχU€2ς=ς=@δ{,ž3aδ{δ{€μΙχΘχΩ“οο2'ί ίO8ωω {ς=ς=@φδ{δ{€ΜΙχΘχ@Ύ@ΎȞ|€|=ωω sς=ςύ„οο²$ί';=>©ΥΒ­ήξqύnψΦ`;‰Ώ:l ί=:μΧΠοξμΆ6ΆZυγ³ ΑY-߈WψΖ½σwΏ|Tξνp}mψ©©ΟλΑ‡Χ§†—˜Y; ²QϊΌ0˜Ά°tL>@ΎȞ|Ÿl§πβ<ΠΟΕRzoώaψΦωφ£βΡε·χ7£w cq?ΙaυΥπƒΣ‰ωώmύό/?•UΎ/QL»Xš€|Θχ“ @Ύ%ψν(Α/.΅―HηΓνΟ―{—Σ)J/λΑ-ηϋYωο'€|Ÿ^ϋGQ γz'ΎΔΝ…νΕF?Έΰ,χόπ­©Ηo‚ΫΟχMωο'€|Ÿ,Vα§Ώ‹ιK‡ϋGΫόn/xηθ—3Qχ/w‚1½­zΎψ:W|_©ο4zWζϋ/Β—΅­7Ήg› ΟΆsΛ΅Κ~ηŠΕs*§ApΪh-·žm>)Ό^ͺΆOƒχλ΅[ωBuξi%άζŸm—vO‚+ν¬οΝ‡Gώ°UŸ,Χλo?6ίχŽ;υΖIΈ5{ΑHΏS)ο-δͺᐹε½₯j+ώP_@Ύ—οδϋd΅βκ(‘#'Ξλό‹\qmxΐ½§ΑHcoj΄κΞq0²³R™wŽoSΏ]+·ί“οοώvυGcΉRλΗσύκάηΟΓ_Ζ·βbύ,Σ[όβŝΨwΎ 0ζtοΑΜ—±ƒσ ‰ωώpkσnlY‘εWS±K‡|²Υ ώ―δϋΞi§ψΥWΑΐΒ,Μύη?Αΐ‹ενv;ώλύ/{wβΤΦ•¦ Ό‘^ΪnΛK₯”vεΛR.2ͺT–"Ž»c›Ϋτκ1½8Γ,‘ιž,ΔYΰkΫdAΩ>²aw¨xL;c†AΦ #L„±0FΖ’A`$FΧpτ™‰ϋr΄\„’ ΫΝσ«SΊχάsnUeyξα=…‚ψ>]dΔH’»ϊfψΰ₯…ƒ‡bfbWZΎ?~ΖKGR6•8ΙΡyzλ<KŠοΖOΠΉ˜ώ(Ηχf}.§žμκ‘Cι}ΈΉZΗζ”W~Ӟ4·ϊ€nίΣ΅-φ”b֎ά` > ΎwΧ;ώρ«ϊΤ՞“½|πcΞAεεΑσ ΝŠΡ‹ΌΔ^) ?;ωΔβ¬Ό}ρς>ϊΤηG<WβψžΎ{jξΠ›M&\{γIγΦ'Η£³ρžγ―ΤΫΌ3*ΰF‚ψώξ;ξΌΟzoΗσpΙφ[Ύτ•΅kŠθύB펭·―_»Ž~Ήg[qŸ·:lή°qnnNζρ}ή:ήq)‘9™;~(‘;;Η«Σ |Τ™θpΰΜ΄œwυ#­~ΓMΞG!{Σe©ŠŒ Ύ³•Fγψ>qyrέόžDVN­²7˜ίοkH.ΝΏ0η'\“){π>ίyEͺ¦h%λη—|—Œ¨ˆδψΎo–f>°XδηΝKz†-vΟiR1nνΚλ ίΣ:ϊϊW^ΎrεJΡ—Χ}ω+‹νKσ?οΊύΚξω£z– _Ϋλν¨¨+ρ½*­Ξ‘xf=Ωg€Ϋ£rž>μOZE>5n$ς υvfŒ#•=A™ŠΣ§(XWγϋCCB&›Ψ§”ΤWβϋΤa£}猷γκ˜ G₯Š#xξ|νύγmΔU™„οΘ₯„ψΪγ-ώΐ%=σUާNŽ«§ΖΗ"#ΓΣB^g€ψž"ψ[Ύό•―­-yΰΑ[βύ-ρFΏΠΗ―ίj™žž¦tžJθ)g©ΡΗϋo–δΣοg{ΟΚ| ΎΟAlt‡’’‹‹jXŸ\m¦—ιœ•Χϊυ€|ŸšσHχ¨³σR«ΡœgFΌiԐIŽΪŸκŒΙTβH­ƒkΝs,Ξφ ’ΖχΊϊ‘eξ|νύΚ-R]S+©ΡΏΪ:f2^ΕΝΉ―ή{€st`μͺΌβ{Z8/„˜˜˜ ˜>½QΩͺ“C©}( Φ­/Ύ{΅Œ©h>‘Bω2ˆο³αb8NgL^ΰR9‹•mζZ띉­y‘΅Σ(hQσρ¬M])Ο…wΜVΚΏβ»ͺ₯Ιί΄vρUYβϋ™‰}J)ό4jΈoί?τζ%!“ˆΘε}™7ΧνxεδeM^7€ψžφλ7P:ΰ}χs=ZGOGžϊ·'χŠΕbΫ,‘…ωT›ΆGώgςθ νaϋόώη,[nU ιPΕόψ‘5ωWΡ@|Ÿ]OS‡QOfβeύ;νξ5–̏]ˆςG†Ez|P₯3s£kONͺτM›ΟΔεŒΘεΖχ|•Ι+]νL•xEŽρύŽ'_άμ=Α΅Ύξ‘§udρ ιςΊ¬ΎdϋΓκ ϊ―~yMεΏ>F₯‚>>ϊέούlwUΖ— ϊψμΣΟP²Ο—S-Z›/W7qˆοWόΛhΎs umηŽ…t{@“ŠΨHβψCϋ;wU&Rι/Μ*aΊq•.M˜$ο*.LΟ΅ο—ίs)|ώƒfμUΛΥ{¦?5θ˜•ibΟWf.žσD|ο\§±Ν/ύ fΕψΨ„σδ'ϋ7Š·γcs ρ='οvΧιΣ;w<Κ»ΡΪy:(sΤγρΠΆΌ“--Υ§ai~°ͺΐ™3g^£_ί―δ—αέhΙu¦άH½ KμyŸΫ„©Λ»ŒP;=vzw<ΩAνθ°P“χ‡κyuή96‡ψž_?(oΨάΡCeοάko<ι0[ͺ/†‡Œ’7½Α€b>ΌIοοR»γπ¨ŒΣ‡»žtν¨μ8’Θ₯όy΅>βϋ–'ΤΝiιη]·ί144$?ͺŒo-Ύ‡FS7Άm|ο}ΉΚ@WWΧΛ ϊˆψ~Ώ αFQ{Ζ²6ܞpΕΤ%ž*ˆά5)USϋxΜ15ΎO―Ÿ3ΗkΫwΎ,eρ½œ’žFPž4&'ςΤ:υpzΈΞώOΖ₯jŽλν:6Ξ΅ψΥZ:κZ~jο\κΛήμ—9λ―[|ˆο…”Χ«esΆZn£βυΛlχί{ߏψ£wή~{zzZƍ ί~ΫVνΦM[ΈN¦¦¦.^ΌθΏNθΦ4 ω™μιΘΏ3άμΩ}ώ >βϋμϊ>Rχhν š½1šσψψœTŒŸρςΩ}ΗFΖ§„˜½v‘oθWΙυίynΟ·Ž#W΅Θδ‘7]Cλβψ›“w“¦¬ίŸίeΔχΡ¬τgcž4φIopVέ‰Χ»ΔtΗ$αψžKαΔx€Κρ:ύΤ—\†ˆ3ύ&$βϋOΗΗΧ―]ΗΛδ3Ά"σι§Άlάdܞ_όR=»vMΡΏ_.EhΊ,‘G Ί…WΠg’OΝb°ΦΈeAψoςΘξ©q(Ÿog¦u;©ηmΫN_[Ώ, ݘ̢Ϊωy ,Žβ1νog>ΥΑΟk'[Δχlιtώΐ™i™8zΘiT‰‘™ττrΝν‘Cη¦ΤχώΔϋ7ΊG_©MξLΟΔ§Δ+χΥ— ™’ρι£ο$Ηθ‰>ο’2UΤΟ ΆΞγώ«Ι”ΠΕ΅ψWΰQN͝nε’:js>r\H(4¬Ύέ―£ §>Ϗε°7Q‡γ«g©ΐΞάܜ\έSja₯M4―}ρ†Ά:,ΌΒ>“<TΩβ$–ρ ί f‡ιΚgΝξΉΡεK'ζάΩΌ΅νy}Z<­»=qΗ’—tωyϊ΄~gό‹lk7SdOԏˆο eς63έΧ7Ϊ}ωτ™gίx0rM.i|l’γΜ¨³{€£o":+σ§G&;:Η]£'‡ŽvŽ^ˆiJ\πwQη!κάα›Τσώξ=g.ωhΘΩ7r΄•~NDgδuˆο£ΡθΊ5E΄:ώO΅·α5|­hνc–>χμ³Φ{ξαlo‰7Λ–[χ›ίPάO?©ύfο―‹οήΖgy―Ϊ;ΆήNΓώεX3 e―·Wώλ·ΔΗαUωΩ鞲BΗχ…O₯YΑŸI°ϊ X}Ο ~nέ²Ηχ;}Η>2ZΰΔρσΰTGΫQ»β Ύήί΅ν mΤ?,Vw|Οa½y‚β9ΩΎ Ύ§νdŸyκiJΫ)[ίΌacbi<νCϋ½o‡~ωΕOFΗΥφpΙv™lffζ‘ν§tΫvΧέtκ›ΆG¨%–αSŽOΗΏ~«₯ͺςχΡhTf₯{•¨ΊΜ£Λ•ζkPβ{»W^!ŸI΄ §ΉΉe^s³ΛE|7$ψΩ;dοwΌ.dφWσ€oυΚΒ[}ρ=Ητζ >4‡ψί‡B!ZPΟΕmh5ύ_EϋΝN³‰ψ>e―Z›ίσΆ·ίΣ)΅=ϋτ3tŠζΣ’ώƒ:πϋηSτ’`ΐ7 —"ΒΑ ίΣh΅0k£Ητ5‘΄j“½¦ͺ’<’Ί¦Αα ™‰r4Ω+ΚΛΚΛ«κš<Α¨4mώ†M‹7΄V4Ξί/€Ιl’AOC]υό βjμn_H&ΡΌnW‚Ϋ;*μuΤUW•—•7ϋx|τΊ›μuUU Υuφ—7* L²ΣΓ~Ϋ“ΰK{DQΏΫ^]^j#τƒ”VΥ5ϊΒ:β{sHπMOεί—Ό4#3ΓOs‚ΏwRd"φIχπ‰γ΄`Έ½;ΦΣˆM‡ΓΣ‘XβΓ΄ΏΧθ|*½³Πc3ZLˆΜΫΟ†Ϋ[σΧΆ†?Ή(€) ΅΅†Ϊ!χΒ|"7O|Ού >‘ˁψP‚ΪαΦM[²qœξ±Y2«ρ$ςeαn¨°dd­ .ͺ Λn΅˜τ-oHνή:›IΊ¨\B΄ΉΊΤ’‘­Κ2AsΫ”’<-‹3―KΜUχΧ€ Γͺ›}…~&Ÿ½xŽΦRSf1QΥθYιψ~` Αΐ@>Ηί Α7Λξσο™vͺέΉ§mD*bν―σ)nΞ=΅ŸFΤ•οΫΕy†Ο:?.IνάώB«P&Σnό)ΐLςΜΎΚΔ j+ξ=Φ+SΕ/μL»ΕαΐAŽοoΎμώ³/·G|ˆοϋϝKΤ―§FλθΥ­e)ΎΟuλڟν.‹Εb)ΛσύξχgΟm[?ξώX.Mγ Z™šw G΅Ν²” ŸnŒδk΄,ΙZΥ"€τΨΝRx{Tš‰r­U~‘\°ήšρλ„kψΈ‰―Vψg’ϋ&ZcΉΕwv­`|ߟ{ΏΕφώ‡7Ηγύˆο―@‚o’έηί31τψζ…hώ`·4D>Ψk>c‘δΒ5ζνγ#ι“ΡΥ‚ψζעןR³ϋσ{L{r|sdχω'ψˆορ}γ{ο―]SΔ›Σξϊα$γβ9ΚΦ΅‹ωHςGͺ½Γ/mu˝Ώϊ•5―Ότ²\‚πΧΜ—]IIKmΦ&Ώϋ,Ι¨ς‹--€–στ†RυhiMCcScrŒ”Ί5l©αϋ±R*ϋ¬ΛΜ‚Ν©©·-5†/oςs|ŸQ;*όM)ί³’ͺ’Μ–|¨Ζ]ψg’k|rT§>ΐςŠςΤgθΣW,ΎŸ‹ύΌΩ›ϋqΔχ7@‚―fχωΗχ,ςώξ…Έάˆοη‡3τΚ·#α؜W<‡y1>― OŽο·ϋΪz©Ž8Υ΅m±§0ŒΪ³xνωφώ!ζ΄‘‘§·σ˜ύgc2.¦ΌNθ<ψa,ΣΓƒηΉ'Ηχ7[vΟNŸ>-—ρ= Ύ?Ϋ{φ>λ½\άζ‡₯IΖ[ΧζΨκ’†h4ΪεvS±΅8Oη©S2+αU2εRc›VΡRaUΛΒ5j’Ιe[ΚΌΤ_ψ«”CUn=}u?ŸIK₯½r)αδr;ε FGEΘ]ž’7κγ{kYmύΪΤμ›Oο+Ԝž Υ»ͺ¬jD^ψg’S|Ÿφ7e-~MΖ…\v‹ͺ’Y¬P|ο5‰γM#ΎΏΑ|ΞξW ΎŸ9ϋ’΅'Ίι~#Uo?xJͺΒN#Αί‰δψ~g(,…Ž·)%υ3NFψ/^‘ͺΠ Ή|[e«œ7ΓΛω JE˜'ΐρ=²{Δχ°JŠηΜΜΜ|ϋ‘o&ŠΫίζ>q<`΄αvηωJ΅P>Ηχ\ε†Ε|;8UΟ8^γOχ};ΤήJγ­5ΤφzΫΖΕτΊ½Z-ο“ŒΗΙί‡:\[WWŸ»ΊΊΪ?·o μρ= ΎΈ0Γ^wX¦ωτ­ΛŒοΟ½»oχg΅η­37Vvψ°ϊž‚ψϊΗ JΥ“7§Νώ±ώ•WιrΪ¨–2ϊԝlM>ξύ՞ŽίA—¬`|ΟΫ½ΖEύŽ*Ϊ@՜έΝ=ΎoΚ9Ύ7ιμSjί3[YEEYr|_ψgς™γ{›=υb]­αcmκˆο²{:’ρΰηίGϊJŒ²ςm#)ρ}ΫΆΝ™uώ`0Οψ~¨²˜—ω›έˆŠη|ͺ/–Ωιsσ]]n|lwmm]ξjkχΏΫΌQ²{Δχ€ψž ψΈ~Ξς["Ύ'=ZΊΜKΤuχyΟ±9ΒB }R‘~"ѐίγjΆΧTΨΜBν<Šη¨ κυΫ^gohh Zφ -^‘%ΎΧμ6u*U._H’„š+ΤΙώ™δQ<Η抦€χ Ο“μ>λ©όγϋΉο[<NΩΙφu!³Θ#Ύυ― ΞϋuΉ€Ιφ.Q<ηό.ΎΡ*«wψ°uνύχήΗυmx―Ϊ¬9Ύ§κω|vιΆν»c±Xξρ=GδΑͺ”j0LiΨΈš»7*u_ƒ2€]7ϊ…ƒžj›E=₯₯§v―4θaΏΗΰ FΣ·­jJƒ§Ξ–Ύu­i|―'ν:Ϋਲ©ωuz&šίΓΜχψM{ ev:ΘτΖr‹’ά'ί0dχ9tΘίλ2£‹=%Ƙ»ή2N;Υnl2EμD΅³d;5Ώ_Ο/Ύ_,‰ΣωΑ`ϊΔzw·S«όPπ…Œ_Dο₯aωFΘξίΐ*Šο)OɏvQ°NEπΣΧΛρΟ΄ί !h΅7ώHƒί}ǝ?ϊώΖΖΖrοmΝΎ°λRŠ–*«EQΣμ‰κBϋμεκq«#ΜkδΉ€;―’ΧΛ²¬Ύ·V5…5Mi‰Ό΅.‘ί+LjΥξπiΊζsΤ©“(kτΙ,ρ½·L‘#$η _KM†΅π…&šΫ¦»T•!½©ά’*―k Ρ ϋ’χ—r²ψΩ}ξέLγ{“uτ"άΝΩ=΅D7»Ν(§slPͺ΄ήNξΘ7ΎŸnϋC+Ώ'ΠSΚΩο^8΅ηΓ9ϊα7 _θ₯ ?Š1γίΩ=β{X-ρ=ϋωOΖΜή³­ΈΛν¦²ψf·%‰ ψ}ήΎ”΄σ-o{ϋ½oGζB­ Γμ^M†VK6εΝ‚Γρ$V›ΝfIVΥ4Ξσ “vaε΄Z]ͺrT[Lq)³Ο)M©z_f³¦dΧ ώLxζ¬Ξ<Ύ'ΒίdɎΏ>β{€Ud²§#{(ožΰΣεˊο7nξ||oΧbΫέ^’΄mlG}·TL·Wσ©ΞϊΦiMŸ±IχαΎdΟa‘oρζχ­Ϋφžb&2θz'ίύό'ϊBLB1μωΰ” ‘ ½ΐ=ωFΘξίΐ*‹ο'&&6¬[―nN[ώ{ŸίΥ΅Ώχο­ΝΛe8Ϋ{φα’νΏόΩ/μυφͺΚί«[ΧRmύ@ sRr5A:j–ή‘•—S„Μ{rεϋ΅Te<›ίΫΈΏΦT±Tfήΰ σ’ωReD·&UΑζ*Kv垰(ό3Qgž5Ύ'ώž]FΦF/€ψ`΅―Ύ§ίσο¬ΖχνΤ3[λ¬wΞ₯]xΊΨό’έa-{νϋ~#ΎΣx2©pDΰΈΣ|bouK&ΈΞIγ]'/ΗΙΈΒeχˆορύ΅ώΧΖυ,[nUχ°έjΉ­lΧOθΘ3O=-—"{κΌsΗ£›7lΌE)›συ[-›Φohϊσ2zΠUΑωqrύ=δ)ːžWΨšLφΆ$—oaΦ*»#*UΊΛ^Ε§ΉTŽ·‘\ uΙ„§±:Σΐ厠KχͺΧ{u™LΈS|[E£?θ*SŽ”5x όLkϋ4ϊ†ρ)ΕμΛμ^©Π|-εΦLγ–Υo ί¬κŸ~~žΉ.ΌIk+ΩΩsπp$¬KŸ~Pέ–~՞ڈ&Yd‘ΚΝf^&oˆ;ΚξώT_˜L§QdF*"½½»6§M―ΈοDΏL!F|§Fό‡ήίΛ7ΊAβϋΒeχˆορ=Ϋσ‹_ςζ΄vωΧώ—e–ΎydϋΓό}ύς—cΝΌ]-₯ω2Z4F5]$Ÿ =.—ΓαhiiqΈ½!MHѐΟεhinn!τΓνυ›φΪβύ4]f₯‡=–¦¦·ΫΥΤΤμςrrŸ‘…Όn—Ηλu»=Α°vύŸI^DΘηi™nWKsS³#±ί/A|€*:9w.=ό©»5Πvj¬½uΨέ ΗVκ>ήαΗΗ<§†ΫœαO.Έ₯{ΆŽΉΓνέ±ˆ.oί³‚fχˆορ}ρέΫxαόώκgG†‡)”§…ω—‚—δ’hOΪυkΧύ τ±X,FUwxλZZΡO•ρ%@.^ΌθΏh`uKο —έ#ΎΔχ”³η›ί¦ͺχάσςωDύzͺŠσθwΏGEνOwŸ‰£Šω=Ο›―Ώρ£οΰkEkΞkJόίύϊ7”ώ_ΉrEδajjκ:&ψtkš€Xέg½{Δχ€ψž…/‡©„ŽΊ“mΚΗ’―¬‘6D9›ςq``@ώM€"»G|ˆοC‘Ετ΄pž–αΣ/95ZtO΅ςι—>oŸΘβ{@|ρύBPE{ZPχw[STd¬ΑOi_ύς*šŸX›Fως,ίϋ|>ϊ2Ψ<ΰσ­Ohυω|Ϋ/¨δύΏύαTιώ—?ϋΥΖΉcλνT?±ΠΎlΧOξΩVLΏP!Dν‹5%<(ί/νΊF_&‹IψœΔb1ϊG+ύv•¬ύoοήjΉmπό EωχYο½λφ;hOΪ‘αaZ˜τΓ£ρύg‰DθΛψύώΩΩY y£œϊύ~ϊGk$Y=₯{„2ξ‚ίίξ\ςΑά Ύλ_:44”Hπc±ͺθδS3'‹%²{ϊG+ύvΥΔχˆοWΐ΅kΧ >δ/‘έΣ?Z%A|€ψ>Ο5ψTδΚ4'v²€ΟΐησΡ?H—»FͺŒβ8Ίkάιή|e:l Ω@6•€LυtαN\/Ξ9MΌώΟόιtš½{ω@ΎΐPεΘχ ίΘχ*ωδ{ωC&ί€| ί`¨δ{οδ{ @‚|ς=€|€‘ο@Ύο0Tiς=Θχς=† @Ύω@ΎΐPεΘχ ίΘχ*ωδ{ωC• ί€| ί`¨ςδ{ ΥΥ? ίΏω@Ύ<@Ύ@Ύο@Ύο0T?€|€‘ο@Ύο0Tyς=Θχς=† @Ύω@ΎΐPεΘχ ίΘχ*ωδ{ωC• ί€| ί`¨δ{οδ{ U€|ς=€|€‘ο@Ύο0Tyς=Θχς=ς=€|ς=€|€‘H“ο@Ύοο»kšζππ°”²9ˆ“ΈŒ›ρ¨οΰ+‡κμ쬔²τ¬x!^λδ{ψ쑚L&UU-έ΅ΏΏ2ˆ“₯»βεψ€οΰ“†ͺmΫέέέ₯Αςςr]Χ———‹Ε’Ώ%.γf<Š–ρI|ΨΘχπp¨ςν~uuui0]³w&²*YΊ―}£=ΩSΦδ›ς I Ι`y¨› X ƒ€PByxr#…”pο­ |·Ή0χzi―άΑφάδ~Ώξκν‰ŒΘˆ+bΕ—aďʲVŒΙ¬‰ρƒ† ~ί΅ΝD«ϊ£»f¦ΥΓΐΉ~ΊιτηΤM5RΦm7όπ «Κ±κNoT βζې^Υe5Bέγ©θηΖ£ΆΣμΙ6¬— @ΧNŽV·ϊ)Ζ«;ύΉ)0 £(JΛv0GžνσΓό'(ϊ„hŠκΪnέΙΫfώηG2€O„Ύ¬¨/bΐώε’y;π"δ{˜¨ΜΞΜ‘}χ_}υΥwί}7“‰iΎΙ):ΡޚρQ5c'-ŒJϊg?SC—;sΪS7ό@ΙOsœr5l@eۜκλdOhς‚kaΎκ#(€ξμXδhۏWƞ5ΫΊbϋΩ`†AΆΟ πŸ \ΈκσQœς§ΈΡΓͺpo]Ή”ύπόωώ½ΊΜ™§€ώσ/—δν°a„ ίΐD5aOΪύpΛο{Οσώξοώξ―ώκ―ώΧϊ_ΏωΝoΎύφΫa)ψc&Γ[©£ƒυ‚sjYΔΈqŒ›α]‡ΦŒ“΄cς|NΎ;wΓ€ΎxYΏΊηb±τu7 »|7[ଆν‘ϊξžQvζΖ*Ξ;z²M·”») ΌΜφω>uιξ²³ŒMΰοšm_œ­Ξ) d»=Ο?ΰ3— ˜«>%Ί:XWά Ώ£–ξHγ?Ζzx~€|Hώ ΄ϊ؝rσο9όηοTςvΨ>ο`’ϊψρ#™³άw§?ύιΧΏώ΅Εψ›Ώω›’(–{πι1«αMθ:²fv™βρ‚C¬_KΗ«jβ·ΛτΞDΎΟwlΑ°u°Ψ—Ϋ²Η=„ύ` δϋ28X–=ώoPφΫtaΉ`ϋ.AΎ'Œ³­‚YtύΈQͺΧƒ9ςlŸŸ­|ξεBΎRTv²ψiΛΓΖψh“ΐŸχψΑΙχ|@xfωžxO8ΰ?§’·Γφ€|ΥO~ς“—»j_wOΪ=η/ώβ/ώπ‡?,ΟΑΉΙvΜjx#υ¬Α³ψ¦ΛOΦ ‡J³%ά„γg@€6θΡ‚αœ?°ωώKv€.-Τ³η”οιΙΉθΒ–-ΐ¨ώϊθθ@ύ±―”΄‹uΕKšα–Μδ{ŒxθTFEς=LTŸ>}²&>|ψπκΜk•ύΧŒ·&Ζ 9\…gzbζο¬[‚j)‘4ž}Υύ3u#ίο.cΊΎ­8šnŒ³’¦O2ϊΊΜ“ψ3IšΧm?0:Վtύ|―Z‘ΝΙ)[ω=«ωτF’8-ͺV³Φx)Kυό_ϊω_^²κ:UΕσΉο^¬:₯ΊžΙI}U€Q4•–dυόQŽσ$ΈΌDIΞΪ`ͺφˆκτTx™%tOcίw#½¦”}»Κ”o–ΉIσr₯I'›&I:’Ν–*Ν–=Τβh2ΛX©¬h;ήψ‹JjUdΙKκ$+y±S9 UΙά “²ιV#{*h€ηΉ–Y<Ϋ(YΉ‚Ί¬v^.‘U•DTr”W­‰ƒP¦e–„A0~Ν8“Uτ5%τm‡σW ©aεξΆ­kΛLIΦW‰7Ϋ'”’]”fn#±&θzό«?ύ2΄¦y©XUΕ-―䝜{–ξu—zΞUο«uίχΪ΄Uί#[b9 τm™ΔŸ[ΊΥΛLκμ,Žνy³,ΛαηπΣΙϋ‚ε5_©DΑβρr9Ψ컞Rύza‘P²Ά¬τ7« Ο‘D5ΩnJΙ`ΗCSρ—#OοPι ΊΨŸ«Fμό0>ΙχUrkΓq½ ΡD‚ˆw˜]T(~ΆλΌj;/R²³οΟE·θ;±kqœKΦ€³HΪ\mκQͺξ~sΛΟΤZ K^„&νΎ—»όϊ€oξ›ζogωX*©‚qdXΥMΪεκ*»―τΐŸ»A%?X;ˆΓQYδ{˜¨~ώσŸ[ί|σΝ°`Ό«ΦΊ2, ‡χ?,?nMŒo£Mμk€Uττ0Άηg‘ξ 9΅©O’I·|}ΥΆξbϋΛO¬»ΈΐΟΩα5Γ:νΙ±V«žΚβηπΥΛΌ e:¬Z$γπ—Ν»"°ξc’ž•~ۜΒωJUι€#:MβUΗdDμ­Uή"ί·ΩΙZͺ?tηέzΗ²ΞΉZŠzkIέ‹Z‘οΩ•_¬ϋU/―Λz·ab±Zms'υΓBn{‡ \·’Ί¬”΄{Ή»mοΪS²•όš”π6Z‘» άιή~JΘ][>^Ι–ΙχωΙbΨ~nΨͺβl͍Eςύj&44Χθήx6Cu™c­³ΛΊ<ε ,Λ5›ΏΦ+Θa‘cG΅–Ηζemκͺ¬[v(ψκΌ»½ΓΆΛιΑ>(χ,Q|΅^ίmζy2Ή…–23¨άvσ=;­uΧDގ·j½XνOΡXSΥV·3“ο» ΅1¨;ύςZpzLxšRŸΥm―ϋ¦€΅ΐΘ±&ω~Ή€ 3ΥχS«ΩςF ίλrΏ(7―•Φz΄ζΡ‘‡a/―Λz·‘rY›{Ρτ³ΪTeΪ@D˘G€£νU^˜+==jŠ“;gκxιpŸj!ψQz­ΗR‚γr&u·ν][bΚώ³}’ω‘£}&crήΟFλΞbΫ/]')›^5MφόnΉkΛΗ+y'η~4–έΤα΅Qa>ώΩvΪ°UΩ‹‘KF©] άΣΣγ%©[ΥΦΕω@}ˆΖα75΅xR΄ͺ­@ߍ¬ΐόΎH“, φΛοή>δ)w\€•+ŸΏδ”ΚχUνψρ”>Ni”ͺ‹xo/+%Ž7ŒΛΪΐUy Π‘ew^^ΏL[]Ÿθ{₯jx;MLυ>Mκ;4΅Άž΅_­Χw£Ή@žLlD‘₯Μ * – \˜ΛχΌUw*μƒjXPGs‘φ!8ό Ϋϋ(―»NUYp‰>κςw'}mξ›ζogωX*―‚IdΨ&K3―(νYH―ΨEkΗZ~πv‡£ς"δ{˜¨Φ±§Υέ―~υ+aqΦ•ίύξwΓ-c&ΣjΡήΚυaŠ΄t4’v4½…X^χ9ψσ ur˜κ΄UχAq{ŒώœϊU/IΓ=EΫΩ°€Mi{NάθWΩΊ·‰iEr ‘Cš maϋ}vΚχ΄^t—?`P2ώ*m“€‹ΰθ’Ϋ«‡rΚδiζžhοήκ&ΊόLŸέ_ŠωsUHϊj5˜ϊ³μc2Ηβ΄Μˆ*ώ=ί(ίλκΪ<² υ–9O.ίΫUžL@›h–K_Ύ£³ΌμIzhDςύςτ€cΝε$²²¬.‚nCί€¦‚½ΈyeNχՏar‘”ΞιΦ\ε5_ ξΈ>F5w :\Kθn»ΆΤ”Λ[μϊa•χ°‘@”‘ΧGXYΗΈ~Μ΅εγ•άaοyΦΕ]Ξζ­*ΘΦΘX4?5šeBσ kX‡n.2YΛΔS|EΎ:»6Ω΄θφξΌ\ƒωk½‚Z”L±%‹Pςσς0%yΌaXΦΚczn·‡Ήσs/Sƒδbl㹁gΙδ{^ί ηi2Ή%–26¨¨Ψι%d…{ρ‹θι†ώqOα.ΐΛ}|ώ’UΓC‹Š=g£=;(Yo˜—υ~Κc2mΦnϊ›+'θ2LΓ·3 ΗOM=K&ίσϊn8“ɍ(²”±AεΑ’ά…%ς½΄Ή(h€Ρ•$`j(B~8~μίΎΔΰ#ž‘oζƒ ogωX*―‚QdΈ(ΧIνΗ'¨•θ2Ÿ8ό`ν G₯EΘχζΐα9KνώΗ?ώρoϋΫ?ωΟΓππŠžIyΧε…‚QφΦͺΧ,7ΈAΟCέΉcκ ΕΦτcΐ„$iΗD’¦ΑΡ¦5ΖmΆ—΅l έTe–&Qp9ωήa?‡œο$ߟ‹Ž­=Hιf,qΦρlΕς}_μWΦRΏάšdƒτ•’‹mQιτΣΞ’š,^ 9ΊkΚyΗƒλ°yXkάλόΟDΖEΉΚ .Λχ=«ΕJ•ΕB-@ΨϋƒΔiY³κ1θ+9΄—ΝάέΆsmΉ)Eςύ;ΨH ίίQ7ϊ2Xδ wmωx%νδζς½ΌUί)[6‘I2!υŠΖays‰­ ο‹`ΟX—{Š\Ύ7šΏΦ+Θα‘?wΞκ‘xΓΌ¬Ν]uώ}°w{€y©ϊΑVΥKξY†ς=―ο&s<™άˆRK™T,Ι]X$ίσζ’Ύμ»vΥ» όϋz«Β·/1ψˆgθ›„ι`ΒΫY>–Κ«`ŽΛ—³ΙΎŽ‡Ε™tn’k>ρΙΓQqrδ{ΈΊΦΓ«kΩN;QCEΡX¦Ψ EvάkΌτB( °λ”Xžν*e|v,Ζ;Ιχ”l%X$aΒtΉΠ,•οΫӎͺΈΟ»WkNΫZ…"ϋόš‹Ÿ΅wχ·Fΐ]yή―™%οqv—ν-Ήή%ςσŽšρί{φnY]δέ¦ φτ§‰&Β[C·Ω”5ΓޝΒl₯–τ•(+swΫΜ΅™)ε{Θχl΅O§" <βΪςρJήΙες½y«Κ³57iL|m{§˜5Χγς=»)Χ½δrOy\Ύ7šΏŒδ{ͺ΅όΉyΌ!/λΛΈͺΞΞ|PŸΟΰΞκn0ρε%ν{ ƒςc‹Ή@žμ#Κ-e`Py°dΰΒΜ”¬ΉdΏΡ +»Έ|λτΚ―sjο{ΦcKŒ5ωήά7ΝώD>–Κ«`ΛΩτλ‹ς§bνcΪ7ΡυwšzκΓrΛ—ΩΔ'G₯E¬ ίΐDsΔ“eΩ`ΐψqkbΜΠ ›…J›N’, kšΔ»Ύ‰™Σ…AΥΚχφ=Ζ‚ΒJž­@»!lgηΌsx_JΎ§₯[δ›Λχ$'9Tίp6ΡGGG`ΓςέπS¦Z§˜Ύ`Vqάύα„ώξž|oΕνέ©γg7]kqk“]ͺ’Αœ3½lZa·!gΌ·ρ°7•†»" ΄-‘RΊ>>ΨτFŽΉ»mεΪΜ”¦ς½ΐFΚχŠ\ }Θ΅εγ•Ό“ΛΕ8σV•gkn,vΞ>7}δαζ2—οu/Iλ ΌR ί³ωλiδ{ƒxCϊ|SW%Ί¦Oǝcq‚²Œ‘ΟygβYζςύs<Ω#F”[Κΐ ς`Ιΐ…™)₯ΝŎcr’1q—:μ°ώUωώ¬ξ o\bπΟά7ΏΌ|/―‚Ad8S_²U™C'φdˆ;’ςη67˜ψX8ϊξς=€|Υ§OŸ¬‰> w° w?nMŒtωiŽŽ\ΧYn!TςςάήΉ;{ρ΅|/Έτί@ωbvνOQ­:~Πδο‡φ4·ςΎΤ£φl£έ=ep°Χ”qkŸv>οͺπ°rxNΰΚ#`vόθŒζU―™"ΐδ{ΊhA΅άsa|xΩΘε΅λκ"Ι«^\y·QΩ‰½έrο[Ρ}[Ix³E˜NfΊ“O—ͺ±|T‘}¦l{Ή»mιΪrSŠεϋ lΔ».DΌrzΎά΅εγ•ΤaΝε{y«ςlίΫX=-—rύπœΗšΛ\ΎοΛΓβ`ςΖΘSdς½ωό΅½|ooHŸoκͺœ^΅E–œ=²ιφQ#Xιže.ίo0H“ɍ(·”AεΑ’ 3SŠ[•™ϋ7mr\ή5Κaxξx}yyϋƒxζΎωεε{q "C"9ΪσΦϊΠ£k:0Η>†—ΓΛσ¨Ρ¦ς½<• ίΐD%<ώώίύ»W–εΫδϋρƒγΗ Ύg{[l‹XΗhΩC)lΊηφ :{—νΨEŽDϋ޳wή3n¨|΅‰sΥ±[Ύ5Lr7՝Λχ,‚|ClΝ·­Ρ9ž–νՏ/Tv&#Ί,Ί% ‰ν…!jημΖ=F=52ΫτDkζ·Eΐ©οΜVI[φΣΒέέχ—²ηYύΊε­ wχΘ»Ε·Ι€Ύ σ@ωW?Θ:i]δέ†~K£~Έ@‡{‹έ΅(rΎŠFΏrμ]Ptόg˜u­€ΛΩ]aLΩ·ύLξn[ΊΆά”BωήΐFςύα"σ―.”ΈΆ|Ό’:¬Ή|/oUyΆ¦Ζb™π±EΧ‘E‘oζς=έ8γfΚΠSdς½Αό΅½|ooȟoξͺ}δΉΞ˜μάΖξœ7€•nΰY²ψj₯Δ­ζa2±₯–23¨υς=LT?~΄&~τ£}χέwΚχγGΖZcVƒΝau7ϊgG?*Ζ5Χ€ή:FΥ«€{ϊ΅‘|OλϋΝ©Wρ«s]xž ZŒRΠΏ\ϋ4±G΅ζς½}Lޏζg—Šo‡%šΞ&"…WΎ<ΠMμX3Žί_νR¨¦΄š,@?Ο¨Œ¬ξίnδΟN;~ŽsίVωDΥφk―œOuδςV“ψ$Eε=λ™»σMύ©²σb•ΊΔrΗNrƒoY«ή=ƒž·Κ΄lq]δέ¦ΣP-ZΎ^ns“;I-t%5‘XχΎ@OΥΌUύ¬•»Ϋ–-6εNBι62‘οωχ'§S δ-―δk*ίΛ[•gϋξΖZŒ„ηL±[Fθ#ςζ2—οuβ948Ζ΅Ή§pΰεšΝ_ΫΛχζρ†όωΦͺΞΞ2Ο*±^ΕͺfS³©|/χ,y|΅^βFs0™άˆBKI“q–Œ\˜Ήƒ¬Ήυz3Πξ^₯XΐΙί1R)Uϊ-K >β™ϋζ——οεU0Š ΩaS·ΑmsuL:΅Ι\Ύ‡£ς"DΘχ0Qύρ?¬‰―ΎϊκQω~όˆ51f2˜BXˆc}"2ώ€\’β›5φη€νz­»* ]vΨ’ρ 9…•ξ) κ»ΆHwTΊηΊ{©KλΣsη˜–uUΡi`) dZΏ¦yY+q°Θβݝ_4ξ{ΥV‘Giύ&^θϊ8gzζŸ/ηΣ-ώ9Μυ«šΪ‡€l΅ΦKŸσ[^#–ω=τΒLυΊοšΘŸS²]Km~Y•¨šn\6}ί·uLyη¬ΦΧ¦£ZΩϋSVι»*½ΠΧ’ύ°Τ%μ)­emΧwmv?­]%"#½^@½§hTΧ΅yD-7ok’ΦEΦmψOe~TtZχ„ž:~ϊ°ίu7ΎΠL}^wmx\R\NqρΉ-κܟ‰ΆΉΙέmKΧ–›rΩς;?Μ‹RυΗΐFς=±σ’’V]WeCN|Œ‡ΉkΛΗ+y'—ΛχΖ­*ΟΦΨX|$<^’ͺς$ΨΣ3ϊˆΌΉΜTŒ:>.»Ε%ΈœΨxŸ·Zξ)χ\€·‰Αό΅½|ooȟoνͺω™ογβ%&Σm•y;R` >5›Θχ+―ΦJά`.&“Qn)ΉA9ς`ΙΤ…™;ˆš‹ΚHωo•9τΗΛj₯uί•ϋ¦%ƝΟά7Ώ΄|/‚AdΘJ·§IΓξ9 οl,ίΛΓQq"δ{˜¨ώψΗ?ώυ_5)ψ΄1iχγΗΗLsθε\Ϊ=±~E'Χχ{v΅ΤšDΥǞcέeŸwoΛ–£S*h 'λ–ΫR8<΄rΒ>©Υ`ρΌ[œ%J;1ογ'υZ˜Λ‡’pϋž#}―΄‹{ΙBΧkMH•’―ʍΒi‘UNω5ΞΆWR%^t‰Υ€^ςΠy»mv^ωrν£u‘w]ϋΞJ^~­uώέ²₯ψ᳜–„m½”»Ϋ–-5%ΧLψ™οg#‚kΨΗ²ΉkΛΗ+y'ΏγY;^I«ΎCΆ‚;u}Z5‰ ςζ[u$R¬?ζ)άx6ŸΏx9λξ#noˆŸoξͺ}±2¦Σόlj6ο V_­•ΈΕ\ J&7’άRrƒrδΑ’© 3w6‘RΗ"\‘„ZǞ΅Κ£KŒ•Iίά7Ν.ώD>–Κ«`²»£ΈˆdΆ»Α`⇣"€Θχ0Q}ύυΧα?ό:E‡ŸƒΟΟ»§3sΖŽΜœAταήζ2θςγ΄«…0ΐ£ΐ"τmΎΌέŸΚΞ$[ŽŠΌu‹γϊE§—η5†λΣΣόΨyQUF/Aθeq:dϋ―„›r^Ϊ׍9DΈ|υ5¨"t-ŽŠEΖ‹yγΈAGή‡RRρΎΛ-ΰœbΦ us98―s:ΕipΈ©΅ΐͺF@uίρ΅~TκE-lŸδϋ)Λ‹o3ΫdMΟΕPηxςχ―ΣƒL―Ά*}ωΛ’ vUL9ϋSά>^—ΥnCενrŸαz‘z»ίι,8ZΛφζΧ2ΦιbςΒ9DEϋΈ»mιΪS}ur™œΔx?­ Όφώ’F>0Ο5³œΠ΅εγ•Ό“sΟ’'¬‡›΅κ;eΫ—‹A•ΣeΑ­\i»A–_φ6ΉσCΝ%Άπιƒk ρξ·ε><Ι+H¬Ί{ΌzξsyΌa^Φ»Ί*σ)B7ΩqgqlΧΟ[ΝspΟΕ›_Ν<ΓJγ«υϊn2ˆ’ɍ(·”<G,™† |@7[MΨ‡XRšμβ°β<=Ϊ.1$“Ύ‘o\Όεc©Ό †‘!]c_gΉ–‹?bί6‚ΑΔ' G/@Ύ€‰JΰΣόρ*Ϊ>dYφΝ7ί Ζ?Η‡γ? hί=iχOLίY'Y–&I–Χm7Ό]S¦ΙgΟ₯ττ\ΥΥHέ(=,θUUωHQ6ͺ_ύώͺišΆm»^ΏΉ ͺ<>Ž€yΥ›’κ"‰γ,ΟΖΆΙΛΊΣk)gS₯y­ϊυ‹"e[όΊ21ΛTτMϋV ΡΛi‘_:Μό‘’nωωNΛΜΫΊΈ½hšU7ežMδE₯ϊ·ΦερnΣ·U_;GœVfΥ Lσ4.a’%a&YA5’ U“g/δU£LέmΧ›’PΝdEή°‰δcfž₯³ (sΧζ˜9¬9†­ΊAΆΊS]OΗΠωΞ4ΧžΒ]@>!ήΨήUΫ*Γ Œ’4Η€βΆBξY,Ύϊ^Ο<™%–20¨ XΪΐ…₯­mφ› ­ŠΟm<9qYχζ.ΟG<ΉYŸ^³Θp{ ΒQ€|•ω):t>α8Ξ―&Η±n ΞΜΩΊπ`ӝζH©EθΚΫΩΆcΫξόώ>Ώ₯Σ>DzΨxΦ½UBW1΄ΰΩ€|ΥǏς“ŸX«Œ ΖdίGθ4κCΩ O/ί»…οτT·ωρϊά›―μγδϋ^΅Jλ6<ΨΧKk›π\@Ύ€‰ŠψτιS?ωΟG₯ޞŸρΟραψOίcκΨw\ŸΞM}~ωΨύ~Ž{<χ;gεzF€|Ο"4βX€ης=LT@w²_ί;·~μώˆνg θζλ19ŽWφ»λwtΡ^žΗZkζΐίγ5ΎkqόPoζΛάϋEί6MΣ0m/˜νH@’θϊ˜?7Ξ­e?ϋΨξΓ ό;0°ΜλCγΪςC$>·yζόnώ»2πn£ΘšξΝ―wϊΗ+’–BΣν»#§™ΏΗi‚i6Έχ­αΪΌλ‡σ…O@”οAεϋΕΣέΜeFlκn8B‡7^ψ/d^ύ"ˆ§=εVΆ’‡Ώ ΜvΔΧI³›ζΟ½ύςΏ7Γ/žVζσΟ=g‘ΊΉw±4Μ‹u;D€‡›§I»Ά{ήι)ӊΊΛt]zΚΙε7Β=Š; A0Ν¨UόΞž=APΎGA”οO·ZωžΌΙΓ›9αwW3C ΥnH˜cΏΚ¬OYžίΤΕsUœHΆ#žN’˜«BΨέςή~ΕΦξα‘ΆΟ ηΛ/v koŸ’Σή[œDΨ©}Aώυξ/3£8ΎϊzŽΡΎΚΌς½7ώΆιΝΟ«05TόN[Ό3‘Ε!žfΒdΫ°Šΰά=φAωAA*ί£|oΤ‹™L6zκ†??95OΙ©Š~”οΕsUœFΆ#ΎQΜf²ΡSlBΌέ’–οWjGΫρτ³ΥψΉς^?AAωε{)w=ή_{@SΙχχΞλεζ‹θωtΤ‡κͺ΄3”οώtί£ίiEΟ†1Εαή=8χAω~ρ ‚ (ί£|Ώ9΅ͺ‡‹AωswKοe-H69”φ&π[ΚχSω~ΧϋΛ”tΏ&Λχ¨«cCPΎGθέAR€ς=‚ ‚ |ο»–ΚRsˆ$«]Σ ¨Λ ½τ:p»š22–ΝπθΦΌ!O-ΈVW5,IŠa{lω:ς|ΊUC“λ΅υ&Υ/˜Y†.KO£“T­λxΑΜΨ\SΞe†δΚ²λΉη3ϋ¦k*€gI7ΪΖs’ηDώ‘‡(Z׊·š¬Χ£aΚJdfΒ09πCn’!5Α±aΉh ρ‡’ί Ί&ΓΦ 6ΏΟωΝ8πΜh4ŠAtαό²xς΄ΐ†ΚvϋχΙǁcv‰CUΝrό_Άζ:OŸΠb―-]!Α­Υ%EOθΔχž>$D|Ψζ΄7<ϊπC‰œ"«t ς/mΎ–! κ(Yμΰ©εiw§΄πaW&Ύv˜xΑΌwKvXy'( ˆ°5&Ώψ¦ωΌΐ:81μ–|ڐO[ͺ}β 9ξ©§Ϋ;G[;GΫ²₯±nt|8Τ­–b΅UkΟpϋΓ_ΒΉK&7ξνά]έ?GζΊ»ΚiC:n(Φ!\lψh™“_>wξΐ½nI½΅Z'z6vŽχΜ;v_}ΗiΙΗΡΌμ€νNΰγΡψ·"§IΗmν†Ρ‚|ίΎx·Ρ|‡ξ•NΫΊsυμB~<ΡΟ6’© ŸήΆbΫχtb<ΨN4;G8ko0ΚΔΑη·ΦHο†ι_9~ΉπΜΣ§BΥKk§Wž?jδϋRoMω°gmKQ’DQ»8!Qϊχ7ήC?ΰBτςΊ­œEΉΤΦH:œΊ*™ΰΓΝ¨ρ0π5νl”ΐΗ-ΝΉI5 Ή—ά =θDΝF–½›gƒ+g4Ah$v_θS˜¦"ΊΙNI†4Ž¨Ι¦1ƒΟΓ…pFVt"Ήb«Ρ‹FΨPΞv{Χ7Az3Q?@Rέ܏&ιέiόnΌ@šρ―JFΪw3Ή7ƒz‘™·ƒ§WΑcτΜΥ°RΌΑδΟθƒŒη&ΰ]5μέƒήψχpξ δήβAPΎGA”ο=³ZΜgh²…ΊnOŒjΔ€P—j”u^κΊ t«Δ΄R―•ς³¦eΙε«}_νzSc”#;Š\M‹) ¦RΝe(”λv@\όλπΥj1Ι7•›T_’ͺjΝέΏlωγΜK†Η0Ή’ΰŒ\±fόaM?rΡβ%ȏc@"uΑ†Οη‘<ƒv‘©Hpϊ^΅'π,Λϊν¬žbœRεψε-ε˜ητ=TWψjννμΝtχzΗx—+of‡ρNΎž‘}Ϊ_ΏΣ£}ρώ@sϋΊθΘϋc3iΧγu›CεηP«’žWk=+ CΣz·ΎGKkΪ¬{ϋηg―—ιf[€Λδ‹({3ε§_ΒE‘Pπžx’Π„‘Ηε›:τΨ^}±τMΛo;7\Qxh¬Λ3 Ύ\?j|αΣUΑ½ς†DG³½έσJΤΐoςξωε»qBώ§σιΆΗa}σγκω…ωA›‰ΐ4aΉΔiά»kŸΫ΄δΫu‘“‡”f~ Ά‘έzYΙ ΈD(ΝψW%#m^¬_FΜ³ΜL#ΛίϊYVεόό[A©χ>,}±9W {χ χώ=œs‚Ή·x”οAε{'’Θ4M?β$—,Γ²:ͺΌnυ¦…šΛο©7^c4V'# m3Γ (ω#Α+—$HΉΙe|άϊ*K “­ V_‚EΩ­³ΒšΞ/db·Ζθi΅ισ‡5ύΘ…Rˆ?”fPϋ;Ω„θ­!ΏΟCC2Ώžaς½`ςΆ@Γ^wLJ²ύόΜ΅όx|²3“°κxέϋ΅—²0ς„ABŽcμΑ€-ί /mhΩν²FΞ/Ύ@ΐΨΰκZψΒƒA’Νi·δ+ηδ{Fυ•ΫwcUhmJΜνμPϊ<ς.dί]ρ KΗΩ2  φz KΆ|ΣIύ΄ΆΟ'λdγ#ΓRjKΊ/Ž9ΓΐΦ~°όιΈp[DœJ2ώ ήrΊχ4K΄‘=όφ'-ί³€δ{{)I€»‚Ύž}ή)·Π{όf˜‘δ’οKΰήδ§€ϋ©£FM™!ίψnŽ…ΛVΓ cΈΦΨ₯2c"©ΰ•ήaXn˜N3¨KΞXΛ_/ΐuκ«AγίLΔάΫΘΎΝ ¦YϊU ΘχW=VD`!ΟΓԎί1δϋυ ήUΓά=θ=Ap§'(ž{‚ς=‚ ‚ςύ€“ς„\]³RΝ@+ηα΄ζδ-P¨h–γΎm(%xJΦDΎ²•¦ζϊΎοΩJ­D 4ς}`¬Nυ«[n₯AΏ…¦aT·Όΰ©HƒΦ,O΄S’ΈΩΆΥmζˆ₯dΩ–εx‰#±šEΠ°₯ξ°—α”Ι SPΝ(/_ͺw£F}W«A γcͺ^57n³©»Αθ•έέ,dI“e•/³Ξ¨HΊλAΤK½—ΆωΓΚ?rρβ%oΔ μςuΥπ";Ο–HΗι}.š00ΑeΩUI·<Ο5΅zž’οs•Ώω>_SΊŽλEn/fαόδ₯±έjέ §°€βδHϊ¬—ς•3lr΄9ΐλMΝε•οs³ς=xΊΦlJ*-- .mhy&Qσe₯kΉΣ…#ν0ςTρβνίξ*²2¦)0Υ‘op»%WXΕ2MΌφ}p·½6‘hχNBΒΰόdΈΟ½Žσ0-ύh,5ΒYΘ~οηD7ΩОŠ‘<ΪζΕ»·Τ1ΜΰΔΎεoΫΊ{γω‡Zοε_•ο_­:WηΦΜΙΦ•Ϊ™εϊ'½ΣΜςμN¦dχwε¨nΟΐ2ΘΩX¦pά.֎Ά$λŠΧ9 ΰTυηή‰χψTΨawgΧ·\nΖΆΧ2nϋAδή³ μϋa@Ήχνζ †Ρρn[5˜rQσ9δ{β‡ΡΡWί²ένΟ€ήΛVΟ΅ΞέηͺI\Ω·'½£%2―γΫ=q(Q²½‘:Wήΰκ2$rWŸξ…x(3_Ž£Ύœλ HfήΣχ`ώ`χό?Κψ , ή¨Ρ:μΛO?’Zν#7Ι™ΫŸZdλpŠιϋXα= I½ΜΤdχΞύAπ&ϋώ¨Οo>nŒ…5šΛΐ»…ύ„œΤŽx(?ϋnΗ~*ϊtc_ί·ΘΧ,~3Q?@RΑΦΞ£ϊ*ώnν;5rα4γ\•Œ΄1ύXDήξ·zξ•{§ΑQqXΘσς΅cτνΛ–|ΦRΰΩΥΞ†3…Iέq―ΦξAο ό{8ηΕsAPΎGAPΎ_f“PeΥ‘ΟΜ‚γlَ˷…‰¦Ψ ίƒL΅Υ‰ͺoσΙχ–Tšωz€W‡ς[`Ž›.;a ­B¦’{³cΞW»Ο šVi2/˜Ε½šαΟh|…MmΪvςύF±iΖ·M/Φ¨1n‘β†Οa/$+Q-ΤΊόaεΉx q‡’ΧΜ7›“ΟΘ΄je\[₯€πϋ\,4‘-—2γ^Ν \­“οEs•·…τς=ψΣ‰Ml,«a^VΙΐ‰ψ ^­BvάKe+Œ‘OP”ω>ΏΙΔΏ΄Ω-C’ζ*±(ΨJ>&ί§‹ h?0!؍Wk$…~ϋnΙVρLcιέ νXΫ7υ¬|’Ÿ9h<Ψϊ8ΡΣ/b :Ά‡ZΜαΞ7’z¨wIg‡A‘·δ½±vΏwΫ .^₯—ο3 '¦άΑiτ;x―ήΓ]ˆξ TΤΉη΄Όf f₯Ÿ·?¬ Lλ†’7γφgf»». 8£/Ώο\=3ΰٍ»w$Œ±7‘?ψε{ϊŠΞϋ_ζΫεψςΙΛ0αLρ7mΊοb¬?’| zgDαφέdϊ5=Y~ω~β4‹žό8€7j΄[T!ӝy₯Œ'8œ5Œν=£ZΐΏO€=ϊ‹:βd―Yί<›Ιm7œ¦½NΤνŸΟβa•CœqƒWόf"~ ’κU| ·>KγJ\Χβi–jU2"ςζ –{φΩKJΎŸƒΈ. ΎΡaΞΤ.ΣοuτξAν {8k‚ΒΉ‡ (ί#‚ ‹Oί[Z½ύK²‚ψ{SΚ>#ί—;IΈ„sΦ ίg+-œeαt'‡|οUg gA€/ƒ|ϊšT―Χk’fΟLQ*f)AJΧ"§ϋ…YPεΌΑZ―h΄|F[)5u?¦‡Cž»ρ•=$[΁BΗVώ‘‹§g(9Ν ϊvR© £: ·ΟBCWx§~3ΟUv "ς=ŸάjžγD?EΟ)]9' D<TπRςrιε{θ fΆ s©>Ϋ½όρβ|XRvͺ| dοoή-ωΓ*ši‚ΥfHρΟ0k(7Ό<Ρο€NM«Ζ·£²Ξ΅ήZγψ$ΎτΝγ1΅ˆUΘϋ°ρ-₯|Ώw˜ψ~δ-Ί–ϊ Qω‘Ξ8­«Β /©IžΖ9Ο2Ψ•zΞΆ6c3 ±“δ{¨X}}›:NϋPšΈWχ& ―ς½Η—o±8B_―%—Ί3Nΰ½3’Π7:t(αŒyzω~EΉ₯…Wp/Τ`Κτ‘]Ϊ™0`˜#ˆ­πλ ό °τμ|ͺ™$=8Ν °RRͺ;―`π±]heΗ}ξt §3ΌU•ΫLΠ±ŽΎwZƒcα4γ_•Œ΄F6 p­ͺΟΛςύΰL-ίϋ¬YPs„bά5e €2+ΕJ]V Λε―t”ΫΤ~SXωG.Ϊw(9Νόζκδ°{]UPT₯–… ό>g›±q*ΉgH`6ΑΒΉΚΡ‚|_νzŒ5Λ@-“ξW›&œO‡’λμ¦θM&₯|gY,mhyv.~r#0rώxρ‚φ5ΟΥ©+kν9ξ–όa埠Έ|ΟU‚Όoφ&Κώ†b΅δΣΖψi©ΦΖηȚΐ£e:»κΩv£S\ίύ±MΥΓΉϋZ-₯ώ˜Gός=œSζyεΰ~b/7ρJ·)ΰœdήmGΏhΙΗk_Όϋόύρ-ί'kΈ}£KJL„vΐω οI €3Υ;ͺ–:Jψy4«ΥΰP3ΔΞPnaδgΤSf9³½&‘cΛ;Χpΰjˆ3ΰ_€0YΦέ fΰdrΔΎΧVΟ°ΞΪJη”F™΄ ΞΟ|:ؐ­ΞΉόfb~ˆ%Uη†τTiF“rURiC7rŸ₯°ηβωώώjϋΌωa₯ίλψε{ώ=œ‚"Ή‡ ‹—οAε{CζΨ—m2„ΈΈ F³e_½šέη—ς½o¬fΈΟ¨²¦’JΎ7κ«π'ΏΖGιh΄7G6M“Νo6§NaH0lΡ°ς\΄/ΎPr˜|ŸΝ0ωžΫη‘ =}Ό6$!ςM8Wω[`ΐ^wιDU]-ϋ*Θ«ΰ|:ΚPY ;J/ίΓ6Ώgif‰Lj%QΗή‘ ώxρ‚9kg3Ustož»₯XXη ί'ˆqƒ``υN‰h mp^3Ϋ‘.ύΜ³_Žυ›Qt…CΎy7εϋNγ(Pά΄™Φ9 <§τQJό %ίƒxM; Ύœ Τ}fȈ‹’ο‘/ωžΩ½/νρ ρ»0θυ0ΩvςUŸluΥ»x .εcΚΙΞ„Sή°6ήSα_€χ<§Ε9Μ@Όnq,"Lœ‹L’Αr»ΈsA"Λk&ΰ*©Ψͺ.š%σ[Vep½5¦’޽ΓGζεfJίAψώF»‚ΥπΘχβ{8m){²PωAAPΎ% Θζς…bΉΪ¬—Ÿ;3.;‰ς=œ±…ΪE™2 δ©­¬Ί2"i9"Ω³π»ΤLr…Υb΅ή¬δŠ|―oζ·ϊς=y­4K«ω ΝjӁ!eΛͺ@Xδ{Ύ8CΙq_‚«l³Ο₯`ΧηφΉ@h@Ύ§L:ίDs•έΒ"δ{‚]Ξ/(QžšƒΫ2ΐKp’}Aς½Ψ»„Ÿ­@Υ`δόρ—οa›…ΥΡ4ύΉξ–TX,ί34¦Ύ/GW¨_,·’Ÿa‘λp’§ΐΣZz+Ώ^ϋΉΥΨ§ε{ZΙ‚qΞ[Ύ1.vΈόΐ-ίσ;‡-κA;o€WΏ—Gή¦‘ο]’//­[ΰ^°ό§Κχ ΘΓ΅Ÿ ‰œ€h,@Γ›cΝΑτ‘Ϋ†“,ίΓωwώ¨₯‘ο‘”ςΈ~Žum-AE~ά ¦u₯κiΨfΐ€άΑ;|¨ό?Χξ!ܚr΄2u žGύίLΜόͺnΚ4£[•°]$!‡wΰ#σς#₯Si­)½Žή=ΔχpΎ δ‚,HΎGA”ο©[gW7%Λυ¦²|²|_θϊΌΕs Υpρg3G£ΟκjϊP2jpb3zΠj]:ωήΥ6‰gκF$”ϋΑί±"Ή8vV²φν ω*ΥTΰv΅!†γσ‡•δβ)ΔJN3orΰΏκ$~ŸσšYL<φΗΑβΉΚnaqς=4R”mG)M_§ρU#Α‘V.Ƙ£|/Ό΄Α-PΊ0Fž2^βς½Q&#\'Nρ›wKώ°.VΎ‡bξpT귝>ίU₯Ρ“ωr|β>ΠΝ}$Έ~M„:}mν{ά›$πqDMPΎ‡Ό–om…€β«Ι β 2D¦έrs~Ή«]μFσβ5{(½‘neΰ$Ψη—­Ÿ™7 nŸ?ς›‰ω_ΥO3ΑUΙ΅ ΐGζγFJwίιΪkό{Ώ|/Ύ‡³'({²@ωAAPΎw”άXδqGPγ§οk†Ÿtšt:8}_¨ϋϞϊ,t}TΪΝΩRθn…¨BYΝ Υ ±+«ρ©@ I‚Tυω«kυΝX%Α€πzIΆS‰ΰΎ)εsΉ\6_οΞϊuΈ‚©+7)e?[ΡψΓΚ?rρβ%§d©΄Γͺδsωθ)I>·ΟEB·wΒ°n-vͺx²[X |X‰Πj₯Bͺee; †'ίι2Ω²υLxΘvωώ7-mw<ϊκΪ@.fΑ™iβ%.ί»Z53¦9γwο–όa]°|OΫΐ \Έρ8‘–ή΄£gxΦ9["–ϋ6ϋ $\ΐHe…κδs•οoτ±%1£JΦp'£M~η27?$ŽνJƒί!½ϊj οuτξ!Ύ‡σNP χd!ς=‚ ‚ |š‰θλΞ¨ŸΉ±ΐUν‚°2~3Άε‘-(ϋ ίƒ” %@Ωη“‘6 T§΅l_*f)[©L΄,ݟ€²%εYAΣΣ 0 ‡€ΰ„x €ΓUG)Ζ5bf JlšξD;hXωG.žBά‘δ5s”r†P4όΉ|άΒͺpϋ\$4Ϊ€Φxb‡SΈj>v €xς·0oωžΖΔ'UPE'½ ί;3Ί9™ΘΔϊΨ₯ΕΊ1sš[DΎ]ΪtΔ ±Ή8κ0‘ΐ™άρ—ο؍£4–ΓDζ°[ς‡5m¦₯0.ς}x;Φιš+u|8P aλ|@*/GΟςΑՌ~7–œ^~½$rσ—φX ;°bΆΔη,ί‡\δ[Τξ() 3ΪδwΞ³UG>΅%-K9€1άΗυeκΞ€}Ί/r²7ΤΧH₯{FΥzκt1Π^_7ϊ8π3ς½ΤrxεϋŸΟζ)ίCα θωΗ~!Νψεϋ‰Σb ξ*ϋόQcN™νLtR›τς*°ΰ_€PV…Κ₯›xω,N3[ΩOό{η>>ΗΪΈ…Ηƒ)ΙuiΝ γ΄>6Κi&ξ~UW<ΝψW%#m ―χ±Fl5ςxf^~ Ζϋ‰Χ©ψ^Gοβ{8Σδήΰ€gο Ÿλ~ˆ ‹’οAε{(oB(lʎηϋžΣUκ…l(T-/ + ΘeW75ӎ¬M΅–ΛΊ—}³#ۊ€E-{ŽY'Ϊ=ύ\2"”g޲k»žηθΣ9l8|έ­ΒTd#Ϋ°ςI½}¨jV5Έ‰qSΥ Λ₯ϋυpDEκzAΰ{©5'os5΅ξΕ|n{Γž£ΥΛ<Βk·3Ϊ”»C_XzV4‚TaεΉh ρ‡’Χ,tΰUΆ¨NQ|šeψtέπω}.šΠΥ ΏseΝrƒΐ·» xR8Wω[X€|šΝbfŠ’lΡwς•nδeίw(j°ͺTw’ƒW ΊjYfW“ŠΔΛς½Ψ\u*β%₯kZ†^―¬’7ΰLξx ΚχU‚M:šB΅V݌S©6υ`»%Xω'H"•ƒί Κχp.j8DΨκD^ifΎžΩή`<jG―fjCαζ«/§‘Yί»Σ”ή+’ΙιΔ{|ΒΰBΕ7ϋ»ηώθξάchsΎς=ΘΦD“.o‚ΗΎηGσšΜbiέbΆΙοœdβ7©Κ γΆ?,Τ°±.O—Z.iξτε‰ΔΌίκΉW^ΤWoβΙŸOαg`y ΩƒϋΑ•s½΅uB?=…•†ur~­iΗ―‰ΐ73kpΪΛυ£½žsZ-…7ΙyK΅;ηώδ{ΒξΊ…;v.ξ£PήnƒSΙχ­Ρ±χ»fއC΅³ΩΫ맍Zς”ΩΞ„Š@Ω Ω€Y€p`œΌό‘Ωώw·'D‚/œ£η7 ο>LzYώή2ξΑγε™;œχbƒ΄Fƒxw»ύ˜†Λi&ξ~ΩZ<ΝψW%#m\kjηάkυO {cύuσœό@-pWHܟφ̟k_ΏL?₯{rΥΠ»‡ψΞ;Aώάσμ₯ΏAA”οA”οK –s™_“ΣΌ'a%Λ°Qμd_ΆiYI%#:Pb8 ν(\3!JτPη²›n’¨X•£­ θ£~Ν₯”<ž‚D,2L8JΜVώ‘‹§w(ΉΝ|Kfτ]¨Α°9}.šΠ’Λ €'…s5U βς}5΅Λ™P’6}ρN¦’X`κι« SHZκκ`όK›£e‹5p&ΌΈAm;0T ώόΨ-ωΓΚ?A0†]ˆCNiž!ί73Pk{ΠX݊zΎ«Η6˜1ι©χyŸaΖ!ίSχί¦}ΐ)Τ„ηΝΑ $yB›άΞα(xΒx |3„&ωYήλάΣgZ“ŸŠ;F³0k¨( νh¬/lWbΓϋqΕ%©ο¨x1£Έο‘$ΝςΘχ-FήΆμΗΤQcL™εLPiα1›τ Π†‚'πΠgν9Νϊη§ ŸΌͺΩ³Υ± 1f,/ωΝΔύ@§CΆO3ώUΙH›F#π‘ωψ¬ΚδΦ>ͺIή=χpώ ςηύ©‚ς=‚ ‚ςύBq%r@Θ*]/€& ߏT”9τ δ˚νΣ²o΄9 ”κZ“^j]Ό©ί΄iΚΠά•*;!ΰv›yZ‡“Œ`ͺζLΆB4 SΔΟπΖϋœ©Σά@‘,Ή‰emV›>um)%ώZ½”(­•‡gc‰'oR-J]'}XωG.žBό‘δ1ƒ¨W ΩαQ6ΐ†Χη‘- –£F-λj) žΟΥ”-ΠPΩo²T%w―^€;Z9π›γΘe‹2ε42xXT@AκΊaœΐV ΐΜ”JΉ'‡zγd%σ)Τ½ ζkΩ·΄Κj.ήBSWακΪTOΏ cƒ?Λ9μ–|aMŸiΡ‘³ œΧ=d—8 οπΤvΰΘ-yF‡”;^,ΪΫkνY9ζύζ=’* ±΄Ξ₯w7tke™‘ΠA½ι oO|ί›y*αύžhΦŽγΏ“ŸΏΎ Nη°ΉκQϊ—τArd"£¦Φν(4[[£f;mι€κληIΠ2™rμ‘·{ώlΤν³Μ2mv½ύYΝΊsΈ.Ύ§…Βd僙#όRJ₯EUξ~ΨxΙIβ>ηˆBp»φ ΔΚΙαΩvγ;;ηιγΕΝ·ϋ»φΰ/E κΟ¬A)mgΓMΪωt:9I·oΜSˆ;\sz Z;‡ΰ^~xί’Υα’μΜΘ뻍=0€§υnΌΜe&ξ‡ϋΫ}‡6Θ΅βi–nU&₯ Π?ΏψπQŠoYG{*\]›ή—Ό~€Uά\3ΘY,XώU“Έ{Π{Ξ?Aώάƒ6α 6AωAAω~ΡxΆ‘*CTM·κ€”aΩn‚|ί–gqΊωDΧr !ο‘θΉcuǍw£ΆlCΧFθ]ΣMnΚ3t›nX^8ΖwM#š‰εϊΨϊmێγxπ2ί1UYŠhFΘͺ)4 hTWεz­©hJ³ήT΄.̈ƒΐ΅u8Γ΄έΏΦa ρ‡’ί zι*²¬ι‰{ ΰsn3FlΊΓ WˆfXΤGEs5] KYκ"SίΤUiΌͺTέdψΨ΅Ν>Δ0mρ΅7₯ψž;„ΈάUΛ ί/>^σΛ@‘°ώνΈΏΣ΄³†r±«ž΅4ϋΔyH΄Ί±ΆrΦRΞΪC›θVηΧ‡ζυ‰νβ™΅ΩR/’μn?\}ηΊ-oKΗ[;G[²ucw'έj fνξ ΄ιΧ‡ηnT¦#‘;ύbδgK;g$ΡΰP·Ά₯hvGΡμΪ:«Fσ•}­ιφžnwΜΫd3°Ό΅μ;Ϋ}`zŠLΑ²omηξ¦0GΞ/[ςΩf΅TϋΔMGο΅ΰ)šV‹ΈχN
foo
foo
bar
bar
winapi

features:
β€’ fileapi
β€’ handleapi
β€’ std
β€’ winnt
winapi...
fileapi, handleapi
fileapi, handleapi
std, winnt
std, winnt
my-package
my-package
Viewer does not support full SVG 1.1
cargo-0.66.0/src/doc/src/index.md000066400000000000000000000034711432416201200164520ustar00rootroot00000000000000# The Cargo Book ![Cargo Logo](images/Cargo-Logo-Small.png) Cargo is the [Rust] [*package manager*][def-package-manager]. Cargo downloads your Rust [package][def-package]'s dependencies, compiles your packages, makes distributable packages, and uploads them to [crates.io], the Rust community’s [*package registry*][def-package-registry]. You can contribute to this book on [GitHub]. ### Sections **[Getting Started](getting-started/index.md)** To get started with Cargo, install Cargo (and Rust) and set up your first [*crate*][def-crate]. **[Cargo Guide](guide/index.md)** The guide will give you all you need to know about how to use Cargo to develop Rust packages. **[Cargo Reference](reference/index.md)** The reference covers the details of various areas of Cargo. **[Cargo Commands](commands/index.md)** The commands will let you interact with Cargo using its command-line interface. **[Frequently Asked Questions](faq.md)** **Appendices:** * [Glossary](appendix/glossary.md) * [Git Authentication](appendix/git-authentication.md) **Other Documentation:** * [Changelog](https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md) β€” Detailed notes about changes in Cargo in each release. * [Rust documentation website](https://doc.rust-lang.org/) β€” Links to official Rust documentation and tools. [def-crate]: ./appendix/glossary.md#crate '"crate" (glossary entry)' [def-package]: ./appendix/glossary.md#package '"package" (glossary entry)' [def-package-manager]: ./appendix/glossary.md#package-manager '"package manager" (glossary entry)' [def-package-registry]: ./appendix/glossary.md#package-registry '"package registry" (glossary entry)' [rust]: https://www.rust-lang.org/ [crates.io]: https://crates.io/ [GitHub]: https://github.com/rust-lang/cargo/tree/master/src/doc cargo-0.66.0/src/doc/src/reference/000077500000000000000000000000001432416201200167525ustar00rootroot00000000000000cargo-0.66.0/src/doc/src/reference/build-script-examples.md000066400000000000000000000406351432416201200235210ustar00rootroot00000000000000## Build Script Examples The following sections illustrate some examples of writing build scripts. Some common build script functionality can be found via crates on [crates.io]. Check out the [`build-dependencies` keyword](https://crates.io/keywords/build-dependencies) to see what is available. The following is a sample of some popular crates[^†]: * [`bindgen`](https://crates.io/crates/bindgen) β€” Automatically generate Rust FFI bindings to C libraries. * [`cc`](https://crates.io/crates/cc) β€” Compiles C/C++/assembly. * [`pkg-config`](https://crates.io/crates/pkg-config) β€” Detect system libraries using the `pkg-config` utility. * [`cmake`](https://crates.io/crates/cmake) β€” Runs the `cmake` build tool to build a native library. * [`autocfg`](https://crates.io/crates/autocfg), [`rustc_version`](https://crates.io/crates/rustc_version), [`version_check`](https://crates.io/crates/version_check) β€” These crates provide ways to implement conditional compilation based on the current `rustc` such as the version of the compiler. [^†]: This list is not an endorsement. Evaluate your dependencies to see which is right for your project. ### Code generation Some Cargo packages need to have code generated just before they are compiled for various reasons. Here we’ll walk through a simple example which generates a library call as part of the build script. First, let’s take a look at the directory structure of this package: ```text . β”œβ”€β”€ Cargo.toml β”œβ”€β”€ build.rs └── src └── main.rs 1 directory, 3 files ``` Here we can see that we have a `build.rs` build script and our binary in `main.rs`. This package has a basic manifest: ```toml # Cargo.toml [package] name = "hello-from-generated-code" version = "0.1.0" ``` Let’s see what’s inside the build script: ```rust,no_run // build.rs use std::env; use std::fs; use std::path::Path; fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("hello.rs"); fs::write( &dest_path, "pub fn message() -> &'static str { \"Hello, World!\" } " ).unwrap(); println!("cargo:rerun-if-changed=build.rs"); } ``` There’s a couple of points of note here: * The script uses the `OUT_DIR` environment variable to discover where the output files should be located. It can use the process’ current working directory to find where the input files should be located, but in this case we don’t have any input files. * In general, build scripts should not modify any files outside of `OUT_DIR`. It may seem fine on the first blush, but it does cause problems when you use such crate as a dependency, because there's an *implicit* invariant that sources in `.cargo/registry` should be immutable. `cargo` won't allow such scripts when packaging. * This script is relatively simple as it just writes out a small generated file. One could imagine that other more fanciful operations could take place such as generating a Rust module from a C header file or another language definition, for example. * The [`rerun-if-changed` instruction](build-scripts.md#rerun-if-changed) tells Cargo that the build script only needs to re-run if the build script itself changes. Without this line, Cargo will automatically run the build script if any file in the package changes. If your code generation uses some input files, this is where you would print a list of each of those files. Next, let’s peek at the library itself: ```rust,ignore // src/main.rs include!(concat!(env!("OUT_DIR"), "/hello.rs")); fn main() { println!("{}", message()); } ``` This is where the real magic happens. The library is using the rustc-defined [`include!` macro][include-macro] in combination with the [`concat!`][concat-macro] and [`env!`][env-macro] macros to include the generated file (`hello.rs`) into the crate’s compilation. Using the structure shown here, crates can include any number of generated files from the build script itself. [include-macro]: ../../std/macro.include.html [concat-macro]: ../../std/macro.concat.html [env-macro]: ../../std/macro.env.html ### Building a native library Sometimes it’s necessary to build some native C or C++ code as part of a package. This is another excellent use case of leveraging the build script to build a native library before the Rust crate itself. As an example, we’ll create a Rust library which calls into C to print β€œHello, World!”. Like above, let’s first take a look at the package layout: ```text . β”œβ”€β”€ Cargo.toml β”œβ”€β”€ build.rs └── src β”œβ”€β”€ hello.c └── main.rs 1 directory, 4 files ``` Pretty similar to before! Next, the manifest: ```toml # Cargo.toml [package] name = "hello-world-from-c" version = "0.1.0" edition = "2021" ``` For now we’re not going to use any build dependencies, so let’s take a look at the build script now: ```rust,no_run // build.rs use std::process::Command; use std::env; use std::path::Path; fn main() { let out_dir = env::var("OUT_DIR").unwrap(); // Note that there are a number of downsides to this approach, the comments // below detail how to improve the portability of these commands. Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) .arg(&format!("{}/hello.o", out_dir)) .status().unwrap(); Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) .current_dir(&Path::new(&out_dir)) .status().unwrap(); println!("cargo:rustc-link-search=native={}", out_dir); println!("cargo:rustc-link-lib=static=hello"); println!("cargo:rerun-if-changed=src/hello.c"); } ``` This build script starts out by compiling our C file into an object file (by invoking `gcc`) and then converting this object file into a static library (by invoking `ar`). The final step is feedback to Cargo itself to say that our output was in `out_dir` and the compiler should link the crate to `libhello.a` statically via the `-l static=hello` flag. Note that there are a number of drawbacks to this hard-coded approach: * The `gcc` command itself is not portable across platforms. For example it’s unlikely that Windows platforms have `gcc`, and not even all Unix platforms may have `gcc`. The `ar` command is also in a similar situation. * These commands do not take cross-compilation into account. If we’re cross compiling for a platform such as Android it’s unlikely that `gcc` will produce an ARM executable. Not to fear, though, this is where a `build-dependencies` entry would help! The Cargo ecosystem has a number of packages to make this sort of task much easier, portable, and standardized. Let's try the [`cc` crate](https://crates.io/crates/cc) from [crates.io]. First, add it to the `build-dependencies` in `Cargo.toml`: ```toml [build-dependencies] cc = "1.0" ``` And rewrite the build script to use this crate: ```rust,ignore // build.rs fn main() { cc::Build::new() .file("src/hello.c") .compile("hello"); println!("cargo:rerun-if-changed=src/hello.c"); } ``` The [`cc` crate] abstracts a range of build script requirements for C code: * It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` for Unix platforms, etc.). * It takes the `TARGET` variable into account by passing appropriate flags to the compiler being used. * Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all handled automatically. * The stdout output and `OUT_DIR` locations are also handled by the `cc` library. Here we can start to see some of the major benefits of farming as much functionality as possible out to common build dependencies rather than duplicating logic across all build scripts! Back to the case study though, let’s take a quick look at the contents of the `src` directory: ```c // src/hello.c #include void hello() { printf("Hello, World!\n"); } ``` ```rust,ignore // src/main.rs // Note the lack of the `#[link]` attribute. We’re delegating the responsibility // of selecting what to link over to the build script rather than hard-coding // it in the source file. extern { fn hello(); } fn main() { unsafe { hello(); } } ``` And there we go! This should complete our example of building some C code from a Cargo package using the build script itself. This also shows why using a build dependency can be crucial in many situations and even much more concise! We’ve also seen a brief example of how a build script can use a crate as a dependency purely for the build process and not for the crate itself at runtime. [`cc` crate]: https://crates.io/crates/cc ### Linking to system libraries This example demonstrates how to link a system library and how the build script is used to support this use case. Quite frequently a Rust crate wants to link to a native library provided on the system to bind its functionality or just use it as part of an implementation detail. This is quite a nuanced problem when it comes to performing this in a platform-agnostic fashion. It is best, if possible, to farm out as much of this as possible to make this as easy as possible for consumers. For this example, we will be creating a binding to the system's zlib library. This is a library that is commonly found on most Unix-like systems that provides data compression. This is already wrapped up in the [`libz-sys` crate], but for this example, we'll do an extremely simplified version. Check out [the source code][libz-source] for the full example. To make it easy to find the location of the library, we will use the [`pkg-config` crate]. This crate uses the system's `pkg-config` utility to discover information about a library. It will automatically tell Cargo what is needed to link the library. This will likely only work on Unix-like systems with `pkg-config` installed. Let's start by setting up the manifest: ```toml # Cargo.toml [package] name = "libz-sys" version = "0.1.0" edition = "2021" links = "z" [build-dependencies] pkg-config = "0.3.16" ``` Take note that we included the `links` key in the `package` table. This tells Cargo that we are linking to the `libz` library. See ["Using another sys crate"](#using-another-sys-crate) for an example that will leverage this. The build script is fairly simple: ```rust,ignore // build.rs fn main() { pkg_config::Config::new().probe("zlib").unwrap(); println!("cargo:rerun-if-changed=build.rs"); } ``` Let's round out the example with a basic FFI binding: ```rust,ignore // src/lib.rs use std::os::raw::{c_uint, c_ulong}; extern "C" { pub fn crc32(crc: c_ulong, buf: *const u8, len: c_uint) -> c_ulong; } #[test] fn test_crc32() { let s = "hello"; unsafe { assert_eq!(crc32(0, s.as_ptr(), s.len() as c_uint), 0x3610a686); } } ``` Run `cargo build -vv` to see the output from the build script. On a system with `libz` already installed, it may look something like this: ```text [libz-sys 0.1.0] cargo:rustc-link-search=native=/usr/lib [libz-sys 0.1.0] cargo:rustc-link-lib=z [libz-sys 0.1.0] cargo:rerun-if-changed=build.rs ``` Nice! `pkg-config` did all the work of finding the library and telling Cargo where it is. It is not unusual for packages to include the source for the library, and build it statically if it is not found on the system, or if a feature or environment variable is set. For example, the real [`libz-sys` crate] checks the environment variable `LIBZ_SYS_STATIC` or the `static` feature to build it from source instead of using the system library. Check out [the source][libz-source] for a more complete example. [`libz-sys` crate]: https://crates.io/crates/libz-sys [`pkg-config` crate]: https://crates.io/crates/pkg-config [libz-source]: https://github.com/rust-lang/libz-sys ### Using another `sys` crate When using the `links` key, crates may set metadata that can be read by other crates that depend on it. This provides a mechanism to communicate information between crates. In this example, we'll be creating a C library that makes use of zlib from the real [`libz-sys` crate]. If you have a C library that depends on zlib, you can leverage the [`libz-sys` crate] to automatically find it or build it. This is great for cross-platform support, such as Windows where zlib is not usually installed. `libz-sys` [sets the `include` metadata](https://github.com/rust-lang/libz-sys/blob/3c594e677c79584500da673f918c4d2101ac97a1/build.rs#L156) to tell other packages where to find the header files for zlib. Our build script can read that metadata with the `DEP_Z_INCLUDE` environment variable. Here's an example: ```toml # Cargo.toml [package] name = "zuser" version = "0.1.0" edition = "2021" [dependencies] libz-sys = "1.0.25" [build-dependencies] cc = "1.0.46" ``` Here we have included `libz-sys` which will ensure that there is only one `libz` used in the final library, and give us access to it from our build script: ```rust,ignore // build.rs fn main() { let mut cfg = cc::Build::new(); cfg.file("src/zuser.c"); if let Some(include) = std::env::var_os("DEP_Z_INCLUDE") { cfg.include(include); } cfg.compile("zuser"); println!("cargo:rerun-if-changed=src/zuser.c"); } ``` With `libz-sys` doing all the heavy lifting, the C source code may now include the zlib header, and it should find the header, even on systems where it isn't already installed. ```c // src/zuser.c #include "zlib.h" // … rest of code that makes use of zlib. ``` ### Conditional compilation A build script may emit [`rustc-cfg` instructions] which can enable conditions that can be checked at compile time. In this example, we'll take a look at how the [`openssl` crate] uses this to support multiple versions of the OpenSSL library. The [`openssl-sys` crate] implements building and linking the OpenSSL library. It supports multiple different implementations (like LibreSSL) and multiple versions. It makes use of the `links` key so that it may pass information to other build scripts. One of the things it passes is the `version_number` key, which is the version of OpenSSL that was detected. The code in the build script looks something [like this](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl-sys/build/main.rs#L216): ```rust,ignore println!("cargo:version_number={:x}", openssl_version); ``` This instruction causes the `DEP_OPENSSL_VERSION_NUMBER` environment variable to be set in any crates that directly depend on `openssl-sys`. The `openssl` crate, which provides the higher-level interface, specifies `openssl-sys` as a dependency. The `openssl` build script can read the version information generated by the `openssl-sys` build script with the `DEP_OPENSSL_VERSION_NUMBER` environment variable. It uses this to generate some [`cfg` values](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/build.rs#L18-L36): ```rust,ignore // (portion of build.rs) if let Ok(version) = env::var("DEP_OPENSSL_VERSION_NUMBER") { let version = u64::from_str_radix(&version, 16).unwrap(); if version >= 0x1_00_01_00_0 { println!("cargo:rustc-cfg=ossl101"); } if version >= 0x1_00_02_00_0 { println!("cargo:rustc-cfg=ossl102"); } if version >= 0x1_01_00_00_0 { println!("cargo:rustc-cfg=ossl110"); } if version >= 0x1_01_00_07_0 { println!("cargo:rustc-cfg=ossl110g"); } if version >= 0x1_01_01_00_0 { println!("cargo:rustc-cfg=ossl111"); } } ``` These `cfg` values can then be used with the [`cfg` attribute] or the [`cfg` macro] to conditionally include code. For example, SHA3 support was added in OpenSSL 1.1.1, so it is [conditionally excluded](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/src/hash.rs#L67-L85) for older versions: ```rust,ignore // (portion of openssl crate) #[cfg(ossl111)] pub fn sha3_224() -> MessageDigest { unsafe { MessageDigest(ffi::EVP_sha3_224()) } } ``` Of course, one should be careful when using this, since it makes the resulting binary even more dependent on the build environment. In this example, if the binary is distributed to another system, it may not have the exact same shared libraries, which could cause problems. [`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute [`cfg` macro]: ../../std/macro.cfg.html [`rustc-cfg` instructions]: build-scripts.md#rustc-cfg [`openssl` crate]: https://crates.io/crates/openssl [`openssl-sys` crate]: https://crates.io/crates/openssl-sys [crates.io]: https://crates.io/ cargo-0.66.0/src/doc/src/reference/build-scripts.md000066400000000000000000000516561432416201200220750ustar00rootroot00000000000000## Build Scripts Some packages need to compile third-party non-Rust code, for example C libraries. Other packages need to link to C libraries which can either be located on the system or possibly need to be built from source. Others still need facilities for functionality such as code generation before building (think parser generators). Cargo does not aim to replace other tools that are well-optimized for these tasks, but it does integrate with them with custom build scripts. Placing a file named `build.rs` in the root of a package will cause Cargo to compile that script and execute it just before building the package. ```rust,ignore // Example custom build script. fn main() { // Tell Cargo that if the given file changes, to rerun this build script. println!("cargo:rerun-if-changed=src/hello.c"); // Use the `cc` crate to build a C file and statically link it. cc::Build::new() .file("src/hello.c") .compile("hello"); } ``` Some example use cases of build scripts are: * Building a bundled C library. * Finding a C library on the host system. * Generating a Rust module from a specification. * Performing any platform-specific configuration needed for the crate. The sections below describe how build scripts work, and the [examples chapter](build-script-examples.md) shows a variety of examples on how to write scripts. > Note: The [`package.build` manifest key](manifest.md#package-build) can be > used to change the name of the build script, or disable it entirely. ### Life Cycle of a Build Script Just before a package is built, Cargo will compile a build script into an executable (if it has not already been built). It will then run the script, which may perform any number of tasks. The script may communicate with Cargo by printing specially formatted commands prefixed with `cargo:` to stdout. The build script will be rebuilt if any of its source files or dependencies change. By default, Cargo will re-run the build script if any of the files in the package changes. Typically it is best to use the `rerun-if` commands, described in the [change detection](#change-detection) section below, to narrow the focus of what triggers a build script to run again. Once the build script successfully finishes executing, the rest of the package will be compiled. Scripts should exit with a non-zero exit code to halt the build if there is an error, in which case the build script's output will be displayed on the terminal. ### Inputs to the Build Script When the build script is run, there are a number of inputs to the build script, all passed in the form of [environment variables][build-env]. In addition to environment variables, the build script’s current directory is the source directory of the build script’s package. [build-env]: environment-variables.md#environment-variables-cargo-sets-for-build-scripts ### Outputs of the Build Script Build scripts may save any output files or intermediate artifacts in the directory specified in the [`OUT_DIR` environment variable][build-env]. Scripts should not modify any files outside of that directory. Build scripts communicate with Cargo by printing to stdout. Cargo will interpret each line that starts with `cargo:` as an instruction that will influence compilation of the package. All other lines are ignored. > Note: The order of `cargo:` instructions printed by the build script *may* > affect the order of arguments that `cargo` passes to `rustc`. In turn, the > order of arguments passed to `rustc` may affect the order of arguments passed > to the linker. Therefore, you will want to pay attention to the order of the > build script's instructions. For example, if object `foo` needs to link against > library `bar`, you may want to make sure that library `bar`'s > [`cargo:rustc-link-lib`](#rustc-link-lib) instruction appears *after* > instructions to link object `foo`. The output of the script is hidden from the terminal during normal compilation. If you would like to see the output directly in your terminal, invoke Cargo as "very verbose" with the `-vv` flag. This only happens when the build script is run. If Cargo determines nothing has changed, it will not re-run the script, see [change detection](#change-detection) below for more. All the lines printed to stdout by a build script are written to a file like `target/debug/build//output` (the precise location may depend on your configuration). The stderr output is also saved in that same directory. The following is a summary of the instructions that Cargo recognizes, with each one detailed below. * [`cargo:rerun-if-changed=PATH`](#rerun-if-changed) β€” Tells Cargo when to re-run the script. * [`cargo:rerun-if-env-changed=VAR`](#rerun-if-env-changed) β€” Tells Cargo when to re-run the script. * [`cargo:rustc-link-arg=FLAG`](#rustc-link-arg) – Passes custom flags to a linker for benchmarks, binaries, `cdylib` crates, examples, and tests. * [`cargo:rustc-link-arg-bin=BIN=FLAG`](#rustc-link-arg-bin) – Passes custom flags to a linker for the binary `BIN`. * [`cargo:rustc-link-arg-bins=FLAG`](#rustc-link-arg-bins) – Passes custom flags to a linker for binaries. * [`cargo:rustc-link-arg-tests=FLAG`](#rustc-link-arg-tests) – Passes custom flags to a linker for tests. * [`cargo:rustc-link-arg-examples=FLAG`](#rustc-link-arg-examples) – Passes custom flags to a linker for examples. * [`cargo:rustc-link-arg-benches=FLAG`](#rustc-link-arg-benches) – Passes custom flags to a linker for benchmarks. * [`cargo:rustc-link-lib=LIB`](#rustc-link-lib) β€” Adds a library to link. * [`cargo:rustc-link-search=[KIND=]PATH`](#rustc-link-search) β€” Adds to the library search path. * [`cargo:rustc-flags=FLAGS`](#rustc-flags) β€” Passes certain flags to the compiler. * [`cargo:rustc-cfg=KEY[="VALUE"]`](#rustc-cfg) β€” Enables compile-time `cfg` settings. * [`cargo:rustc-env=VAR=VALUE`](#rustc-env) β€” Sets an environment variable. * [`cargo:rustc-cdylib-link-arg=FLAG`](#rustc-cdylib-link-arg) β€” Passes custom flags to a linker for cdylib crates. * [`cargo:warning=MESSAGE`](#cargo-warning) β€” Displays a warning on the terminal. * [`cargo:KEY=VALUE`](#the-links-manifest-key) β€” Metadata, used by `links` scripts. #### `cargo:rustc-link-arg=FLAG` The `rustc-link-arg` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building supported targets (benchmarks, binaries, `cdylib` crates, examples, and tests). Its usage is highly platform specific. It is useful to set the shared library version or linker script. [link-arg]: ../../rustc/codegen-options/index.md#link-arg #### `cargo:rustc-link-arg-bin=BIN=FLAG` The `rustc-link-arg-bin` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building the binary target with name `BIN`. Its usage is highly platform specific. It is useful to set a linker script or other linker options. #### `cargo:rustc-link-arg-bins=FLAG` The `rustc-link-arg-bins` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building a binary target. Its usage is highly platform specific. It is useful to set a linker script or other linker options. #### `cargo:rustc-link-lib=LIB` The `rustc-link-lib` instruction tells Cargo to link the given library using the compiler's [`-l` flag][option-link]. This is typically used to link a native library using [FFI]. The `LIB` string is passed directly to rustc, so it supports any syntax that `-l` does. \ Currently the full supported syntax for `LIB` is `[KIND[:MODIFIERS]=]NAME[:RENAME]`. The `-l` flag is only passed to the library target of the package, unless there is no library target, in which case it is passed to all targets. This is done because all other targets have an implicit dependency on the library target, and the given library to link should only be included once. This means that if a package has both a library and a binary target, the *library* has access to the symbols from the given lib, and the binary should access them through the library target's public API. The optional `KIND` may be one of `dylib`, `static`, or `framework`. See the [rustc book][option-link] for more detail. [option-link]: ../../rustc/command-line-arguments.md#option-l-link-lib [FFI]: ../../nomicon/ffi.md #### `cargo:rustc-link-arg-tests=FLAG` The `rustc-link-arg-tests` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building a tests target. #### `cargo:rustc-link-arg-examples=FLAG` The `rustc-link-arg-examples` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building an examples target. #### `cargo:rustc-link-arg-benches=FLAG` The `rustc-link-arg-benches` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building an benchmark target. #### `cargo:rustc-link-search=[KIND=]PATH` The `rustc-link-search` instruction tells Cargo to pass the [`-L` flag][option-search] to the compiler to add a directory to the library search path. The optional `KIND` may be one of `dependency`, `crate`, `native`, `framework`, or `all`. See the [rustc book][option-search] for more detail. These paths are also added to the [dynamic library search path environment variable](environment-variables.md#dynamic-library-paths) if they are within the `OUT_DIR`. Depending on this behavior is discouraged since this makes it difficult to use the resulting binary. In general, it is best to avoid creating dynamic libraries in a build script (using existing system libraries is fine). [option-search]: ../../rustc/command-line-arguments.md#option-l-search-path #### `cargo:rustc-flags=FLAGS` The `rustc-flags` instruction tells Cargo to pass the given space-separated flags to the compiler. This only allows the `-l` and `-L` flags, and is equivalent to using [`rustc-link-lib`](#rustc-link-lib) and [`rustc-link-search`](#rustc-link-search). #### `cargo:rustc-cfg=KEY[="VALUE"]` The `rustc-cfg` instruction tells Cargo to pass the given value to the [`--cfg` flag][option-cfg] to the compiler. This may be used for compile-time detection of features to enable [conditional compilation]. Note that this does *not* affect Cargo's dependency resolution. This cannot be used to enable an optional dependency, or enable other Cargo features. Be aware that [Cargo features] use the form `feature="foo"`. `cfg` values passed with this flag are not restricted to that form, and may provide just a single identifier, or any arbitrary key/value pair. For example, emitting `cargo:rustc-cfg=abc` will then allow code to use `#[cfg(abc)]` (note the lack of `feature=`). Or an arbitrary key/value pair may be used with an `=` symbol like `cargo:rustc-cfg=my_component="foo"`. The key should be a Rust identifier, the value should be a string. [cargo features]: features.md [conditional compilation]: ../../reference/conditional-compilation.md [option-cfg]: ../../rustc/command-line-arguments.md#option-cfg #### `cargo:rustc-env=VAR=VALUE` The `rustc-env` instruction tells Cargo to set the given environment variable when compiling the package. The value can be then retrieved by the [`env!` macro][env-macro] in the compiled crate. This is useful for embedding additional metadata in crate's code, such as the hash of git HEAD or the unique identifier of a continuous integration server. See also the [environment variables automatically included by Cargo][env-cargo]. > **Note**: These environment variables are also set when running an > executable with `cargo run` or `cargo test`. However, this usage is > discouraged since it ties the executable to Cargo's execution environment. > Normally, these environment variables should only be checked at compile-time > with the `env!` macro. [env-macro]: ../../std/macro.env.html [env-cargo]: environment-variables.md#environment-variables-cargo-sets-for-crates #### `cargo:rustc-cdylib-link-arg=FLAG` The `rustc-cdylib-link-arg` instruction tells Cargo to pass the [`-C link-arg=FLAG` option][link-arg] to the compiler, but only when building a `cdylib` library target. Its usage is highly platform specific. It is useful to set the shared library version or the runtime-path. #### `cargo:warning=MESSAGE` The `warning` instruction tells Cargo to display a warning after the build script has finished running. Warnings are only shown for `path` dependencies (that is, those you're working on locally), so for example warnings printed out in [crates.io] crates are not emitted by default. The `-vv` "very verbose" flag may be used to have Cargo display warnings for all crates. ### Build Dependencies Build scripts are also allowed to have dependencies on other Cargo-based crates. Dependencies are declared through the `build-dependencies` section of the manifest. ```toml [build-dependencies] cc = "1.0.46" ``` The build script **does not** have access to the dependencies listed in the `dependencies` or `dev-dependencies` section (they’re not built yet!). Also, build dependencies are not available to the package itself unless also explicitly added in the `[dependencies]` table. It is recommended to carefully consider each dependency you add, weighing against the impact on compile time, licensing, maintenance, etc. Cargo will attempt to reuse a dependency if it is shared between build dependencies and normal dependencies. However, this is not always possible, for example when cross-compiling, so keep that in consideration of the impact on compile time. ### Change Detection When rebuilding a package, Cargo does not necessarily know if the build script needs to be run again. By default, it takes a conservative approach of always re-running the build script if any file within the package is changed (or the list of files controlled by the [`exclude` and `include` fields]). For most cases, this is not a good choice, so it is recommended that every build script emit at least one of the `rerun-if` instructions (described below). If these are emitted, then Cargo will only re-run the script if the given value has changed. If Cargo is re-running the build scripts of your own crate or a dependency and you don't know why, see ["Why is Cargo rebuilding my code?" in the FAQ](../faq.md#why-is-cargo-rebuilding-my-code). [`exclude` and `include` fields]: manifest.md#the-exclude-and-include-fields #### `cargo:rerun-if-changed=PATH` The `rerun-if-changed` instruction tells Cargo to re-run the build script if the file at the given path has changed. Currently, Cargo only uses the filesystem last-modified "mtime" timestamp to determine if the file has changed. It compares against an internal cached timestamp of when the build script last ran. If the path points to a directory, it will scan the entire directory for any modifications. If the build script inherently does not need to re-run under any circumstance, then emitting `cargo:rerun-if-changed=build.rs` is a simple way to prevent it from being re-run (otherwise, the default if no `rerun-if` instructions are emitted is to scan the entire package directory for changes). Cargo automatically handles whether or not the script itself needs to be recompiled, and of course the script will be re-run after it has been recompiled. Otherwise, specifying `build.rs` is redundant and unnecessary. #### `cargo:rerun-if-env-changed=NAME` The `rerun-if-env-changed` instruction tells Cargo to re-run the build script if the value of an environment variable of the given name has changed. Note that the environment variables here are intended for global environment variables like `CC` and such, it is not necessary to use this for environment variables like `TARGET` that Cargo sets. ### The `links` Manifest Key The `package.links` key may be set in the `Cargo.toml` manifest to declare that the package links with the given native library. The purpose of this manifest key is to give Cargo an understanding about the set of native dependencies that a package has, as well as providing a principled system of passing metadata between package build scripts. ```toml [package] # ... links = "foo" ``` This manifest states that the package links to the `libfoo` native library. When using the `links` key, the package must have a build script, and the build script should use the [`rustc-link-lib` instruction](#rustc-link-lib) to link the library. Primarily, Cargo requires that there is at most one package per `links` value. In other words, it is forbidden to have two packages link to the same native library. This helps prevent duplicate symbols between crates. Note, however, that there are [conventions in place](#-sys-packages) to alleviate this. As mentioned above in the output format, each build script can generate an arbitrary set of metadata in the form of key-value pairs. This metadata is passed to the build scripts of **dependent** packages. For example, if the package `bar` depends on `foo`, then if `foo` generates `key=value` as part of its build script metadata, then the build script of `bar` will have the environment variables `DEP_FOO_KEY=value`. See the ["Using another `sys` crate"][using-another-sys] for an example of how this can be used. Note that metadata is only passed to immediate dependents, not transitive dependents. [using-another-sys]: build-script-examples.md#using-another-sys-crate ### `*-sys` Packages Some Cargo packages that link to system libraries have a naming convention of having a `-sys` suffix. Any package named `foo-sys` should provide two major pieces of functionality: * The library crate should link to the native library `libfoo`. This will often probe the current system for `libfoo` before resorting to building from source. * The library crate should provide **declarations** for types and functions in `libfoo`, but **not** higher-level abstractions. The set of `*-sys` packages provides a common set of dependencies for linking to native libraries. There are a number of benefits earned from having this convention of native-library-related packages: * Common dependencies on `foo-sys` alleviates the rule about one package per value of `links`. * Other `-sys` packages can take advantage of the `DEP_NAME_KEY=value` environment variables to better integrate with other packages. See the ["Using another `sys` crate"][using-another-sys] example. * A common dependency allows centralizing logic on discovering `libfoo` itself (or building it from source). * These dependencies are easily [overridable](#overriding-build-scripts). It is common to have a companion package without the `-sys` suffix that provides a safe, high-level abstractions on top of the sys package. For example, the [`git2` crate] provides a high-level interface to the [`libgit2-sys` crate]. [`git2` crate]: https://crates.io/crates/git2 [`libgit2-sys` crate]: https://crates.io/crates/libgit2-sys ### Overriding Build Scripts If a manifest contains a `links` key, then Cargo supports overriding the build script specified with a custom library. The purpose of this functionality is to prevent running the build script in question altogether and instead supply the metadata ahead of time. To override a build script, place the following configuration in any acceptable Cargo [configuration location](config.md). ```toml [target.x86_64-unknown-linux-gnu.foo] rustc-link-lib = ["foo"] rustc-link-search = ["/path/to/foo"] rustc-flags = "-L /some/path" rustc-cfg = ['key="value"'] rustc-env = {key = "value"} rustc-cdylib-link-arg = ["…"] metadata_key1 = "value" metadata_key2 = "value" ``` With this configuration, if a package declares that it links to `foo` then the build script will **not** be compiled or run, and the metadata specified will be used instead. The `warning`, `rerun-if-changed`, and `rerun-if-env-changed` keys should not be used and will be ignored. ### Jobserver Cargo and `rustc` use the [jobserver protocol], developed for GNU make, to coordinate concurrency across processes. It is essentially a semaphore that controls the number of jobs running concurrently. The concurrency may be set with the `--jobs` flag, which defaults to the number of logical CPUs. Each build script inherits one job slot from Cargo, and should endeavor to only use one CPU while it runs. If the script wants to use more CPUs in parallel, it should use the [`jobserver` crate] to coordinate with Cargo. As an example, the [`cc` crate] may enable the optional `parallel` feature which will use the jobserver protocol to attempt to build multiple C files at the same time. [`cc` crate]: https://crates.io/crates/cc [`jobserver` crate]: https://crates.io/crates/jobserver [jobserver protocol]: http://make.mad-scientist.net/papers/jobserver-implementation/ [crates.io]: https://crates.io/ cargo-0.66.0/src/doc/src/reference/cargo-targets.md000066400000000000000000000360531432416201200220450ustar00rootroot00000000000000## Cargo Targets Cargo packages consist of *targets* which correspond to source files which can be compiled into a crate. Packages can have [library](#library), [binary](#binaries), [example](#examples), [test](#tests), and [benchmark](#benchmarks) targets. The list of targets can be configured in the `Cargo.toml` manifest, often [inferred automatically](#target-auto-discovery) by the [directory layout][package layout] of the source files. See [Configuring a target](#configuring-a-target) below for details on configuring the settings for a target. ### Library The library target defines a "library" that can be used and linked by other libraries and executables. The filename defaults to `src/lib.rs`, and the name of the library defaults to the name of the package. A package can have only one library. The settings for the library can be [customized] in the `[lib]` table in `Cargo.toml`. ```toml # Example of customizing the library in Cargo.toml. [lib] crate-type = ["cdylib"] bench = false ``` ### Binaries Binary targets are executable programs that can be run after being compiled. The default binary filename is `src/main.rs`, which defaults to the name of the package. Additional binaries are stored in the [`src/bin/` directory][package layout]. The settings for each binary can be [customized] in the `[[bin]]` tables in `Cargo.toml`. Binaries can use the public API of the package's library. They are also linked with the [`[dependencies]`][dependencies] defined in `Cargo.toml`. You can run individual binaries with the [`cargo run`] command with the `--bin ` option. [`cargo install`] can be used to copy the executable to a common location. ```toml # Example of customizing binaries in Cargo.toml. [[bin]] name = "cool-tool" test = false bench = false [[bin]] name = "frobnicator" required-features = ["frobnicate"] ``` ### Examples Files located under the [`examples` directory][package layout] are example uses of the functionality provided by the library. When compiled, they are placed in the [`target/debug/examples` directory][build cache]. Examples can use the public API of the package's library. They are also linked with the [`[dependencies]`][dependencies] and [`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`. By default, examples are executable binaries (with a `main()` function). You can specify the [`crate-type` field](#the-crate-type-field) to make an example be compiled as a library: ```toml [[example]] name = "foo" crate-type = ["staticlib"] ``` You can run individual executable examples with the [`cargo run`] command with the `--example ` option. Library examples can be built with [`cargo build`] with the `--example ` option. [`cargo install`] with the `--example ` option can be used to copy executable binaries to a common location. Examples are compiled by [`cargo test`] by default to protect them from bit-rotting. Set [the `test` field](#the-test-field) to `true` if you have `#[test]` functions in the example that you want to run with [`cargo test`]. ### Tests There are two styles of tests within a Cargo project: * *Unit tests* which are functions marked with the [`#[test]` attribute][test-attribute] located within your library or binaries (or any target enabled with [the `test` field](#the-test-field)). These tests have access to private APIs located within the target they are defined in. * *Integration tests* which is a separate executable binary, also containing `#[test]` functions, which is linked with the project's library and has access to its *public* API. Tests are run with the [`cargo test`] command. By default, Cargo and `rustc` use the [libtest harness] which is responsible for collecting functions annotated with the [`#[test]` attribute][test-attribute] and executing them in parallel, reporting the success and failure of each test. See [the `harness` field](#the-harness-field) if you want to use a different harness or test strategy. > **Note**: There is another special style of test in Cargo: > [documentation tests][documentation examples]. > They are handled by `rustdoc` and have a slightly different execution model. > For more information, please see [`cargo test`][cargo-test-documentation-tests]. [libtest harness]: ../../rustc/tests/index.html [cargo-test-documentation-tests]: ../commands/cargo-test.md#documentation-tests #### Integration tests Files located under the [`tests` directory][package layout] are integration tests. When you run [`cargo test`], Cargo will compile each of these files as a separate crate, and execute them. Integration tests can use the public API of the package's library. They are also linked with the [`[dependencies]`][dependencies] and [`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`. If you want to share code among multiple integration tests, you can place it in a separate module such as `tests/common/mod.rs` and then put `mod common;` in each test to import it. Each integration test results in a separate executable binary, and [`cargo test`] will run them serially. In some cases this can be inefficient, as it can take longer to compile, and may not make full use of multiple CPUs when running the tests. If you have a lot of integration tests, you may want to consider creating a single integration test, and split the tests into multiple modules. The libtest harness will automatically find all of the `#[test]` annotated functions and run them in parallel. You can pass module names to [`cargo test`] to only run the tests within that module. Binary targets are automatically built if there is an integration test. This allows an integration test to execute the binary to exercise and test its behavior. The `CARGO_BIN_EXE_` [environment variable] is set when the integration test is built so that it can use the [`env` macro] to locate the executable. [environment variable]: environment-variables.md#environment-variables-cargo-sets-for-crates [`env` macro]: ../../std/macro.env.html ### Benchmarks Benchmarks provide a way to test the performance of your code using the [`cargo bench`] command. They follow the same structure as [tests](#tests), with each benchmark function annotated with the `#[bench]` attribute. Similarly to tests: * Benchmarks are placed in the [`benches` directory][package layout]. * Benchmark functions defined in libraries and binaries have access to the *private* API within the target they are defined in. Benchmarks in the `benches` directory may use the *public* API. * [The `bench` field](#the-bench-field) can be used to define which targets are benchmarked by default. * [The `harness` field](#the-harness-field) can be used to disable the built-in harness. > **Note**: The [`#[bench]` > attribute](../../unstable-book/library-features/test.html) is currently > unstable and only available on the [nightly channel]. There are some > packages available on [crates.io](https://crates.io/keywords/benchmark) that > may help with running benchmarks on the stable channel, such as > [Criterion](https://crates.io/crates/criterion). ### Configuring a target All of the `[lib]`, `[[bin]]`, `[[example]]`, `[[test]]`, and `[[bench]]` sections in `Cargo.toml` support similar configuration for specifying how a target should be built. The double-bracket sections like `[[bin]]` are [array-of-table of TOML](https://toml.io/en/v1.0.0-rc.3#array-of-tables), which means you can write more than one `[[bin]]` section to make several executables in your crate. You can only specify one library, so `[lib]` is a normal TOML table. The following is an overview of the TOML settings for each target, with each field described in detail below. ```toml [lib] name = "foo" # The name of the target. path = "src/lib.rs" # The source file of the target. test = true # Is tested by default. doctest = true # Documentation examples are tested by default. bench = true # Is benchmarked by default. doc = true # Is documented by default. plugin = false # Used as a compiler plugin (deprecated). proc-macro = false # Set to `true` for a proc-macro library. harness = true # Use libtest harness. edition = "2015" # The edition of the target. crate-type = ["lib"] # The crate types to generate. required-features = [] # Features required to build this target (N/A for lib). ``` #### The `name` field The `name` field specifies the name of the target, which corresponds to the filename of the artifact that will be generated. For a library, this is the crate name that dependencies will use to reference it. For the `[lib]` and the default binary (`src/main.rs`), this defaults to the name of the package, with any dashes replaced with underscores. For other [auto discovered](#target-auto-discovery) targets, it defaults to the directory or file name. This is required for all targets except `[lib]`. #### The `path` field The `path` field specifies where the source for the crate is located, relative to the `Cargo.toml` file. If not specified, the [inferred path](#target-auto-discovery) is used based on the target name. #### The `test` field The `test` field indicates whether or not the target is tested by default by [`cargo test`]. The default is `true` for lib, bins, and tests. > **Note**: Examples are built by [`cargo test`] by default to ensure they > continue to compile, but they are not *tested* by default. Setting `test = > true` for an example will also build it as a test and run any > [`#[test]`][test-attribute] functions defined in the example. #### The `doctest` field The `doctest` field indicates whether or not [documentation examples] are tested by default by [`cargo test`]. This is only relevant for libraries, it has no effect on other sections. The default is `true` for the library. #### The `bench` field The `bench` field indicates whether or not the target is benchmarked by default by [`cargo bench`]. The default is `true` for lib, bins, and benchmarks. #### The `doc` field The `doc` field indicates whether or not the target is included in the documentation generated by [`cargo doc`] by default. The default is `true` for libraries and binaries. > **Note**: The binary will be skipped if its name is the same as the lib > target. #### The `plugin` field This field is used for `rustc` plugins, which are being deprecated. #### The `proc-macro` field The `proc-macro` field indicates that the library is a [procedural macro] ([reference][proc-macro-reference]). This is only valid for the `[lib]` target. #### The `harness` field The `harness` field indicates that the [`--test` flag] will be passed to `rustc` which will automatically include the libtest library which is the driver for collecting and running tests marked with the [`#[test]` attribute][test-attribute] or benchmarks with the `#[bench]` attribute. The default is `true` for all targets. If set to `false`, then you are responsible for defining a `main()` function to run tests and benchmarks. Tests have the [`cfg(test)` conditional expression][cfg-test] enabled whether or not the harness is enabled. #### The `edition` field The `edition` field defines the [Rust edition] the target will use. If not specified, it defaults to the [`edition` field][package-edition] for the `[package]`. This field should usually not be set, and is only intended for advanced scenarios such as incrementally transitioning a large package to a new edition. #### The `crate-type` field The `crate-type` field defines the [crate types] that will be generated by the target. It is an array of strings, allowing you to specify multiple crate types for a single target. This can only be specified for libraries and examples. Binaries, tests, and benchmarks are always the "bin" crate type. The defaults are: Target | Crate Type -------|----------- Normal library | `"lib"` Proc-macro library | `"proc-macro"` Example | `"bin"` The available options are `bin`, `lib`, `rlib`, `dylib`, `cdylib`, `staticlib`, and `proc-macro`. You can read more about the different crate types in the [Rust Reference Manual][crate types]. #### The `required-features` field The `required-features` field specifies which [features] the target needs in order to be built. If any of the required features are not enabled, the target will be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`, and `[[example]]` sections, it has no effect on `[lib]`. ```toml [features] # ... postgres = [] sqlite = [] tools = [] [[bin]] name = "my-pg-tool" required-features = ["postgres", "tools"] ``` ### Target auto-discovery By default, Cargo automatically determines the targets to build based on the [layout of the files][package layout] on the filesystem. The target configuration tables, such as `[lib]`, `[[bin]]`, `[[test]]`, `[[bench]]`, or `[[example]]`, can be used to add additional targets that don't follow the standard directory layout. The automatic target discovery can be disabled so that only manually configured targets will be built. Setting the keys `autobins`, `autoexamples`, `autotests`, or `autobenches` to `false` in the `[package]` section will disable auto-discovery of the corresponding target type. ```toml [package] # ... autobins = false autoexamples = false autotests = false autobenches = false ``` Disabling automatic discovery should only be needed for specialized situations. For example, if you have a library where you want a *module* named `bin`, this would present a problem because Cargo would usually attempt to compile anything in the `bin` directory as an executable. Here is a sample layout of this scenario: ```text β”œβ”€β”€ Cargo.toml └── src Β Β  β”œβ”€β”€ lib.rs Β Β  └── bin Β Β  Β Β  └── mod.rs ``` To prevent Cargo from inferring `src/bin/mod.rs` as an executable, set `autobins = false` in `Cargo.toml` to disable auto-discovery: ```toml [package] # … autobins = false ``` > **Note**: For packages with the 2015 edition, the default for auto-discovery > is `false` if at least one target is manually defined in `Cargo.toml`. > Beginning with the 2018 edition, the default is always `true`. [Build cache]: ../guide/build-cache.md [Rust Edition]: ../../edition-guide/index.html [`--test` flag]: ../../rustc/command-line-arguments.html#option-test [`cargo bench`]: ../commands/cargo-bench.md [`cargo build`]: ../commands/cargo-build.md [`cargo doc`]: ../commands/cargo-doc.md [`cargo install`]: ../commands/cargo-install.md [`cargo run`]: ../commands/cargo-run.md [`cargo test`]: ../commands/cargo-test.md [cfg-test]: ../../reference/conditional-compilation.html#test [crate types]: ../../reference/linkage.html [crates.io]: https://crates.io/ [customized]: #configuring-a-target [dependencies]: specifying-dependencies.md [dev-dependencies]: specifying-dependencies.md#development-dependencies [documentation examples]: ../../rustdoc/documentation-tests.html [features]: features.md [nightly channel]: ../../book/appendix-07-nightly-rust.html [package layout]: ../guide/project-layout.md [package-edition]: manifest.md#the-edition-field [proc-macro-reference]: ../../reference/procedural-macros.html [procedural macro]: ../../book/ch19-06-macros.html [test-attribute]: ../../reference/attributes/testing.html#the-test-attribute cargo-0.66.0/src/doc/src/reference/config.md000066400000000000000000001176421432416201200205540ustar00rootroot00000000000000## Configuration This document explains how Cargo’s configuration system works, as well as available keys or configuration. For configuration of a package through its manifest, see the [manifest format](manifest.md). ### Hierarchical structure Cargo allows local configuration for a particular package as well as global configuration. It looks for configuration files in the current directory and all parent directories. If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the following configuration files would be probed for and unified in this order: * `/projects/foo/bar/baz/.cargo/config.toml` * `/projects/foo/bar/.cargo/config.toml` * `/projects/foo/.cargo/config.toml` * `/projects/.cargo/config.toml` * `/.cargo/config.toml` * `$CARGO_HOME/config.toml` which defaults to: * Windows: `%USERPROFILE%\.cargo\config.toml` * Unix: `$HOME/.cargo/config.toml` With this structure, you can specify configuration per-package, and even possibly check it into version control. You can also specify personal defaults with a configuration file in your home directory. If a key is specified in multiple config files, the values will get merged together. Numbers, strings, and booleans will use the value in the deeper config directory taking precedence over ancestor directories, where the home directory is the lowest priority. Arrays will be joined together. At present, when being invoked from a workspace, Cargo does not read config files from crates within the workspace. i.e. if a workspace has two crates in it, named `/projects/foo/bar/baz/mylib` and `/projects/foo/bar/baz/mybin`, and there are Cargo configs at `/projects/foo/bar/baz/mylib/.cargo/config.toml` and `/projects/foo/bar/baz/mybin/.cargo/config.toml`, Cargo does not read those configuration files if it is invoked from the workspace root (`/projects/foo/bar/baz/`). > **Note:** Cargo also reads config files without the `.toml` extension, such as > `.cargo/config`. Support for the `.toml` extension was added in version 1.39 > and is the preferred form. If both files exist, Cargo will use the file > without the extension. ### Configuration format Configuration files are written in the [TOML format][toml] (like the manifest), with simple key-value pairs inside of sections (tables). The following is a quick overview of all settings, with detailed descriptions found below. ```toml paths = ["/path/to/override"] # path dependency overrides [alias] # command aliases b = "build" c = "check" t = "test" r = "run" rr = "run --release" recursive_example = "rr --example recursions" space_example = ["run", "--release", "--", "\"command list\""] [build] jobs = 1 # number of parallel jobs, defaults to # of CPUs rustc = "rustc" # the rust compiler tool rustc-wrapper = "…" # run this wrapper instead of `rustc` rustc-workspace-wrapper = "…" # run this wrapper instead of `rustc` for workspace members rustdoc = "rustdoc" # the doc generator tool target = "triple" # build for the target triple (ignored by `cargo install`) target-dir = "target" # path of where to place all generated artifacts rustflags = ["…", "…"] # custom flags to pass to all compiler invocations rustdocflags = ["…", "…"] # custom flags to pass to rustdoc incremental = true # whether or not to enable incremental compilation dep-info-basedir = "…" # path for the base directory for targets in depfiles [doc] browser = "chromium" # browser to use with `cargo doc --open`, # overrides the `BROWSER` environment variable [env] # Set ENV_VAR_NAME=value for any process run by Cargo ENV_VAR_NAME = "value" # Set even if already present in environment ENV_VAR_NAME_2 = { value = "value", force = true } # Value is relative to .cargo directory containing `config.toml`, make absolute ENV_VAR_NAME_3 = { value = "relative/path", relative = true } [future-incompat-report] frequency = 'always' # when to display a notification about a future incompat report [cargo-new] vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none') [http] debug = false # HTTP debugging proxy = "host:port" # HTTP proxy in libcurl format ssl-version = "tlsv1.3" # TLS version to use ssl-version.max = "tlsv1.3" # maximum TLS version ssl-version.min = "tlsv1.1" # minimum TLS version timeout = 30 # timeout for each HTTP request, in seconds low-speed-limit = 10 # network timeout threshold (bytes/sec) cainfo = "cert.pem" # path to Certificate Authority (CA) bundle check-revoke = true # check for SSL certificate revocation multiplexing = true # HTTP/2 multiplexing user-agent = "…" # the user-agent header [install] root = "/some/path" # `cargo install` destination directory [net] retry = 2 # network retries git-fetch-with-cli = true # use the `git` executable for git operations offline = true # do not access the network [patch.] # Same keys as for [patch] in Cargo.toml [profile.] # Modify profile settings via config. opt-level = 0 # Optimization level. debug = true # Include debug info. split-debuginfo = '...' # Debug info splitting behavior. debug-assertions = true # Enables debug assertions. overflow-checks = true # Enables runtime integer overflow checks. lto = false # Sets link-time optimization. panic = 'unwind' # The panic strategy. incremental = true # Incremental compilation. codegen-units = 16 # Number of code generation units. rpath = false # Sets the rpath linking option. [profile..build-override] # Overrides build-script settings. # Same keys for a normal profile. [profile..package.] # Override profile for a package. # Same keys for a normal profile (minus `panic`, `lto`, and `rpath`). [registries.] # registries other than crates.io index = "…" # URL of the registry index token = "…" # authentication token for the registry [registry] default = "…" # name of the default registry token = "…" # authentication token for crates.io [source.] # source definition and replacement replace-with = "…" # replace this source with the given named source directory = "…" # path to a directory source registry = "…" # URL to a registry source local-registry = "…" # path to a local registry source git = "…" # URL of a git repository source branch = "…" # branch name for the git repository tag = "…" # tag name for the git repository rev = "…" # revision for the git repository [target.] linker = "…" # linker to use runner = "…" # wrapper to run executables rustflags = ["…", "…"] # custom flags for `rustc` [target.] runner = "…" # wrapper to run executables rustflags = ["…", "…"] # custom flags for `rustc` [target..] # `links` build script override rustc-link-lib = ["foo"] rustc-link-search = ["/path/to/foo"] rustc-flags = ["-L", "/some/path"] rustc-cfg = ['key="value"'] rustc-env = {key = "value"} rustc-cdylib-link-arg = ["…"] metadata_key1 = "value" metadata_key2 = "value" [term] quiet = false # whether cargo output is quiet verbose = false # whether cargo provides verbose output color = 'auto' # whether cargo colorizes output progress.when = 'auto' # whether cargo shows progress bar progress.width = 80 # width of progress bar ``` ### Environment variables Cargo can also be configured through environment variables in addition to the TOML configuration files. For each configuration key of the form `foo.bar` the environment variable `CARGO_FOO_BAR` can also be used to define the value. Keys are converted to uppercase, dots and dashes are converted to underscores. For example the `target.x86_64-unknown-linux-gnu.runner` key can also be defined by the `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER` environment variable. Environment variables will take precedence over TOML configuration files. Currently only integer, boolean, string and some array values are supported to be defined by environment variables. Descriptions below indicate which keys support environment variables. In addition to the system above, Cargo recognizes a few other specific [environment variables][env]. ### Command-line overrides Cargo also accepts arbitrary configuration overrides through the `--config` command-line option. The argument should be in TOML syntax of `KEY=VALUE`: ```console cargo --config net.git-fetch-with-cli=true fetch ``` The `--config` option may be specified multiple times, in which case the values are merged in left-to-right order, using the same merging logic that is used when multiple configuration files apply. Configuration values specified this way take precedence over environment variables, which take precedence over configuration files. Some examples of what it looks like using Bourne shell syntax: ```console # Most shells will require escaping. cargo --config http.proxy=\"http://example.com\" … # Spaces may be used. cargo --config "net.git-fetch-with-cli = true" … # TOML array example. Single quotes make it easier to read and write. cargo --config 'build.rustdocflags = ["--html-in-header", "header.html"]' … # Example of a complex TOML key. cargo --config "target.'cfg(all(target_arch = \"arm\", target_os = \"none\"))'.runner = 'my-runner'" … # Example of overriding a profile setting. cargo --config profile.dev.package.image.opt-level=3 … ``` The `--config` option can also be used to pass paths to extra configuration files that Cargo should use for a specific invocation. Options from configuration files loaded this way follow the same precedence rules as other options specified directly with `--config`. ### Config-relative paths Paths in config files may be absolute, relative, or a bare name without any path separators. Paths for executables without a path separator will use the `PATH` environment variable to search for the executable. Paths for non-executables will be relative to where the config value is defined. In particular, rules are: * For environment variables, paths are relative to the current working directory. * For config values loaded directly from the [`--config KEY=VALUE`](#command-line-overrides) option, paths are relative to the current working directory. * For config files, paths are relative to the parent directory of the directory where the config files were defined, no matter those files are from either the [hierarchical probing](#hierarchical-structure) or the [`--config `](#command-line-overrides) option. > **Note:** To maintain consistency with existing `.cargo/config.toml` probing behavior, > it is by design that a path in a config file passed via `--config ` > is also relative to two levels up from the config file itself. > > To avoid unexpected results, the rule of thumb is putting your extra config files > at the same level of discovered `.cargo/config.toml` in your porject. > For instance, given a project `/my/project`, > it is recommended to put config files under `/my/project/.cargo` > or a new directory at the same level, such as `/my/project/.config`. ```toml # Relative path examples. [target.x86_64-unknown-linux-gnu] runner = "foo" # Searches `PATH` for `foo`. [source.vendored-sources] # Directory is relative to the parent where `.cargo/config.toml` is located. # For example, `/my/project/.cargo/config.toml` would result in `/my/project/vendor`. directory = "vendor" ``` ### Executable paths with arguments Some Cargo commands invoke external programs, which can be configured as a path and some number of arguments. The value may be an array of strings like `['/path/to/program', 'somearg']` or a space-separated string like `'/path/to/program somearg'`. If the path to the executable contains a space, the list form must be used. If Cargo is passing other arguments to the program such as a path to open or run, they will be passed after the last specified argument in the value of an option of this format. If the specified program does not have path separators, Cargo will search `PATH` for its executable. ### Credentials Configuration values with sensitive information are stored in the `$CARGO_HOME/credentials.toml` file. This file is automatically created and updated by [`cargo login`]. It follows the same format as Cargo config files. ```toml [registry] token = "…" # Access token for crates.io [registries.] token = "…" # Access token for the named registry ``` Tokens are used by some Cargo commands such as [`cargo publish`] for authenticating with remote registries. Care should be taken to protect the tokens and to keep them secret. As with most other config values, tokens may be specified with environment variables. The token for [crates.io] may be specified with the `CARGO_REGISTRY_TOKEN` environment variable. Tokens for other registries may be specified with environment variables of the form `CARGO_REGISTRIES__TOKEN` where `` is the name of the registry in all capital letters. ### Configuration keys This section documents all configuration keys. The description for keys with variable parts are annotated with angled brackets like `target.` where the `` part can be any target triple like `target.x86_64-pc-windows-msvc`. #### `paths` * Type: array of strings (paths) * Default: none * Environment: not supported An array of paths to local packages which are to be used as overrides for dependencies. For more information see the [Overriding Dependencies guide](overriding-dependencies.md#paths-overrides). #### `[alias]` * Type: string or array of strings * Default: see below * Environment: `CARGO_ALIAS_` The `[alias]` table defines CLI command aliases. For example, running `cargo b` is an alias for running `cargo build`. Each key in the table is the subcommand, and the value is the actual command to run. The value may be an array of strings, where the first element is the command and the following are arguments. It may also be a string, which will be split on spaces into subcommand and arguments. The following aliases are built-in to Cargo: ```toml [alias] b = "build" c = "check" d = "doc" t = "test" r = "run" ``` Aliases are not allowed to redefine existing built-in commands. Aliases are recursive: ```toml [alias] rr = "run --release" recursive_example = "rr --example recursions" ``` #### `[build]` The `[build]` table controls build-time operations and compiler settings. ##### `build.jobs` * Type: integer * Default: number of logical CPUs * Environment: `CARGO_BUILD_JOBS` Sets the maximum number of compiler processes to run in parallel. If negative, it sets the maximum number of compiler processes to the number of logical CPUs plus provided value. Should not be 0. Can be overridden with the `--jobs` CLI option. ##### `build.rustc` * Type: string (program path) * Default: "rustc" * Environment: `CARGO_BUILD_RUSTC` or `RUSTC` Sets the executable to use for `rustc`. ##### `build.rustc-wrapper` * Type: string (program path) * Default: none * Environment: `CARGO_BUILD_RUSTC_WRAPPER` or `RUSTC_WRAPPER` Sets a wrapper to execute instead of `rustc`. The first argument passed to the wrapper is the path to the actual executable to use (i.e., `build.rustc`, if that is set, or `"rustc"` otherwise). ##### `build.rustc-workspace-wrapper` * Type: string (program path) * Default: none * Environment: `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` or `RUSTC_WORKSPACE_WRAPPER` Sets a wrapper to execute instead of `rustc`, for workspace members only. The first argument passed to the wrapper is the path to the actual executable to use (i.e., `build.rustc`, if that is set, or `"rustc"` otherwise). It affects the filename hash so that artifacts produced by the wrapper are cached separately. ##### `build.rustdoc` * Type: string (program path) * Default: "rustdoc" * Environment: `CARGO_BUILD_RUSTDOC` or `RUSTDOC` Sets the executable to use for `rustdoc`. ##### `build.target` * Type: string or array of strings * Default: host platform * Environment: `CARGO_BUILD_TARGET` The default target platform triples to compile to. This allows passing either a string or an array of strings. Each string value is a target platform triple. The selected build targets will be built for each of the selected architectures. The string value may also be a relative path to a `.json` target spec file. Can be overridden with the `--target` CLI option. ```toml [build] target = ["x86_64-unknown-linux-gnu", "i686-unknown-linux-gnu"] ``` ##### `build.target-dir` * Type: string (path) * Default: "target" * Environment: `CARGO_BUILD_TARGET_DIR` or `CARGO_TARGET_DIR` The path to where all compiler output is placed. The default if not specified is a directory named `target` located at the root of the workspace. Can be overridden with the `--target-dir` CLI option. ##### `build.rustflags` * Type: string or array of strings * Default: none * Environment: `CARGO_BUILD_RUSTFLAGS` or `CARGO_ENCODED_RUSTFLAGS` or `RUSTFLAGS` Extra command-line flags to pass to `rustc`. The value may be an array of strings or a space-separated string. There are four mutually exclusive sources of extra flags. They are checked in order, with the first one being used: 1. `CARGO_ENCODED_RUSTFLAGS` environment variable. 2. `RUSTFLAGS` environment variable. 3. All matching `target..rustflags` and `target..rustflags` config entries joined together. 4. `build.rustflags` config value. Additional flags may also be passed with the [`cargo rustc`] command. If the `--target` flag (or [`build.target`](#buildtarget)) is used, then the flags will only be passed to the compiler for the target. Things being built for the host, such as build scripts or proc macros, will not receive the args. Without `--target`, the flags will be passed to all compiler invocations (including build scripts and proc macros) because dependencies are shared. If you have args that you do not want to pass to build scripts or proc macros and are building for the host, pass `--target` with the host triple. It is not recommended to pass in flags that Cargo itself usually manages. For example, the flags driven by [profiles](profiles.md) are best handled by setting the appropriate profile setting. > **Caution**: Due to the low-level nature of passing flags directly to the > compiler, this may cause a conflict with future versions of Cargo which may > issue the same or similar flags on its own which may interfere with the > flags you specify. This is an area where Cargo may not always be backwards > compatible. ##### `build.rustdocflags` * Type: string or array of strings * Default: none * Environment: `CARGO_BUILD_RUSTDOCFLAGS` or `CARGO_ENCODED_RUSTDOCFLAGS` or `RUSTDOCFLAGS` Extra command-line flags to pass to `rustdoc`. The value may be an array of strings or a space-separated string. There are three mutually exclusive sources of extra flags. They are checked in order, with the first one being used: 1. `CARGO_ENCODED_RUSTDOCFLAGS` environment variable. 2. `RUSTDOCFLAGS` environment variable. 3. `build.rustdocflags` config value. Additional flags may also be passed with the [`cargo rustdoc`] command. ##### `build.incremental` * Type: bool * Default: from profile * Environment: `CARGO_BUILD_INCREMENTAL` or `CARGO_INCREMENTAL` Whether or not to perform [incremental compilation]. The default if not set is to use the value from the [profile](profiles.md#incremental). Otherwise this overrides the setting of all profiles. The `CARGO_INCREMENTAL` environment variable can be set to `1` to force enable incremental compilation for all profiles, or `0` to disable it. This env var overrides the config setting. ##### `build.dep-info-basedir` * Type: string (path) * Default: none * Environment: `CARGO_BUILD_DEP_INFO_BASEDIR` Strips the given path prefix from [dep info](../guide/build-cache.md#dep-info-files) file paths. This config setting is intended to convert absolute paths to relative paths for tools that require relative paths. The setting itself is a config-relative path. So, for example, a value of `"."` would strip all paths starting with the parent directory of the `.cargo` directory. ##### `build.pipelining` This option is deprecated and unused. Cargo always has pipelining enabled. #### `[doc]` The `[doc]` table defines options for the [`cargo doc`] command. ##### `doc.browser` * Type: string or array of strings ([program path with args]) * Default: `BROWSER` environment variable, or, if that is missing, opening the link in a system specific way This option sets the browser to be used by [`cargo doc`], overriding the `BROWSER` environment variable when opening documentation with the `--open` option. #### `[cargo-new]` The `[cargo-new]` table defines defaults for the [`cargo new`] command. ##### `cargo-new.name` This option is deprecated and unused. ##### `cargo-new.email` This option is deprecated and unused. ##### `cargo-new.vcs` * Type: string * Default: "git" or "none" * Environment: `CARGO_CARGO_NEW_VCS` Specifies the source control system to use for initializing a new repository. Valid values are `git`, `hg` (for Mercurial), `pijul`, `fossil` or `none` to disable this behavior. Defaults to `git`, or `none` if already inside a VCS repository. Can be overridden with the `--vcs` CLI option. ### `[env]` The `[env]` section allows you to set additional environment variables for build scripts, rustc invocations, `cargo run` and `cargo build`. ```toml [env] OPENSSL_DIR = "/opt/openssl" ``` By default, the variables specified will not override values that already exist in the environment. This behavior can be changed by setting the `force` flag. Setting the `relative` flag evaluates the value as a config-relative path that is relative to the parent directory of the `.cargo` directory that contains the `config.toml` file. The value of the environment variable will be the full absolute path. ```toml [env] TMPDIR = { value = "/home/tmp", force = true } OPENSSL_DIR = { value = "vendor/openssl", relative = true } ``` ### `[future-incompat-report]` The `[future-incompat-report]` table controls setting for [future incompat reporting](future-incompat-report.md) #### `future-incompat-report.frequency` * Type: string * Default: "always" * Environment: `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY` Controls how often we display a notification to the terminal when a future incompat report is available. Possible values: * `always` (default): Always display a notification when a command (e.g. `cargo build`) produces a future incompat report * `never`: Never display a notification #### `[http]` The `[http]` table defines settings for HTTP behavior. This includes fetching crate dependencies and accessing remote git repositories. ##### `http.debug` * Type: boolean * Default: false * Environment: `CARGO_HTTP_DEBUG` If `true`, enables debugging of HTTP requests. The debug information can be seen by setting the `CARGO_LOG=cargo::ops::registry=debug` environment variable (or use `trace` for even more information). Be wary when posting logs from this output in a public location. The output may include headers with authentication tokens which you don't want to leak! Be sure to review logs before posting them. ##### `http.proxy` * Type: string * Default: none * Environment: `CARGO_HTTP_PROXY` or `HTTPS_PROXY` or `https_proxy` or `http_proxy` Sets an HTTP and HTTPS proxy to use. The format is in [libcurl format] as in `[protocol://]host[:port]`. If not set, Cargo will also check the `http.proxy` setting in your global git configuration. If none of those are set, the `HTTPS_PROXY` or `https_proxy` environment variables set the proxy for HTTPS requests, and `http_proxy` sets it for HTTP requests. ##### `http.timeout` * Type: integer * Default: 30 * Environment: `CARGO_HTTP_TIMEOUT` or `HTTP_TIMEOUT` Sets the timeout for each HTTP request, in seconds. ##### `http.cainfo` * Type: string (path) * Default: none * Environment: `CARGO_HTTP_CAINFO` Path to a Certificate Authority (CA) bundle file, used to verify TLS certificates. If not specified, Cargo attempts to use the system certificates. ##### `http.check-revoke` * Type: boolean * Default: true (Windows) false (all others) * Environment: `CARGO_HTTP_CHECK_REVOKE` This determines whether or not TLS certificate revocation checks should be performed. This only works on Windows. ##### `http.ssl-version` * Type: string or min/max table * Default: none * Environment: `CARGO_HTTP_SSL_VERSION` This sets the minimum TLS version to use. It takes a string, with one of the possible values of "default", "tlsv1", "tlsv1.0", "tlsv1.1", "tlsv1.2", or "tlsv1.3". This may alternatively take a table with two keys, `min` and `max`, which each take a string value of the same kind that specifies the minimum and maximum range of TLS versions to use. The default is a minimum version of "tlsv1.0" and a max of the newest version supported on your platform, typically "tlsv1.3". ##### `http.low-speed-limit` * Type: integer * Default: 10 * Environment: `CARGO_HTTP_LOW_SPEED_LIMIT` This setting controls timeout behavior for slow connections. If the average transfer speed in bytes per second is below the given value for [`http.timeout`](#httptimeout) seconds (default 30 seconds), then the connection is considered too slow and Cargo will abort and retry. ##### `http.multiplexing` * Type: boolean * Default: true * Environment: `CARGO_HTTP_MULTIPLEXING` When `true`, Cargo will attempt to use the HTTP2 protocol with multiplexing. This allows multiple requests to use the same connection, usually improving performance when fetching multiple files. If `false`, Cargo will use HTTP 1.1 without pipelining. ##### `http.user-agent` * Type: string * Default: Cargo's version * Environment: `CARGO_HTTP_USER_AGENT` Specifies a custom user-agent header to use. The default if not specified is a string that includes Cargo's version. #### `[install]` The `[install]` table defines defaults for the [`cargo install`] command. ##### `install.root` * Type: string (path) * Default: Cargo's home directory * Environment: `CARGO_INSTALL_ROOT` Sets the path to the root directory for installing executables for [`cargo install`]. Executables go into a `bin` directory underneath the root. To track information of installed executables, some extra files, such as `.crates.toml` and `.crates2.json`, are also created under this root. The default if not specified is Cargo's home directory (default `.cargo` in your home directory). Can be overridden with the `--root` command-line option. #### `[net]` The `[net]` table controls networking configuration. ##### `net.retry` * Type: integer * Default: 2 * Environment: `CARGO_NET_RETRY` Number of times to retry possibly spurious network errors. ##### `net.git-fetch-with-cli` * Type: boolean * Default: false * Environment: `CARGO_NET_GIT_FETCH_WITH_CLI` If this is `true`, then Cargo will use the `git` executable to fetch registry indexes and git dependencies. If `false`, then it uses a built-in `git` library. Setting this to `true` can be helpful if you have special authentication requirements that Cargo does not support. See [Git Authentication](../appendix/git-authentication.md) for more information about setting up git authentication. ##### `net.offline` * Type: boolean * Default: false * Environment: `CARGO_NET_OFFLINE` If this is `true`, then Cargo will avoid accessing the network, and attempt to proceed with locally cached data. If `false`, Cargo will access the network as needed, and generate an error if it encounters a network error. Can be overridden with the `--offline` command-line option. #### `[patch]` Just as you can override dependencies using [`[patch]` in `Cargo.toml`](overriding-dependencies.md#the-patch-section), you can override them in the cargo configuration file to apply those patches to any affected build. The format is identical to the one used in `Cargo.toml`. Since `.cargo/config.toml` files are not usually checked into source control, you should prefer patching using `Cargo.toml` where possible to ensure that other developers can compile your crate in their own environments. Patching through cargo configuration files is generally only appropriate when the patch section is automatically generated by an external build tool. If a given dependency is patched both in a cargo configuration file and a `Cargo.toml` file, the patch in the configuration file is used. If multiple configuration files patch the same dependency, standard cargo configuration merging is used, which prefers the value defined closest to the current directory, with `$HOME/.cargo/config.toml` taking the lowest precedence. Relative `path` dependencies in such a `[patch]` section are resolved relative to the configuration file they appear in. #### `[profile]` The `[profile]` table can be used to globally change profile settings, and override settings specified in `Cargo.toml`. It has the same syntax and options as profiles specified in `Cargo.toml`. See the [Profiles chapter] for details about the options. [Profiles chapter]: profiles.md ##### `[profile..build-override]` * Environment: `CARGO_PROFILE__BUILD_OVERRIDE_` The build-override table overrides settings for build scripts, proc macros, and their dependencies. It has the same keys as a normal profile. See the [overrides section](profiles.md#overrides) for more details. ##### `[profile..package.]` * Environment: not supported The package table overrides settings for specific packages. It has the same keys as a normal profile, minus the `panic`, `lto`, and `rpath` settings. See the [overrides section](profiles.md#overrides) for more details. ##### `profile..codegen-units` * Type: integer * Default: See profile docs. * Environment: `CARGO_PROFILE__CODEGEN_UNITS` See [codegen-units](profiles.md#codegen-units). ##### `profile..debug` * Type: integer or boolean * Default: See profile docs. * Environment: `CARGO_PROFILE__DEBUG` See [debug](profiles.md#debug). ##### `profile..split-debuginfo` * Type: string * Default: See profile docs. * Environment: `CARGO_PROFILE__SPLIT_DEBUGINFO` See [split-debuginfo](profiles.md#split-debuginfo). ##### `profile..debug-assertions` * Type: boolean * Default: See profile docs. * Environment: `CARGO_PROFILE__DEBUG_ASSERTIONS` See [debug-assertions](profiles.md#debug-assertions). ##### `profile..incremental` * Type: boolean * Default: See profile docs. * Environment: `CARGO_PROFILE__INCREMENTAL` See [incremental](profiles.md#incremental). ##### `profile..lto` * Type: string or boolean * Default: See profile docs. * Environment: `CARGO_PROFILE__LTO` See [lto](profiles.md#lto). ##### `profile..overflow-checks` * Type: boolean * Default: See profile docs. * Environment: `CARGO_PROFILE__OVERFLOW_CHECKS` See [overflow-checks](profiles.md#overflow-checks). ##### `profile..opt-level` * Type: integer or string * Default: See profile docs. * Environment: `CARGO_PROFILE__OPT_LEVEL` See [opt-level](profiles.md#opt-level). ##### `profile..panic` * Type: string * default: See profile docs. * Environment: `CARGO_PROFILE__PANIC` See [panic](profiles.md#panic). ##### `profile..rpath` * Type: boolean * default: See profile docs. * Environment: `CARGO_PROFILE__RPATH` See [rpath](profiles.md#rpath). #### `[registries]` The `[registries]` table is used for specifying additional [registries]. It consists of a sub-table for each named registry. ##### `registries..index` * Type: string (url) * Default: none * Environment: `CARGO_REGISTRIES__INDEX` Specifies the URL of the git index for the registry. ##### `registries..token` * Type: string * Default: none * Environment: `CARGO_REGISTRIES__TOKEN` Specifies the authentication token for the given registry. This value should only appear in the [credentials](#credentials) file. This is used for registry commands like [`cargo publish`] that require authentication. Can be overridden with the `--token` command-line option. #### `[registry]` The `[registry]` table controls the default registry used when one is not specified. ##### `registry.index` This value is no longer accepted and should not be used. ##### `registry.default` * Type: string * Default: `"crates-io"` * Environment: `CARGO_REGISTRY_DEFAULT` The name of the registry (from the [`registries` table](#registries)) to use by default for registry commands like [`cargo publish`]. Can be overridden with the `--registry` command-line option. ##### `registry.token` * Type: string * Default: none * Environment: `CARGO_REGISTRY_TOKEN` Specifies the authentication token for [crates.io]. This value should only appear in the [credentials](#credentials) file. This is used for registry commands like [`cargo publish`] that require authentication. Can be overridden with the `--token` command-line option. #### `[source]` The `[source]` table defines the registry sources available. See [Source Replacement] for more information. It consists of a sub-table for each named source. A source should only define one kind (directory, registry, local-registry, or git). ##### `source..replace-with` * Type: string * Default: none * Environment: not supported If set, replace this source with the given named source. ##### `source..directory` * Type: string (path) * Default: none * Environment: not supported Sets the path to a directory to use as a directory source. ##### `source..registry` * Type: string (url) * Default: none * Environment: not supported Sets the URL to use for a registry source. ##### `source..local-registry` * Type: string (path) * Default: none * Environment: not supported Sets the path to a directory to use as a local registry source. ##### `source..git` * Type: string (url) * Default: none * Environment: not supported Sets the URL to use for a git repository source. ##### `source..branch` * Type: string * Default: none * Environment: not supported Sets the branch name to use for a git repository. If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. ##### `source..tag` * Type: string * Default: none * Environment: not supported Sets the tag name to use for a git repository. If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. ##### `source..rev` * Type: string * Default: none * Environment: not supported Sets the [revision] to use for a git repository. If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. #### `[target]` The `[target]` table is used for specifying settings for specific platform targets. It consists of a sub-table which is either a platform triple or a [`cfg()` expression]. The given values will be used if the target platform matches either the `` value or the `` expression. ```toml [target.thumbv7m-none-eabi] linker = "arm-none-eabi-gcc" runner = "my-emulator" rustflags = ["…", "…"] [target.'cfg(all(target_arch = "arm", target_os = "none"))'] runner = "my-arm-wrapper" rustflags = ["…", "…"] ``` `cfg` values come from those built-in to the compiler (run `rustc --print=cfg` to view), values set by [build scripts], and extra `--cfg` flags passed to `rustc` (such as those defined in `RUSTFLAGS`). Do not try to match on `debug_assertions` or Cargo features like `feature="foo"`. If using a target spec JSON file, the `` value is the filename stem. For example `--target foo/bar.json` would match `[target.bar]`. ##### `target..ar` This option is deprecated and unused. ##### `target..linker` * Type: string (program path) * Default: none * Environment: `CARGO_TARGET__LINKER` Specifies the linker which is passed to `rustc` (via [`-C linker`]) when the `` is being compiled for. By default, the linker is not overridden. ##### `target..runner` * Type: string or array of strings ([program path with args]) * Default: none * Environment: `CARGO_TARGET__RUNNER` If a runner is provided, executables for the target `` will be executed by invoking the specified runner with the actual executable passed as an argument. This applies to [`cargo run`], [`cargo test`] and [`cargo bench`] commands. By default, compiled executables are executed directly. ##### `target..runner` This is similar to the [target runner](#targettriplerunner), but using a [`cfg()` expression]. If both a `` and `` runner match, the `` will take precedence. It is an error if more than one `` runner matches the current target. ##### `target..rustflags` * Type: string or array of strings * Default: none * Environment: `CARGO_TARGET__RUSTFLAGS` Passes a set of custom flags to the compiler for this ``. The value may be an array of strings or a space-separated string. See [`build.rustflags`](#buildrustflags) for more details on the different ways to specific extra flags. ##### `target..rustflags` This is similar to the [target rustflags](#targettriplerustflags), but using a [`cfg()` expression]. If several `` and `` entries match the current target, the flags are joined together. ##### `target..` The links sub-table provides a way to [override a build script]. When specified, the build script for the given `links` library will not be run, and the given values will be used instead. ```toml [target.x86_64-unknown-linux-gnu.foo] rustc-link-lib = ["foo"] rustc-link-search = ["/path/to/foo"] rustc-flags = "-L /some/path" rustc-cfg = ['key="value"'] rustc-env = {key = "value"} rustc-cdylib-link-arg = ["…"] metadata_key1 = "value" metadata_key2 = "value" ``` #### `[term]` The `[term]` table controls terminal output and interaction. ##### `term.quiet` * Type: boolean * Default: false * Environment: `CARGO_TERM_QUIET` Controls whether or not log messages are displayed by Cargo. Specifying the `--quiet` flag will override and force quiet output. Specifying the `--verbose` flag will override and disable quiet output. ##### `term.verbose` * Type: boolean * Default: false * Environment: `CARGO_TERM_VERBOSE` Controls whether or not extra detailed messages are displayed by Cargo. Specifying the `--quiet` flag will override and disable verbose output. Specifying the `--verbose` flag will override and force verbose output. ##### `term.color` * Type: string * Default: "auto" * Environment: `CARGO_TERM_COLOR` Controls whether or not colored output is used in the terminal. Possible values: * `auto` (default): Automatically detect if color support is available on the terminal. * `always`: Always display colors. * `never`: Never display colors. Can be overridden with the `--color` command-line option. ##### `term.progress.when` * Type: string * Default: "auto" * Environment: `CARGO_TERM_PROGRESS_WHEN` Controls whether or not progress bar is shown in the terminal. Possible values: * `auto` (default): Intelligently guess whether to show progress bar. * `always`: Always show progress bar. * `never`: Never show progress bar. ##### `term.progress.width` * Type: integer * Default: none * Environment: `CARGO_TERM_PROGRESS_WIDTH` Sets the width for progress bar. [`cargo bench`]: ../commands/cargo-bench.md [`cargo login`]: ../commands/cargo-login.md [`cargo doc`]: ../commands/cargo-doc.md [`cargo new`]: ../commands/cargo-new.md [`cargo publish`]: ../commands/cargo-publish.md [`cargo run`]: ../commands/cargo-run.md [`cargo rustc`]: ../commands/cargo-rustc.md [`cargo test`]: ../commands/cargo-test.md [`cargo rustdoc`]: ../commands/cargo-rustdoc.md [`cargo install`]: ../commands/cargo-install.md [env]: environment-variables.md [`cfg()` expression]: ../../reference/conditional-compilation.html [build scripts]: build-scripts.md [`-C linker`]: ../../rustc/codegen-options/index.md#linker [override a build script]: build-scripts.md#overriding-build-scripts [toml]: https://toml.io/ [incremental compilation]: profiles.md#incremental [program path with args]: #executable-paths-with-arguments [libcurl format]: https://everything.curl.dev/libcurl/proxies#proxy-types [source replacement]: source-replacement.md [revision]: https://git-scm.com/docs/gitrevisions [registries]: registries.md [crates.io]: https://crates.io/ cargo-0.66.0/src/doc/src/reference/environment-variables.md000066400000000000000000000612351432416201200236150ustar00rootroot00000000000000## Environment Variables Cargo sets and reads a number of environment variables which your code can detect or override. Here is a list of the variables Cargo sets, organized by when it interacts with them: ### Environment variables Cargo reads You can override these environment variables to change Cargo's behavior on your system: * `CARGO_LOG` - Cargo uses the [`env_logger`] crate to display debug log messages. The `CARGO_LOG` environment variable can be set to enable debug logging, with a value such as `trace`, `debug`, or `warn`. Usually it is only used during debugging. For more details refer to the [Debug logging]. * `CARGO_HOME` β€” Cargo maintains a local cache of the registry index and of git checkouts of crates. By default these are stored under `$HOME/.cargo` (`%USERPROFILE%\.cargo` on Windows), but this variable overrides the location of this directory. Once a crate is cached it is not removed by the clean command. For more details refer to the [guide](../guide/cargo-home.md). * `CARGO_TARGET_DIR` β€” Location of where to place all generated artifacts, relative to the current working directory. See [`build.target-dir`] to set via config. * `RUSTC` β€” Instead of running `rustc`, Cargo will execute this specified compiler instead. See [`build.rustc`] to set via config. * `RUSTC_WRAPPER` β€” Instead of simply running `rustc`, Cargo will execute this specified wrapper, passing as its command-line arguments the rustc invocation, with the first argument being the path to the actual rustc. Useful to set up a build cache tool such as `sccache`. See [`build.rustc-wrapper`] to set via config. Setting this to the empty string overwrites the config and resets cargo to not use a wrapper. * `RUSTC_WORKSPACE_WRAPPER` β€” Instead of simply running `rustc`, for workspace members Cargo will execute this specified wrapper, passing as its command-line arguments the rustc invocation, with the first argument being the path to the actual rustc. It affects the filename hash so that artifacts produced by the wrapper are cached separately. See [`build.rustc-workspace-wrapper`] to set via config. Setting this to the empty string overwrites the config and resets cargo to not use a wrapper for workspace members. * `RUSTDOC` β€” Instead of running `rustdoc`, Cargo will execute this specified `rustdoc` instance instead. See [`build.rustdoc`] to set via config. * `RUSTDOCFLAGS` β€” A space-separated list of custom flags to pass to all `rustdoc` invocations that Cargo performs. In contrast with [`cargo rustdoc`], this is useful for passing a flag to *all* `rustdoc` instances. See [`build.rustdocflags`] for some more ways to set flags. This string is split by whitespace; for a more robust encoding of multiple arguments, see `CARGO_ENCODED_RUSTDOCFLAGS`. * `CARGO_ENCODED_RUSTDOCFLAGS` - A list of custom flags separated by `0x1f` (ASCII Unit Separator) to pass to all `rustdoc` invocations that Cargo performs. * `RUSTFLAGS` β€” A space-separated list of custom flags to pass to all compiler invocations that Cargo performs. In contrast with [`cargo rustc`], this is useful for passing a flag to *all* compiler instances. See [`build.rustflags`] for some more ways to set flags. This string is split by whitespace; for a more robust encoding of multiple arguments, see `CARGO_ENCODED_RUSTFLAGS`. * `CARGO_ENCODED_RUSTFLAGS` - A list of custom flags separated by `0x1f` (ASCII Unit Separator) to pass to all compiler invocations that Cargo performs. * `CARGO_INCREMENTAL` β€” If this is set to 1 then Cargo will force [incremental compilation] to be enabled for the current compilation, and when set to 0 it will force disabling it. If this env var isn't present then cargo's defaults will otherwise be used. See also [`build.incremental`] config value. * `CARGO_CACHE_RUSTC_INFO` β€” If this is set to 0 then Cargo will not try to cache compiler version information. * `HTTPS_PROXY` or `https_proxy` or `http_proxy` β€” The HTTP proxy to use, see [`http.proxy`] for more detail. * `HTTP_TIMEOUT` β€” The HTTP timeout in seconds, see [`http.timeout`] for more detail. * `TERM` β€” If this is set to `dumb`, it disables the progress bar. * `BROWSER` β€” The web browser to execute to open documentation with [`cargo doc`]'s' `--open` flag, see [`doc.browser`] for more details. * `RUSTFMT` β€” Instead of running `rustfmt`, [`cargo fmt`](https://github.com/rust-lang/rustfmt) will execute this specified `rustfmt` instance instead. #### Configuration environment variables Cargo reads environment variables for configuration values. See the [configuration chapter][config-env] for more details. In summary, the supported environment variables are: * `CARGO_ALIAS_` β€” Command aliases, see [`alias`]. * `CARGO_BUILD_JOBS` β€” Number of parallel jobs, see [`build.jobs`]. * `CARGO_BUILD_RUSTC` β€” The `rustc` executable, see [`build.rustc`]. * `CARGO_BUILD_RUSTC_WRAPPER` β€” The `rustc` wrapper, see [`build.rustc-wrapper`]. * `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` β€” The `rustc` wrapper for workspace members only, see [`build.rustc-workspace-wrapper`]. * `CARGO_BUILD_RUSTDOC` β€” The `rustdoc` executable, see [`build.rustdoc`]. * `CARGO_BUILD_TARGET` β€” The default target platform, see [`build.target`]. * `CARGO_BUILD_TARGET_DIR` β€” The default output directory, see [`build.target-dir`]. * `CARGO_BUILD_RUSTFLAGS` β€” Extra `rustc` flags, see [`build.rustflags`]. * `CARGO_BUILD_RUSTDOCFLAGS` β€” Extra `rustdoc` flags, see [`build.rustdocflags`]. * `CARGO_BUILD_INCREMENTAL` β€” Incremental compilation, see [`build.incremental`]. * `CARGO_BUILD_DEP_INFO_BASEDIR` β€” Dep-info relative directory, see [`build.dep-info-basedir`]. * `CARGO_CARGO_NEW_VCS` β€” The default source control system with [`cargo new`], see [`cargo-new.vcs`]. * `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY` - How often we should generate a future incompat report notification, see [`future-incompat-report.frequency`]. * `CARGO_HTTP_DEBUG` β€” Enables HTTP debugging, see [`http.debug`]. * `CARGO_HTTP_PROXY` β€” Enables HTTP proxy, see [`http.proxy`]. * `CARGO_HTTP_TIMEOUT` β€” The HTTP timeout, see [`http.timeout`]. * `CARGO_HTTP_CAINFO` β€” The TLS certificate Certificate Authority file, see [`http.cainfo`]. * `CARGO_HTTP_CHECK_REVOKE` β€” Disables TLS certificate revocation checks, see [`http.check-revoke`]. * `CARGO_HTTP_SSL_VERSION` β€” The TLS version to use, see [`http.ssl-version`]. * `CARGO_HTTP_LOW_SPEED_LIMIT` β€” The HTTP low-speed limit, see [`http.low-speed-limit`]. * `CARGO_HTTP_MULTIPLEXING` β€” Whether HTTP/2 multiplexing is used, see [`http.multiplexing`]. * `CARGO_HTTP_USER_AGENT` β€” The HTTP user-agent header, see [`http.user-agent`]. * `CARGO_INSTALL_ROOT` β€” The default directory for [`cargo install`], see [`install.root`]. * `CARGO_NET_RETRY` β€” Number of times to retry network errors, see [`net.retry`]. * `CARGO_NET_GIT_FETCH_WITH_CLI` β€” Enables the use of the `git` executable to fetch, see [`net.git-fetch-with-cli`]. * `CARGO_NET_OFFLINE` β€” Offline mode, see [`net.offline`]. * `CARGO_PROFILE__BUILD_OVERRIDE_` β€” Override build script profile, see [`profile..build-override`]. * `CARGO_PROFILE__CODEGEN_UNITS` β€” Set code generation units, see [`profile..codegen-units`]. * `CARGO_PROFILE__DEBUG` β€” What kind of debug info to include, see [`profile..debug`]. * `CARGO_PROFILE__DEBUG_ASSERTIONS` β€” Enable/disable debug assertions, see [`profile..debug-assertions`]. * `CARGO_PROFILE__INCREMENTAL` β€” Enable/disable incremental compilation, see [`profile..incremental`]. * `CARGO_PROFILE__LTO` β€” Link-time optimization, see [`profile..lto`]. * `CARGO_PROFILE__OVERFLOW_CHECKS` β€” Enable/disable overflow checks, see [`profile..overflow-checks`]. * `CARGO_PROFILE__OPT_LEVEL` β€” Set the optimization level, see [`profile..opt-level`]. * `CARGO_PROFILE__PANIC` β€” The panic strategy to use, see [`profile..panic`]. * `CARGO_PROFILE__RPATH` β€” The rpath linking option, see [`profile..rpath`]. * `CARGO_PROFILE__SPLIT_DEBUGINFO` β€” Controls debug file output behavior, see [`profile..split-debuginfo`]. * `CARGO_REGISTRIES__INDEX` β€” URL of a registry index, see [`registries..index`]. * `CARGO_REGISTRIES__TOKEN` β€” Authentication token of a registry, see [`registries..token`]. * `CARGO_REGISTRY_DEFAULT` β€” Default registry for the `--registry` flag, see [`registry.default`]. * `CARGO_REGISTRY_TOKEN` β€” Authentication token for [crates.io], see [`registry.token`]. * `CARGO_TARGET__LINKER` β€” The linker to use, see [`target..linker`]. The triple must be [converted to uppercase and underscores](config.md#environment-variables). * `CARGO_TARGET__RUNNER` β€” The executable runner, see [`target..runner`]. * `CARGO_TARGET__RUSTFLAGS` β€” Extra `rustc` flags for a target, see [`target..rustflags`]. * `CARGO_TERM_QUIET` β€” Quiet mode, see [`term.quiet`]. * `CARGO_TERM_VERBOSE` β€” The default terminal verbosity, see [`term.verbose`]. * `CARGO_TERM_COLOR` β€” The default color mode, see [`term.color`]. * `CARGO_TERM_PROGRESS_WHEN` β€” The default progress bar showing mode, see [`term.progress.when`]. * `CARGO_TERM_PROGRESS_WIDTH` β€” The default progress bar width, see [`term.progress.width`]. [`cargo doc`]: ../commands/cargo-doc.md [`cargo install`]: ../commands/cargo-install.md [`cargo new`]: ../commands/cargo-new.md [`cargo rustc`]: ../commands/cargo-rustc.md [`cargo rustdoc`]: ../commands/cargo-rustdoc.md [config-env]: config.md#environment-variables [crates.io]: https://crates.io/ [incremental compilation]: profiles.md#incremental [`alias`]: config.md#alias [`build.jobs`]: config.md#buildjobs [`build.rustc`]: config.md#buildrustc [`build.rustc-wrapper`]: config.md#buildrustc-wrapper [`build.rustc-workspace-wrapper`]: config.md#buildrustc-workspace-wrapper [`build.rustdoc`]: config.md#buildrustdoc [`build.target`]: config.md#buildtarget [`build.target-dir`]: config.md#buildtarget-dir [`build.rustflags`]: config.md#buildrustflags [`build.rustdocflags`]: config.md#buildrustdocflags [`build.incremental`]: config.md#buildincremental [`build.dep-info-basedir`]: config.md#builddep-info-basedir [`doc.browser`]: config.md#docbrowser [`cargo-new.name`]: config.md#cargo-newname [`cargo-new.email`]: config.md#cargo-newemail [`cargo-new.vcs`]: config.md#cargo-newvcs [`future-incompat-report.frequency`]: config.md#future-incompat-reportfrequency [`http.debug`]: config.md#httpdebug [`http.proxy`]: config.md#httpproxy [`http.timeout`]: config.md#httptimeout [`http.cainfo`]: config.md#httpcainfo [`http.check-revoke`]: config.md#httpcheck-revoke [`http.ssl-version`]: config.md#httpssl-version [`http.low-speed-limit`]: config.md#httplow-speed-limit [`http.multiplexing`]: config.md#httpmultiplexing [`http.user-agent`]: config.md#httpuser-agent [`install.root`]: config.md#installroot [`net.retry`]: config.md#netretry [`net.git-fetch-with-cli`]: config.md#netgit-fetch-with-cli [`net.offline`]: config.md#netoffline [`profile..build-override`]: config.md#profilenamebuild-override [`profile..codegen-units`]: config.md#profilenamecodegen-units [`profile..debug`]: config.md#profilenamedebug [`profile..debug-assertions`]: config.md#profilenamedebug-assertions [`profile..incremental`]: config.md#profilenameincremental [`profile..lto`]: config.md#profilenamelto [`profile..overflow-checks`]: config.md#profilenameoverflow-checks [`profile..opt-level`]: config.md#profilenameopt-level [`profile..panic`]: config.md#profilenamepanic [`profile..rpath`]: config.md#profilenamerpath [`profile..split-debuginfo`]: config.md#profilenamesplit-debuginfo [`registries..index`]: config.md#registriesnameindex [`registries..token`]: config.md#registriesnametoken [`registry.default`]: config.md#registrydefault [`registry.token`]: config.md#registrytoken [`target..linker`]: config.md#targettriplelinker [`target..runner`]: config.md#targettriplerunner [`target..rustflags`]: config.md#targettriplerustflags [`term.quiet`]: config.md#termquiet [`term.verbose`]: config.md#termverbose [`term.color`]: config.md#termcolor [`term.progress.when`]: config.md#termprogresswhen [`term.progress.width`]: config.md#termprogresswidth ### Environment variables Cargo sets for crates Cargo exposes these environment variables to your crate when it is compiled. Note that this applies for running binaries with `cargo run` and `cargo test` as well. To get the value of any of these variables in a Rust program, do this: ```rust,ignore let version = env!("CARGO_PKG_VERSION"); ``` `version` will now contain the value of `CARGO_PKG_VERSION`. Note that if one of these values is not provided in the manifest, the corresponding environment variable is set to the empty string, `""`. * `CARGO` β€” Path to the `cargo` binary performing the build. * `CARGO_MANIFEST_DIR` β€” The directory containing the manifest of your package. * `CARGO_PKG_VERSION` β€” The full version of your package. * `CARGO_PKG_VERSION_MAJOR` β€” The major version of your package. * `CARGO_PKG_VERSION_MINOR` β€” The minor version of your package. * `CARGO_PKG_VERSION_PATCH` β€” The patch version of your package. * `CARGO_PKG_VERSION_PRE` β€” The pre-release version of your package. * `CARGO_PKG_AUTHORS` β€” Colon separated list of authors from the manifest of your package. * `CARGO_PKG_NAME` β€” The name of your package. * `CARGO_PKG_DESCRIPTION` β€” The description from the manifest of your package. * `CARGO_PKG_HOMEPAGE` β€” The home page from the manifest of your package. * `CARGO_PKG_REPOSITORY` β€” The repository from the manifest of your package. * `CARGO_PKG_LICENSE` β€” The license from the manifest of your package. * `CARGO_PKG_LICENSE_FILE` β€” The license file from the manifest of your package. * `CARGO_PKG_RUST_VERSION` β€” The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version. * `CARGO_CRATE_NAME` β€” The name of the crate that is currently being compiled. * `CARGO_BIN_NAME` β€” The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as `.exe`. * `OUT_DIR` β€” If the package has a build script, this is set to the folder where the build script should place its output. See below for more information. (Only set during compilation.) * `CARGO_BIN_EXE_` β€” The absolute path to a binary target's executable. This is only set when building an [integration test] or benchmark. This may be used with the [`env` macro] to find the executable to run for testing purposes. The `` is the name of the binary target, exactly as-is. For example, `CARGO_BIN_EXE_my-program` for a binary named `my-program`. Binaries are automatically built when the test is built, unless the binary has required features that are not enabled. * `CARGO_PRIMARY_PACKAGE` β€” This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with `-p` flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests). * `CARGO_TARGET_TMPDIR` β€” Only set when building [integration test] or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code. [integration test]: cargo-targets.md#integration-tests [`env` macro]: ../../std/macro.env.html #### Dynamic library paths Cargo also sets the dynamic library path when compiling and running binaries with commands like `cargo run` and `cargo test`. This helps with locating shared libraries that are part of the build process. The variable name depends on the platform: * Windows: `PATH` * macOS: `DYLD_FALLBACK_LIBRARY_PATH` * Unix: `LD_LIBRARY_PATH` The value is extended from the existing value when Cargo starts. macOS has special consideration where if `DYLD_FALLBACK_LIBRARY_PATH` is not already set, it will add the default `$HOME/lib:/usr/local/lib:/usr/lib`. Cargo includes the following paths: * Search paths included from any build script with the [`rustc-link-search` instruction](build-scripts.md#rustc-link-search). Paths outside of the `target` directory are removed. It is the responsibility of the user running Cargo to properly set the environment if additional libraries on the system are needed in the search path. * The base output directory, such as `target/debug`, and the "deps" directory. This is mostly for legacy support of `rustc` compiler plugins. * The rustc sysroot library path. This generally is not important to most users. ### Environment variables Cargo sets for build scripts Cargo sets several environment variables when build scripts are run. Because these variables are not yet set when the build script is compiled, the above example using `env!` won't work and instead you'll need to retrieve the values when the build script is run: ```rust,ignore use std::env; let out_dir = env::var("OUT_DIR").unwrap(); ``` `out_dir` will now contain the value of `OUT_DIR`. * `CARGO` β€” Path to the `cargo` binary performing the build. * `CARGO_MANIFEST_DIR` β€” The directory containing the manifest for the package being built (the package containing the build script). Also note that this is the value of the current working directory of the build script when it starts. * `CARGO_MANIFEST_LINKS` β€” the manifest `links` value. * `CARGO_MAKEFLAGS` β€” Contains parameters needed for Cargo's [jobserver] implementation to parallelize subprocesses. Rustc or cargo invocations from build.rs can already read `CARGO_MAKEFLAGS`, but GNU Make requires the flags to be specified either directly as arguments, or through the `MAKEFLAGS` environment variable. Currently Cargo doesn't set the `MAKEFLAGS` variable, but it's free for build scripts invoking GNU Make to set it to the contents of `CARGO_MAKEFLAGS`. * `CARGO_FEATURE_` β€” For each activated feature of the package being built, this environment variable will be present where `` is the name of the feature uppercased and having `-` translated to `_`. * `CARGO_CFG_` β€” For each [configuration option][configuration] of the package being built, this environment variable will contain the value of the configuration, where `` is the name of the configuration uppercased and having `-` translated to `_`. Boolean configurations are present if they are set, and not present otherwise. Configurations with multiple values are joined to a single variable with the values delimited by `,`. This includes values built-in to the compiler (which can be seen with `rustc --print=cfg`) and values set by build scripts and extra flags passed to `rustc` (such as those defined in `RUSTFLAGS`). Some examples of what these variables are: * `CARGO_CFG_UNIX` β€” Set on [unix-like platforms]. * `CARGO_CFG_WINDOWS` β€” Set on [windows-like platforms]. * `CARGO_CFG_TARGET_FAMILY=unix` β€” The [target family]. * `CARGO_CFG_TARGET_OS=macos` β€” The [target operating system]. * `CARGO_CFG_TARGET_ARCH=x86_64` β€” The CPU [target architecture]. * `CARGO_CFG_TARGET_VENDOR=apple` β€” The [target vendor]. * `CARGO_CFG_TARGET_ENV=gnu` β€” The [target environment] ABI. * `CARGO_CFG_TARGET_POINTER_WIDTH=64` β€” The CPU [pointer width]. * `CARGO_CFG_TARGET_ENDIAN=little` β€” The CPU [target endianness]. * `CARGO_CFG_TARGET_FEATURE=mmx,sse` β€” List of CPU [target features] enabled. * `OUT_DIR` β€” the folder in which all output and intermediate artifacts should be placed. This folder is inside the build directory for the package being built, and it is unique for the package in question. * `TARGET` β€” the target triple that is being compiled for. Native code should be compiled for this triple. See the [Target Triple] description for more information. * `HOST` β€” the host triple of the Rust compiler. * `NUM_JOBS` β€” the parallelism specified as the top-level parallelism. This can be useful to pass a `-j` parameter to a system like `make`. Note that care should be taken when interpreting this environment variable. For historical purposes this is still provided but recent versions of Cargo, for example, do not need to run `make -j`, and instead can set the `MAKEFLAGS` env var to the content of `CARGO_MAKEFLAGS` to activate the use of Cargo's GNU Make compatible [jobserver] for sub-make invocations. * `OPT_LEVEL`, `DEBUG` β€” values of the corresponding variables for the profile currently being built. * `PROFILE` β€” `release` for release builds, `debug` for other builds. This is determined based on if the [profile] inherits from the [`dev`] or [`release`] profile. Using this environment variable is not recommended. Using other environment variables like `OPT_LEVEL` provide a more correct view of the actual settings being used. * `DEP__` β€” For more information about this set of environment variables, see build script documentation about [`links`][links]. * `RUSTC`, `RUSTDOC` β€” the compiler and documentation generator that Cargo has resolved to use, passed to the build script so it might use it as well. * `RUSTC_WRAPPER` β€” the `rustc` wrapper, if any, that Cargo is using. See [`build.rustc-wrapper`]. * `RUSTC_WORKSPACE_WRAPPER` β€” the `rustc` wrapper, if any, that Cargo is using for workspace members. See [`build.rustc-workspace-wrapper`]. * `RUSTC_LINKER` β€” The path to the linker binary that Cargo has resolved to use for the current target, if specified. The linker can be changed by editing `.cargo/config.toml`; see the documentation about [cargo configuration][cargo-config] for more information. * `CARGO_ENCODED_RUSTFLAGS` β€” extra flags that Cargo invokes `rustc` with, separated by a `0x1f` character (ASCII Unit Separator). See [`build.rustflags`]. Note that since Rust 1.55, `RUSTFLAGS` is removed from the environment; scripts should use `CARGO_ENCODED_RUSTFLAGS` instead. * `CARGO_PKG_` - The package information variables, with the same names and values as are [provided during crate building][variables set for crates]. [`env_logger`]: https://docs.rs/env_logger [debug logging]: https://doc.crates.io/contrib/architecture/console.html#debug-logging [unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows [windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows [target family]: ../../reference/conditional-compilation.html#target_family [target operating system]: ../../reference/conditional-compilation.html#target_os [target architecture]: ../../reference/conditional-compilation.html#target_arch [target vendor]: ../../reference/conditional-compilation.html#target_vendor [target environment]: ../../reference/conditional-compilation.html#target_env [pointer width]: ../../reference/conditional-compilation.html#target_pointer_width [target endianness]: ../../reference/conditional-compilation.html#target_endian [target features]: ../../reference/conditional-compilation.html#target_feature [links]: build-scripts.md#the-links-manifest-key [configuration]: ../../reference/conditional-compilation.html [jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html [cargo-config]: config.md [Target Triple]: ../appendix/glossary.md#target [variables set for crates]: #environment-variables-cargo-sets-for-crates [profile]: profiles.md [`dev`]: profiles.md#dev [`release`]: profiles.md#release ### Environment variables Cargo sets for 3rd party subcommands Cargo exposes this environment variable to 3rd party subcommands (ie. programs named `cargo-foobar` placed in `$PATH`): * `CARGO` β€” Path to the `cargo` binary performing the build. For extended information about your environment you may run `cargo metadata`. cargo-0.66.0/src/doc/src/reference/external-tools.md000066400000000000000000000244121432416201200222570ustar00rootroot00000000000000## External tools One of the goals of Cargo is simple integration with third-party tools, like IDEs and other build systems. To make integration easier, Cargo has several facilities: * a [`cargo metadata`] command, which outputs package structure and dependencies information in JSON, * a `--message-format` flag, which outputs information about a particular build, and * support for custom subcommands. ### Information about package structure You can use [`cargo metadata`] command to get information about package structure and dependencies. See the [`cargo metadata`] documentation for details on the format of the output. The format is stable and versioned. When calling `cargo metadata`, you should pass `--format-version` flag explicitly to avoid forward incompatibility hazard. If you are using Rust, the [cargo_metadata] crate can be used to parse the output. [cargo_metadata]: https://crates.io/crates/cargo_metadata [`cargo metadata`]: ../commands/cargo-metadata.md ### JSON messages When passing `--message-format=json`, Cargo will output the following information during the build: * compiler errors and warnings, * produced artifacts, * results of the build scripts (for example, native dependencies). The output goes to stdout in the JSON object per line format. The `reason` field distinguishes different kinds of messages. The `--message-format` option can also take additional formatting values which alter the way the JSON messages are computed and rendered. See the description of the `--message-format` option in the [build command documentation] for more details. If you are using Rust, the [cargo_metadata] crate can be used to parse these messages. [build command documentation]: ../commands/cargo-build.md [cargo_metadata]: https://crates.io/crates/cargo_metadata #### Compiler messages The "compiler-message" message includes output from the compiler, such as warnings and errors. See the [rustc JSON chapter](../../rustc/json.md) for details on `rustc`'s message format, which is embedded in the following structure: ```javascript { /* The "reason" indicates the kind of message. */ "reason": "compiler-message", /* The Package ID, a unique identifier for referring to the package. */ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* Absolute path to the package manifest. */ "manifest_path": "/path/to/my-package/Cargo.toml", /* The Cargo target (lib, bin, example, etc.) that generated the message. */ "target": { /* Array of target kinds. - lib targets list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - binary is ["bin"] - example is ["example"] - integration test is ["test"] - benchmark is ["bench"] - build script is ["custom-build"] */ "kind": [ "lib" ], /* Array of crate types. - lib and example libraries list the `crate-type` values from the manifest such as "lib", "rlib", "dylib", "proc-macro", etc. (default ["lib"]) - all other target kinds are ["bin"] */ "crate_types": [ "lib" ], /* The name of the target. */ "name": "my-package", /* Absolute path to the root source file of the target. */ "src_path": "/path/to/my-package/src/lib.rs", /* The Rust edition of the target. Defaults to the package edition. */ "edition": "2018", /* Array of required features. This property is not included if no required features are set. */ "required-features": ["feat1"], /* Whether or not this target has doc tests enabled, and the target is compatible with doc testing. */ "doctest": true }, /* The message emitted by the compiler. See https://doc.rust-lang.org/rustc/json.html for details. */ "message": { /* ... */ } } ``` #### Artifact messages For every compilation step, a "compiler-artifact" message is emitted with the following structure: ```javascript { /* The "reason" indicates the kind of message. */ "reason": "compiler-artifact", /* The Package ID, a unique identifier for referring to the package. */ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* Absolute path to the package manifest. */ "manifest_path": "/path/to/my-package/Cargo.toml", /* The Cargo target (lib, bin, example, etc.) that generated the artifacts. See the definition above for `compiler-message` for details. */ "target": { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "my-package", "src_path": "/path/to/my-package/src/lib.rs", "edition": "2018", "doctest": true, "test": true }, /* The profile indicates which compiler settings were used. */ "profile": { /* The optimization level. */ "opt_level": "0", /* The debug level, an integer of 0, 1, or 2. If `null`, it implies rustc's default of 0. */ "debuginfo": 2, /* Whether or not debug assertions are enabled. */ "debug_assertions": true, /* Whether or not overflow checks are enabled. */ "overflow_checks": true, /* Whether or not the `--test` flag is used. */ "test": false }, /* Array of features enabled. */ "features": ["feat1", "feat2"], /* Array of files generated by this step. */ "filenames": [ "/path/to/my-package/target/debug/libmy_package.rlib", "/path/to/my-package/target/debug/deps/libmy_package-be9f3faac0a26ef0.rmeta" ], /* A string of the path to the executable that was created, or null if this step did not generate an executable. */ "executable": null, /* Whether or not this step was actually executed. When `true`, this means that the pre-existing artifacts were up-to-date, and `rustc` was not executed. When `false`, this means that `rustc` was run to generate the artifacts. */ "fresh": true } ``` #### Build script output The "build-script-executed" message includes the parsed output of a build script. Note that this is emitted even if the build script is not run; it will display the previously cached value. More details about build script output may be found in [the chapter on build scripts](build-scripts.md). ```javascript { /* The "reason" indicates the kind of message. */ "reason": "build-script-executed", /* The Package ID, a unique identifier for referring to the package. */ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* Array of libraries to link, as indicated by the `cargo:rustc-link-lib` instruction. Note that this may include a "KIND=" prefix in the string where KIND is the library kind. */ "linked_libs": ["foo", "static=bar"], /* Array of paths to include in the library search path, as indicated by the `cargo:rustc-link-search` instruction. Note that this may include a "KIND=" prefix in the string where KIND is the library kind. */ "linked_paths": ["/some/path", "native=/another/path"], /* Array of cfg values to enable, as indicated by the `cargo:rustc-cfg` instruction. */ "cfgs": ["cfg1", "cfg2=\"string\""], /* Array of [KEY, VALUE] arrays of environment variables to set, as indicated by the `cargo:rustc-env` instruction. */ "env": [ ["SOME_KEY", "some value"], ["ANOTHER_KEY", "another value"] ], /* An absolute path which is used as a value of `OUT_DIR` environmental variable when compiling current package. */ "out_dir": "/some/path/in/target/dir" } ``` #### Build finished The "build-finished" message is emitted at the end of the build. ```javascript { /* The "reason" indicates the kind of message. */ "reason": "build-finished", /* Whether or not the build finished successfully. */ "success": true, } ```` This message can be helpful for tools to know when to stop reading JSON messages. Commands such as `cargo test` or `cargo run` can produce additional output after the build has finished. This message lets a tool know that Cargo will not produce additional JSON messages, but there may be additional output that may be generated afterwards (such as the output generated by the program executed by `cargo run`). > Note: There is experimental nightly-only support for JSON output for tests, > so additional test-specific JSON messages may begin arriving after the > "build-finished" message if that is enabled. ### Custom subcommands Cargo is designed to be extensible with new subcommands without having to modify Cargo itself. This is achieved by translating a cargo invocation of the form cargo `(?[^ ]+)` into an invocation of an external tool `cargo-${command}`. The external tool must be present in one of the user's `$PATH` directories. When Cargo invokes a custom subcommand, the first argument to the subcommand will be the filename of the custom subcommand, as usual. The second argument will be the subcommand name itself. For example, the second argument would be `${command}` when invoking `cargo-${command}`. Any additional arguments on the command line will be forwarded unchanged. Cargo can also display the help output of a custom subcommand with `cargo help ${command}`. Cargo assumes that the subcommand will print a help message if its third argument is `--help`. So, `cargo help ${command}` would invoke `cargo-${command} ${command} --help`. Custom subcommands may use the `CARGO` environment variable to call back to Cargo. Alternatively, it can link to `cargo` crate as a library, but this approach has drawbacks: * Cargo as a library is unstable: the API may change without deprecation * versions of the linked Cargo library may be different from the Cargo binary Instead, it is encouraged to use the CLI interface to drive Cargo. The [`cargo metadata`] command can be used to obtain information about the current project (the [`cargo_metadata`] crate provides a Rust interface to this command). [`cargo metadata`]: ../commands/cargo-metadata.md [`cargo_metadata`]: https://crates.io/crates/cargo_metadata cargo-0.66.0/src/doc/src/reference/features-examples.md000066400000000000000000000237441432416201200227400ustar00rootroot00000000000000## Features Examples The following illustrates some real-world examples of features in action. ### Minimizing build times and file sizes Some packages use features so that if the features are not enabled, it reduces the size of the crate and reduces compile time. Some examples are: * [`syn`] is a popular crate for parsing Rust code. Since it is so popular, it is helpful to reduce compile times since it affects so many projects. It has a [clearly documented list][syn-features] of features which can be used to minimize the amount of code it contains. * [`regex`] has a [several features][regex-features] that are [well documented][regex-docs]. Cutting out Unicode support can reduce the resulting file size as it can remove some large tables. * [`winapi`] has [a large number][winapi-features] of features that limit which Windows API bindings it supports. * [`web-sys`] is another example similar to `winapi` that provides a [huge surface area][web-sys-features] of API bindings that are limited by using features. [`winapi`]: https://crates.io/crates/winapi [winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431 [`regex`]: https://crates.io/crates/regex [`syn`]: https://crates.io/crates/syn [syn-features]: https://docs.rs/syn/1.0.54/syn/#optional-features [regex-features]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L33-L101 [regex-docs]: https://docs.rs/regex/1.4.2/regex/#crate-features [`web-sys`]: https://crates.io/crates/web-sys [web-sys-features]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/crates/web-sys/Cargo.toml#L32-L1395 ### Extending behavior The [`serde_json`] package has a [`preserve_order` feature][serde_json-preserve_order] which [changes the behavior][serde_json-code] of JSON maps to preserve the order that keys are inserted. Notice that it enables an optional dependency [`indexmap`] to implement the new behavior. When changing behavior like this, be careful to make sure the changes are [SemVer compatible]. That is, enabling the feature should not break code that usually builds with the feature off. [`serde_json`]: https://crates.io/crates/serde_json [serde_json-preserve_order]: https://github.com/serde-rs/json/blob/v1.0.60/Cargo.toml#L53-L56 [SemVer compatible]: features.md#semver-compatibility [serde_json-code]: https://github.com/serde-rs/json/blob/v1.0.60/src/map.rs#L23-L26 [`indexmap`]: https://crates.io/crates/indexmap ### `no_std` support Some packages want to support both [`no_std`] and `std` environments. This is useful for supporting embedded and resource-constrained platforms, but still allowing extended capabilities for platforms that support the full standard library. The [`wasm-bindgen`] package defines a [`std` feature][wasm-bindgen-std] that is [enabled by default][wasm-bindgen-default]. At the top of the library, it [unconditionally enables the `no_std` attribute][wasm-bindgen-no_std]. This ensures that `std` and the [`std` prelude] are not automatically in scope. Then, in various places in the code ([example1][wasm-bindgen-cfg1], [example2][wasm-bindgen-cfg2]), it uses `#[cfg(feature = "std")]` attributes to conditionally enable extra functionality that requires `std`. [`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen [`std` prelude]: ../../std/prelude/index.html [wasm-bindgen-std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L25 [wasm-bindgen-default]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L23 [wasm-bindgen-no_std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L8 [wasm-bindgen-cfg1]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L270-L273 [wasm-bindgen-cfg2]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L67-L75 ### Re-exporting dependency features It can be convenient to re-export the features from a dependency. This allows the user depending on the crate to control those features without needing to specify those dependencies directly. For example, [`regex`] [re-exports the features][regex-re-export] from the [`regex_syntax`][regex_syntax-features] package. Users of `regex` don't need to know about the `regex_syntax` package, but they can still access the features it contains. [regex-re-export]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L65-L89 [regex_syntax-features]: https://github.com/rust-lang/regex/blob/1.4.2/regex-syntax/Cargo.toml#L17-L32 ### Vendoring of C libraries Some packages provide bindings to common C libraries (sometimes referred to as ["sys" crates][sys]). Sometimes these packages give you the choice to use the C library installed on the system, or to build it from source. For example, the [`openssl`] package has a [`vendored` feature][openssl-vendored] which enables the corresponding `vendored` feature of [`openssl-sys`]. The `openssl-sys` build script has some [conditional logic][openssl-sys-cfg] which causes it to build from a local copy of the OpenSSL source code instead of using the version from the system. The [`curl-sys`] package is another example where the [`static-curl` feature][curl-sys-static] causes it to build libcurl from source. Notice that it also has a [`force-system-lib-on-osx`][curl-sys-macos] feature which forces it [to use the system libcurl][curl-sys-macos-code], overriding the static-curl setting. [`openssl`]: https://crates.io/crates/openssl [`openssl-sys`]: https://crates.io/crates/openssl-sys [sys]: build-scripts.md#-sys-packages [openssl-vendored]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl/Cargo.toml#L19 [build script]: build-scripts.md [openssl-sys-cfg]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl-sys/build/main.rs#L47-L54 [`curl-sys`]: https://crates.io/crates/curl-sys [curl-sys-static]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L49 [curl-sys-macos]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L52 [curl-sys-macos-code]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/build.rs#L15-L20 ### Feature precedence Some packages may have mutually-exclusive features. One option to handle this is to prefer one feature over another. The [`log`] package is an example. It has [several features][log-features] for choosing the maximum logging level at compile-time described [here][log-docs]. It uses [`cfg-if`] to [choose a precedence][log-cfg-if]. If multiple features are enabled, the higher "max" levels will be preferred over the lower levels. [`log`]: https://crates.io/crates/log [log-features]: https://github.com/rust-lang/log/blob/0.4.11/Cargo.toml#L29-L42 [log-docs]: https://docs.rs/log/0.4.11/log/#compile-time-filters [log-cfg-if]: https://github.com/rust-lang/log/blob/0.4.11/src/lib.rs#L1422-L1448 [`cfg-if`]: https://crates.io/crates/cfg-if ### Proc-macro companion package Some packages have a proc-macro that is intimately tied with it. However, not all users will need to use the proc-macro. By making the proc-macro an optional-dependency, this allows you to conveniently choose whether or not it is included. This is helpful, because sometimes the proc-macro version must stay in sync with the parent package, and you don't want to force the users to have to specify both dependencies and keep them in sync. An example is [`serde`] which has a [`derive`][serde-derive] feature which enables the [`serde_derive`] proc-macro. The `serde_derive` crate is very tightly tied to `serde`, so it uses an [equals version requirement][serde-equals] to ensure they stay in sync. [`serde`]: https://crates.io/crates/serde [`serde_derive`]: https://crates.io/crates/serde_derive [serde-derive]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L34-L35 [serde-equals]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L17 ### Nightly-only features Some packages want to experiment with APIs or language features that are only available on the Rust [nightly channel]. However, they may not want to require their users to also use the nightly channel. An example is [`wasm-bindgen`] which has a [`nightly` feature][wasm-bindgen-nightly] which enables an [extended API][wasm-bindgen-unsize] that uses the [`Unsize`] marker trait that is only available on the nightly channel at the time of this writing. Note that at the root of the crate it uses [`cfg_attr` to enable the nightly feature][wasm-bindgen-cfg_attr]. Keep in mind that the [`feature` attribute] is unrelated to Cargo features, and is used to opt-in to experimental language features. The [`simd_support` feature][rand-simd_support] of the [`rand`] package is another example, which relies on a dependency that only builds on the nightly channel. [`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen [nightly channel]: ../../book/appendix-07-nightly-rust.html [wasm-bindgen-nightly]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L27 [wasm-bindgen-unsize]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/closure.rs#L257-L269 [`Unsize`]: ../../std/marker/trait.Unsize.html [wasm-bindgen-cfg_attr]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L11 [`feature` attribute]: ../../unstable-book/index.html [`rand`]: https://crates.io/crates/rand [rand-simd_support]: https://github.com/rust-random/rand/blob/0.7.3/Cargo.toml#L40 ### Experimental features Some packages have new functionality that they may want to experiment with, without having to commit to the stability of those APIs. The features are usually documented that they are experimental, and thus may change or break in the future, even during a minor release. An example is the [`async-std`] package, which has an [`unstable` feature][async-std-unstable], which [gates new APIs][async-std-gate] that people can opt-in to using, but may not be completely ready to be relied upon. [`async-std`]: https://crates.io/crates/async-std [async-std-unstable]: https://github.com/async-rs/async-std/blob/v1.8.0/Cargo.toml#L38-L42 [async-std-gate]: https://github.com/async-rs/async-std/blob/v1.8.0/src/macros.rs#L46 cargo-0.66.0/src/doc/src/reference/features.md000066400000000000000000000527611432416201200211250ustar00rootroot00000000000000## Features Cargo "features" provide a mechanism to express [conditional compilation] and [optional dependencies](#optional-dependencies). A package defines a set of named features in the `[features]` table of `Cargo.toml`, and each feature can either be enabled or disabled. Features for the package being built can be enabled on the command-line with flags such as `--features`. Features for dependencies can be enabled in the dependency declaration in `Cargo.toml`. See also the [Features Examples] chapter for some examples of how features can be used. [conditional compilation]: ../../reference/conditional-compilation.md [Features Examples]: features-examples.md ### The `[features]` section Features are defined in the `[features]` table in `Cargo.toml`. Each feature specifies an array of other features or optional dependencies that it enables. The following examples illustrate how features could be used for a 2D image processing library where support for different image formats can be optionally included: ```toml [features] # Defines a feature named `webp` that does not enable any other features. webp = [] ``` With this feature defined, [`cfg` expressions] can be used to conditionally include code to support the requested feature at compile time. For example, inside `lib.rs` of the package could include this: ```rust // This conditionally includes a module which implements WEBP support. #[cfg(feature = "webp")] pub mod webp; ``` Cargo sets features in the package using the `rustc` [`--cfg` flag], and code can test for their presence with the [`cfg` attribute] or the [`cfg` macro]. Features can list other features to enable. For example, the ICO image format can contain BMP and PNG images, so when it is enabled, it should make sure those other features are enabled, too: ```toml [features] bmp = [] png = [] ico = ["bmp", "png"] webp = [] ``` Feature names may include characters from the [Unicode XID standard] (which includes most letters), and additionally allows starting with `_` or digits `0` through `9`, and after the first character may also contain `-`, `+`, or `.`. > **Note**: [crates.io] imposes additional constraints on feature name syntax > that they must only be [ASCII alphanumeric] characters or `_`, `-`, or `+`. [crates.io]: https://crates.io/ [Unicode XID standard]: https://unicode.org/reports/tr31/ [ASCII alphanumeric]: ../../std/primitive.char.html#method.is_ascii_alphanumeric [`--cfg` flag]: ../../rustc/command-line-arguments.md#option-cfg [`cfg` expressions]: ../../reference/conditional-compilation.md [`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute [`cfg` macro]: ../../std/macro.cfg.html ### The `default` feature By default, all features are disabled unless explicitly enabled. This can be changed by specifying the `default` feature: ```toml [features] default = ["ico", "webp"] bmp = [] png = [] ico = ["bmp", "png"] webp = [] ``` When the package is built, the `default` feature is enabled which in turn enables the listed features. This behavior can be changed by: * The `--no-default-features` [command-line flag](#command-line-feature-options) disables the default features of the package. * The `default-features = false` option can be specified in a [dependency declaration](#dependency-features). > **Note**: Be careful about choosing the default feature set. The default > features are a convenience that make it easier to use a package without > forcing the user to carefully select which features to enable for common > use, but there are some drawbacks. Dependencies automatically enable default > features unless `default-features = false` is specified. This can make it > difficult to ensure that the default features are not enabled, especially > for a dependency that appears multiple times in the dependency graph. Every > package must ensure that `default-features = false` is specified to avoid > enabling them. > > Another issue is that it can be a [SemVer incompatible > change](#semver-compatibility) to remove a feature from the default set, so > you should be confident that you will keep those features. ### Optional dependencies Dependencies can be marked "optional", which means they will not be compiled by default. For example, let's say that our 2D image processing library uses an external package to handle GIF images. This can be expressed like this: ```toml [dependencies] gif = { version = "0.11.1", optional = true } ``` By default, this optional dependency implicitly defines a feature that looks like this: ```toml [features] gif = ["dep:gif"] ``` This means that this dependency will only be included if the `gif` feature is enabled. The same `cfg(feature = "gif")` syntax can be used in the code, and the dependency can be enabled just like any feature such as `--features gif` (see [Command-line feature options](#command-line-feature-options) below). In some cases, you may not want to expose a feature that has the same name as the optional dependency. For example, perhaps the optional dependency is an internal detail, or you want to group multiple optional dependencies together, or you just want to use a better name. If you specify the optional dependency with the `dep:` prefix anywhere in the `[features]` table, that disables the implicit feature. > **Note**: The `dep:` syntax is only available starting with Rust 1.60. > Previous versions can only use the implicit feature name. For example, let's say in order to support the AVIF image format, our library needs two other dependencies to be enabled: ```toml [dependencies] ravif = { version = "0.6.3", optional = true } rgb = { version = "0.8.25", optional = true } [features] avif = ["dep:ravif", "dep:rgb"] ``` In this example, the `avif` feature will enable the two listed dependencies. This also avoids creating the implicit `ravif` and `rgb` features, since we don't want users to enable those individually as they are internal details to our crate. > **Note**: Another way to optionally include a dependency is to use > [platform-specific dependencies]. Instead of using features, these are > conditional based on the target platform. [platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies ### Dependency features Features of dependencies can be enabled within the dependency declaration. The `features` key indicates which features to enable: ```toml [dependencies] # Enables the `derive` feature of serde. serde = { version = "1.0.118", features = ["derive"] } ``` The [`default` features](#the-default-feature) can be disabled using `default-features = false`: ```toml [dependencies] flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } ``` > **Note**: This may not ensure the default features are disabled. If another > dependency includes `flate2` without specifying `default-features = false`, > then the default features will be enabled. See [feature > unification](#feature-unification) below for more details. Features of dependencies can also be enabled in the `[features]` table. The syntax is `"package-name/feature-name"`. For example: ```toml [dependencies] jpeg-decoder = { version = "0.1.20", default-features = false } [features] # Enables parallel processing support by enabling the "rayon" feature of jpeg-decoder. parallel = ["jpeg-decoder/rayon"] ``` The `"package-name/feature-name"` syntax will also enable `package-name` if it is an optional dependency. Often this is not what you want. You can add a `?` as in `"package-name?/feature-name"` which will only enable the given feature if something else enables the optional dependency. > **Note**: The `?` syntax is only available starting with Rust 1.60. For example, let's say we have added some serialization support to our library, and it requires enabling a corresponding feature in some optional dependencies. That can be done like this: ```toml [dependencies] serde = { version = "1.0.133", optional = true } rgb = { version = "0.8.25", optional = true } [features] serde = ["dep:serde", "rgb?/serde"] ``` In this example, enabling the `serde` feature will enable the serde dependency. It will also enable the `serde` feature for the `rgb` dependency, but only if something else has enabled the `rgb` dependency. ### Command-line feature options The following command-line flags can be used to control which features are enabled: * `--features` _FEATURES_: Enables the listed features. Multiple features may be separated with commas or spaces. If using spaces, be sure to use quotes around all the features if running Cargo from a shell (such as `--features "foo bar"`). If building multiple packages in a [workspace], the `package-name/feature-name` syntax can be used to specify features for specific workspace members. * `--all-features`: Activates all features of all packages selected on the command-line. * `--no-default-features`: Does not activate the [`default` feature](#the-default-feature) of the selected packages. [workspace]: workspaces.md ### Feature unification Features are unique to the package that defines them. Enabling a feature on a package does not enable a feature of the same name on other packages. When a dependency is used by multiple packages, Cargo will use the union of all features enabled on that dependency when building it. This helps ensure that only a single copy of the dependency is used. See the [features section] of the resolver documentation for more details. For example, let's look at the [`winapi`] package which uses a [large number][winapi-features] of features. If your package depends on a package `foo` which enables the "fileapi" and "handleapi" features of `winapi`, and another dependency `bar` which enables the "std" and "winnt" features of `winapi`, then `winapi` will be built with all four of those features enabled. ![winapi features example](../images/winapi-features.svg) [`winapi`]: https://crates.io/crates/winapi [winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431 A consequence of this is that features should be *additive*. That is, enabling a feature should not disable functionality, and it should usually be safe to enable any combination of features. A feature should not introduce a [SemVer-incompatible change](#semver-compatibility). For example, if you want to optionally support [`no_std`] environments, **do not** use a `no_std` feature. Instead, use a `std` feature that *enables* `std`. For example: ```rust #![no_std] #[cfg(feature = "std")] extern crate std; #[cfg(feature = "std")] pub fn function_that_requires_std() { // ... } ``` [`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [features section]: resolver.md#features #### Mutually exclusive features There are rare cases where features may be mutually incompatible with one another. This should be avoided if at all possible, because it requires coordinating all uses of the package in the dependency graph to cooperate to avoid enabling them together. If it is not possible, consider adding a compile error to detect this scenario. For example: ```rust,ignore #[cfg(all(feature = "foo", feature = "bar"))] compile_error!("feature \"foo\" and feature \"bar\" cannot be enabled at the same time"); ``` Instead of using mutually exclusive features, consider some other options: * Split the functionality into separate packages. * When there is a conflict, [choose one feature over another][feature-precedence]. The [`cfg-if`] package can help with writing more complex `cfg` expressions. * Architect the code to allow the features to be enabled concurrently, and use runtime options to control which is used. For example, use a config file, command-line argument, or environment variable to choose which behavior to enable. [`cfg-if`]: https://crates.io/crates/cfg-if [feature-precedence]: features-examples.md#feature-precedence #### Inspecting resolved features In complex dependency graphs, it can sometimes be difficult to understand how different features get enabled on various packages. The [`cargo tree`] command offers several options to help inspect and visualize which features are enabled. Some options to try: * `cargo tree -e features`: This will show features in the dependency graph. Each feature will appear showing which package enabled it. * `cargo tree -f "{p} {f}"`: This is a more compact view that shows a comma-separated list of features enabled on each package. * `cargo tree -e features -i foo`: This will invert the tree, showing how features flow into the given package "foo". This can be useful because viewing the entire graph can be quite large and overwhelming. Use this when you are trying to figure out which features are enabled on a specific package and why. See the example at the bottom of the [`cargo tree`] page on how to read this. [`cargo tree`]: ../commands/cargo-tree.md ### Feature resolver version 2 A different feature resolver can be specified with the `resolver` field in `Cargo.toml`, like this: ```toml [package] name = "my-package" version = "1.0.0" resolver = "2" ``` See the [resolver versions] section for more detail on specifying resolver versions. The version `"2"` resolver avoids unifying features in a few situations where that unification can be unwanted. The exact situations are described in the [resolver chapter][resolver-v2], but in short, it avoids unifying in these situations: * Features enabled on [platform-specific dependencies] for targets not currently being built are ignored. * [Build-dependencies] and proc-macros do not share features with normal dependencies. * [Dev-dependencies] do not activate features unless building a target that needs them (like tests or examples). Avoiding the unification is necessary for some situations. For example, if a build-dependency enables a `std` feature, and the same dependency is used as a normal dependency for a `no_std` environment, enabling `std` would break the build. However, one drawback is that this can increase build times because the dependency is built multiple times (each with different features). When using the version `"2"` resolver, it is recommended to check for dependencies that are built multiple times to reduce overall build time. If it is not *required* to build those duplicated packages with separate features, consider adding features to the `features` list in the [dependency declaration](#dependency-features) so that the duplicates end up with the same features (and thus Cargo will build it only once). You can detect these duplicate dependencies with the [`cargo tree --duplicates`][`cargo tree`] command. It will show which packages are built multiple times; look for any entries listed with the same version. See [Inspecting resolved features](#inspecting-resolved-features) for more on fetching information on the resolved features. For build dependencies, this is not necessary if you are cross-compiling with the `--target` flag because build dependencies are always built separately from normal dependencies in that scenario. #### Resolver version 2 command-line flags The `resolver = "2"` setting also changes the behavior of the `--features` and `--no-default-features` [command-line options](#command-line-feature-options). With version `"1"`, you can only enable features for the package in the current working directory. For example, in a workspace with packages `foo` and `bar`, and you are in the directory for package `foo`, and ran the command `cargo build -p bar --features bar-feat`, this would fail because the `--features` flag only allowed enabling features on `foo`. With `resolver = "2"`, the features flags allow enabling features for any of the packages selected on the command-line with `-p` and `--workspace` flags. For example: ```sh # This command is allowed with resolver = "2", regardless of which directory # you are in. cargo build -p foo -p bar --features foo-feat,bar-feat # This explicit equivalent works with any resolver version: cargo build -p foo -p bar --features foo/foo-feat,bar/bar-feat ``` Additionally, with `resolver = "1"`, the `--no-default-features` flag only disables the default feature for the package in the current directory. With version "2", it will disable the default features for all workspace members. [resolver versions]: resolver.md#resolver-versions [build-dependencies]: specifying-dependencies.md#build-dependencies [dev-dependencies]: specifying-dependencies.md#development-dependencies [resolver-v2]: resolver.md#feature-resolver-version-2 ### Build scripts [Build scripts] can detect which features are enabled on the package by inspecting the `CARGO_FEATURE_` environment variable, where `` is the feature name converted to uppercase and `-` converted to `_`. [build scripts]: build-scripts.md ### Required features The [`required-features` field] can be used to disable specific [Cargo targets] if a feature is not enabled. See the linked documentation for more details. [`required-features` field]: cargo-targets.md#the-required-features-field [Cargo targets]: cargo-targets.md ### SemVer compatibility Enabling a feature should not introduce a SemVer-incompatible change. For example, the feature shouldn't change an existing API in a way that could break existing uses. More details about what changes are compatible can be found in the [SemVer Compatibility chapter](semver.md). Care should be taken when adding and removing feature definitions and optional dependencies, as these can sometimes be backwards-incompatible changes. More details can be found in the [Cargo section](semver.md#cargo) of the SemVer Compatibility chapter. In short, follow these rules: * The following is usually safe to do in a minor release: * Add a [new feature][cargo-feature-add] or [optional dependency][cargo-dep-add]. * [Change the features used on a dependency][cargo-change-dep-feature]. * The following should usually **not** be done in a minor release: * [Remove a feature][cargo-feature-remove] or [optional dependency][cargo-remove-opt-dep]. * [Moving existing public code behind a feature][item-remove]. * [Remove a feature from a feature list][cargo-feature-remove-another]. See the links for caveats and examples. [cargo-change-dep-feature]: semver.md#cargo-change-dep-feature [cargo-dep-add]: semver.md#cargo-dep-add [cargo-feature-add]: semver.md#cargo-feature-add [item-remove]: semver.md#item-remove [cargo-feature-remove]: semver.md#cargo-feature-remove [cargo-remove-opt-dep]: semver.md#cargo-remove-opt-dep [cargo-feature-remove-another]: semver.md#cargo-feature-remove-another ### Feature documentation and discovery You are encouraged to document which features are available in your package. This can be done by adding [doc comments] at the top of `lib.rs`. As an example, see the [regex crate source], which when rendered can be viewed on [docs.rs][regex-docs-rs]. If you have other documentation, such as a user guide, consider adding the documentation there (for example, see [serde.rs]). If you have a binary project, consider documenting the features in the README or other documentation for the project (for example, see [sccache]). Clearly documenting the features can set expectations about features that are considered "unstable" or otherwise shouldn't be used. For example, if there is an optional dependency, but you don't want users to explicitly list that optional dependency as a feature, exclude it from the documented list. Documentation published on [docs.rs] can use metadata in `Cargo.toml` to control which features are enabled when the documentation is built. See [docs.rs metadata documentation] for more details. > **Note**: Rustdoc has experimental support for annotating the documentation > to indicate which features are required to use certain APIs. See the > [`doc_cfg`] documentation for more details. An example is the [`syn` > documentation], where you can see colored boxes which note which features > are required to use it. [docs.rs metadata documentation]: https://docs.rs/about/metadata [docs.rs]: https://docs.rs/ [serde.rs]: https://serde.rs/feature-flags.html [doc comments]: ../../rustdoc/how-to-write-documentation.html [regex crate source]: https://github.com/rust-lang/regex/blob/1.4.2/src/lib.rs#L488-L583 [regex-docs-rs]: https://docs.rs/regex/1.4.2/regex/#crate-features [sccache]: https://github.com/mozilla/sccache/blob/0.2.13/README.md#build-requirements [`doc_cfg`]: ../../unstable-book/language-features/doc-cfg.html [`syn` documentation]: https://docs.rs/syn/1.0.54/syn/#modules #### Discovering features When features are documented in the library API, this can make it easier for your users to discover which features are available and what they do. If the feature documentation for a package isn't readily available, you can look at the `Cargo.toml` file, but sometimes it can be hard to track it down. The crate page on [crates.io] has a link to the source repository if available. Tools like [`cargo vendor`] or [cargo-clone-crate] can be used to download the source and inspect it. [`cargo vendor`]: ../commands/cargo-vendor.md [cargo-clone-crate]: https://crates.io/crates/cargo-clone-crate ### Feature combinations Because features are a form of conditional compilation, they require an exponential number of configurations and test cases to be 100% covered. By default, tests, docs, and other tooling such as [Clippy](https://github.com/rust-lang/rust-clippy) will only run with the default set of features. We encourage you to consider your strategy and tooling in regards to different feature combinations - Every project will have different requirements in conjunction with time, resources, and the cost-benefit of covering specific scenarios. Common configurations may be with / without default features, specific combinations of features, or all combinations of features. cargo-0.66.0/src/doc/src/reference/future-incompat-report.md000066400000000000000000000027471432416201200237410ustar00rootroot00000000000000### Future incompat report Cargo checks for future-incompatible warnings in all dependencies. These are warnings for changes that may become hard errors in the future, causing the dependency to stop building in a future version of rustc. If any warnings are found, a small notice is displayed indicating that the warnings were found, and provides instructions on how to display a full report. For example, you may see something like this at the end of a build: ```text warning: the following packages contain code that will be rejected by a future version of Rust: rental v0.5.5 note: to see what the problems were, use the option `--future-incompat-report`, or run `cargo report future-incompatibilities --id 1` ``` A full report can be displayed with the `cargo report future-incompatibilities --id ID` command, or by running the build again with the `--future-incompat-report` flag. The developer should then update their dependencies to a version where the issue is fixed, or work with the developers of the dependencies to help resolve the issue. ## Configuration This feature can be configured through a [`[future-incompat-report]`][config] section in `.cargo/config.toml`. Currently, the supported options are: ```toml [future-incompat-report] frequency = "always" ``` The supported values for the frequency are `"always"` and `"never"`, which control whether or not a message is printed out at the end of `cargo build` / `cargo check`. [config]: config.md#future-incompat-report cargo-0.66.0/src/doc/src/reference/index.md000066400000000000000000000017271432416201200204120ustar00rootroot00000000000000## Cargo Reference The reference covers the details of various areas of Cargo. * [Specifying Dependencies](specifying-dependencies.md) * [Overriding Dependencies](overriding-dependencies.md) * [The Manifest Format](manifest.md) * [Cargo Targets](cargo-targets.md) * [Workspaces](workspaces.md) * [Features](features.md) * [Features Examples](features-examples.md) * [Profiles](profiles.md) * [Configuration](config.md) * [Environment Variables](environment-variables.md) * [Build Scripts](build-scripts.md) * [Build Script Examples](build-script-examples.md) * [Publishing on crates.io](publishing.md) * [Package ID Specifications](pkgid-spec.md) * [Source Replacement](source-replacement.md) * [External Tools](external-tools.md) * [Registries](registries.md) * [Dependency Resolution](resolver.md) * [SemVer Compatibility](semver.md) * [Future incompat report](future-incompat-report.md) * [Reporting build timings](timings.md) * [Unstable Features](unstable.md) cargo-0.66.0/src/doc/src/reference/manifest.md000066400000000000000000000607701432416201200211140ustar00rootroot00000000000000## The Manifest Format The `Cargo.toml` file for each package is called its *manifest*. It is written in the [TOML] format. Every manifest file consists of the following sections: * [`cargo-features`](unstable.md) β€” Unstable, nightly-only features. * [`[package]`](#the-package-section) β€” Defines a package. * [`name`](#the-name-field) β€” The name of the package. * [`version`](#the-version-field) β€” The version of the package. * [`authors`](#the-authors-field) β€” The authors of the package. * [`edition`](#the-edition-field) β€” The Rust edition. * [`rust-version`](#the-rust-version-field) β€” The minimal supported Rust version. * [`description`](#the-description-field) β€” A description of the package. * [`documentation`](#the-documentation-field) β€” URL of the package documentation. * [`readme`](#the-readme-field) β€” Path to the package's README file. * [`homepage`](#the-homepage-field) β€” URL of the package homepage. * [`repository`](#the-repository-field) β€” URL of the package source repository. * [`license`](#the-license-and-license-file-fields) β€” The package license. * [`license-file`](#the-license-and-license-file-fields) β€” Path to the text of the license. * [`keywords`](#the-keywords-field) β€” Keywords for the package. * [`categories`](#the-categories-field) β€” Categories of the package. * [`workspace`](#the-workspace-field) β€” Path to the workspace for the package. * [`build`](#the-build-field) β€” Path to the package build script. * [`links`](#the-links-field) β€” Name of the native library the package links with. * [`exclude`](#the-exclude-and-include-fields) β€” Files to exclude when publishing. * [`include`](#the-exclude-and-include-fields) β€” Files to include when publishing. * [`publish`](#the-publish-field) β€” Can be used to prevent publishing the package. * [`metadata`](#the-metadata-table) β€” Extra settings for external tools. * [`default-run`](#the-default-run-field) β€” The default binary to run by [`cargo run`]. * [`autobins`](cargo-targets.md#target-auto-discovery) β€” Disables binary auto discovery. * [`autoexamples`](cargo-targets.md#target-auto-discovery) β€” Disables example auto discovery. * [`autotests`](cargo-targets.md#target-auto-discovery) β€” Disables test auto discovery. * [`autobenches`](cargo-targets.md#target-auto-discovery) β€” Disables bench auto discovery. * [`resolver`](resolver.md#resolver-versions) β€” Sets the dependency resolver to use. * Target tables: (see [configuration](cargo-targets.md#configuring-a-target) for settings) * [`[lib]`](cargo-targets.md#library) β€” Library target settings. * [`[[bin]]`](cargo-targets.md#binaries) β€” Binary target settings. * [`[[example]]`](cargo-targets.md#examples) β€” Example target settings. * [`[[test]]`](cargo-targets.md#tests) β€” Test target settings. * [`[[bench]]`](cargo-targets.md#benchmarks) β€” Benchmark target settings. * Dependency tables: * [`[dependencies]`](specifying-dependencies.md) β€” Package library dependencies. * [`[dev-dependencies]`](specifying-dependencies.md#development-dependencies) β€” Dependencies for examples, tests, and benchmarks. * [`[build-dependencies]`](specifying-dependencies.md#build-dependencies) β€” Dependencies for build scripts. * [`[target]`](specifying-dependencies.md#platform-specific-dependencies) β€” Platform-specific dependencies. * [`[badges]`](#the-badges-section) β€” Badges to display on a registry. * [`[features]`](features.md) β€” Conditional compilation features. * [`[patch]`](overriding-dependencies.md#the-patch-section) β€” Override dependencies. * [`[replace]`](overriding-dependencies.md#the-replace-section) β€” Override dependencies (deprecated). * [`[profile]`](profiles.md) β€” Compiler settings and optimizations. * [`[workspace]`](workspaces.md) β€” The workspace definition. ### The `[package]` section The first section in a `Cargo.toml` is `[package]`. ```toml [package] name = "hello_world" # the name of the package version = "0.1.0" # the current version, obeying semver authors = ["Alice ", "Bob "] ``` The only fields required by Cargo are [`name`](#the-name-field) and [`version`](#the-version-field). If publishing to a registry, the registry may require additional fields. See the notes below and [the publishing chapter][publishing] for requirements for publishing to [crates.io]. #### The `name` field The package name is an identifier used to refer to the package. It is used when listed as a dependency in another package, and as the default name of inferred lib and bin targets. The name must use only [alphanumeric] characters or `-` or `_`, and cannot be empty. Note that [`cargo new`] and [`cargo init`] impose some additional restrictions on the package name, such as enforcing that it is a valid Rust identifier and not a keyword. [crates.io] imposes even more restrictions, such as: - Only ASCII characters are allowed. - Do not use reserved names. - Do not use special Windows name such as "nul". - Use a maximum of 64 characters of length. [alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric #### The `version` field Cargo bakes in the concept of [Semantic Versioning](https://semver.org/), so make sure you follow some basic rules: * Before you reach 1.0.0, anything goes, but if you make breaking changes, increment the minor version. In Rust, breaking changes include adding fields to structs or variants to enums. * After 1.0.0, only make breaking changes when you increment the major version. Don’t break the build. * After 1.0.0, don’t add any new public API (no new `pub` anything) in patch-level versions. Always increment the minor version if you add any new `pub` structs, traits, fields, types, functions, methods or anything else. * Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. See the [Resolver] chapter for more information on how Cargo uses versions to resolve dependencies, and for guidelines on setting your own version. See the [SemVer compatibility] chapter for more details on exactly what constitutes a breaking change. [Resolver]: resolver.md [SemVer compatibility]: semver.md #### The `authors` field The optional `authors` field lists people or organizations that are considered the "authors" of the package. The exact meaning is open to interpretation β€” it may list the original or primary authors, current maintainers, or owners of the package. An optional email address may be included within angled brackets at the end of each author entry. This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS` environment variable within `build.rs`. It is not displayed in the [crates.io] user interface. > **Warning**: Package manifests cannot be changed once published, so this > field cannot be changed or removed in already-published versions of a > package. #### The `edition` field The `edition` key is an optional key that affects which [Rust Edition] your package is compiled with. Setting the `edition` key in `[package]` will affect all targets/crates in the package, including test suites, benchmarks, binaries, examples, etc. ```toml [package] # ... edition = '2021' ``` Most manifests have the `edition` field filled in automatically by [`cargo new`] with the latest stable edition. By default `cargo new` creates a manifest with the 2021 edition currently. If the `edition` field is not present in `Cargo.toml`, then the 2015 edition is assumed for backwards compatibility. Note that all manifests created with [`cargo new`] will not use this historical fallback because they will have `edition` explicitly specified to a newer value. #### The `rust-version` field The `rust-version` field is an optional key that tells cargo what version of the Rust language and compiler your package can be compiled with. If the currently selected version of the Rust compiler is older than the stated version, cargo will exit with an error, telling the user what version is required. The first version of Cargo that supports this field was released with Rust 1.56.0. In older releases, the field will be ignored, and Cargo will display a warning. ```toml [package] # ... rust-version = "1.56" ``` The Rust version must be a bare version number with two or three components; it cannot include semver operators or pre-release identifiers. Compiler pre-release identifiers such as -nightly will be ignored while checking the Rust version. The `rust-version` must be equal to or newer than the version that first introduced the configured `edition`. The `rust-version` may be ignored using the `--ignore-rust-version` option. Setting the `rust-version` key in `[package]` will affect all targets/crates in the package, including test suites, benchmarks, binaries, examples, etc. #### The `description` field The description is a short blurb about the package. [crates.io] will display this with your package. This should be plain text (not Markdown). ```toml [package] # ... description = "A short description of my package" ``` > **Note**: [crates.io] requires the `description` to be set. #### The `documentation` field The `documentation` field specifies a URL to a website hosting the crate's documentation. If no URL is specified in the manifest file, [crates.io] will automatically link your crate to the corresponding [docs.rs] page. ```toml [package] # ... documentation = "https://docs.rs/bitflags" ``` #### The `readme` field The `readme` field should be the path to a file in the package root (relative to this `Cargo.toml`) that contains general information about the package. This file will be transferred to the registry when you publish. [crates.io] will interpret it as Markdown and render it on the crate's page. ```toml [package] # ... readme = "README.md" ``` If no value is specified for this field, and a file named `README.md`, `README.txt` or `README` exists in the package root, then the name of that file will be used. You can suppress this behavior by setting this field to `false`. If the field is set to `true`, a default value of `README.md` will be assumed. #### The `homepage` field The `homepage` field should be a URL to a site that is the home page for your package. ```toml [package] # ... homepage = "https://serde.rs/" ``` #### The `repository` field The `repository` field should be a URL to the source repository for your package. ```toml [package] # ... repository = "https://github.com/rust-lang/cargo/" ``` #### The `license` and `license-file` fields The `license` field contains the name of the software license that the package is released under. The `license-file` field contains the path to a file containing the text of the license (relative to this `Cargo.toml`). [crates.io] interprets the `license` field as an [SPDX 2.1 license expression][spdx-2.1-license-expressions]. The name must be a known license from the [SPDX license list 3.11][spdx-license-list-3.11]. Parentheses are not currently supported. See the [SPDX site] for more information. SPDX license expressions support AND and OR operators to combine multiple licenses.[^slash] ```toml [package] # ... license = "MIT OR Apache-2.0" ``` Using `OR` indicates the user may choose either license. Using `AND` indicates the user must comply with both licenses simultaneously. The `WITH` operator indicates a license with a special exception. Some examples: * `MIT OR Apache-2.0` * `LGPL-2.1-only AND MIT AND BSD-2-Clause` * `GPL-2.0-or-later WITH Bison-exception-2.2` If a package is using a nonstandard license, then the `license-file` field may be specified in lieu of the `license` field. ```toml [package] # ... license-file = "LICENSE.txt" ``` > **Note**: [crates.io] requires either `license` or `license-file` to be set. [^slash]: Previously multiple licenses could be separated with a `/`, but that usage is deprecated. #### The `keywords` field The `keywords` field is an array of strings that describe this package. This can help when searching for the package on a registry, and you may choose any words that would help someone find this crate. ```toml [package] # ... keywords = ["gamedev", "graphics"] ``` > **Note**: [crates.io] has a maximum of 5 keywords. Each keyword must be > ASCII text, start with a letter, and only contain letters, numbers, `_` or > `-`, and have at most 20 characters. #### The `categories` field The `categories` field is an array of strings of the categories this package belongs to. ```toml categories = ["command-line-utilities", "development-tools::cargo-plugins"] ``` > **Note**: [crates.io] has a maximum of 5 categories. Each category should > match one of the strings available at , and > must match exactly. #### The `workspace` field The `workspace` field can be used to configure the workspace that this package will be a member of. If not specified this will be inferred as the first Cargo.toml with `[workspace]` upwards in the filesystem. Setting this is useful if the member is not inside a subdirectory of the workspace root. ```toml [package] # ... workspace = "path/to/workspace/root" ``` This field cannot be specified if the manifest already has a `[workspace]` table defined. That is, a crate cannot both be a root crate in a workspace (contain `[workspace]`) and also be a member crate of another workspace (contain `package.workspace`). For more information, see the [workspaces chapter](workspaces.md). #### The `build` field The `build` field specifies a file in the package root which is a [build script] for building native code. More information can be found in the [build script guide][build script]. [build script]: build-scripts.md ```toml [package] # ... build = "build.rs" ``` The default is `"build.rs"`, which loads the script from a file named `build.rs` in the root of the package. Use `build = "custom_build_name.rs"` to specify a path to a different file or `build = false` to disable automatic detection of the build script. #### The `links` field The `links` field specifies the name of a native library that is being linked to. More information can be found in the [`links`][links] section of the build script guide. [links]: build-scripts.md#the-links-manifest-key For example, a crate that links a native library called "git2" (e.g. `libgit2.a` on Linux) may specify: ```toml [package] # ... links = "git2" ``` #### The `exclude` and `include` fields The `exclude` and `include` fields can be used to explicitly specify which files are included when packaging a project to be [published][publishing], and certain kinds of change tracking (described below). The patterns specified in the `exclude` field identify a set of files that are not included, and the patterns in `include` specify files that are explicitly included. You may run [`cargo package --list`][`cargo package`] to verify which files will be included in the package. ```toml [package] # ... exclude = ["/ci", "images/", ".*"] ``` ```toml [package] # ... include = ["/src", "COPYRIGHT", "/examples", "!/examples/big_example"] ``` The default if neither field is specified is to include all files from the root of the package, except for the exclusions listed below. If `include` is not specified, then the following files will be excluded: * If the package is not in a git repository, all "hidden" files starting with a dot will be skipped. * If the package is in a git repository, any files that are ignored by the [gitignore] rules of the repository and global git configuration will be skipped. Regardless of whether `exclude` or `include` is specified, the following files are always excluded: * Any sub-packages will be skipped (any subdirectory that contains a `Cargo.toml` file). * A directory named `target` in the root of the package will be skipped. The following files are always included: * The `Cargo.toml` file of the package itself is always included, it does not need to be listed in `include`. * A minimized `Cargo.lock` is automatically included if the package contains a binary or example target, see [`cargo package`] for more information. * If a [`license-file`](#the-license-and-license-file-fields) is specified, it is always included. The options are mutually exclusive; setting `include` will override an `exclude`. If you need to have exclusions to a set of `include` files, use the `!` operator described below. The patterns should be [gitignore]-style patterns. Briefly: - `foo` matches any file or directory with the name `foo` anywhere in the package. This is equivalent to the pattern `**/foo`. - `/foo` matches any file or directory with the name `foo` only in the root of the package. - `foo/` matches any *directory* with the name `foo` anywhere in the package. - Common glob patterns like `*`, `?`, and `[]` are supported: - `*` matches zero or more characters except `/`. For example, `*.html` matches any file or directory with the `.html` extension anywhere in the package. - `?` matches any character except `/`. For example, `foo?` matches `food`, but not `foo`. - `[]` allows for matching a range of characters. For example, `[ab]` matches either `a` or `b`. `[a-z]` matches letters a through z. - `**/` prefix matches in any directory. For example, `**/foo/bar` matches the file or directory `bar` anywhere that is directly under directory `foo`. - `/**` suffix matches everything inside. For example, `foo/**` matches all files inside directory `foo`, including all files in subdirectories below `foo`. - `/**/` matches zero or more directories. For example, `a/**/b` matches `a/b`, `a/x/b`, `a/x/y/b`, and so on. - `!` prefix negates a pattern. For example, a pattern of `src/*.rs` and `!foo.rs` would match all files with the `.rs` extension inside the `src` directory, except for any file named `foo.rs`. The include/exclude list is also used for change tracking in some situations. For targets built with `rustdoc`, it is used to determine the list of files to track to determine if the target should be rebuilt. If the package has a [build script] that does not emit any `rerun-if-*` directives, then the include/exclude list is used for tracking if the build script should be re-run if any of those files change. [gitignore]: https://git-scm.com/docs/gitignore #### The `publish` field The `publish` field can be used to prevent a package from being published to a package registry (like *crates.io*) by mistake, for instance to keep a package private in a company. ```toml [package] # ... publish = false ``` The value may also be an array of strings which are registry names that are allowed to be published to. ```toml [package] # ... publish = ["some-registry-name"] ``` If publish array contains a single registry, `cargo publish` command will use it when `--registry` flag is not specified. #### The `metadata` table Cargo by default will warn about unused keys in `Cargo.toml` to assist in detecting typos and such. The `package.metadata` table, however, is completely ignored by Cargo and will not be warned about. This section can be used for tools which would like to store package configuration in `Cargo.toml`. For example: ```toml [package] name = "..." # ... # Metadata used when generating an Android APK, for example. [package.metadata.android] package-name = "my-awesome-android-app" assets = "path/to/static" ``` There is a similar table at the workspace level at [`workspace.metadata`][workspace-metadata]. While cargo does not specify a format for the content of either of these tables, it is suggested that external tools may wish to use them in a consistent fashion, such as referring to the data in `workspace.metadata` if data is missing from `package.metadata`, if that makes sense for the tool in question. [workspace-metadata]: workspaces.md#the-workspacemetadata-table #### The `default-run` field The `default-run` field in the `[package]` section of the manifest can be used to specify a default binary picked by [`cargo run`]. For example, when there is both `src/bin/a.rs` and `src/bin/b.rs`: ```toml [package] default-run = "a" ``` ### The `[badges]` section The `[badges]` section is for specifying status badges that can be displayed on a registry website when the package is published. > Note: [crates.io] previously displayed badges next to a crate on its > website, but that functionality has been removed. Packages should place > badges in its README file which will be displayed on [crates.io] (see [the > `readme` field](#the-readme-field)). ```toml [badges] # The `maintenance` table indicates the status of the maintenance of # the crate. This may be used by a registry, but is currently not # used by crates.io. See https://github.com/rust-lang/crates.io/issues/2437 # and https://github.com/rust-lang/crates.io/issues/2438 for more details. # # The `status` field is required. Available options are: # - `actively-developed`: New features are being added and bugs are being fixed. # - `passively-maintained`: There are no plans for new features, but the maintainer intends to # respond to issues that get filed. # - `as-is`: The crate is feature complete, the maintainer does not intend to continue working on # it or providing support, but it works for the purposes it was designed for. # - `experimental`: The author wants to share it with the community but is not intending to meet # anyone's particular use case. # - `looking-for-maintainer`: The current maintainer would like to transfer the crate to someone # else. # - `deprecated`: The maintainer does not recommend using this crate (the description of the crate # can describe why, there could be a better solution available or there could be problems with # the crate that the author does not want to fix). # - `none`: Displays no badge on crates.io, since the maintainer has not chosen to specify # their intentions, potential crate users will need to investigate on their own. maintenance = { status = "..." } ``` ### Dependency sections See the [specifying dependencies page](specifying-dependencies.md) for information on the `[dependencies]`, `[dev-dependencies]`, `[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. ### The `[profile.*]` sections The `[profile]` tables provide a way to customize compiler settings such as optimizations and debug settings. See [the Profiles chapter](profiles.md) for more detail. [`cargo init`]: ../commands/cargo-init.md [`cargo new`]: ../commands/cargo-new.md [`cargo package`]: ../commands/cargo-package.md [`cargo run`]: ../commands/cargo-run.md [crates.io]: https://crates.io/ [docs.rs]: https://docs.rs/ [publishing]: publishing.md [Rust Edition]: ../../edition-guide/index.html [spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60 [spdx-license-list-3.11]: https://github.com/spdx/license-list-data/tree/v3.11 [SPDX site]: https://spdx.org/license-list [TOML]: https://toml.io/ cargo-0.66.0/src/doc/src/reference/overriding-dependencies.md000066400000000000000000000333231432416201200240740ustar00rootroot00000000000000## Overriding Dependencies The desire to override a dependency can arise through a number of scenarios. Most of them, however, boil down to the ability to work with a crate before it's been published to [crates.io]. For example: * A crate you're working on is also used in a much larger application you're working on, and you'd like to test a bug fix to the library inside of the larger application. * An upstream crate you don't work on has a new feature or a bug fix on the master branch of its git repository which you'd like to test out. * You're about to publish a new major version of your crate, but you'd like to do integration testing across an entire package to ensure the new major version works. * You've submitted a fix to an upstream crate for a bug you found, but you'd like to immediately have your application start depending on the fixed version of the crate to avoid blocking on the bug fix getting merged. These scenarios can be solved with the [`[patch]` manifest section](#the-patch-section). This chapter walks through a few different use cases, and includes details on the different ways to override a dependency. * Example use cases * [Testing a bugfix](#testing-a-bugfix) * [Working with an unpublished minor version](#working-with-an-unpublished-minor-version) * [Overriding repository URL](#overriding-repository-url) * [Prepublishing a breaking change](#prepublishing-a-breaking-change) * [Using `[patch]` with multiple versions](#using-patch-with-multiple-versions) * Reference * [The `[patch]` section](#the-patch-section) * [The `[replace]` section](#the-replace-section) * [`paths` overrides](#paths-overrides) > **Note**: See also specifying a dependency with [multiple locations], which > can be used to override the source for a single dependency declaration in a > local package. ### Testing a bugfix Let's say you're working with the [`uuid` crate] but while you're working on it you discover a bug. You are, however, quite enterprising so you decide to also try to fix the bug! Originally your manifest will look like: [`uuid` crate]: https://crates.io/crates/uuid ```toml [package] name = "my-library" version = "0.1.0" [dependencies] uuid = "1.0" ``` First thing we'll do is to clone the [`uuid` repository][uuid-repository] locally via: ```console $ git clone https://github.com/uuid-rs/uuid ``` Next we'll edit the manifest of `my-library` to contain: ```toml [patch.crates-io] uuid = { path = "../path/to/uuid" } ``` Here we declare that we're *patching* the source `crates-io` with a new dependency. This will effectively add the local checked out version of `uuid` to the crates.io registry for our local package. Next up we need to ensure that our lock file is updated to use this new version of `uuid` so our package uses the locally checked out copy instead of one from crates.io. The way `[patch]` works is that it'll load the dependency at `../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` it'll *also* return the local version. This means that the version number of the local checkout is significant and will affect whether the patch is used. Our manifest declared `uuid = "1.0"` which means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution algorithm also means that we'll resolve to the maximum version within that range. Typically this doesn't matter as the version of the git repository will already be greater or match the maximum version published on crates.io, but it's important to keep this in mind! In any case, typically all you need to do now is: ```console $ cargo build Compiling uuid v1.0.0 (.../uuid) Compiling my-library v0.1.0 (.../my-library) Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs ``` And that's it! You're now building with the local version of `uuid` (note the path in parentheses in the build output). If you don't see the local path version getting built then you may need to run `cargo update -p uuid --precise $version` where `$version` is the version of the locally checked out copy of `uuid`. Once you've fixed the bug you originally found the next thing you'll want to do is to likely submit that as a pull request to the `uuid` crate itself. Once you've done this then you can also update the `[patch]` section. The listing inside of `[patch]` is just like the `[dependencies]` section, so once your pull request is merged you could change your `path` dependency to: ```toml [patch.crates-io] uuid = { git = 'https://github.com/uuid-rs/uuid' } ``` [uuid-repository]: https://github.com/uuid-rs/uuid ### Working with an unpublished minor version Let's now shift gears a bit from bug fixes to adding features. While working on `my-library` you discover that a whole new feature is needed in the `uuid` crate. You've implemented this feature, tested it locally above with `[patch]`, and submitted a pull request. Let's go over how you continue to use and test it before it's actually published. Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but since then the master branch of the git repository has updated to `1.0.1`. This branch includes your new feature you submitted previously. To use this repository we'll edit our `Cargo.toml` to look like ```toml [package] name = "my-library" version = "0.1.0" [dependencies] uuid = "1.0.1" [patch.crates-io] uuid = { git = 'https://github.com/uuid-rs/uuid' } ``` Note that our local dependency on `uuid` has been updated to `1.0.1` as it's what we'll actually require once the crate is published. This version doesn't exist on crates.io, though, so we provide it with the `[patch]` section of the manifest. Now when our library is built it'll fetch `uuid` from the git repository and resolve to 1.0.1 inside the repository instead of trying to download a version from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can be deleted. It's also worth noting that `[patch]` applies *transitively*. Let's say you use `my-library` in a larger package, such as: ```toml [package] name = "my-binary" version = "0.1.0" [dependencies] my-library = { git = 'https://example.com/git/my-library' } uuid = "1.0" [patch.crates-io] uuid = { git = 'https://github.com/uuid-rs/uuid' } ``` Remember that `[patch]` is applicable *transitively* but can only be defined at the *top level* so we consumers of `my-library` have to repeat the `[patch]` section if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on `uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to one version for this entire crate graph, 1.0.1, and it'll be pulled from the git repository. #### Overriding repository URL In case the dependency you want to override isn't loaded from `crates.io`, you'll have to change a bit how you use `[patch]`. For example, if the dependency is a git dependency, you can override it to a local path with: ```toml [patch."https://github.com/your/repository"] my-library = { path = "../my-library/path" } ``` And that's it! ### Prepublishing a breaking change Let's take a look at working with a new major version of a crate, typically accompanied with breaking changes. Sticking with our previous crates, this means that we're going to be creating version 2.0.0 of the `uuid` crate. After we've submitted all changes upstream we can update our manifest for `my-library` to look like: ```toml [dependencies] uuid = "2.0" [patch.crates-io] uuid = { git = "https://github.com/uuid-rs/uuid", branch = "2.0.0" } ``` And that's it! Like with the previous example the 2.0.0 version doesn't actually exist on crates.io but we can still put it in through a git dependency through the usage of the `[patch]` section. As a thought exercise let's take another look at the `my-binary` manifest from above again as well: ```toml [package] name = "my-binary" version = "0.1.0" [dependencies] my-library = { git = 'https://example.com/git/my-library' } uuid = "1.0" [patch.crates-io] uuid = { git = 'https://github.com/uuid-rs/uuid', branch = '2.0.0' } ``` Note that this will actually resolve to two versions of the `uuid` crate. The `my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but the `my-library` crate will use the `2.0.0` version of `uuid`. This will allow you to gradually roll out breaking changes to a crate through a dependency graph without being forced to update everything all at once. ### Using `[patch]` with multiple versions You can patch in multiple versions of the same crate with the `package` key used to rename dependencies. For example let's say that the `serde` crate has a bugfix that we'd like to use to its `1.*` series but we'd also like to prototype using a `2.0.0` version of serde we have in our git repository. To configure this we'd do: ```toml [patch.crates-io] serde = { git = 'https://github.com/serde-rs/serde' } serde2 = { git = 'https://github.com/example/serde', package = 'serde', branch = 'v2' } ``` The first `serde = ...` directive indicates that serde `1.*` should be used from the git repository (pulling in the bugfix we need) and the second `serde2 = ...` directive indicates that the `serde` package should also be pulled from the `v2` branch of `https://github.com/example/serde`. We're assuming here that `Cargo.toml` on that branch mentions version `2.0.0`. Note that when using the `package` key the `serde2` identifier here is actually ignored. We simply need a unique name which doesn't conflict with other patched crates. ### The `[patch]` section The `[patch]` section of `Cargo.toml` can be used to override dependencies with other copies. The syntax is similar to the [`[dependencies]`][dependencies] section: ```toml [patch.crates-io] foo = { git = 'https://github.com/example/foo' } bar = { path = 'my/local/bar' } [dependencies.baz] git = 'https://github.com/example/baz' [patch.'https://github.com/example/baz'] baz = { git = 'https://github.com/example/patched-baz', branch = 'my-branch' } ``` > **Note**: The `[patch]` table can also be specified as a [configuration > option](config.md), such as in a `.cargo/config.toml` file or a CLI option > like `--config 'patch.crates-io.rand.path="rand"'`. This can be useful for > local-only changes that you don't want to commit, or temporarily testing a > patch. The `[patch]` table is made of dependency-like sub-tables. Each key after `[patch]` is a URL of the source that is being patched, or the name of a registry. The name `crates-io` may be used to override the default registry [crates.io]. The first `[patch]` in the example above demonstrates overriding [crates.io], and the second `[patch]` demonstrates overriding a git source. Each entry in these tables is a normal dependency specification, the same as found in the `[dependencies]` section of the manifest. The dependencies listed in the `[patch]` section are resolved and used to patch the source at the URL specified. The above manifest snippet patches the `crates-io` source (e.g. crates.io itself) with the `foo` crate and `bar` crate. It also patches the `https://github.com/example/baz` source with a `my-branch` that comes from elsewhere. Sources can be patched with versions of crates that do not exist, and they can also be patched with versions of crates that already exist. If a source is patched with a crate version that already exists in the source, then the source's original crate is replaced. ### The `[replace]` section > **Note**: `[replace]` is deprecated. You should use the > [`[patch]`](#the-patch-section) table instead. This section of Cargo.toml can be used to override dependencies with other copies. The syntax is similar to the `[dependencies]` section: ```toml [replace] "foo:0.1.0" = { git = 'https://github.com/example/foo' } "bar:1.0.2" = { path = 'my/local/bar' } ``` Each key in the `[replace]` table is a [package ID specification](pkgid-spec.md), which allows arbitrarily choosing a node in the dependency graph to override (the 3-part version number is required). The value of each key is the same as the `[dependencies]` syntax for specifying dependencies, except that you can't specify features. Note that when a crate is overridden the copy it's overridden with must have both the same name and version, but it can come from a different source (e.g., git or a local path). ### `paths` overrides Sometimes you're only temporarily working on a crate and you don't want to have to modify `Cargo.toml` like with the `[patch]` section above. For this use case Cargo offers a much more limited version of overrides called **path overrides**. Path overrides are specified through [`.cargo/config.toml`](config.md) instead of `Cargo.toml`. Inside of `.cargo/config.toml` you'll specify a key called `paths`: ```toml paths = ["/path/to/uuid"] ``` This array should be filled with directories that contain a `Cargo.toml`. In this instance, we’re just adding `uuid`, so it will be the only one that’s overridden. This path can be either absolute or relative to the directory that contains the `.cargo` folder. Path overrides are more restricted than the `[patch]` section, however, in that they cannot change the structure of the dependency graph. When a path replacement is used then the previous set of dependencies must all match exactly to the new `Cargo.toml` specification. For example this means that path overrides cannot be used to test out adding a dependency to a crate, instead `[patch]` must be used in that situation. As a result usage of a path override is typically isolated to quick bug fixes rather than larger changes. Note: using a local configuration to override paths will only work for crates that have been published to [crates.io]. You cannot use this feature to tell Cargo how to find local unpublished crates. [crates.io]: https://crates.io/ [multiple locations]: specifying-dependencies.md#multiple-locations [dependencies]: specifying-dependencies.md cargo-0.66.0/src/doc/src/reference/pkgid-spec.md000066400000000000000000000060731432416201200213300ustar00rootroot00000000000000## Package ID Specifications ### Package ID specifications Subcommands of Cargo frequently need to refer to a particular package within a dependency graph for various operations like updating, cleaning, building, etc. To solve this problem, Cargo supports *Package ID Specifications*. A specification is a string which is used to uniquely refer to one package within a graph of packages. The specification may be fully qualified, such as `https://github.com/rust-lang/crates.io-index#regex@1.4.3` or it may be abbreviated, such as `regex`. The abbreviated form may be used as long as it uniquely identifies a single package in the dependency graph. If there is ambiguity, additional qualifiers can be added to make it unique. For example, if there are two versions of the `regex` package in the graph, then it can be qualified with a version to make it unique, such as `regex@1.4.3`. #### Specification grammar The formal grammar for a Package Id Specification is: ```notrust spec := pkgname | proto "://" hostname-and-path [ "#" ( pkgname | semver ) ] pkgname := name [ ("@" | ":" ) semver ] proto := "http" | "git" | ... ``` Here, brackets indicate that the contents are optional. The URL form can be used for git dependencies, or to differentiate packages that come from different sources such as different registries. #### Example specifications The following are references to the `regex` package on `crates.io`: | Spec | Name | Version | |:------------------------------------------------------------|:-------:|:-------:| | `regex` | `regex` | `*` | | `regex@1.4.3` | `regex` | `1.4.3` | | `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` | | `https://github.com/rust-lang/crates.io-index#regex@1.4.3` | `regex` | `1.4.3` | The following are some examples of specs for several different git dependencies: | Spec | Name | Version | |:----------------------------------------------------------|:----------------:|:--------:| | `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` | | `https://github.com/rust-lang/cargo#cargo-platform@0.1.2` | `cargo-platform` | `0.1.2` | | `ssh://git@github.com/rust-lang/regex.git#regex@1.4.3` | `regex` | `1.4.3` | Local packages on the filesystem can use `file://` URLs to reference them: | Spec | Name | Version | |:---------------------------------------|:-----:|:-------:| | `file:///path/to/my/project/foo` | `foo` | `*` | | `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` | #### Brevity of specifications The goal of this is to enable both succinct and exhaustive syntaxes for referring to packages in a dependency graph. Ambiguous references may refer to one or more packages. Most commands generate an error if more than one package could be referred to with the same specification. cargo-0.66.0/src/doc/src/reference/profiles.md000066400000000000000000000401461432416201200211240ustar00rootroot00000000000000## Profiles Profiles provide a way to alter the compiler settings, influencing things like optimizations and debugging symbols. Cargo has 4 built-in profiles: `dev`, `release`, `test`, and `bench`. The profile is automatically chosen based on which command is being run if a profile is not specified on the command-line. In addition to the built-in profiles, custom user-defined profiles can also be specified. Profile settings can be changed in [`Cargo.toml`](manifest.md) with the `[profile]` table. Within each named profile, individual settings can be changed with key/value pairs like this: ```toml [profile.dev] opt-level = 1 # Use slightly better optimizations. overflow-checks = false # Disable integer overflow checks. ``` Cargo only looks at the profile settings in the `Cargo.toml` manifest at the root of the workspace. Profile settings defined in dependencies will be ignored. Additionally, profiles can be overridden from a [config] definition. Specifying a profile in a config file or environment variable will override the settings from `Cargo.toml`. [config]: config.md ### Profile settings The following is a list of settings that can be controlled in a profile. #### opt-level The `opt-level` setting controls the [`-C opt-level` flag] which controls the level of optimization. Higher optimization levels may produce faster runtime code at the expense of longer compiler times. Higher levels may also change and rearrange the compiled code which may make it harder to use with a debugger. The valid options are: * `0`: no optimizations * `1`: basic optimizations * `2`: some optimizations * `3`: all optimizations * `"s"`: optimize for binary size * `"z"`: optimize for binary size, but also turn off loop vectorization. It is recommended to experiment with different levels to find the right balance for your project. There may be surprising results, such as level `3` being slower than `2`, or the `"s"` and `"z"` levels not being necessarily smaller. You may also want to reevaluate your settings over time as newer versions of `rustc` changes optimization behavior. See also [Profile Guided Optimization] for more advanced optimization techniques. [`-C opt-level` flag]: ../../rustc/codegen-options/index.html#opt-level [Profile Guided Optimization]: ../../rustc/profile-guided-optimization.html #### debug The `debug` setting controls the [`-C debuginfo` flag] which controls the amount of debug information included in the compiled binary. The valid options are: * `0` or `false`: no debug info at all * `1`: line tables only * `2` or `true`: full debug info You may wish to also configure the [`split-debuginfo`](#split-debuginfo) option depending on your needs as well. [`-C debuginfo` flag]: ../../rustc/codegen-options/index.html#debuginfo #### split-debuginfo The `split-debuginfo` setting controls the [`-C split-debuginfo` flag] which controls whether debug information, if generated, is either placed in the executable itself or adjacent to it. This option is a string and acceptable values are the same as those the [compiler accepts][`-C split-debuginfo` flag]. The default value for this option is `unpacked` on macOS for profiles that have debug information otherwise enabled. Otherwise the default for this option is [documented with rustc][`-C split-debuginfo` flag] and is platform-specific. Some options are only available on the [nightly channel]. The Cargo default may change in the future once more testing has been performed, and support for DWARF is stabilized. [nightly channel]: ../../book/appendix-07-nightly-rust.html [`-C split-debuginfo` flag]: ../../rustc/codegen-options/index.html#split-debuginfo #### strip The `strip` option controls the [`-C strip` flag], which directs rustc to strip either symbols or debuginfo from a binary. This can be enabled like so: ```toml [package] # ... [profile.release] strip = "debuginfo" ``` Possible string values of `strip` are `"none"`, `"debuginfo"`, and `"symbols"`. The default is `"none"`. You can also configure this option with the boolean values `true` or `false`. `strip = true` is equivalent to `strip = "symbols"`. `strip = false` is equivalent to `strip = "none"` and disables `strip` completely. [`-C strip` flag]: ../../rustc/codegen-options/index.html#strip #### debug-assertions The `debug-assertions` setting controls the [`-C debug-assertions` flag] which turns `cfg(debug_assertions)` [conditional compilation] on or off. Debug assertions are intended to include runtime validation which is only available in debug/development builds. These may be things that are too expensive or otherwise undesirable in a release build. Debug assertions enables the [`debug_assert!` macro] in the standard library. The valid options are: * `true`: enabled * `false`: disabled [`-C debug-assertions` flag]: ../../rustc/codegen-options/index.html#debug-assertions [conditional compilation]: ../../reference/conditional-compilation.md#debug_assertions [`debug_assert!` macro]: ../../std/macro.debug_assert.html #### overflow-checks The `overflow-checks` setting controls the [`-C overflow-checks` flag] which controls the behavior of [runtime integer overflow]. When overflow-checks are enabled, a panic will occur on overflow. The valid options are: * `true`: enabled * `false`: disabled [`-C overflow-checks` flag]: ../../rustc/codegen-options/index.html#overflow-checks [runtime integer overflow]: ../../reference/expressions/operator-expr.md#overflow #### lto The `lto` setting controls the [`-C lto` flag] which controls LLVM's [link time optimizations]. LTO can produce better optimized code, using whole-program analysis, at the cost of longer linking time. The valid options are: * `false`: Performs "thin local LTO" which performs "thin" LTO on the local crate only across its [codegen units](#codegen-units). No LTO is performed if codegen units is 1 or [opt-level](#opt-level) is 0. * `true` or `"fat"`: Performs "fat" LTO which attempts to perform optimizations across all crates within the dependency graph. * `"thin"`: Performs ["thin" LTO]. This is similar to "fat", but takes substantially less time to run while still achieving performance gains similar to "fat". * `"off"`: Disables LTO. See also the [`-C linker-plugin-lto`] `rustc` flag for cross-language LTO. [`-C lto` flag]: ../../rustc/codegen-options/index.html#lto [link time optimizations]: https://llvm.org/docs/LinkTimeOptimization.html [`-C linker-plugin-lto`]: ../../rustc/codegen-options/index.html#linker-plugin-lto ["thin" LTO]: http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html #### panic The `panic` setting controls the [`-C panic` flag] which controls which panic strategy to use. The valid options are: * `"unwind"`: Unwind the stack upon panic. * `"abort"`: Terminate the process upon panic. When set to `"unwind"`, the actual value depends on the default of the target platform. For example, the NVPTX platform does not support unwinding, so it always uses `"abort"`. Tests, benchmarks, build scripts, and proc macros ignore the `panic` setting. The `rustc` test harness currently requires `unwind` behavior. See the [`panic-abort-tests`] unstable flag which enables `abort` behavior. Additionally, when using the `abort` strategy and building a test, all of the dependencies will also be forced to build with the `unwind` strategy. [`-C panic` flag]: ../../rustc/codegen-options/index.html#panic [`panic-abort-tests`]: unstable.md#panic-abort-tests #### incremental The `incremental` setting controls the [`-C incremental` flag] which controls whether or not incremental compilation is enabled. Incremental compilation causes `rustc` to save additional information to disk which will be reused when recompiling the crate, improving re-compile times. The additional information is stored in the `target` directory. The valid options are: * `true`: enabled * `false`: disabled Incremental compilation is only used for workspace members and "path" dependencies. The incremental value can be overridden globally with the `CARGO_INCREMENTAL` [environment variable] or the [`build.incremental`] config variable. [`-C incremental` flag]: ../../rustc/codegen-options/index.html#incremental [environment variable]: environment-variables.md [`build.incremental`]: config.md#buildincremental #### codegen-units The `codegen-units` setting controls the [`-C codegen-units` flag] which controls how many "code generation units" a crate will be split into. More code generation units allows more of a crate to be processed in parallel possibly reducing compile time, but may produce slower code. This option takes an integer greater than 0. The default is 256 for [incremental](#incremental) builds, and 16 for non-incremental builds. [`-C codegen-units` flag]: ../../rustc/codegen-options/index.html#codegen-units #### rpath The `rpath` setting controls the [`-C rpath` flag] which controls whether or not [`rpath`] is enabled. [`-C rpath` flag]: ../../rustc/codegen-options/index.html#rpath [`rpath`]: https://en.wikipedia.org/wiki/Rpath ### Default profiles #### dev The `dev` profile is used for normal development and debugging. It is the default for build commands like [`cargo build`], and is used for `cargo install --debug`. The default settings for the `dev` profile are: ```toml [profile.dev] opt-level = 0 debug = true split-debuginfo = '...' # Platform-specific. debug-assertions = true overflow-checks = true lto = false panic = 'unwind' incremental = true codegen-units = 256 rpath = false ``` #### release The `release` profile is intended for optimized artifacts used for releases and in production. This profile is used when the `--release` flag is used, and is the default for [`cargo install`]. The default settings for the `release` profile are: ```toml [profile.release] opt-level = 3 debug = false split-debuginfo = '...' # Platform-specific. debug-assertions = false overflow-checks = false lto = false panic = 'unwind' incremental = false codegen-units = 16 rpath = false ``` #### test The `test` profile is the default profile used by [`cargo test`]. The `test` profile inherits the settings from the [`dev`](#dev) profile. #### bench The `bench` profile is the default profile used by [`cargo bench`]. The `bench` profile inherits the settings from the [`release`](#release) profile. #### Build Dependencies All profiles, by default, do not optimize build dependencies (build scripts, proc macros, and their dependencies). The default settings for build overrides are: ```toml [profile.dev.build-override] opt-level = 0 codegen-units = 256 [profile.release.build-override] opt-level = 0 codegen-units = 256 ``` Build dependencies otherwise inherit settings from the active profile in use, as described in [Profile selection](#profile-selection). ### Custom profiles In addition to the built-in profiles, additional custom profiles can be defined. These may be useful for setting up multiple workflows and build modes. When defining a custom profile, you must specify the `inherits` key to specify which profile the custom profile inherits settings from when the setting is not specified. For example, let's say you want to compare a normal release build with a release build with [LTO](#lto) optimizations, you can specify something like the following in `Cargo.toml`: ```toml [profile.release-lto] inherits = "release" lto = true ``` The `--profile` flag can then be used to choose this custom profile: ```console cargo build --profile release-lto ``` The output for each profile will be placed in a directory of the same name as the profile in the [`target` directory]. As in the example above, the output would go into the `target/release-lto` directory. [`target` directory]: ../guide/build-cache.md ### Profile selection The profile used depends on the command, the command-line flags like `--release` or `--profile`, and the package (in the case of [overrides](#overrides)). The default profile if none is specified is: | Command | Default Profile | |---------|-----------------| | [`cargo run`], [`cargo build`],
[`cargo check`], [`cargo rustc`] | [`dev` profile](#dev) | | [`cargo test`] | [`test` profile](#test) | [`cargo bench`] | [`bench` profile](#bench) | [`cargo install`] | [`release` profile](#release) You can switch to a different profile using the `--profile=NAME` option which will used the given profile. The `--release` flag is equivalent to `--profile=release`. The selected profile applies to all Cargo targets, including [library](./cargo-targets.md#library), [binary](./cargo-targets.md#binaries), [example](./cargo-targets.md#examples), [test](./cargo-targets.md#tests), and [benchmark](./cargo-targets.md#benchmarks). The profile for specific packages can be specified with [overrides](#overrides), described below. [`cargo bench`]: ../commands/cargo-bench.md [`cargo build`]: ../commands/cargo-build.md [`cargo check`]: ../commands/cargo-check.md [`cargo install`]: ../commands/cargo-install.md [`cargo run`]: ../commands/cargo-run.md [`cargo rustc`]: ../commands/cargo-rustc.md [`cargo test`]: ../commands/cargo-test.md ### Overrides Profile settings can be overridden for specific packages and build-time crates. To override the settings for a specific package, use the `package` table to change the settings for the named package: ```toml # The `foo` package will use the -Copt-level=3 flag. [profile.dev.package.foo] opt-level = 3 ``` The package name is actually a [Package ID Spec](pkgid-spec.md), so you can target individual versions of a package with syntax such as `[profile.dev.package."foo:2.1.0"]`. To override the settings for all dependencies (but not any workspace member), use the `"*"` package name: ```toml # Set the default for dependencies. [profile.dev.package."*"] opt-level = 2 ``` To override the settings for build scripts, proc macros, and their dependencies, use the `build-override` table: ```toml # Set the settings for build scripts and proc-macros. [profile.dev.build-override] opt-level = 3 ``` > Note: When a dependency is both a normal dependency and a build dependency, > Cargo will try to only build it once when `--target` is not specified. When > using `build-override`, the dependency may need to be built twice, once as a > normal dependency and once with the overridden build settings. This may > increase initial build times. The precedence for which value is used is done in the following order (first match wins): 1. `[profile.dev.package.name]` β€” A named package. 2. `[profile.dev.package."*"]` β€” For any non-workspace member. 3. `[profile.dev.build-override]` β€” Only for build scripts, proc macros, and their dependencies. 4. `[profile.dev]` β€” Settings in `Cargo.toml`. 5. Default values built-in to Cargo. Overrides cannot specify the `panic`, `lto`, or `rpath` settings. #### Overrides and generics The location where generic code is instantiated will influence the optimization settings used for that generic code. This can cause subtle interactions when using profile overrides to change the optimization level of a specific crate. If you attempt to raise the optimization level of a dependency which defines generic functions, those generic functions may not be optimized when used in your local crate. This is because the code may be generated in the crate where it is instantiated, and thus may use the optimization settings of that crate. For example, [nalgebra] is a library which defines vectors and matrices making heavy use of generic parameters. If your local code defines concrete nalgebra types like `Vector4` and uses their methods, the corresponding nalgebra code will be instantiated and built within your crate. Thus, if you attempt to increase the optimization level of `nalgebra` using a profile override, it may not result in faster performance. Further complicating the issue, `rustc` has some optimizations where it will attempt to share monomorphized generics between crates. If the opt-level is 2 or 3, then a crate will not use monomorphized generics from other crates, nor will it export locally defined monomorphized items to be shared with other crates. When experimenting with optimizing dependencies for development, consider trying opt-level 1, which will apply some optimizations while still allowing monomorphized items to be shared. [nalgebra]: https://crates.io/crates/nalgebra cargo-0.66.0/src/doc/src/reference/publishing.md000066400000000000000000000257161432416201200214530ustar00rootroot00000000000000## Publishing on crates.io Once you've got a library that you'd like to share with the world, it's time to publish it on [crates.io]! Publishing a crate is when a specific version is uploaded to be hosted on [crates.io]. Take care when publishing a crate, because a publish is **permanent**. The version can never be overwritten, and the code cannot be deleted. There is no limit to the number of versions which can be published, however. ### Before your first publish First things first, you’ll need an account on [crates.io] to acquire an API token. To do so, [visit the home page][crates.io] and log in via a GitHub account (required for now). After this, visit your [Account Settings](https://crates.io/me) page and run the [`cargo login`] command. ```console $ cargo login ``` Then at the prompt put in the token specified. ```console please paste the API Token found on https://crates.io/me below abcdefghijklmnopqrstuvwxyz012345 ``` This command will inform Cargo of your API token and store it locally in your `~/.cargo/credentials.toml`. Note that this token is a **secret** and should not be shared with anyone else. If it leaks for any reason, you should revoke it immediately. ### Before publishing a new crate Keep in mind that crate names on [crates.io] are allocated on a first-come-first- serve basis. Once a crate name is taken, it cannot be used for another crate. Check out the [metadata you can specify](manifest.md) in `Cargo.toml` to ensure your crate can be discovered more easily! Before publishing, make sure you have filled out the following fields: - [`license` or `license-file`] - [`description`] - [`homepage`] - [`documentation`] - [`repository`] - [`readme`] It would also be a good idea to include some [`keywords`] and [`categories`], though they are not required. If you are publishing a library, you may also want to consult the [Rust API Guidelines]. #### Packaging a crate The next step is to package up your crate and upload it to [crates.io]. For this we’ll use the [`cargo publish`] subcommand. This command performs the following steps: 1. Perform some verification checks on your package. 2. Compress your source code into a `.crate` file. 3. Extract the `.crate` file into a temporary directory and verify that it compiles. 4. Upload the `.crate` file to [crates.io]. 5. The registry will perform some additional checks on the uploaded package before adding it. It is recommended that you first run `cargo publish --dry-run` (or [`cargo package`] which is equivalent) to ensure there aren't any warnings or errors before publishing. This will perform the first three steps listed above. ```console $ cargo publish --dry-run ``` You can inspect the generated `.crate` file in the `target/package` directory. [crates.io] currently has a 10MB size limit on the `.crate` file. You may want to check the size of the `.crate` file to ensure you didn't accidentally package up large assets that are not required to build your package, such as test data, website documentation, or code generation. You can check which files are included with the following command: ```console $ cargo package --list ``` Cargo will automatically ignore files ignored by your version control system when packaging, but if you want to specify an extra set of files to ignore you can use the [`exclude` key](manifest.md#the-exclude-and-include-fields) in the manifest: ```toml [package] # ... exclude = [ "public/assets/*", "videos/*", ] ``` If you’d rather explicitly list the files to include, Cargo also supports an `include` key, which if set, overrides the `exclude` key: ```toml [package] # ... include = [ "**/*.rs", "Cargo.toml", ] ``` ### Uploading the crate When you are ready to publish, use the [`cargo publish`] command to upload to [crates.io]: ```console $ cargo publish ``` And that’s it, you’ve now published your first crate! ### Publishing a new version of an existing crate In order to release a new version, change [the `version` value](manifest.md#the-version-field) specified in your `Cargo.toml` manifest. Keep in mind [the SemVer rules](semver.md) which provide guidelines on what is a compatible change. Then run [`cargo publish`] as described above to upload the new version. ### Managing a crates.io-based crate Management of crates is primarily done through the command line `cargo` tool rather than the [crates.io] web interface. For this, there are a few subcommands to manage a crate. #### `cargo yank` Occasions may arise where you publish a version of a crate that actually ends up being broken for one reason or another (syntax error, forgot to include a file, etc.). For situations such as this, Cargo supports a β€œyank” of a version of a crate. ```console $ cargo yank --version 1.0.1 $ cargo yank --version 1.0.1 --undo ``` A yank **does not** delete any code. This feature is not intended for deleting accidentally uploaded secrets, for example. If that happens, you must reset those secrets immediately. The semantics of a yanked version are that no new dependencies can be created against that version, but all existing dependencies continue to work. One of the major goals of [crates.io] is to act as a permanent archive of crates that does not change over time, and allowing deletion of a version would go against this goal. Essentially a yank means that all packages with a `Cargo.lock` will not break, while any future `Cargo.lock` files generated will not list the yanked version. #### `cargo owner` A crate is often developed by more than one person, or the primary maintainer may change over time! The owner of a crate is the only person allowed to publish new versions of the crate, but an owner may designate additional owners. ```console $ cargo owner --add github-handle $ cargo owner --remove github-handle $ cargo owner --add github:rust-lang:owners $ cargo owner --remove github:rust-lang:owners ``` The owner IDs given to these commands must be GitHub user names or GitHub teams. If a user name is given to `--add`, that user is invited as a β€œnamed” owner, with full rights to the crate. In addition to being able to publish or yank versions of the crate, they have the ability to add or remove owners, *including* the owner that made *them* an owner. Needless to say, you shouldn’t make people you don’t fully trust into a named owner. In order to become a named owner, a user must have logged into [crates.io] previously. If a team name is given to `--add`, that team is invited as a β€œteam” owner, with restricted right to the crate. While they have permission to publish or yank versions of the crate, they *do not* have the ability to add or remove owners. In addition to being more convenient for managing groups of owners, teams are just a bit more secure against owners becoming malicious. The syntax for teams is currently `github:org:team` (see examples above). In order to invite a team as an owner one must be a member of that team. No such restriction applies to removing a team as an owner. ### GitHub permissions Team membership is not something GitHub provides simple public access to, and it is likely for you to encounter the following message when working with them: > It looks like you don’t have permission to query a necessary property from GitHub to complete this request. You may need to re-authenticate on [crates.io] to grant permission to read GitHub org memberships. This is basically a catch-all for β€œyou tried to query a team, and one of the five levels of membership access control denied this”. That is not an exaggeration. GitHub’s support for team access control is Enterprise Grade. The most likely cause of this is simply that you last logged in before this feature was added. We originally requested *no* permissions from GitHub when authenticating users, because we didn’t actually ever use the user’s token for anything other than logging them in. However to query team membership on your behalf, we now require [the `read:org` scope][oauth-scopes]. You are free to deny us this scope, and everything that worked before teams were introduced will keep working. However you will never be able to add a team as an owner, or publish a crate as a team owner. If you ever attempt to do this, you will get the error above. You may also see this error if you ever try to publish a crate that you don’t own at all, but otherwise happens to have a team. If you ever change your mind, or just aren’t sure if [crates.io] has sufficient permission, you can always go to and re-authenticate, which will prompt you for permission if [crates.io] doesn’t have all the scopes it would like to. An additional barrier to querying GitHub is that the organization may be actively denying third party access. To check this, you can go to: ```text https://github.com/organizations/:org/settings/oauth_application_policy ``` where `:org` is the name of the organization (e.g., `rust-lang`). You may see something like: ![Organization Access Control](../images/org-level-acl.png) Where you may choose to explicitly remove [crates.io] from your organization’s blacklist, or simply press the β€œRemove Restrictions” button to allow all third party applications to access this data. Alternatively, when [crates.io] requested the `read:org` scope, you could have explicitly whitelisted [crates.io] querying the org in question by pressing the β€œGrant Access” button next to its name: ![Authentication Access Control](../images/auth-level-acl.png) #### Troubleshooting GitHub team access errors When trying to add a GitHub team as crate owner, you may see an error like: ```text error: failed to invite owners to crate : api errors (status 200 OK): could not find the github team org/repo ``` In that case, you should go to [the GitHub Application settings page] and check if crates.io is listed in the `Authorized OAuth Apps` tab. If it isn't, you should go to and authorize it. Then go back to the Application Settings page on GitHub, click on the crates.io application in the list, and make sure you or your organization is listed in the "Organization access" list with a green check mark. If there's a button labeled `Grant` or `Request`, you should grant the access or request the org owner to do so. [Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/ [`cargo login`]: ../commands/cargo-login.md [`cargo package`]: ../commands/cargo-package.md [`cargo publish`]: ../commands/cargo-publish.md [`categories`]: manifest.md#the-categories-field [`description`]: manifest.md#the-description-field [`documentation`]: manifest.md#the-documentation-field [`homepage`]: manifest.md#the-homepage-field [`keywords`]: manifest.md#the-keywords-field [`license` or `license-file`]: manifest.md#the-license-and-license-file-fields [`readme`]: manifest.md#the-readme-field [`repository`]: manifest.md#the-repository-field [crates.io]: https://crates.io/ [oauth-scopes]: https://developer.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/ [the GitHub Application settings page]: https://github.com/settings/applications cargo-0.66.0/src/doc/src/reference/registries.md000066400000000000000000000603001432416201200214530ustar00rootroot00000000000000## Registries Cargo installs crates and fetches dependencies from a "registry". The default registry is [crates.io]. A registry contains an "index" which contains a searchable list of available crates. A registry may also provide a web API to support publishing new crates directly from Cargo. > Note: If you are interested in mirroring or vendoring an existing registry, > take a look at [Source Replacement]. ### Using an Alternate Registry To use a registry other than [crates.io], the name and index URL of the registry must be added to a [`.cargo/config.toml` file][config]. The `registries` table has a key for each registry, for example: ```toml [registries] my-registry = { index = "https://my-intranet:8080/git/index" } ``` The `index` key should be a URL to a git repository with the registry's index. A crate can then depend on a crate from another registry by specifying the `registry` key and a value of the registry's name in that dependency's entry in `Cargo.toml`: ```toml # Sample Cargo.toml [package] name = "my-project" version = "0.1.0" [dependencies] other-crate = { version = "1.0", registry = "my-registry" } ``` As with most config values, the index may be specified with an environment variable instead of a config file. For example, setting the following environment variable will accomplish the same thing as defining a config file: ```ignore CARGO_REGISTRIES_MY_REGISTRY_INDEX=https://my-intranet:8080/git/index ``` > Note: [crates.io] does not accept packages that depend on crates from other > registries. ### Publishing to an Alternate Registry If the registry supports web API access, then packages can be published directly to the registry from Cargo. Several of Cargo's commands such as [`cargo publish`] take a `--registry` command-line flag to indicate which registry to use. For example, to publish the package in the current directory: 1. `cargo login --registry=my-registry` This only needs to be done once. You must enter the secret API token retrieved from the registry's website. Alternatively the token may be passed directly to the `publish` command with the `--token` command-line flag or an environment variable with the name of the registry such as `CARGO_REGISTRIES_MY_REGISTRY_TOKEN`. 2. `cargo publish --registry=my-registry` Instead of always passing the `--registry` command-line option, the default registry may be set in [`.cargo/config.toml`][config] with the `registry.default` key. Setting the `package.publish` key in the `Cargo.toml` manifest restricts which registries the package is allowed to be published to. This is useful to prevent accidentally publishing a closed-source package to [crates.io]. The value may be a list of registry names, for example: ```toml [package] # ... publish = ["my-registry"] ``` The `publish` value may also be `false` to restrict all publishing, which is the same as an empty list. The authentication information saved by [`cargo login`] is stored in the `credentials.toml` file in the Cargo home directory (default `$HOME/.cargo`). It has a separate table for each registry, for example: ```toml [registries.my-registry] token = "854DvwSlUwEHtIo3kWy6x7UCPKHfzCmy" ``` ### Running a Registry A minimal registry can be implemented by having a git repository that contains an index, and a server that contains the compressed `.crate` files created by [`cargo package`]. Users won't be able to use Cargo to publish to it, but this may be sufficient for closed environments. A full-featured registry that supports publishing will additionally need to have a web API service that conforms to the API used by Cargo. The web API is documented below. Commercial and community projects are available for building and running a registry. See for a list of what is available. ### Index Format The following defines the format of the index. New features are occasionally added, which are only understood starting with the version of Cargo that introduced them. Older versions of Cargo may not be able to use packages that make use of new features. However, the format for older packages should not change, so older versions of Cargo should be able to use them. The index is stored in a git repository so that Cargo can efficiently fetch incremental updates to the index. In the root of the repository is a file named `config.json` which contains JSON information used by Cargo for accessing the registry. This is an example of what the [crates.io] config file looks like: ```javascript { "dl": "https://crates.io/api/v1/crates", "api": "https://crates.io" } ``` The keys are: - `dl`: This is the URL for downloading crates listed in the index. The value may have the following markers which will be replaced with their corresponding value: - `{crate}`: The name of crate. - `{version}`: The crate version. - `{prefix}`: A directory prefix computed from the crate name. For example, a crate named `cargo` has a prefix of `ca/rg`. See below for details. - `{lowerprefix}`: Lowercase variant of `{prefix}`. - `{sha256-checksum}`: The crate's sha256 checksum. If none of the markers are present, then the value `/{crate}/{version}/download` is appended to the end. - `api`: This is the base URL for the web API. This key is optional, but if it is not specified, commands such as [`cargo publish`] will not work. The web API is described below. The download endpoint should send the `.crate` file for the requested package. Cargo supports https, http, and file URLs, HTTP redirects, HTTP1 and HTTP2. The exact specifics of TLS support depend on the platform that Cargo is running on, the version of Cargo, and how it was compiled. The rest of the index repository contains one file for each package, where the filename is the name of the package in lowercase. Each version of the package has a separate line in the file. The files are organized in a tier of directories: - Packages with 1 character names are placed in a directory named `1`. - Packages with 2 character names are placed in a directory named `2`. - Packages with 3 character names are placed in the directory `3/{first-character}` where `{first-character}` is the first character of the package name. - All other packages are stored in directories named `{first-two}/{second-two}` where the top directory is the first two characters of the package name, and the next subdirectory is the third and fourth characters of the package name. For example, `cargo` would be stored in a file named `ca/rg/cargo`. > Note: Although the index filenames are in lowercase, the fields that contain > package names in `Cargo.toml` and the index JSON data are case-sensitive and > may contain upper and lower case characters. The directory name above is calculated based on the package name converted to lowercase; it is represented by the marker `{lowerprefix}`. When the original package name is used without case conversion, the resulting directory name is represented by the marker `{prefix}`. For example, the package `MyCrate` would have a `{prefix}` of `My/Cr` and a `{lowerprefix}` of `my/cr`. In general, using `{prefix}` is recommended over `{lowerprefix}`, but there are pros and cons to each choice. Using `{prefix}` on case-insensitive filesystems results in (harmless-but-inelegant) directory aliasing. For example, `crate` and `CrateTwo` have `{prefix}` values of `cr/at` and `Cr/at`; these are distinct on Unix machines but alias to the same directory on Windows. Using directories with normalized case avoids aliasing, but on case-sensitive filesystems it's harder to support older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. For example, nginx rewrite rules can easily construct `{prefix}` but can't perform case-conversion to construct `{lowerprefix}`. Registries should consider enforcing limitations on package names added to their index. Cargo itself allows names with any [alphanumeric], `-`, or `_` characters. [crates.io] imposes its own limitations, including the following: - Only allows ASCII characters. - Only alphanumeric, `-`, and `_` characters. - First character must be alphabetic. - Case-insensitive collision detection. - Prevent differences of `-` vs `_`. - Under a specific length (max 64). - Rejects reserved names, such as Windows special filenames like "nul". Registries should consider incorporating similar restrictions, and consider the security implications, such as [IDN homograph attacks](https://en.wikipedia.org/wiki/IDN_homograph_attack) and other concerns in [UTR36](https://www.unicode.org/reports/tr36/) and [UTS39](https://www.unicode.org/reports/tr39/). Each line in a package file contains a JSON object that describes a published version of the package. The following is a pretty-printed example with comments explaining the format of the entry. ```javascript { // The name of the package. // This must only contain alphanumeric, `-`, or `_` characters. "name": "foo", // The version of the package this row is describing. // This must be a valid version number according to the Semantic // Versioning 2.0.0 spec at https://semver.org/. "vers": "0.1.0", // Array of direct dependencies of the package. "deps": [ { // Name of the dependency. // If the dependency is renamed from the original package name, // this is the new name. The original package name is stored in // the `package` field. "name": "rand", // The SemVer requirement for this dependency. // This must be a valid version requirement defined at // https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html. "req": "^0.6", // Array of features (as strings) enabled for this dependency. "features": ["i128_support"], // Boolean of whether or not this is an optional dependency. "optional": false, // Boolean of whether or not default features are enabled. "default_features": true, // The target platform for the dependency. // null if not a target dependency. // Otherwise, a string such as "cfg(windows)". "target": null, // The dependency kind. // "dev", "build", or "normal". // Note: this is a required field, but a small number of entries // exist in the crates.io index with either a missing or null // `kind` field due to implementation bugs. "kind": "normal", // The URL of the index of the registry where this dependency is // from as a string. If not specified or null, it is assumed the // dependency is in the current registry. "registry": null, // If the dependency is renamed, this is a string of the actual // package name. If not specified or null, this dependency is not // renamed. "package": null, } ], // A SHA256 checksum of the `.crate` file. "cksum": "d867001db0e2b6e0496f9fac96930e2d42233ecd3ca0413e0753d4c7695d289c", // Set of features defined for the package. // Each feature maps to an array of features or dependencies it enables. "features": { "extras": ["rand/simd_support"] }, // Boolean of whether or not this version has been yanked. "yanked": false, // The `links` string value from the package's manifest, or null if not // specified. This field is optional and defaults to null. "links": null, // An unsigned 32-bit integer value indicating the schema version of this // entry. // // If this not specified, it should be interpreted as the default of 1. // // Cargo (starting with version 1.51) will ignore versions it does not // recognize. This provides a method to safely introduce changes to index // entries and allow older versions of cargo to ignore newer entries it // doesn't understand. Versions older than 1.51 ignore this field, and // thus may misinterpret the meaning of the index entry. // // The current values are: // // * 1: The schema as documented here, not including newer additions. // This is honored in Rust version 1.51 and newer. // * 2: The addition of the `features2` field. // This is honored in Rust version 1.60 and newer. "v": 2, // This optional field contains features with new, extended syntax. // Specifically, namespaced features (`dep:`) and weak dependencies // (`pkg?/feat`). // // This is separated from `features` because versions older than 1.19 // will fail to load due to not being able to parse the new syntax, even // with a `Cargo.lock` file. // // Cargo will merge any values listed here with the "features" field. // // If this field is included, the "v" field should be set to at least 2. // // Registries are not required to use this field for extended feature // syntax, they are allowed to include those in the "features" field. // Using this is only necessary if the registry wants to support cargo // versions older than 1.19, which in practice is only crates.io since // those older versions do not support other registries. "features2": { "serde": ["dep:serde", "chrono?/serde"] } } ``` The JSON objects should not be modified after they are added except for the `yanked` field whose value may change at any time. ### Web API A registry may host a web API at the location defined in `config.json` to support any of the actions listed below. Cargo includes the `Authorization` header for requests that require authentication. The header value is the API token. The server should respond with a 403 response code if the token is not valid. Users are expected to visit the registry's website to obtain a token, and Cargo can store the token using the [`cargo login`] command, or by passing the token on the command-line. Responses use the 200 response code for success. Errors should use an appropriate response code, such as 404. Failure responses should have a JSON object with the following structure: ```javascript { // Array of errors to display to the user. "errors": [ { // The error message as a string. "detail": "error message text" } ] } ``` If the response has this structure Cargo will display the detailed message to the user, even if the response code is 200. If the response code indicates an error and the content does not have this structure, Cargo will display to the user a message intended to help debugging the server error. A server returning an `errors` object allows a registry to provide a more detailed or user-centric error message. For backwards compatibility, servers should ignore any unexpected query parameters or JSON fields. If a JSON field is missing, it should be assumed to be null. The endpoints are versioned with the `v1` component of the path, and Cargo is responsible for handling backwards compatibility fallbacks should any be required in the future. Cargo sets the following headers for all requests: - `Content-Type`: `application/json` - `Accept`: `application/json` - `User-Agent`: The Cargo version such as `cargo 1.32.0 (8610973aa 2019-01-02)`. This may be modified by the user in a configuration value. Added in 1.29. #### Publish - Endpoint: `/api/v1/crates/new` - Method: PUT - Authorization: Included The publish endpoint is used to publish a new version of a crate. The server should validate the crate, make it available for download, and add it to the index. The body of the data sent by Cargo is: - 32-bit unsigned little-endian integer of the length of JSON data. - Metadata of the package as a JSON object. - 32-bit unsigned little-endian integer of the length of the `.crate` file. - The `.crate` file. The following is a commented example of the JSON object. Some notes of some restrictions imposed by [crates.io] are included only to illustrate some suggestions on types of validation that may be done, and should not be considered as an exhaustive list of restrictions [crates.io] imposes. ```javascript { // The name of the package. "name": "foo", // The version of the package being published. "vers": "0.1.0", // Array of direct dependencies of the package. "deps": [ { // Name of the dependency. // If the dependency is renamed from the original package name, // this is the original name. The new package name is stored in // the `explicit_name_in_toml` field. "name": "rand", // The semver requirement for this dependency. "version_req": "^0.6", // Array of features (as strings) enabled for this dependency. "features": ["i128_support"], // Boolean of whether or not this is an optional dependency. "optional": false, // Boolean of whether or not default features are enabled. "default_features": true, // The target platform for the dependency. // null if not a target dependency. // Otherwise, a string such as "cfg(windows)". "target": null, // The dependency kind. // "dev", "build", or "normal". "kind": "normal", // The URL of the index of the registry where this dependency is // from as a string. If not specified or null, it is assumed the // dependency is in the current registry. "registry": null, // If the dependency is renamed, this is a string of the new // package name. If not specified or null, this dependency is not // renamed. "explicit_name_in_toml": null, } ], // Set of features defined for the package. // Each feature maps to an array of features or dependencies it enables. // Cargo does not impose limitations on feature names, but crates.io // requires alphanumeric ASCII, `_` or `-` characters. "features": { "extras": ["rand/simd_support"] }, // List of strings of the authors. // May be empty. "authors": ["Alice "], // Description field from the manifest. // May be null. crates.io requires at least some content. "description": null, // String of the URL to the website for this package's documentation. // May be null. "documentation": null, // String of the URL to the website for this package's home page. // May be null. "homepage": null, // String of the content of the README file. // May be null. "readme": null, // String of a relative path to a README file in the crate. // May be null. "readme_file": null, // Array of strings of keywords for the package. "keywords": [], // Array of strings of categories for the package. "categories": [], // String of the license for the package. // May be null. crates.io requires either `license` or `license_file` to be set. "license": null, // String of a relative path to a license file in the crate. // May be null. "license_file": null, // String of the URL to the website for the source repository of this package. // May be null. "repository": null, // Optional object of "status" badges. Each value is an object of // arbitrary string to string mappings. // crates.io has special interpretation of the format of the badges. "badges": { "travis-ci": { "branch": "master", "repository": "rust-lang/cargo" } }, // The `links` string value from the package's manifest, or null if not // specified. This field is optional and defaults to null. "links": null } ``` A successful response includes the JSON object: ```javascript { // Optional object of warnings to display to the user. "warnings": { // Array of strings of categories that are invalid and ignored. "invalid_categories": [], // Array of strings of badge names that are invalid and ignored. "invalid_badges": [], // Array of strings of arbitrary warnings to display to the user. "other": [] } } ``` #### Yank - Endpoint: `/api/v1/crates/{crate_name}/{version}/yank` - Method: DELETE - Authorization: Included The yank endpoint will set the `yank` field of the given version of a crate to `true` in the index. A successful response includes the JSON object: ```javascript { // Indicates the delete succeeded, always true. "ok": true, } ``` #### Unyank - Endpoint: `/api/v1/crates/{crate_name}/{version}/unyank` - Method: PUT - Authorization: Included The unyank endpoint will set the `yank` field of the given version of a crate to `false` in the index. A successful response includes the JSON object: ```javascript { // Indicates the delete succeeded, always true. "ok": true, } ``` #### Owners Cargo does not have an inherent notion of users and owners, but it does provide the `owner` command to assist managing who has authorization to control a crate. It is up to the registry to decide exactly how users and owners are handled. See the [publishing documentation] for a description of how [crates.io] handles owners via GitHub users and teams. ##### Owners: List - Endpoint: `/api/v1/crates/{crate_name}/owners` - Method: GET - Authorization: Included The owners endpoint returns a list of owners of the crate. A successful response includes the JSON object: ```javascript { // Array of owners of the crate. "users": [ { // Unique unsigned 32-bit integer of the owner. "id": 70, // The unique username of the owner. "login": "github:rust-lang:core", // Name of the owner. // This is optional and may be null. "name": "Core", } ] } ``` ##### Owners: Add - Endpoint: `/api/v1/crates/{crate_name}/owners` - Method: PUT - Authorization: Included A PUT request will send a request to the registry to add a new owner to a crate. It is up to the registry how to handle the request. For example, [crates.io] sends an invite to the user that they must accept before being added. The request should include the following JSON object: ```javascript { // Array of `login` strings of owners to add. "users": ["login_name"] } ``` A successful response includes the JSON object: ```javascript { // Indicates the add succeeded, always true. "ok": true, // A string to be displayed to the user. "msg": "user ehuss has been invited to be an owner of crate cargo" } ``` ##### Owners: Remove - Endpoint: `/api/v1/crates/{crate_name}/owners` - Method: DELETE - Authorization: Included A DELETE request will remove an owner from a crate. The request should include the following JSON object: ```javascript { // Array of `login` strings of owners to remove. "users": ["login_name"] } ``` A successful response includes the JSON object: ```javascript { // Indicates the remove succeeded, always true. "ok": true } ``` #### Search - Endpoint: `/api/v1/crates` - Method: GET - Query Parameters: - `q`: The search query string. - `per_page`: Number of results, default 10, max 100. The search request will perform a search for crates, using criteria defined on the server. A successful response includes the JSON object: ```javascript { // Array of results. "crates": [ { // Name of the crate. "name": "rand", // The highest version available. "max_version": "0.6.1", // Textual description of the crate. "description": "Random number generators and other randomness functionality.\n", } ], "meta": { // Total number of results available on the server. "total": 119 } } ``` #### Login - Endpoint: `/me` The "login" endpoint is not an actual API request. It exists solely for the [`cargo login`] command to display a URL to instruct a user to visit in a web browser to log in and retrieve an API token. [Source Replacement]: source-replacement.md [`cargo login`]: ../commands/cargo-login.md [`cargo package`]: ../commands/cargo-package.md [`cargo publish`]: ../commands/cargo-publish.md [alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric [config]: config.md [crates.io]: https://crates.io/ [publishing documentation]: publishing.md#cargo-owner cargo-0.66.0/src/doc/src/reference/resolver.md000066400000000000000000000602121432416201200211360ustar00rootroot00000000000000# Dependency Resolution One of Cargo's primary tasks is to determine the versions of dependencies to use based on the version requirements specified in each package. This process is called "dependency resolution" and is performed by the "resolver". The result of the resolution is stored in the `Cargo.lock` file which "locks" the dependencies to specific versions, and keeps them fixed over time. The resolver attempts to unify common dependencies while considering possibly conflicting requirements. The sections below provide some details on how these constraints are handled, and how to work with the resolver. See the chapter [Specifying Dependencies] for more details about how dependency requirements are specified. The [`cargo tree`] command can be used to visualize the result of the resolver. [Specifying Dependencies]: specifying-dependencies.md [`cargo tree`]: ../commands/cargo-tree.md ## SemVer compatibility Cargo uses [SemVer] for specifying version numbers. This establishes a common convention for what is compatible between different versions of a package. See the [SemVer Compatibility] chapter for guidance on what is considered a "compatible" change. This notion of "compatibility" is important because Cargo assumes it should be safe to update a dependency within a compatibility range without breaking the build. Versions are considered compatible if their left-most non-zero major/minor/patch component is the same. For example, `1.0.3` and `1.1.0` are considered compatible, and thus it should be safe to update from the older release to the newer one. However, an update from `1.1.0` to `2.0.0` would not be allowed to be made automatically. This convention also applies to versions with leading zeros. For example, `0.1.0` and `0.1.2` are compatible, but `0.1.0` and `0.2.0` are not. Similarly, `0.0.1` and `0.0.2` are not compatible. As a quick refresher, the [*version requirement* syntax][Specifying Dependencies] Cargo uses for dependencies is: Requirement | Example | Equivalence | Description --|--------|--|------------- Caret | `1.2.3` or `^1.2.3` | >=1.2.3, <2.0.0 | Any SemVer-compatible version of at least the given value. Tilde | `~1.2` | >=1.2.0, <1.3.0 | Minimum version, with restricted compatibility range. Wildcard | `1.*` | >=1.0.0, <2.0.0 | Any version in the `*` position. Equals | `=1.2.3` | =1.2.3 | Exactly the specified version only. Comparison | `>1.1` | >=1.2.0 | Naive numeric comparison of specified digits. Compound | >=1.2, <1.5 | >1.2.0, <1.5.0 | Multiple requirements that must be simultaneously satisfied. When multiple packages specify a dependency for a common package, the resolver attempts to ensure that they use the same version of that common package, as long as they are within a SemVer compatibility range. It also attempts to use the greatest version currently available within that compatibility range. For example, if there are two packages in the resolve graph with the following requirements: ```toml # Package A [dependencies] bitflags = "1.0" # Package B [dependencies] bitflags = "1.1" ``` If at the time the `Cargo.lock` file is generated, the greatest version of `bitflags` is `1.2.1`, then both packages will use `1.2.1` because it is the greatest within the compatibility range. If `2.0.0` is published, it will still use `1.2.1` because `2.0.0` is considered incompatible. If multiple packages have a common dependency with semver-incompatible versions, then Cargo will allow this, but will build two separate copies of the dependency. For example: ```toml # Package A [dependencies] rand = "0.7" # Package B [dependencies] rand = "0.6" ``` The above will result in Package A using the greatest `0.7` release (`0.7.3` at the time of this writing) and Package B will use the greatest `0.6` release (`0.6.5` for example). This can lead to potential problems, see the [Version-incompatibility hazards] section for more details. Multiple versions within the same compatibility range are not allowed and will result in a resolver error if it is constrained to two different versions within a compatibility range. For example, if there are two packages in the resolve graph with the following requirements: ```toml # Package A [dependencies] log = "=0.4.11" # Package B [dependencies] log = "=0.4.8" ``` The above will fail because it is not allowed to have two separate copies of the `0.4` release of the `log` package. [SemVer]: https://semver.org/ [SemVer Compatibility]: semver.md [Version-incompatibility hazards]: #version-incompatibility-hazards ### Version-incompatibility hazards When multiple versions of a crate appear in the resolve graph, this can cause problems when types from those crates are exposed by the crates using them. This is because the types and items are considered different by the Rust compiler, even if they have the same name. Libraries should take care when publishing a SemVer-incompatible version (for example, publishing `2.0.0` after `1.0.0` has been in use), particularly for libraries that are widely used. The "[semver trick]" is a workaround for this problem of publishing a breaking change while retaining compatibility with older versions. The linked page goes into detail about what the problem is and how to address it. In short, when a library wants to publish a SemVer-breaking release, publish the new release, and also publish a point release of the previous version that reexports the types from the newer version. These incompatibilities usually manifest as a compile-time error, but sometimes they will only appear as a runtime misbehavior. For example, let's say there is a common library named `foo` that ends up appearing with both version `1.0.0` and `2.0.0` in the resolve graph. If [`downcast_ref`] is used on a object created by a library using version `1.0.0`, and the code calling `downcast_ref` is downcasting to a type from version `2.0.0`, the downcast will fail at runtime. It is important to make sure that if you have multiple versions of a library that you are properly using them, especially if it is ever possible for the types from different versions to be used together. The [`cargo tree -d`][`cargo tree`] command can be used to identify duplicate versions and where they come from. Similarly, it is important to consider the impact on the ecosystem if you publish a SemVer-incompatible version of a popular library. [semver trick]: https://github.com/dtolnay/semver-trick [`downcast_ref`]: ../../std/any/trait.Any.html#method.downcast_ref ### Pre-releases SemVer has the concept of "pre-releases" with a dash in the version, such as `1.0.0-alpha`, or `1.0.0-beta`. Cargo will avoid automatically using pre-releases unless explicitly asked. For example, if `1.0.0-alpha` of package `foo` is published, then a requirement of `foo = "1.0"` will *not* match, and will return an error. The pre-release must be specified, such as `foo = "1.0.0-alpha"`. Similarly [`cargo install`] will avoid pre-releases unless explicitly asked to install one. Cargo allows "newer" pre-releases to be used automatically. For example, if `1.0.0-beta` is published, then a requirement `foo = "1.0.0-alpha"` will allow updating to the `beta` version. Beware that pre-release versions can be unstable, and as such care should be taken when using them. Some projects may choose to publish breaking changes between pre-release versions. It is recommended to not use pre-release dependencies in a library if your library is not also a pre-release. Care should also be taken when updating your `Cargo.lock`, and be prepared if a pre-release update causes issues. The pre-release tag may be separated with periods to distinguish separate components. Numeric components will use numeric comparison. For example, `1.0.0-alpha.4` will use numeric comparison for the `4` component. That means that if `1.0.0-alpha.11` is published, that will be chosen as the greatest release. Non-numeric components are compared lexicographically. [`cargo install`]: ../commands/cargo-install.md ### Version metadata SemVer has the concept of "version metadata" with a plus in the version, such as `1.0.0+21AF26D3`. This metadata is usually ignored, and should not be used in a version requirement. You should never publish multiple versions that differ only in the metadata tag (note, this is a [known issue] with [crates.io] that currently permits this). [known issue]: https://github.com/rust-lang/crates.io/issues/1059 [crates.io]: https://crates.io/ ## Other constraints Version requirements aren't the only constraint that the resolver considers when selecting and unifying dependencies. The following sections cover some of the other constraints that can affect resolution. ### Features For the purpose of generating `Cargo.lock`, the resolver builds the dependency graph as-if all [features] of all [workspace] members are enabled. This ensures that any optional dependencies are available and properly resolved with the rest of the graph when features are added or removed with the [`--features` command-line flag](features.md#command-line-feature-options). The resolver runs a second time to determine the actual features used when *compiling* a crate, based on the features selected on the command-line. Dependencies are resolved with the union of all features enabled on them. For example, if one package depends on the [`im`] package with the [`serde` dependency] enabled and another package depends on it with the [`rayon` dependency] enabled, then `im` will be built with both features enabled, and the `serde` and `rayon` crates will be included in the resolve graph. If no packages depend on `im` with those features, then those optional dependencies will be ignored, and they will not affect resolution. When building multiple packages in a workspace (such as with `--workspace` or multiple `-p` flags), the features of the dependencies of all of those packages are unified. If you have a circumstance where you want to avoid that unification for different workspace members, you will need to build them via separate `cargo` invocations. The resolver will skip over versions of packages that are missing required features. For example, if a package depends on version `^1` of [`regex`] with the [`perf` feature], then the oldest version it can select is `1.3.0`, because versions prior to that did not contain the `perf` feature. Similarly, if a feature is removed from a new release, then packages that require that feature will be stuck on the older releases that contain that feature. It is discouraged to remove features in a SemVer-compatible release. Beware that optional dependencies also define an implicit feature, so removing an optional dependency or making it non-optional can cause problems, see [removing an optional dependency]. [`im`]: https://crates.io/crates/im [`perf` feature]: https://github.com/rust-lang/regex/blob/1.3.0/Cargo.toml#L56 [`rayon` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L47 [`regex`]: https://crates.io/crates/regex [`serde` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L46 [features]: features.md [removing an optional dependency]: semver.md#cargo-remove-opt-dep [workspace]: workspaces.md #### Feature resolver version 2 When `resolver = "2"` is specified in `Cargo.toml` (see [resolver versions](#resolver-versions) below), a different feature resolver is used which uses a different algorithm for unifying features. The version `"1"` resolver will unify features for a package no matter where it is specified. The version `"2"` resolver will avoid unifying features in the following situations: * Features for target-specific dependencies are not enabled if the target is not currently being built. For example: ```toml [dependency.common] version = "1.0" features = ["f1"] [target.'cfg(windows)'.dependencies.common] version = "1.0" features = ["f2"] ``` When building this example for a non-Windows platform, the `f2` feature will *not* be enabled. * Features enabled on [build-dependencies] or proc-macros will not be unified when those same dependencies are used as a normal dependency. For example: ```toml [dependencies] log = "0.4" [build-dependencies] log = {version = "0.4", features=['std']} ``` When building the build script, the `log` crate will be built with the `std` feature. When building the library of your package, it will not enable the feature. * Features enabled on [dev-dependencies] will not be unified when those same dependencies are used as a normal dependency, unless those dev-dependencies are currently being built. For example: ```toml [dependencies] serde = {version = "1.0", default-features = false} [dev-dependencies] serde = {version = "1.0", features = ["std"]} ``` In this example, the library will normally link against `serde` without the `std` feature. However, when built as a test or example, it will include the `std` feature. For example, `cargo test` or `cargo build --all-targets` will unify these features. Note that dev-dependencies in dependencies are always ignored, this is only relevant for the top-level package or workspace members. [build-dependencies]: specifying-dependencies.md#build-dependencies [dev-dependencies]: specifying-dependencies.md#development-dependencies [resolver-field]: features.md#resolver-versions ### `links` The [`links` field] is used to ensure only one copy of a native library is linked into a binary. The resolver will attempt to find a graph where there is only one instance of each `links` name. If it is unable to find a graph that satisfies that constraint, it will return an error. For example, it is an error if one package depends on [`libgit2-sys`] version `0.11` and another depends on `0.12`, because Cargo is unable to unify those, but they both link to the `git2` native library. Due to this requirement, it is encouraged to be very careful when making SemVer-incompatible releases with the `links` field if your library is in common use. [`links` field]: manifest.md#the-links-field [`libgit2-sys`]: https://crates.io/crates/libgit2-sys ### Yanked versions [Yanked releases][yank] are those that are marked that they should not be used. When the resolver is building the graph, it will ignore all yanked releases unless they already exist in the `Cargo.lock` file. [yank]: publishing.md#cargo-yank ## Dependency updates Dependency resolution is automatically performed by all Cargo commands that need to know about the dependency graph. For example, [`cargo build`] will run the resolver to discover all the dependencies to build. After the first time it runs, the result is stored in the `Cargo.lock` file. Subsequent commands will run the resolver, keeping dependencies locked to the versions in `Cargo.lock` *if it can*. If the dependency list in `Cargo.toml` has been modified, for example changing the version of a dependency from `1.0` to `2.0`, then the resolver will select a new version for that dependency that matches the new requirements. If that new dependency introduces new requirements, those new requirements may also trigger additional updates. The `Cargo.lock` file will be updated with the new result. The `--locked` or `--frozen` flags can be used to change this behavior to prevent automatic updates when requirements change, and return an error instead. [`cargo update`] can be used to update the entries in `Cargo.lock` when new versions are published. Without any options, it will attempt to update all packages in the lock file. The `-p` flag can be used to target the update for a specific package, and other flags such as `--aggressive` or `--precise` can be used to control how versions are selected. [`cargo build`]: ../commands/cargo-build.md [`cargo update`]: ../commands/cargo-update.md ## Overrides Cargo has several mechanisms to override dependencies within the graph. The [Overriding Dependencies] chapter goes into detail on how to use overrides. The overrides appear as an overlay to a registry, replacing the patched version with the new entry. Otherwise, resolution is performed like normal. [Overriding Dependencies]: overriding-dependencies.md ## Dependency kinds There are three kinds of dependencies in a package: normal, [build], and [dev][dev-dependencies]. For the most part these are all treated the same from the perspective of the resolver. One difference is that dev-dependencies for non-workspace members are always ignored, and do not influence resolution. [Platform-specific dependencies] with the `[target]` table are resolved as-if all platforms are enabled. In other words, the resolver ignores the platform or `cfg` expression. [build]: specifying-dependencies.md#build-dependencies [dev-dependencies]: specifying-dependencies.md#development-dependencies [Platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies ### dev-dependency cycles Usually the resolver does not allow cycles in the graph, but it does allow them for [dev-dependencies]. For example, project "foo" has a dev-dependency on "bar", which has a normal dependency on "foo" (usually as a "path" dependency). This is allowed because there isn't really a cycle from the perspective of the build artifacts. In this example, the "foo" library is built (which does not need "bar" because "bar" is only used for tests), and then "bar" can be built depending on "foo", then the "foo" tests can be built linking to "bar". Beware that this can lead to confusing errors. In the case of building library unit tests, there are actually two copies of the library linked into the final test binary: the one that was linked with "bar", and the one built that contains the unit tests. Similar to the issues highlighted in the [Version-incompatibility hazards] section, the types between the two are not compatible. Be careful when exposing types of "foo" from "bar" in this situation, since the "foo" unit tests won't treat them the same as the local types. If possible, try to split your package into multiple packages and restructure it so that it remains strictly acyclic. ## Resolver versions A different feature resolver algorithm can be used by specifying the resolver version in `Cargo.toml` like this: ```toml [package] name = "my-package" version = "1.0.0" resolver = "2" ``` The version `"1"` resolver is the original resolver that shipped with Cargo up to version 1.50. The default is `"2"` if the root package specifies [`edition = "2021"`](manifest.md#the-edition-field) or a newer edition. Otherwise the default is `"1"`. The version `"2"` resolver introduces changes in [feature unification](#features). See the [features chapter][features-2] for more details. The resolver is a global option that affects the entire workspace. The `resolver` version in dependencies is ignored, only the value in the top-level package will be used. If using a [virtual workspace], the version should be specified in the `[workspace]` table, for example: ```toml [workspace] members = ["member1", "member2"] resolver = "2" ``` [virtual workspace]: workspaces.md#virtual-manifest [features-2]: features.md#feature-resolver-version-2 ## Recommendations The following are some recommendations for setting the version within your package, and for specifying dependency requirements. These are general guidelines that should apply to common situations, but of course some situations may require specifying unusual requirements. * Follow the [SemVer guidelines] when deciding how to update your version number, and whether or not you will need to make a SemVer-incompatible version change. * Use caret requirements for dependencies, such as `"1.2.3"`, for most situations. This ensures that the resolver can be maximally flexible in choosing a version while maintaining build compatibility. * Specify all three components with the version you are currently using. This helps set the minimum version that will be used, and ensures that other users won't end up with an older version of the dependency that might be missing something that your package requires. * Avoid `*` requirements, as they are not allowed on [crates.io], and they can pull in SemVer-breaking changes during a normal `cargo update`. * Avoid overly broad version requirements. For example, `>=2.0.0` can pull in any SemVer-incompatible version, like version `5.0.0`, which can result in broken builds in the future. * Avoid overly narrow version requirements if possible. For example, if you specify a tilde requirement like `bar="~1.3"`, and another package specifies a requirement of `bar="1.4"`, this will fail to resolve, even though minor releases should be compatible. * Try to keep the dependency versions up-to-date with the actual minimum versions that your library requires. For example, if you have a requirement of `bar="1.0.12"`, and then in a future release you start using new features added in the `1.1.0` release of "bar", update your dependency requirement to `bar="1.1.0"`. If you fail to do this, it may not be immediately obvious because Cargo can opportunistically choose the newest version when you run a blanket `cargo update`. However, if another user depends on your library, and runs `cargo update -p your-library`, it will *not* automatically update "bar" if it is locked in their `Cargo.lock`. It will only update "bar" in that situation if the dependency declaration is also updated. Failure to do so can cause confusing build errors for the user using `cargo update -p`. * If two packages are tightly coupled, then an `=` dependency requirement may help ensure that they stay in sync. For example, a library with a companion proc-macro library will sometimes make assumptions between the two libraries that won't work well if the two are out of sync (and it is never expected to use the two libraries independently). The parent library can use an `=` requirement on the proc-macro, and re-export the macros for easy access. * `0.0.x` versions can be used for packages that are permanently unstable. In general, the stricter you make the dependency requirements, the more likely it will be for the resolver to fail. Conversely, if you use requirements that are too loose, it may be possible for new versions to be published that will break the build. [SemVer guidelines]: semver.md ## Troubleshooting The following illustrates some problems you may experience, and some possible solutions. ### SemVer-breaking patch release breaks the build Sometimes a project may inadvertently publish a point release with a SemVer-breaking change. When users update with `cargo update`, they will pick up this new release, and then their build may break. In this situation, it is recommended that the project should [yank] the release, and either remove the SemVer-breaking change, or publish it as a new SemVer-major version increase. If the change happened in a third-party project, if possible try to (politely!) work with the project to resolve the issue. While waiting for the release to be yanked, some workarounds depend on the circumstances: * If your project is the end product (such as a binary executable), just avoid updating the offending package in `Cargo.lock`. This can be done with the `--precise` flag in [`cargo update`]. * If you publish a binary on [crates.io], then you can temporarily add an `=` requirement to force the dependency to a specific good version. * Binary projects can alternatively recommend users to use the `--locked` flag with [`cargo install`] to use the original `Cargo.lock` that contains the known good version. * Libraries may also consider publishing a temporary new release with stricter requirements that avoid the troublesome dependency. You may want to consider using range requirements (instead of `=`) to avoid overly-strict requirements that may conflict with other packages using the same dependency. Once the problem has been resolved, you can publish another point release that relaxes the dependency back to a caret requirement. * If it looks like the third-party project is unable or unwilling to yank the release, then one option is to update your code to be compatible with the changes, and update the dependency requirement to set the minimum version to the new release. You will also need to consider if this is a SemVer-breaking change of your own library, for example if it exposes types from the dependency. cargo-0.66.0/src/doc/src/reference/semver.md000066400000000000000000001217261432416201200206060ustar00rootroot00000000000000# SemVer Compatibility This chapter provides details on what is conventionally considered a compatible or breaking SemVer change for new releases of a package. See the [SemVer compatibility] section for details on what SemVer is, and how Cargo uses it to ensure compatibility of libraries. These are only *guidelines*, and not necessarily hard-and-fast rules that all projects will obey. The [Change categories] section details how this guide classifies the level and severity of a change. Most of this guide focuses on changes that will cause `cargo` and `rustc` to fail to build something that previously worked. Almost every change carries some risk that it will negatively affect the runtime behavior, and for those cases it is usually a judgment call by the project maintainers whether or not it is a SemVer-incompatible change. See also [rust-semverver], which is an experimental tool that attempts to programmatically check compatibility rules. [Change categories]: #change-categories [rust-semverver]: https://github.com/rust-dev-tools/rust-semverver [SemVer compatibility]: resolver.md#semver-compatibility ## Change categories All of the policies listed below are categorized by the level of change: * **Major change**: a change that requires a major SemVer bump. * **Minor change**: a change that requires only a minor SemVer bump. * **Possibly-breaking change**: a change that some projects may consider major and others consider minor. The "Possibly-breaking" category covers changes that have the *potential* to break during an update, but may not necessarily cause a breakage. The impact of these changes should be considered carefully. The exact nature will depend on the change and the principles of the project maintainers. Some projects may choose to only bump the patch number on a minor change. It is encouraged to follow the SemVer spec, and only apply bug fixes in patch releases. However, a bug fix may require an API change that is marked as a "minor change", and shouldn't affect compatibility. This guide does not take a stance on how each individual "minor change" should be treated, as the difference between minor and patch changes are conventions that depend on the nature of the change. Some changes are marked as "minor", even though they carry the potential risk of breaking a build. This is for situations where the potential is extremely low, and the potentially breaking code is unlikely to be written in idiomatic Rust, or is specifically discouraged from use. This guide uses the terms "major" and "minor" assuming this relates to a "1.0.0" release or later. Initial development releases starting with "0.y.z" can treat changes in "y" as a major release, and "z" as a minor release. "0.0.z" releases are always major changes. This is because Cargo uses the convention that only changes in the left-most non-zero component are considered incompatible. * API compatibility * Items * [Major: renaming/moving/removing any public items](#item-remove) * [Minor: adding new public items](#item-new) * Structs * [Major: adding a private struct field when all current fields are public](#struct-add-private-field-when-public) * [Major: adding a public field when no private field exists](#struct-add-public-field-when-no-private) * [Minor: adding or removing private fields when at least one already exists](#struct-private-fields-with-private) * [Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa](#struct-tuple-normal-with-private) * Enums * [Major: adding new enum variants (without `non_exhaustive`)](#enum-variant-new) * [Major: adding new fields to an enum variant](#enum-fields-new) * Traits * [Major: adding a non-defaulted trait item](#trait-new-item-no-default) * [Major: any change to trait item signatures](#trait-item-signature) * [Possibly-breaking: adding a defaulted trait item](#trait-new-default-item) * [Major: adding a trait item that makes the trait non-object safe](#trait-object-safety) * [Major: adding a type parameter without a default](#trait-new-parameter-no-default) * [Minor: adding a defaulted trait type parameter](#trait-new-parameter-default) * Implementations * [Possibly-breaking change: adding any inherent items](#impl-item-new) * Generics * [Major: tightening generic bounds](#generic-bounds-tighten) * [Minor: loosening generic bounds](#generic-bounds-loosen) * [Minor: adding defaulted type parameters](#generic-new-default) * [Minor: generalizing a type to use generics (with identical types)](#generic-generalize-identical) * [Major: generalizing a type to use generics (with possibly different types)](#generic-generalize-different) * [Minor: changing a generic type to a more generic type](#generic-more-generic) * Functions * [Major: adding/removing function parameters](#fn-change-arity) * [Possibly-breaking: introducing a new function type parameter](#fn-generic-new) * [Minor: generalizing a function to use generics (supporting original type)](#fn-generalize-compatible) * [Major: generalizing a function to use generics with type mismatch](#fn-generalize-mismatch) * Attributes * [Major: switching from `no_std` support to requiring `std`](#attr-no-std-to-std) * Tooling and environment compatibility * [Possibly-breaking: changing the minimum version of Rust required](#env-new-rust) * [Possibly-breaking: changing the platform and environment requirements](#env-change-requirements) * Cargo * [Minor: adding a new Cargo feature](#cargo-feature-add) * [Major: removing a Cargo feature](#cargo-feature-remove) * [Major: removing a feature from a feature list if that changes functionality or public items](#cargo-feature-remove-another) * [Possibly-breaking: removing an optional dependency](#cargo-remove-opt-dep) * [Minor: changing dependency features](#cargo-change-dep-feature) * [Minor: adding dependencies](#cargo-dep-add) * [Application compatibility](#application-compatibility) ## API compatibility All of the examples below contain three parts: the original code, the code after it has been modified, and an example usage of the code that could appear in another project. In a minor change, the example usage should successfully build with both the before and after versions. ### Major: renaming/moving/removing any public items The absence of a publicly exposed [item][items] will cause any uses of that item to fail to compile. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub fn foo() {} /////////////////////////////////////////////////////////// // After // ... item has been removed /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { updated_crate::foo(); // Error: cannot find function `foo` } ``` This includes adding any sort of [`cfg` attribute] which can change which items or behavior is available based on [conditional compilation]. Mitigating strategies: * Mark items to be removed as [deprecated], and then remove them at a later date in a SemVer-breaking release. * Mark renamed items as [deprecated], and use a [`pub use`] item to re-export to the old name. ### Minor: adding new public items Adding new, public [items] is a minor change. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before // ... absence of item /////////////////////////////////////////////////////////// // After pub fn foo() {} /////////////////////////////////////////////////////////// // Example use of the library that will safely work. // `foo` is not used since it didn't previously exist. ``` Note that in some rare cases this can be a **breaking change** due to glob imports. For example, if you add a new trait, and a project has used a glob import that brings that trait into scope, and the new trait introduces an associated item that conflicts with any types it is implemented on, this can cause a compile-time error due to the ambiguity. Example: ```rust,ignore // Breaking change example /////////////////////////////////////////////////////////// // Before // ... absence of trait /////////////////////////////////////////////////////////// // After pub trait NewTrait { fn foo(&self) {} } impl NewTrait for i32 {} /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::*; pub trait LocalTrait { fn foo(&self) {} } impl LocalTrait for i32 {} fn main() { 123i32.foo(); // Error: multiple applicable items in scope } ``` This is not considered a major change because conventionally glob imports are a known forwards-compatibility hazard. Glob imports of items from external crates should be avoided. ### Major: adding a private struct field when all current fields are public When a private field is added to a struct that previously had all public fields, this will break any code that attempts to construct it with a [struct literal]. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo { pub f1: i32, } /////////////////////////////////////////////////////////// // After pub struct Foo { pub f1: i32, f2: i32, } /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { let x = updated_crate::Foo { f1: 123 }; // Error: cannot construct `Foo` } ``` Mitigation strategies: * Do not add new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. ### Major: adding a public field when no private field exists When a public field is added to a struct that has all public fields, this will break any code that attempts to construct it with a [struct literal]. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo { pub f1: i32, } /////////////////////////////////////////////////////////// // After pub struct Foo { pub f1: i32, pub f2: i32, } /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { let x = updated_crate::Foo { f1: 123 }; // Error: missing field `f2` } ``` Mitigation strategies: * Do not add new new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. ### Minor: adding or removing private fields when at least one already exists It is safe to add or remove private fields from a struct when the struct already has at least one private field. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before #[derive(Default)] pub struct Foo { f1: i32, } /////////////////////////////////////////////////////////// // After #[derive(Default)] pub struct Foo { f2: f64, } /////////////////////////////////////////////////////////// // Example use of the library that will safely work. fn main() { // Cannot access private fields. let x = updated_crate::Foo::default(); } ``` This is safe because existing code cannot use a [struct literal] to construct it, nor exhaustively match its contents. Note that for tuple structs, this is a **major change** if the tuple contains public fields, and the addition or removal of a private field changes the index of any public field. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before #[derive(Default)] pub struct Foo(pub i32, i32); /////////////////////////////////////////////////////////// // After #[derive(Default)] pub struct Foo(f64, pub i32, i32); /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { let x = updated_crate::Foo::default(); let y = x.0; // Error: is private } ``` ### Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa Changing a tuple struct to a normal struct (or vice-versa) is safe if all fields are private. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before #[derive(Default)] pub struct Foo(i32); /////////////////////////////////////////////////////////// // After #[derive(Default)] pub struct Foo { f1: i32, } /////////////////////////////////////////////////////////// // Example use of the library that will safely work. fn main() { // Cannot access private fields. let x = updated_crate::Foo::default(); } ``` This is safe because existing code cannot use a [struct literal] to construct it, nor match its contents. ### Major: adding new enum variants (without `non_exhaustive`) It is a breaking change to add a new enum variant if the enum does not use the [`#[non_exhaustive]`][non_exhaustive] attribute. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub enum E { Variant1, } /////////////////////////////////////////////////////////// // After pub enum E { Variant1, Variant2, } /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { use updated_crate::E; let x = E::Variant1; match x { // Error: `Variant2` not covered E::Variant1 => {} } } ``` Mitigation strategies: * When introducing the enum, mark it as [`#[non_exhaustive]`][non_exhaustive] to force users to use [wildcard patterns] to catch new variants. ### Major: adding new fields to an enum variant It is a breaking change to add new fields to an enum variant because all fields are public, and constructors and matching will fail to compile. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub enum E { Variant1 { f1: i32 }, } /////////////////////////////////////////////////////////// // After pub enum E { Variant1 { f1: i32, f2: i32 }, } /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { use updated_crate::E; let x = E::Variant1 { f1: 1 }; // Error: missing f2 match x { E::Variant1 { f1 } => {} // Error: missing f2 } } ``` Mitigation strategies: * When introducing the enum, mark the variant as [`non_exhaustive`][non_exhaustive] so that it cannot be constructed or matched without wildcards. ```rust,ignore,skip pub enum E { #[non_exhaustive] Variant1{f1: i32} } ``` * When introducing the enum, use an explicit struct as a value, where you can have control over the field visibility. ```rust,ignore,skip pub struct Foo { f1: i32, f2: i32, } pub enum E { Variant1(Foo) } ``` ### Major: adding a non-defaulted trait item It is a breaking change to add a non-defaulted item to a trait. This will break any implementors of the trait. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait {} /////////////////////////////////////////////////////////// // After pub trait Trait { fn foo(&self); } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Trait; struct Foo; impl Trait for Foo {} // Error: not all trait items implemented ``` Mitigation strategies: * Always provide a default implementation or value for new associated trait items. * When introducing the trait, use the [sealed trait] technique to prevent users outside of the crate from implementing the trait. ### Major: any change to trait item signatures It is a breaking change to make any change to a trait item signature. This can break external implementors of the trait. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait { fn f(&self, x: i32) {} } /////////////////////////////////////////////////////////// // After pub trait Trait { // For sealed traits or normal functions, this would be a minor change // because generalizing with generics strictly expands the possible uses. // But in this case, trait implementations must use the same signature. fn f(&self, x: V) {} } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Trait; struct Foo; impl Trait for Foo { fn f(&self, x: i32) {} // Error: trait declaration has 1 type parameter } ``` Mitigation strategies: * Introduce new items with default implementations to cover the new functionality instead of modifying existing items. * When introducing the trait, use the [sealed trait] technique to prevent users outside of the crate from implementing the trait. ### Possibly-breaking: adding a defaulted trait item It is usually safe to add a defaulted trait item. However, this can sometimes cause a compile error. For example, this can introduce an ambiguity if a method of the same name exists in another trait. ```rust,ignore // Breaking change example /////////////////////////////////////////////////////////// // Before pub trait Trait {} /////////////////////////////////////////////////////////// // After pub trait Trait { fn foo(&self) {} } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Trait; struct Foo; trait LocalTrait { fn foo(&self) {} } impl Trait for Foo {} impl LocalTrait for Foo {} fn main() { let x = Foo; x.foo(); // Error: multiple applicable items in scope } ``` Note that this ambiguity does *not* exist for name collisions on [inherent implementations], as they take priority over trait items. See [trait-object-safety](#trait-object-safety) for a special case to consider when adding trait items. Mitigation strategies: * Some projects may deem this acceptable breakage, particularly if the new item name is unlikely to collide with any existing code. Choose names carefully to help avoid these collisions. Additionally, it may be acceptable to require downstream users to add [disambiguation syntax] to select the correct function when updating the dependency. ### Major: adding a trait item that makes the trait non-object safe It is a breaking change to add a trait item that changes the trait to not be [object safe]. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait {} /////////////////////////////////////////////////////////// // After pub trait Trait { // An associated const makes the trait not object-safe. const CONST: i32 = 123; } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Trait; struct Foo; impl Trait for Foo {} fn main() { let obj: Box = Box::new(Foo); // Error: cannot be made into an object } ``` It is safe to do the converse (making a non-object safe trait into a safe one). ### Major: adding a type parameter without a default It is a breaking change to add a type parameter without a default to a trait. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait {} /////////////////////////////////////////////////////////// // After pub trait Trait {} /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Trait; struct Foo; impl Trait for Foo {} // Error: missing generics ``` Mitigating strategies: * See [adding a defaulted trait type parameter](#trait-new-parameter-default). ### Minor: adding a defaulted trait type parameter It is safe to add a type parameter to a trait as long as it has a default. External implementors will use the default without needing to specify the parameter. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait {} /////////////////////////////////////////////////////////// // After pub trait Trait {} /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::Trait; struct Foo; impl Trait for Foo {} ``` ### Possibly-breaking change: adding any inherent items Usually adding inherent items to an implementation should be safe because inherent items take priority over trait items. However, in some cases the collision can cause problems if the name is the same as an implemented trait item with a different signature. ```rust,ignore // Breaking change example /////////////////////////////////////////////////////////// // Before pub struct Foo; /////////////////////////////////////////////////////////// // After pub struct Foo; impl Foo { pub fn foo(&self) {} } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Foo; trait Trait { fn foo(&self, x: i32) {} } impl Trait for Foo {} fn main() { let x = Foo; x.foo(1); // Error: this function takes 0 arguments } ``` Note that if the signatures match, there would not be a compile-time error, but possibly a silent change in runtime behavior (because it is now executing a different function). Mitigation strategies: * Some projects may deem this acceptable breakage, particularly if the new item name is unlikely to collide with any existing code. Choose names carefully to help avoid these collisions. Additionally, it may be acceptable to require downstream users to add [disambiguation syntax] to select the correct function when updating the dependency. ### Major: tightening generic bounds It is a breaking change to tighten generic bounds on a type since this can break users expecting the looser bounds. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo { pub f1: A, } /////////////////////////////////////////////////////////// // After pub struct Foo { pub f1: A, } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Foo; fn main() { let s = Foo { f1: 1.23 }; // Error: the trait bound `{float}: Eq` is not satisfied } ``` ### Minor: loosening generic bounds It is safe to loosen the generic bounds on a type, as it only expands what is allowed. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo { pub f1: A, } /////////////////////////////////////////////////////////// // After pub struct Foo { pub f1: A, } /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::Foo; fn main() { let s = Foo { f1: 123 }; } ``` ### Minor: adding defaulted type parameters It is safe to add a type parameter to a type as long as it has a default. All existing references will use the default without needing to specify the parameter. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before #[derive(Default)] pub struct Foo {} /////////////////////////////////////////////////////////// // After #[derive(Default)] pub struct Foo { f1: A, } /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::Foo; fn main() { let s: Foo = Default::default(); } ``` ### Minor: generalizing a type to use generics (with identical types) A struct or enum field can change from a concrete type to a generic type parameter, provided that the change results in an identical type for all existing use cases. For example, the following change is permitted: ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo(pub u8); /////////////////////////////////////////////////////////// // After pub struct Foo(pub T); /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::Foo; fn main() { let s: Foo = Foo(123); } ``` because existing uses of `Foo` are shorthand for `Foo` which yields the identical field type. ### Major: generalizing a type to use generics (with possibly different types) Changing a struct or enum field from a concrete type to a generic type parameter can break if the type can change. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo(pub T, pub u8); /////////////////////////////////////////////////////////// // After pub struct Foo(pub T, pub T); /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::Foo; fn main() { let s: Foo = Foo(3.14, 123); // Error: mismatched types } ``` ### Minor: changing a generic type to a more generic type It is safe to change a generic type to a more generic one. For example, the following adds a generic parameter that defaults to the original type, which is safe because all existing users will be using the same type for both fields, the the defaulted parameter does not need to be specified. ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub struct Foo(pub T, pub T); /////////////////////////////////////////////////////////// // After pub struct Foo(pub T, pub U); /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::Foo; fn main() { let s: Foo = Foo(1.0, 2.0); } ``` ### Major: adding/removing function parameters Changing the arity of a function is a breaking change. ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub fn foo() {} /////////////////////////////////////////////////////////// // After pub fn foo(x: i32) {} /////////////////////////////////////////////////////////// // Example usage that will break. fn main() { updated_crate::foo(); // Error: this function takes 1 argument } ``` Mitigating strategies: * Introduce a new function with the new signature and possibly [deprecate][deprecated] the old one. * Introduce functions that take a struct argument, where the struct is built with the builder pattern. This allows new fields to be added to the struct in the future. ### Possibly-breaking: introducing a new function type parameter Usually, adding a non-defaulted type parameter is safe, but in some cases it can be a breaking change: ```rust,ignore // Breaking change example /////////////////////////////////////////////////////////// // Before pub fn foo() {} /////////////////////////////////////////////////////////// // After pub fn foo() {} /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::foo; fn main() { foo::(); // Error: this function takes 2 generic arguments but 1 generic argument was supplied } ``` However, such explicit calls are rare enough (and can usually be written in other ways) that this breakage is usually acceptable. One should take into account how likely it is that the function in question is being called with explicit type arguments. ### Minor: generalizing a function to use generics (supporting original type) The type of a parameter to a function, or its return value, can be *generalized* to use generics, including by introducing a new type parameter, as long as it can be instantiated to the original type. For example, the following changes are allowed: ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub fn foo(x: u8) -> u8 { x } pub fn bar>(t: T) {} /////////////////////////////////////////////////////////// // After use std::ops::Add; pub fn foo(x: T) -> T { x } pub fn bar>(t: T) {} /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::{bar, foo}; fn main() { foo(1); bar(vec![1, 2, 3].into_iter()); } ``` because all existing uses are instantiations of the new signature. Perhaps somewhat surprisingly, generalization applies to trait objects as well, given that every trait implements itself: ```rust,ignore // MINOR CHANGE /////////////////////////////////////////////////////////// // Before pub trait Trait {} pub fn foo(t: &dyn Trait) {} /////////////////////////////////////////////////////////// // After pub trait Trait {} pub fn foo(t: &T) {} /////////////////////////////////////////////////////////// // Example use of the library that will safely work. use updated_crate::{foo, Trait}; struct Foo; impl Trait for Foo {} fn main() { let obj = Foo; foo(&obj); } ``` (The use of `?Sized` is essential; otherwise you couldn't recover the original signature.) Introducing generics in this way can potentially create type inference failures. These are usually rare, and may be acceptable breakage for some projects, as this can be fixed with additional type annotations. ```rust,ignore // Breaking change example /////////////////////////////////////////////////////////// // Before pub fn foo() -> i32 { 0 } /////////////////////////////////////////////////////////// // After pub fn foo() -> T { Default::default() } /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::foo; fn main() { let x = foo(); // Error: type annotations needed } ``` ### Major: generalizing a function to use generics with type mismatch It is a breaking change to change a function parameter or return type if the generic type constrains or changes the types previously allowed. For example, the following adds a generic constraint that may not be satisfied by existing code: ```rust,ignore // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before pub fn foo(x: Vec) {} /////////////////////////////////////////////////////////// // After pub fn foo>(x: T) {} /////////////////////////////////////////////////////////// // Example usage that will break. use updated_crate::foo; fn main() { foo(vec![1, 2, 3]); // Error: `Copy` is not implemented for `Vec` } ``` ### Major: switching from `no_std` support to requiring `std` If your library specifically supports a [`no_std`] environment, it is a breaking change to make a new release that requires `std`. ```rust,ignore,skip // MAJOR CHANGE /////////////////////////////////////////////////////////// // Before #![no_std] pub fn foo() {} /////////////////////////////////////////////////////////// // After pub fn foo() { std::time::SystemTime::now(); } /////////////////////////////////////////////////////////// // Example usage that will break. // This will fail to link for no_std targets because they don't have a `std` crate. #![no_std] use updated_crate::foo; fn example() { foo(); } ``` Mitigation strategies: * A common idiom to avoid this is to include a `std` [Cargo feature] that optionally enables `std` support, and when the feature is off, the library can be used in a `no_std` environment. ## Tooling and environment compatibility ### Possibly-breaking: changing the minimum version of Rust required Introducing the use of new features in a new release of Rust can break projects that are using older versions of Rust. This also includes using new features in a new release of Cargo, and requiring the use of a nightly-only feature in a crate that previously worked on stable. Some projects choose to allow this in a minor release for various reasons. It is usually relatively easy to update to a newer version of Rust. Rust also has a rapid 6-week release cycle, and some projects will provide compatibility within a window of releases (such as the current stable release plus N previous releases). Just keep in mind that some large projects may not be able to update their Rust toolchain rapidly. Mitigation strategies: * Use [Cargo features] to make the new features opt-in. * Provide a large window of support for older releases. * Copy the source of new standard library items if possible so that you can continue to use an older version but take advantage of the new feature. * Provide a separate branch of older minor releases that can receive backports of important bugfixes. * Keep an eye out for the [`[cfg(version(..))]`][cfg-version] and [`#[cfg(accessible(..))]`][cfg-accessible] features which provide an opt-in mechanism for new features. These are currently unstable and only available in the nightly channel. ### Possibly-breaking: changing the platform and environment requirements There is a very wide range of assumptions a library makes about the environment that it runs in, such as the host platform, operating system version, available services, filesystem support, etc. It can be a breaking change if you make a new release that restricts what was previously supported, for example requiring a newer version of an operating system. These changes can be difficult to track, since you may not always know if a change breaks in an environment that is not automatically tested. Some projects may deem this acceptable breakage, particularly if the breakage is unlikely for most users, or the project doesn't have the resources to support all environments. Another notable situation is when a vendor discontinues support for some hardware or OS, the project may deem it reasonable to also discontinue support. Mitigation strategies: * Document the platforms and environments you specifically support. * Test your code on a wide range of environments in CI. ### Cargo #### Minor: adding a new Cargo feature It is usually safe to add new [Cargo features]. If the feature introduces new changes that cause a breaking change, this can cause difficulties for projects that have stricter backwards-compatibility needs. In that scenario, avoid adding the feature to the "default" list, and possibly document the consequences of enabling the feature. ```toml # MINOR CHANGE ########################################################### # Before [features] # ..empty ########################################################### # After [features] std = [] ``` #### Major: removing a Cargo feature It is usually a breaking change to remove [Cargo features]. This will cause an error for any project that enabled the feature. ```toml # MAJOR CHANGE ########################################################### # Before [features] logging = [] ########################################################### # After [dependencies] # ..logging removed ``` Mitigation strategies: * Clearly document your features. If there is an internal or experimental feature, mark it as such, so that users know the status of the feature. * Leave the old feature in `Cargo.toml`, but otherwise remove its functionality. Document that the feature is deprecated, and remove it in a future major SemVer release. #### Major: removing a feature from a feature list if that changes functionality or public items If removing a feature from another feature, this can break existing users if they are expecting that functionality to be available through that feature. ```toml # Breaking change example ########################################################### # Before [features] default = ["std"] std = [] ########################################################### # After [features] default = [] # This may cause packages to fail if they are expecting std to be enabled. std = [] ``` #### Possibly-breaking: removing an optional dependency Removing an optional dependency can break a project using your library because another project may be enabling that dependency via [Cargo features]. ```toml # Breaking change example ########################################################### # Before [dependencies] curl = { version = "0.4.31", optional = true } ########################################################### # After [dependencies] # ..curl removed ``` Mitigation strategies: * Clearly document your features. If the optional dependency is not included in the documented list of features, then you may decide to consider it safe to change undocumented entries. * Leave the optional dependency, and just don't use it within your library. * Replace the optional dependency with a [Cargo feature] that does nothing, and document that it is deprecated. * Use high-level features which enable optional dependencies, and document those as the preferred way to enable the extended functionality. For example, if your library has optional support for something like "networking", create a generic feature name "networking" that enables the optional dependencies necessary to implement "networking". Then document the "networking" feature. #### Minor: changing dependency features It is usually safe to change the features on a dependency, as long as the feature does not introduce a breaking change. ```toml # MINOR CHANGE ########################################################### # Before [dependencies] rand = { version = "0.7.3", features = ["small_rng"] } ########################################################### # After [dependencies] rand = "0.7.3" ``` #### Minor: adding dependencies It is usually safe to add new dependencies, as long as the new dependency does not introduce new requirements that result in a breaking change. For example, adding a new dependency that requires nightly in a project that previously worked on stable is a major change. ```toml # MINOR CHANGE ########################################################### # Before [dependencies] # ..empty ########################################################### # After [dependencies] log = "0.4.11" ``` ## Application compatibility Cargo projects may also include executable binaries which have their own interfaces (such as a CLI interface, OS-level interaction, etc.). Since these are part of the Cargo package, they often use and share the same version as the package. You will need to decide if and how you want to employ a SemVer contract with your users in the changes you make to your application. The potential breaking and compatible changes to an application are too numerous to list, so you are encouraged to use the spirit of the [SemVer] spec to guide your decisions on how to apply versioning to your application, or at least document what your commitments are. [`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute [`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [`pub use`]: ../../reference/items/use-declarations.html [Cargo feature]: features.md [Cargo features]: features.md [cfg-accessible]: https://github.com/rust-lang/rust/issues/64797 [cfg-version]: https://github.com/rust-lang/rust/issues/64796 [conditional compilation]: ../../reference/conditional-compilation.md [Default]: ../../std/default/trait.Default.html [deprecated]: ../../reference/attributes/diagnostics.html#the-deprecated-attribute [disambiguation syntax]: ../../reference/expressions/call-expr.html#disambiguating-function-calls [inherent implementations]: ../../reference/items/implementations.html#inherent-implementations [items]: ../../reference/items.html [non_exhaustive]: ../../reference/attributes/type_system.html#the-non_exhaustive-attribute [object safe]: ../../reference/items/traits.html#object-safety [rust-feature]: https://doc.rust-lang.org/nightly/unstable-book/ [sealed trait]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed [SemVer]: https://semver.org/ [struct literal]: ../../reference/expressions/struct-expr.html [wildcard patterns]: ../../reference/patterns.html#wildcard-pattern cargo-0.66.0/src/doc/src/reference/source-replacement.md000066400000000000000000000115621432416201200230760ustar00rootroot00000000000000## Source Replacement This document is about replacing the crate index. You can read about overriding dependencies in the [overriding dependencies] section of this documentation. A *source* is a provider that contains crates that may be included as dependencies for a package. Cargo supports the ability to **replace one source with another** to express strategies such as: * Vendoring - custom sources can be defined which represent crates on the local filesystem. These sources are subsets of the source that they're replacing and can be checked into packages if necessary. * Mirroring - sources can be replaced with an equivalent version which acts as a cache for crates.io itself. Cargo has a core assumption about source replacement that the source code is exactly the same from both sources. Note that this also means that a replacement source is not allowed to have crates which are not present in the original source. As a consequence, source replacement is not appropriate for situations such as patching a dependency or a private registry. Cargo supports patching dependencies through the usage of [the `[patch]` key][overriding dependencies], and private registry support is described in [the Registries chapter][registries]. [overriding dependencies]: overriding-dependencies.md [registries]: registries.md ### Configuration Configuration of replacement sources is done through [`.cargo/config.toml`][config] and the full set of available keys are: ```toml # The `source` table is where all keys related to source-replacement # are stored. [source] # Under the `source` table are a number of other tables whose keys are a # name for the relevant source. For example this section defines a new # source, called `my-vendor-source`, which comes from a directory # located at `vendor` relative to the directory containing this `.cargo/config.toml` # file [source.my-vendor-source] directory = "vendor" # The crates.io default source for crates is available under the name # "crates-io", and here we use the `replace-with` key to indicate that it's # replaced with our source above. [source.crates-io] replace-with = "my-vendor-source" # Each source has its own table where the key is the name of the source [source.the-source-name] # Indicate that `the-source-name` will be replaced with `another-source`, # defined elsewhere replace-with = "another-source" # Several kinds of sources can be specified (described in more detail below): registry = "https://example.com/path/to/index" local-registry = "path/to/registry" directory = "path/to/vendor" # Git sources can optionally specify a branch/tag/rev as well git = "https://example.com/path/to/repo" # branch = "master" # tag = "v1.0.1" # rev = "313f44e8" ``` [config]: config.md ### Registry Sources A "registry source" is one that is the same as crates.io itself. That is, it has an index served in a git repository which matches the format of the [crates.io index](https://github.com/rust-lang/crates.io-index). That repository then has configuration indicating where to download crates from. Currently there is not an already-available project for setting up a mirror of crates.io. Stay tuned though! ### Local Registry Sources A "local registry source" is intended to be a subset of another registry source, but available on the local filesystem (aka vendoring). Local registries are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are made up of a set of `*.crate` files and an index like the normal registry is. The primary way to manage and create local registry sources is through the [`cargo-local-registry`][cargo-local-registry] subcommand, [available on crates.io][cargo-local-registry] and can be installed with `cargo install cargo-local-registry`. [cargo-local-registry]: https://crates.io/crates/cargo-local-registry Local registries are contained within one directory and contain a number of `*.crate` files downloaded from crates.io as well as an `index` directory with the same format as the crates.io-index project (populated with just entries for the crates that are present). ### Directory Sources A "directory source" is similar to a local registry source where it contains a number of crates available on the local filesystem, suitable for vendoring dependencies. Directory sources are primarily managed by the `cargo vendor` subcommand. Directory sources are distinct from local registries though in that they contain the unpacked version of `*.crate` files, making it more suitable in some situations to check everything into source control. A directory source is just a directory containing a number of other directories which contain the source code for crates (the unpacked version of `*.crate` files). Currently no restriction is placed on the name of each directory. Each crate in a directory source also has an associated metadata file indicating the checksum of each file in the crate to protect against accidental modifications. cargo-0.66.0/src/doc/src/reference/specifying-dependencies.md000066400000000000000000000431531432416201200240660ustar00rootroot00000000000000## Specifying Dependencies Your crates can depend on other libraries from [crates.io] or other registries, `git` repositories, or subdirectories on your local file system. You can also temporarily override the location of a dependency β€” for example, to be able to test out a bug fix in the dependency that you are working on locally. You can have different dependencies for different platforms, and dependencies that are only used during development. Let's take a look at how to do each of these. ### Specifying dependencies from crates.io Cargo is configured to look for dependencies on [crates.io] by default. Only the name and a version string are required in this case. In [the cargo guide](../guide/index.md), we specified a dependency on the `time` crate: ```toml [dependencies] time = "0.1.12" ``` The string `"0.1.12"` is a version requirement. Although it looks like a specific *version* of the `time` crate, it actually specifies a *range* of versions and allows [SemVer] compatible updates. An update is allowed if the new version number does not modify the left-most non-zero digit in the major, minor, patch grouping. In this case, if we ran `cargo update -p time`, cargo should update us to version `0.1.13` if it is the latest `0.1.z` release, but would not update us to `0.2.0`. If instead we had specified the version string as `1.0`, cargo should update to `1.1` if it is the latest `1.y` release, but not `2.0`. The version `0.0.x` is not considered compatible with any other version. [SemVer]: https://semver.org Here are some more examples of version requirements and the versions that would be allowed with them: ```notrust 1.2.3 := >=1.2.3, <2.0.0 1.2 := >=1.2.0, <2.0.0 1 := >=1.0.0, <2.0.0 0.2.3 := >=0.2.3, <0.3.0 0.2 := >=0.2.0, <0.3.0 0.0.3 := >=0.0.3, <0.0.4 0.0 := >=0.0.0, <0.1.0 0 := >=0.0.0, <1.0.0 ``` This compatibility convention is different from SemVer in the way it treats versions before 1.0.0. While SemVer says there is no compatibility before 1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y β‰₯ z` and `x > 0`. It is possible to further tweak the logic for selecting compatible versions using special operators, though it shouldn't be necessary most of the time. ### Caret requirements **Caret requirements** are an alternative syntax for the default strategy, `^1.2.3` is exactly equivalent to `1.2.3`. ### Tilde requirements **Tilde requirements** specify a minimal version with some ability to update. If you specify a major, minor, and patch version or only a major and minor version, only patch-level changes are allowed. If you only specify a major version, then minor- and patch-level changes are allowed. `~1.2.3` is an example of a tilde requirement. ```notrust ~1.2.3 := >=1.2.3, <1.3.0 ~1.2 := >=1.2.0, <1.3.0 ~1 := >=1.0.0, <2.0.0 ``` ### Wildcard requirements **Wildcard requirements** allow for any version where the wildcard is positioned. `*`, `1.*` and `1.2.*` are examples of wildcard requirements. ```notrust * := >=0.0.0 1.* := >=1.0.0, <2.0.0 1.2.* := >=1.2.0, <1.3.0 ``` > **Note**: [crates.io] does not allow bare `*` versions. ### Comparison requirements **Comparison requirements** allow manually specifying a version range or an exact version to depend on. Here are some examples of comparison requirements: ```notrust >= 1.2.0 > 1 < 2 = 1.2.3 ``` ### Multiple requirements As shown in the examples above, multiple version requirements can be separated with a comma, e.g., `>= 1.2, < 1.5`. ### Specifying dependencies from other registries To specify a dependency from a registry other than [crates.io], first the registry must be configured in a `.cargo/config.toml` file. See the [registries documentation] for more information. In the dependency, set the `registry` key to the name of the registry to use. ```toml [dependencies] some-crate = { version = "1.0", registry = "my-registry" } ``` > **Note**: [crates.io] does not allow packages to be published with > dependencies on other registries. [registries documentation]: registries.md ### Specifying dependencies from `git` repositories To depend on a library located in a `git` repository, the minimum information you need to specify is the location of the repository with the `git` key: ```toml [dependencies] regex = { git = "https://github.com/rust-lang/regex" } ``` Cargo will fetch the `git` repository at this location then look for a `Cargo.toml` for the requested crate anywhere inside the `git` repository (not necessarily at the root - for example, specifying a member crate name of a workspace and setting `git` to the repository containing the workspace). Since we haven’t specified any other information, Cargo assumes that we intend to use the latest commit on the main branch to build our package. You can combine the `git` key with the `rev`, `tag`, or `branch` keys to specify something else. Here's an example of specifying that you want to use the latest commit on a branch named `next`: ```toml [dependencies] regex = { git = "https://github.com/rust-lang/regex", branch = "next" } ``` Anything that is not a branch or tag falls under `rev`. This can be a commit hash like `rev = "4c59b707"`, or a named reference exposed by the remote repository such as `rev = "refs/pull/493/head"`. What references are available varies by where the repo is hosted; GitHub in particular exposes a reference to the most recent commit of every pull request as shown, but other git hosts often provide something equivalent, possibly under a different naming scheme. Once a `git` dependency has been added, Cargo will lock that dependency to the latest commit at the time. New commits will not be pulled down automatically once the lock is in place. However, they can be pulled down manually with `cargo update`. See [Git Authentication] for help with git authentication for private repos. > **Note**: [crates.io] does not allow packages to be published with `git` > dependencies (`git` [dev-dependencies] are ignored). See the [Multiple > locations](#multiple-locations) section for a fallback alternative. [Git Authentication]: ../appendix/git-authentication.md ### Specifying path dependencies Over time, our `hello_world` package from [the guide](../guide/index.md) has grown significantly in size! It’s gotten to the point that we probably want to split out a separate crate for others to use. To do this Cargo supports **path dependencies** which are typically sub-crates that live within one repository. Let’s start off by making a new crate inside of our `hello_world` package: ```console # inside of hello_world/ $ cargo new hello_utils ``` This will create a new folder `hello_utils` inside of which a `Cargo.toml` and `src` folder are ready to be configured. In order to tell Cargo about this, open up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: ```toml [dependencies] hello_utils = { path = "hello_utils" } ``` This tells Cargo that we depend on a crate called `hello_utils` which is found in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). And that’s it! The next `cargo build` will automatically build `hello_utils` and all of its own dependencies, and others can also start using the crate as well. However, crates that use dependencies specified with only a path are not permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we would need to publish a version of `hello_utils` to [crates.io] and specify its version in the dependencies line as well: ```toml [dependencies] hello_utils = { path = "hello_utils", version = "0.1.0" } ``` > **Note**: [crates.io] does not allow packages to be published with `path` > dependencies (`path` [dev-dependencies] are ignored). See the [Multiple > locations](#multiple-locations) section for a fallback alternative. ### Multiple locations It is possible to specify both a registry version and a `git` or `path` location. The `git` or `path` dependency will be used locally (in which case the `version` is checked against the local copy), and when published to a registry like [crates.io], it will use the registry version. Other combinations are not allowed. Examples: ```toml [dependencies] # Uses `my-bitflags` when used locally, and uses # version 1.0 from crates.io when published. bitflags = { path = "my-bitflags", version = "1.0" } # Uses the given git repo when used locally, and uses # version 1.0 from crates.io when published. smallvec = { git = "https://github.com/servo/rust-smallvec", version = "1.0" } # N.B. that if a version doesn't match, Cargo will fail to compile! ``` One example where this can be useful is when you have split up a library into multiple packages within the same workspace. You can then use `path` dependencies to point to the local packages within the workspace to use the local version during development, and then use the [crates.io] version once it is published. This is similar to specifying an [override](overriding-dependencies.md), but only applies to this one dependency declaration. ### Platform specific dependencies Platform-specific dependencies take the same format, but are listed under a `target` section. Normally Rust-like [`#[cfg]` syntax](../../reference/conditional-compilation.html) will be used to define these sections: ```toml [target.'cfg(windows)'.dependencies] winhttp = "0.4.0" [target.'cfg(unix)'.dependencies] openssl = "1.0.1" [target.'cfg(target_arch = "x86")'.dependencies] native-i686 = { path = "native/i686" } [target.'cfg(target_arch = "x86_64")'.dependencies] native-x86_64 = { path = "native/x86_64" } ``` Like with Rust, the syntax here supports the `not`, `any`, and `all` operators to combine various cfg name/value pairs. If you want to know which cfg targets are available on your platform, run `rustc --print=cfg` from the command line. If you want to know which `cfg` targets are available for another platform, such as 64-bit Windows, run `rustc --print=cfg --target=x86_64-pc-windows-msvc`. Unlike in your Rust source code, you cannot use `[target.'cfg(feature = "fancy-feature")'.dependencies]` to add dependencies based on optional features. Use [the `[features]` section](features.md) instead: ```toml [dependencies] foo = { version = "1.0", optional = true } bar = { version = "1.0", optional = true } [features] fancy-feature = ["foo", "bar"] ``` The same applies to `cfg(debug_assertions)`, `cfg(test)` and `cfg(proc_macro)`. These values will not work as expected and will always have the default value returned by `rustc --print=cfg`. There is currently no way to add dependencies based on these configuration values. In addition to `#[cfg]` syntax, Cargo also supports listing out the full target the dependencies would apply to: ```toml [target.x86_64-pc-windows-gnu.dependencies] winhttp = "0.4.0" [target.i686-unknown-linux-gnu.dependencies] openssl = "1.0.1" ``` #### Custom target specifications If you’re using a custom target specification (such as `--target foo/bar.json`), use the base filename without the `.json` extension: ```toml [target.bar.dependencies] winhttp = "0.4.0" [target.my-special-i686-platform.dependencies] openssl = "1.0.1" native = { path = "native/i686" } ``` > **Note**: Custom target specifications are not usable on the stable channel. ### Development dependencies You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format is equivalent to `[dependencies]`: ```toml [dev-dependencies] tempdir = "0.3" ``` Dev-dependencies are not used when compiling a package for building, but are used for compiling tests, examples, and benchmarks. These dependencies are *not* propagated to other packages which depend on this package. You can also have target-specific development dependencies by using `dev-dependencies` in the target section header instead of `dependencies`. For example: ```toml [target.'cfg(unix)'.dev-dependencies] mio = "0.0.1" ``` > **Note**: When a package is published, only dev-dependencies that specify a > `version` will be included in the published crate. For most use cases, > dev-dependencies are not needed when published, though some users (like OS > packagers) may want to run tests within a crate, so providing a `version` if > possible can still be beneficial. ### Build dependencies You can depend on other Cargo-based crates for use in your build scripts. Dependencies are declared through the `build-dependencies` section of the manifest: ```toml [build-dependencies] cc = "1.0.3" ``` You can also have target-specific build dependencies by using `build-dependencies` in the target section header instead of `dependencies`. For example: ```toml [target.'cfg(unix)'.build-dependencies] cc = "1.0.3" ``` In this case, the dependency will only be built when the host platform matches the specified target. The build script **does not** have access to the dependencies listed in the `dependencies` or `dev-dependencies` section. Build dependencies will likewise not be available to the package itself unless listed under the `dependencies` section as well. A package itself and its build script are built separately, so their dependencies need not coincide. Cargo is kept simpler and cleaner by using independent dependencies for independent purposes. ### Choosing features If a package you depend on offers conditional features, you can specify which to use: ```toml [dependencies.awesome] version = "1.3.5" default-features = false # do not include the default features, and optionally # cherry-pick individual features features = ["secure-password", "civet"] ``` More information about features can be found in the [features chapter](features.md#dependency-features). ### Renaming dependencies in `Cargo.toml` When writing a `[dependencies]` section in `Cargo.toml` the key you write for a dependency typically matches up to the name of the crate you import from in the code. For some projects, though, you may wish to reference the crate with a different name in the code regardless of how it's published on crates.io. For example you may wish to: * Avoid the need to `use foo as bar` in Rust source. * Depend on multiple versions of a crate. * Depend on crates with the same name from different registries. To support this Cargo supports a `package` key in the `[dependencies]` section of which package should be depended on: ```toml [package] name = "mypackage" version = "0.0.1" [dependencies] foo = "0.1" bar = { git = "https://github.com/example/project", package = "foo" } baz = { version = "0.1", registry = "custom", package = "foo" } ``` In this example, three crates are now available in your Rust code: ```rust,ignore extern crate foo; // crates.io extern crate bar; // git repository extern crate baz; // registry `custom` ``` All three of these crates have the package name of `foo` in their own `Cargo.toml`, so we're explicitly using the `package` key to inform Cargo that we want the `foo` package even though we're calling it something else locally. The `package` key, if not specified, defaults to the name of the dependency being requested. Note that if you have an optional dependency like: ```toml [dependencies] bar = { version = "0.1", package = 'foo', optional = true } ``` you're depending on the crate `foo` from crates.io, but your crate has a `bar` feature instead of a `foo` feature. That is, names of features take after the name of the dependency, not the package name, when renamed. Enabling transitive dependencies works similarly, for example we could add the following to the above manifest: ```toml [features] log-debug = ['bar/log-debug'] # using 'foo/log-debug' would be an error! ``` ### Inheriting a dependency from a workspace Dependencies can be inherited from a workspace by specifying the dependency in the workspace's [`[workspace.dependencies]`][workspace.dependencies] table. After that, add it to the `[dependencies]` table with `workspace = true`. Along with the `workspace` key, dependencies can also include these keys: - [`optional`][optional]: Note that the`[workspace.dependencies]` table is not allowed to specify `optional`. - [`features`][features]: These are additive with the features declared in the `[workspace.dependencies]` Other than `optional` and `features`, inherited dependencies cannot use any other dependency key (such as `version` or `default-features`). Dependencies in the `[dependencies]`, `[dev-dependencies]`, `[build-dependencies]`, and `[target."...".dependencies]` sections support the ability to reference the `[workspace.dependencies]` definition of dependencies. ```toml [project] name = "bar" version = "0.2.0" [dependencies] regex = { workspace = true, features = ["unicode"] } [build-dependencies] cc.workspace = true [dev-dependencies] rand = { workspace = true, optional = true } ``` [crates.io]: https://crates.io/ [dev-dependencies]: #development-dependencies [workspace.dependencies]: workspaces.md#the-workspacedependencies-table [optional]: features.md#optional-dependencies [features]: features.md cargo-0.66.0/src/doc/src/reference/timings.md000066400000000000000000000045601432416201200207530ustar00rootroot00000000000000# Reporting build timings The `--timings` option gives some information about how long each compilation takes, and tracks concurrency information over time. ```sh cargo build --timings ``` This writes an HTML report in `target/cargo-timings/cargo-timing.html`. This also writes a copy of the report to the same directory with a timestamp in the filename, if you want to look at older runs. #### Reading the graphs There are two graphs in the output. The "unit" graph shows the duration of each unit over time. A "unit" is a single compiler invocation. There are lines that show which additional units are "unlocked" when a unit finishes. That is, it shows the new units that are now allowed to run because their dependencies are all finished. Hover the mouse over a unit to highlight the lines. This can help visualize the critical path of dependencies. This may change between runs because the units may finish in different orders. The "codegen" times are highlighted in a lavender color. In some cases, build pipelining allows units to start when their dependencies are performing code generation. This information is not always displayed (for example, binary units do not show when code generation starts). The "custom build" units are `build.rs` scripts, which when run are highlighted in orange. The second graph shows Cargo's concurrency over time. The background indicates CPU usage. The three lines are: - "Waiting" (red) β€” This is the number of units waiting for a CPU slot to open. - "Inactive" (blue) β€” This is the number of units that are waiting for their dependencies to finish. - "Active" (green) β€” This is the number of units currently running. Note: This does not show the concurrency in the compiler itself. `rustc` coordinates with Cargo via the "job server" to stay within the concurrency limit. This currently mostly applies to the code generation phase. Tips for addressing compile times: - Look for slow dependencies. - Check if they have features that you may wish to consider disabling. - Consider trying to remove the dependency completely. - Look for a crate being built multiple times with different versions. Try to remove the older versions from the dependency graph. - Split large crates into smaller pieces. - If there are a large number of crates bottlenecked on a single crate, focus your attention on improving that one crate to improve parallelism. cargo-0.66.0/src/doc/src/reference/unstable.md000066400000000000000000001501461432416201200211200ustar00rootroot00000000000000## Unstable Features Experimental Cargo features are only available on the [nightly channel]. You are encouraged to experiment with these features to see if they meet your needs, and if there are any issues or problems. Check the linked tracking issues listed below for more information on the feature, and click the GitHub subscribe button if you want future updates. After some period of time, if the feature does not have any major concerns, it can be [stabilized], which will make it available on stable once the current nightly release reaches the stable channel (anywhere from 6 to 12 weeks). There are three different ways that unstable features can be enabled based on how the feature works: * New syntax in `Cargo.toml` requires a `cargo-features` key at the top of `Cargo.toml`, before any tables. For example: ```toml # This specifies which new Cargo.toml features are enabled. cargo-features = ["test-dummy-unstable"] [package] name = "my-package" version = "0.1.0" im-a-teapot = true # This is a new option enabled by test-dummy-unstable. ``` * New command-line flags, options, and subcommands require the `-Z unstable-options` CLI option to also be included. For example, the new `--out-dir` option is only available on nightly: ```cargo +nightly build --out-dir=out -Z unstable-options``` * `-Z` command-line flags are used to enable new functionality that may not have an interface, or the interface has not yet been designed, or for more complex features that affect multiple parts of Cargo. For example, the [mtime-on-use](#mtime-on-use) feature can be enabled with: ```cargo +nightly build -Z mtime-on-use``` Run `cargo -Z help` to see a list of flags available. Anything which can be configured with a `-Z` flag can also be set in the cargo [config file] (`.cargo/config.toml`) in the `unstable` table. For example: ```toml [unstable] mtime-on-use = true build-std = ["core", "alloc"] ``` Each new feature described below should explain how to use it. [config file]: config.md [nightly channel]: ../../book/appendix-07-nightly-rust.html [stabilized]: https://doc.crates.io/contrib/process/unstable.html#stabilization ### List of unstable features * Unstable-specific features * [-Z allow-features](#allow-features) β€” Provides a way to restrict which unstable features are used. * Build scripts and linking * [Metabuild](#metabuild) β€” Provides declarative build scripts. * Resolver and features * [no-index-update](#no-index-update) β€” Prevents cargo from updating the index cache. * [avoid-dev-deps](#avoid-dev-deps) β€” Prevents the resolver from including dev-dependencies during resolution. * [minimal-versions](#minimal-versions) β€” Forces the resolver to use the lowest compatible version instead of the highest. * [public-dependency](#public-dependency) β€” Allows dependencies to be classified as either public or private. * Output behavior * [out-dir](#out-dir) β€” Adds a directory where artifacts are copied to. * [terminal-width](#terminal-width) β€” Tells rustc the width of the terminal so that long diagnostic messages can be truncated to be more readable. * [Different binary name](#different-binary-name) β€” Assign a name to the built binary that is separate from the crate name. * Compile behavior * [mtime-on-use](#mtime-on-use) β€” Updates the last-modified timestamp on every dependency every time it is used, to provide a mechanism to delete unused artifacts. * [doctest-xcompile](#doctest-xcompile) β€” Supports running doctests with the `--target` flag. * [build-std](#build-std) β€” Builds the standard library instead of using pre-built binaries. * [build-std-features](#build-std-features) β€” Sets features to use with the standard library. * [binary-dep-depinfo](#binary-dep-depinfo) β€” Causes the dep-info file to track binary dependencies. * [panic-abort-tests](#panic-abort-tests) β€” Allows running tests with the "abort" panic strategy. * [crate-type](#crate-type) β€” Supports passing crate types to the compiler. * [keep-going](#keep-going) β€” Build as much as possible rather than aborting on the first error. * rustdoc * [`doctest-in-workspace`](#doctest-in-workspace) β€” Fixes workspace-relative paths when running doctests. * [rustdoc-map](#rustdoc-map) β€” Provides mappings for documentation to link to external sites like [docs.rs](https://docs.rs/). * `Cargo.toml` extensions * [Profile `rustflags` option](#profile-rustflags-option) β€” Passed directly to rustc. * [per-package-target](#per-package-target) β€” Sets the `--target` to use for each individual package. * [artifact dependencies](#artifact-dependencies) - Allow build artifacts to be included into other build artifacts and build them for different targets. * Information and metadata * [Build-plan](#build-plan) β€” Emits JSON information on which commands will be run. * [unit-graph](#unit-graph) β€” Emits JSON for Cargo's internal graph structure. * [`cargo rustc --print`](#rustc---print) β€” Calls rustc with `--print` to display information from rustc. * Configuration * [config-include](#config-include) β€” Adds the ability for config files to include other files. * [`cargo config`](#cargo-config) β€” Adds a new subcommand for viewing config files. * Registries * [credential-process](#credential-process) β€” Adds support for fetching registry tokens from an external authentication program. * [`cargo logout`](#cargo-logout) β€” Adds the `logout` command to remove the currently saved registry token. * [sparse-registry](#sparse-registry) β€” Adds support for fetching from static-file HTTP registries (`sparse+`) ### allow-features This permanently-unstable flag makes it so that only a listed set of unstable features can be used. Specifically, if you pass `-Zallow-features=foo,bar`, you'll continue to be able to pass `-Zfoo` and `-Zbar` to `cargo`, but you will be unable to pass `-Zbaz`. You can pass an empty string (`-Zallow-features=`) to disallow all unstable features. `-Zallow-features` also restricts which unstable features can be passed to the `cargo-features` entry in `Cargo.toml`. If, for example, you want to allow ```toml cargo-features = ["test-dummy-unstable"] ``` where `test-dummy-unstable` is unstable, that features would also be disallowed by `-Zallow-features=`, and allowed with `-Zallow-features=test-dummy-unstable`. The list of features passed to cargo's `-Zallow-features` is also passed to any Rust tools that cargo ends up calling (like `rustc` or `rustdoc`). Thus, if you run `cargo -Zallow-features=`, no unstable Cargo _or_ Rust features can be used. ### no-index-update * Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479) * Tracking Issue: [#7404](https://github.com/rust-lang/cargo/issues/7404) The `-Z no-index-update` flag ensures that Cargo does not attempt to update the registry index. This is intended for tools such as Crater that issue many Cargo commands, and you want to avoid the network latency for updating the index each time. ### mtime-on-use * Original Issue: [#6477](https://github.com/rust-lang/cargo/pull/6477) * Cache usage meta tracking issue: [#7150](https://github.com/rust-lang/cargo/issues/7150) The `-Z mtime-on-use` flag is an experiment to have Cargo update the mtime of used files to make it easier for tools like cargo-sweep to detect which files are stale. For many workflows this needs to be set on *all* invocations of cargo. To make this more practical setting the `unstable.mtime_on_use` flag in `.cargo/config.toml` or the corresponding ENV variable will apply the `-Z mtime-on-use` to all invocations of nightly cargo. (the config flag is ignored by stable) ### avoid-dev-deps * Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988) * Tracking Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133) When running commands such as `cargo install` or `cargo build`, Cargo currently requires dev-dependencies to be downloaded, even if they are not used. The `-Z avoid-dev-deps` flag allows Cargo to avoid downloading dev-dependencies if they are not needed. The `Cargo.lock` file will not be generated if dev-dependencies are skipped. ### minimal-versions * Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100) * Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657) > Note: It is not recommended to use this feature. Because it enforces minimal > versions for all transitive dependencies, its usefulness is limited since > not all external dependencies declare proper lower version bounds. It is > intended that it will be changed in the future to only enforce minimal > versions for direct dependencies. When a `Cargo.lock` file is generated, the `-Z minimal-versions` flag will resolve the dependencies to the minimum SemVer version that will satisfy the requirements (instead of the greatest version). The intended use-case of this flag is to check, during continuous integration, that the versions specified in Cargo.toml are a correct reflection of the minimum versions that you are actually using. That is, if Cargo.toml says `foo = "1.0.0"` that you don't accidentally depend on features added only in `foo 1.5.0`. ### out-dir * Original Issue: [#4875](https://github.com/rust-lang/cargo/issues/4875) * Tracking Issue: [#6790](https://github.com/rust-lang/cargo/issues/6790) This feature allows you to specify the directory where artifacts will be copied to after they are built. Typically artifacts are only written to the `target/release` or `target/debug` directories. However, determining the exact filename can be tricky since you need to parse JSON output. The `--out-dir` flag makes it easier to predictably access the artifacts. Note that the artifacts are copied, so the originals are still in the `target` directory. Example: ```sh cargo +nightly build --out-dir=out -Z unstable-options ``` This can also be specified in `.cargo/config.toml` files. ```toml [build] out-dir = "out" ``` ### doctest-xcompile * Tracking Issue: [#7040](https://github.com/rust-lang/cargo/issues/7040) * Tracking Rustc Issue: [#64245](https://github.com/rust-lang/rust/issues/64245) This flag changes `cargo test`'s behavior when handling doctests when a target is passed. Currently, if a target is passed that is different from the host cargo will simply skip testing doctests. If this flag is present, cargo will continue as normal, passing the tests to doctest, while also passing it a `--target` option, as well as enabling `-Zunstable-features --enable-per-target-ignores` and passing along information from `.cargo/config.toml`. See the rustc issue for more information. ```sh cargo test --target foo -Zdoctest-xcompile ``` #### New `dir-name` attribute Some of the paths generated under `target/` have resulted in a de-facto "build protocol", where `cargo` is invoked as a part of a larger project build. So, to preserve the existing behavior, there is also a new attribute `dir-name`, which when left unspecified, defaults to the name of the profile. For example: ```toml [profile.release-lto] inherits = "release" dir-name = "lto" # Emits to target/lto instead of target/release-lto lto = true ``` ### Build-plan * Tracking Issue: [#5579](https://github.com/rust-lang/cargo/issues/5579) The `--build-plan` argument for the `build` command will output JSON with information about which commands would be run without actually executing anything. This can be useful when integrating with another build tool. Example: ```sh cargo +nightly build --build-plan -Z unstable-options ``` ### Metabuild * Tracking Issue: [rust-lang/rust#49803](https://github.com/rust-lang/rust/issues/49803) * RFC: [#2196](https://github.com/rust-lang/rfcs/blob/master/text/2196-metabuild.md) Metabuild is a feature to have declarative build scripts. Instead of writing a `build.rs` script, you specify a list of build dependencies in the `metabuild` key in `Cargo.toml`. A build script is automatically generated that runs each build dependency in order. Metabuild packages can then read metadata from `Cargo.toml` to specify their behavior. Include `cargo-features` at the top of `Cargo.toml`, a `metabuild` key in the `package`, list the dependencies in `build-dependencies`, and add any metadata that the metabuild packages require under `package.metadata`. Example: ```toml cargo-features = ["metabuild"] [package] name = "mypackage" version = "0.0.1" metabuild = ["foo", "bar"] [build-dependencies] foo = "1.0" bar = "1.0" [package.metadata.foo] extra-info = "qwerty" ``` Metabuild packages should have a public function called `metabuild` that performs the same actions as a regular `build.rs` script would perform. ### public-dependency * Tracking Issue: [#44663](https://github.com/rust-lang/rust/issues/44663) The 'public-dependency' feature allows marking dependencies as 'public' or 'private'. When this feature is enabled, additional information is passed to rustc to allow the 'exported_private_dependencies' lint to function properly. This requires the appropriate key to be set in `cargo-features`: ```toml cargo-features = ["public-dependency"] [dependencies] my_dep = { version = "1.2.3", public = true } private_dep = "2.0.0" # Will be 'private' by default ``` ### build-std * Tracking Repository: The `build-std` feature enables Cargo to compile the standard library itself as part of a crate graph compilation. This feature has also historically been known as "std-aware Cargo". This feature is still in very early stages of development, and is also a possible massive feature addition to Cargo. This is a very large feature to document, even in the minimal form that it exists in today, so if you're curious to stay up to date you'll want to follow the [tracking repository](https://github.com/rust-lang/wg-cargo-std-aware) and its set of issues. The functionality implemented today is behind a flag called `-Z build-std`. This flag indicates that Cargo should compile the standard library from source code using the same profile as the main build itself. Note that for this to work you need to have the source code for the standard library available, and at this time the only supported method of doing so is to add the `rust-src` rust rustup component: ```console $ rustup component add rust-src --toolchain nightly ``` It is also required today that the `-Z build-std` flag is combined with the `--target` flag. Note that you're not forced to do a cross compilation, you're just forced to pass `--target` in one form or another. Usage looks like: ```console $ cargo new foo $ cd foo $ cargo +nightly run -Z build-std --target x86_64-unknown-linux-gnu Compiling core v0.0.0 (...) ... Compiling foo v0.1.0 (...) Finished dev [unoptimized + debuginfo] target(s) in 21.00s Running `target/x86_64-unknown-linux-gnu/debug/foo` Hello, world! ``` Here we recompiled the standard library in debug mode with debug assertions (like `src/main.rs` is compiled) and everything was linked together at the end. Using `-Z build-std` will implicitly compile the stable crates `core`, `std`, `alloc`, and `proc_macro`. If you're using `cargo test` it will also compile the `test` crate. If you're working with an environment which does not support some of these crates, then you can pass an argument to `-Zbuild-std` as well: ```console $ cargo +nightly build -Z build-std=core,alloc ``` The value here is a comma-separated list of standard library crates to build. #### Requirements As a summary, a list of requirements today to use `-Z build-std` are: * You must install libstd's source code through `rustup component add rust-src` * You must pass `--target` * You must use both a nightly Cargo and a nightly rustc * The `-Z build-std` flag must be passed to all `cargo` invocations. #### Reporting bugs and helping out The `-Z build-std` feature is in the very early stages of development! This feature for Cargo has an extremely long history and is very large in scope, and this is just the beginning. If you'd like to report bugs please either report them to: * Cargo - - for implementation bugs * The tracking repository - - for larger design questions. Also if you'd like to see a feature that's not yet implemented and/or if something doesn't quite work the way you'd like it to, feel free to check out the [issue tracker](https://github.com/rust-lang/wg-cargo-std-aware/issues) of the tracking repository, and if it's not there please file a new issue! ### build-std-features * Tracking Repository: This flag is a sibling to the `-Zbuild-std` feature flag. This will configure the features enabled for the standard library itself when building the standard library. The default enabled features, at this time, are `backtrace` and `panic_unwind`. This flag expects a comma-separated list and, if provided, will override the default list of features enabled. ### binary-dep-depinfo * Tracking rustc issue: [#63012](https://github.com/rust-lang/rust/issues/63012) The `-Z binary-dep-depinfo` flag causes Cargo to forward the same flag to `rustc` which will then cause `rustc` to include the paths of all binary dependencies in the "dep info" file (with the `.d` extension). Cargo then uses that information for change-detection (if any binary dependency changes, then the crate will be rebuilt). The primary use case is for building the compiler itself, which has implicit dependencies on the standard library that would otherwise be untracked for change-detection. ### panic-abort-tests * Tracking Issue: [#67650](https://github.com/rust-lang/rust/issues/67650) * Original Pull Request: [#7460](https://github.com/rust-lang/cargo/pull/7460) The `-Z panic-abort-tests` flag will enable nightly support to compile test harness crates with `-Cpanic=abort`. Without this flag Cargo will compile tests, and everything they depend on, with `-Cpanic=unwind` because it's the only way `test`-the-crate knows how to operate. As of [rust-lang/rust#64158], however, the `test` crate supports `-C panic=abort` with a test-per-process, and can help avoid compiling crate graphs multiple times. It's currently unclear how this feature will be stabilized in Cargo, but we'd like to stabilize it somehow! [rust-lang/rust#64158]: https://github.com/rust-lang/rust/pull/64158 ### keep-going * Tracking Issue: [#10496](https://github.com/rust-lang/cargo/issues/10496) `cargo build --keep-going` (and similarly for `check`, `test` etc) will build as many crates in the dependency graph as possible, rather than aborting the build at the first one that fails to build. For example if the current package depends on dependencies `fails` and `works`, one of which fails to build, `cargo check -j1` may or may not build the one that succeeds (depending on which one of the two builds Cargo picked to run first), whereas `cargo check -j1 --keep-going` would definitely run both builds, even if the one run first fails. The `-Z unstable-options` command-line option must be used in order to use `--keep-going` while it is not yet stable: ```console cargo check --keep-going -Z unstable-options ``` ### config-include * Tracking Issue: [#7723](https://github.com/rust-lang/cargo/issues/7723) The `include` key in a config file can be used to load another config file. It takes a string for a path to another file relative to the config file, or a list of strings. It requires the `-Zconfig-include` command-line option. ```toml # .cargo/config include = '../../some-common-config.toml' ``` The config values are first loaded from the include path, and then the config file's own values are merged on top of it. This can be paired with [config-cli](#config-cli) to specify a file to load from the command-line. Pass a path to a config file as the argument to `--config`: ```console cargo +nightly -Zunstable-options -Zconfig-include --config somefile.toml build ``` CLI paths are relative to the current working directory. ### target-applies-to-host * Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322) * Tracking Issue: [#9453](https://github.com/rust-lang/cargo/issues/9453) Historically, Cargo's behavior for whether the `linker` and `rustflags` configuration options from environment variables and `[target]` are respected for build scripts, plugins, and other artifacts that are _always_ built for the host platform has been somewhat inconsistent. When `--target` is _not_ passed, Cargo respects the same `linker` and `rustflags` for build scripts as for all other compile artifacts. When `--target` _is_ passed, however, Cargo respects `linker` from `[target.]`, and does not pick up any `rustflags` configuration. This dual behavior is confusing, but also makes it difficult to correctly configure builds where the host triple and the target triple happen to be the same, but artifacts intended to run on the build host should still be configured differently. `-Ztarget-applies-to-host` enables the top-level `target-applies-to-host` setting in Cargo configuration files which allows users to opt into different (and more consistent) behavior for these properties. When `target-applies-to-host` is unset, or set to `true`, in the configuration file, the existing Cargo behavior is preserved (though see `-Zhost-config`, which changes that default). When it is set to `false`, no options from `[target.]`, `RUSTFLAGS`, or `[build]` are respected for host artifacts regardless of whether `--target` is passed to Cargo. To customize artifacts intended to be run on the host, use `[host]` ([`host-config`](#host-config)). In the future, `target-applies-to-host` may end up defaulting to `false` to provide more sane and consistent default behavior. ```toml # config.toml target-applies-to-host = false ``` ```console cargo +nightly -Ztarget-applies-to-host build --target x86_64-unknown-linux-gnu ``` ### host-config * Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322) * Tracking Issue: [#9452](https://github.com/rust-lang/cargo/issues/9452) The `host` key in a config file can be used pass flags to host build targets such as build scripts that must run on the host system instead of the target system when cross compiling. It supports both generic and host arch specific tables. Matching host arch tables take precedence over generic host tables. It requires the `-Zhost-config` and `-Ztarget-applies-to-host` command-line options to be set, and that `target-applies-to-host = false` is set in the Cargo configuration file. ```toml # config.toml [host] linker = "/path/to/host/linker" [host.x86_64-unknown-linux-gnu] linker = "/path/to/host/arch/linker" rustflags = ["-Clink-arg=--verbose"] [target.x86_64-unknown-linux-gnu] linker = "/path/to/target/linker" ``` The generic `host` table above will be entirely ignored when building on a `x86_64-unknown-linux-gnu` host as the `host.x86_64-unknown-linux-gnu` table takes precedence. Setting `-Zhost-config` changes the default for `target-applies-to-host` to `false` from `true`. ```console cargo +nightly -Ztarget-applies-to-host -Zhost-config build --target x86_64-unknown-linux-gnu ``` ### unit-graph * Tracking Issue: [#8002](https://github.com/rust-lang/cargo/issues/8002) The `--unit-graph` flag can be passed to any build command (`build`, `check`, `run`, `test`, `bench`, `doc`, etc.) to emit a JSON object to stdout which represents Cargo's internal unit graph. Nothing is actually built, and the command returns immediately after printing. Each "unit" corresponds to an execution of the compiler. These objects also include which unit each unit depends on. ``` cargo +nightly build --unit-graph -Z unstable-options ``` This structure provides a more complete view of the dependency relationship as Cargo sees it. In particular, the "features" field supports the new feature resolver where a dependency can be built multiple times with different features. `cargo metadata` fundamentally cannot represent the relationship of features between different dependency kinds, and features now depend on which command is run and which packages and targets are selected. Additionally it can provide details about intra-package dependencies like build scripts or tests. The following is a description of the JSON structure: ```javascript { /* Version of the JSON output structure. If any backwards incompatible changes are made, this value will be increased. */ "version": 1, /* Array of all build units. */ "units": [ { /* An opaque string which indicates the package. Information about the package can be obtained from `cargo metadata`. */ "pkg_id": "my-package 0.1.0 (path+file:///path/to/my-package)", /* The Cargo target. See the `cargo metadata` documentation for more information about these fields. https://doc.rust-lang.org/cargo/commands/cargo-metadata.html */ "target": { "kind": ["lib"], "crate_types": ["lib"], "name": "my-package", "src_path": "/path/to/my-package/src/lib.rs", "edition": "2018", "test": true, "doctest": true }, /* The profile settings for this unit. These values may not match the profile defined in the manifest. Units can use modified profile settings. For example, the "panic" setting can be overridden for tests to force it to "unwind". */ "profile": { /* The profile name these settings are derived from. */ "name": "dev", /* The optimization level as a string. */ "opt_level": "0", /* The LTO setting as a string. */ "lto": "false", /* The codegen units as an integer. `null` if it should use the compiler's default. */ "codegen_units": null, /* The debug information level as an integer. `null` if it should use the compiler's default (0). */ "debuginfo": 2, /* Whether or not debug-assertions are enabled. */ "debug_assertions": true, /* Whether or not overflow-checks are enabled. */ "overflow_checks": true, /* Whether or not rpath is enabled. */ "rpath": false, /* Whether or not incremental is enabled. */ "incremental": true, /* The panic strategy, "unwind" or "abort". */ "panic": "unwind" }, /* Which platform this target is being built for. A value of `null` indicates it is for the host. Otherwise it is a string of the target triple (such as "x86_64-unknown-linux-gnu"). */ "platform": null, /* The "mode" for this unit. Valid values: * "test" β€” Build using `rustc` as a test. * "build" β€” Build using `rustc`. * "check" β€” Build using `rustc` in "check" mode. * "doc" β€” Build using `rustdoc`. * "doctest" β€” Test using `rustdoc`. * "run-custom-build" β€” Represents the execution of a build script. */ "mode": "build", /* Array of features enabled on this unit as strings. */ "features": ["somefeat"], /* Whether or not this is a standard-library unit, part of the unstable build-std feature. If not set, treat as `false`. */ "is_std": false, /* Array of dependencies of this unit. */ "dependencies": [ { /* Index in the "units" array for the dependency. */ "index": 1, /* The name that this dependency will be referred as. */ "extern_crate_name": "unicode_xid", /* Whether or not this dependency is "public", part of the unstable public-dependency feature. If not set, the public-dependency feature is not enabled. */ "public": false, /* Whether or not this dependency is injected into the prelude, currently used by the build-std feature. If not set, treat as `false`. */ "noprelude": false } ] }, // ... ], /* Array of indices in the "units" array that are the "roots" of the dependency graph. */ "roots": [0], } ``` ### Profile `rustflags` option * Original Issue: [rust-lang/cargo#7878](https://github.com/rust-lang/cargo/issues/7878) * Tracking Issue: [rust-lang/cargo#10271](https://github.com/rust-lang/cargo/issues/10271) This feature provides a new option in the `[profile]` section to specify flags that are passed directly to rustc. This can be enabled like so: ```toml cargo-features = ["profile-rustflags"] [package] # ... [profile.release] rustflags = [ "-C", "..." ] ``` ### rustdoc-map * Tracking Issue: [#8296](https://github.com/rust-lang/cargo/issues/8296) This feature adds configuration settings that are passed to `rustdoc` so that it can generate links to dependencies whose documentation is hosted elsewhere when the dependency is not documented. First, add this to `.cargo/config`: ```toml [doc.extern-map.registries] crates-io = "https://docs.rs/" ``` Then, when building documentation, use the following flags to cause links to dependencies to link to [docs.rs](https://docs.rs/): ``` cargo +nightly doc --no-deps -Zrustdoc-map ``` The `registries` table contains a mapping of registry name to the URL to link to. The URL may have the markers `{pkg_name}` and `{version}` which will get replaced with the corresponding values. If neither are specified, then Cargo defaults to appending `{pkg_name}/{version}/` to the end of the URL. Another config setting is available to redirect standard library links. By default, rustdoc creates links to . To change this behavior, use the `doc.extern-map.std` setting: ```toml [doc.extern-map] std = "local" ``` A value of `"local"` means to link to the documentation found in the `rustc` sysroot. If you are using rustup, this documentation can be installed with `rustup component add rust-docs`. The default value is `"remote"`. The value may also take a URL for a custom location. ### terminal-width * Tracking Issue: [#84673](https://github.com/rust-lang/rust/issues/84673) This feature provides a new flag, `-Z terminal-width`, which is used to pass a terminal width to `rustc` so that error messages containing long lines can be intelligently truncated. For example, passing `-Z terminal-width=20` (an arbitrarily low value) might produce the following error: ```text error[E0308]: mismatched types --> src/main.rs:2:17 | 2 | ..._: () = 42; | -- ^^ expected `()`, found integer | | | expected due to this error: aborting due to previous error ``` In contrast, without `-Z terminal-width`, the error would look as shown below: ```text error[E0308]: mismatched types --> src/main.rs:2:17 | 2 | let _: () = 42; | -- ^^ expected `()`, found integer | | | expected due to this error: aborting due to previous error ``` ### per-package-target * Tracking Issue: [#9406](https://github.com/rust-lang/cargo/pull/9406) * Original Pull Request: [#9030](https://github.com/rust-lang/cargo/pull/9030) * Original Issue: [#7004](https://github.com/rust-lang/cargo/pull/7004) The `per-package-target` feature adds two keys to the manifest: `package.default-target` and `package.forced-target`. The first makes the package be compiled by default (ie. when no `--target` argument is passed) for some target. The second one makes the package always be compiled for the target. Example: ```toml [package] forced-target = "wasm32-unknown-unknown" ``` In this example, the crate is always built for `wasm32-unknown-unknown`, for instance because it is going to be used as a plugin for a main program that runs on the host (or provided on the command line) target. ### artifact-dependencies * Tracking Issue: [#9096](https://github.com/rust-lang/cargo/pull/9096) * Original Pull Request: [#9992](https://github.com/rust-lang/cargo/pull/9992) Allow Cargo packages to depend on `bin`, `cdylib`, and `staticlib` crates, and use the artifacts built by those crates at compile time. Run `cargo` with `-Z bindeps` to enable this functionality. **Example:** use _cdylib_ artifact in build script The `Cargo.toml` in the consuming package, building the `bar` library as `cdylib` for a specific build target… ```toml [build-dependencies] bar = { artifact = "cdylib", version = "1.0", target = "wasm32-unknown-unknown" } ``` …along with the build script in `build.rs`. ```rust fn main() { wasm::run_file(std::env::var("CARGO_CDYLIB_FILE_BAR").unwrap()); } ``` **Example:** use _binary_ artifact and its library in a binary The `Cargo.toml` in the consuming package, building the `bar` binary for inclusion as artifact while making it available as library as well… ```toml [dependencies] bar = { artifact = "bin", version = "1.0", lib = true } ``` …along with the executable using `main.rs`. ```rust fn main() { bar::init(); command::run(env!("CARGO_BIN_FILE_BAR")); } ``` ### sparse-registry * Tracking Issue: [9069](https://github.com/rust-lang/cargo/issues/9069) * RFC: [#2789](https://github.com/rust-lang/rfcs/pull/2789) The `sparse-registry` feature allows cargo to interact with remote registries served over plain HTTP rather than git. These registries can be identified by urls starting with `sparse+http://` or `sparse+https://`. When fetching index metadata over HTTP, Cargo only downloads the metadata for relevant crates, which can save significant time and bandwidth. The format of the sparse index is identical to a checkout of a git-based index. ### credential-process * Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933) * RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730) The `credential-process` feature adds a config setting to fetch registry authentication tokens by calling an external process. Token authentication is used by the [`cargo login`], [`cargo publish`], [`cargo owner`], and [`cargo yank`] commands. Additionally, this feature adds a new `cargo logout` command. To use this feature, you must pass the `-Z credential-process` flag on the command-line. Additionally, you must remove any current tokens currently saved in the [`credentials` file] (which can be done with the new `logout` command). #### `credential-process` Configuration To configure which process to run to fetch the token, specify the process in the `registry` table in a [config file]: ```toml [registry] credential-process = "/usr/bin/cargo-creds" ``` If you want to use a different process for a specific registry, it can be specified in the `registries` table: ```toml [registries.my-registry] credential-process = "/usr/bin/cargo-creds" ``` The value can be a string with spaces separating arguments or it can be a TOML array of strings. Command-line arguments allow special placeholders which will be replaced with the corresponding value: * `{name}` β€” The name of the registry. * `{api_url}` β€” The base URL of the registry API endpoints. * `{action}` β€” The authentication action (described below). Process names with the prefix `cargo:` are loaded from the `libexec` directory next to cargo. Several experimental credential wrappers are included with Cargo, and this provides convenient access to them: ```toml [registry] credential-process = "cargo:macos-keychain" ``` The current wrappers are: * `cargo:macos-keychain`: Uses the macOS Keychain to store the token. * `cargo:wincred`: Uses the Windows Credential Manager to store the token. * `cargo:1password`: Uses the 1password `op` CLI to store the token. You must install the `op` CLI from the [1password website](https://1password.com/downloads/command-line/). You must run `op signin` at least once with the appropriate arguments (such as `op signin my.1password.com user@example.com`), unless you provide the sign-in-address and email arguments. The master password will be required on each request unless the appropriate `OP_SESSION` environment variable is set. It supports the following command-line arguments: * `--account`: The account shorthand name to use. * `--vault`: The vault name to use. * `--sign-in-address`: The sign-in-address, which is a web address such as `my.1password.com`. * `--email`: The email address to sign in with. A wrapper is available for GNOME [libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens on Linux systems. Due to build limitations, this wrapper is not available as a pre-compiled binary. This can be built and installed manually. First, install libsecret using your system package manager (for example, `sudo apt install libsecret-1-dev`). Then build and install the wrapper with `cargo install cargo-credential-gnome-secret`. In the config, use a path to the binary like this: ```toml [registry] credential-process = "cargo-credential-gnome-secret {action}" ``` #### `credential-process` Interface There are two different kinds of token processes that Cargo supports. The simple "basic" kind will only be called by Cargo when it needs a token. This is intended for simple and easy integration with password managers, that can often use pre-existing tooling. The more advanced "Cargo" kind supports different actions passed as a command-line argument. This is intended for more pleasant integration experience, at the expense of requiring a Cargo-specific process to glue to the password manager. Cargo will determine which kind is supported by the `credential-process` definition. If it contains the `{action}` argument, then it uses the advanced style, otherwise it assumes it only supports the "basic" kind. ##### Basic authenticator A basic authenticator is a process that returns a token on stdout. Newlines will be trimmed. The process inherits the user's stdin and stderr. It should exit 0 on success, and nonzero on error. With this form, [`cargo login`] and `cargo logout` are not supported and return an error if used. ##### Cargo authenticator The protocol between the Cargo and the process is very basic, intended to ensure the credential process is kept as simple as possible. Cargo will execute the process with the `{action}` argument indicating which action to perform: * `store` β€” Store the given token in secure storage. * `get` β€” Get a token from storage. * `erase` β€” Remove a token from storage. The `cargo login` command uses `store` to save a token. Commands that require authentication, like `cargo publish`, uses `get` to retrieve a token. `cargo logout` uses the `erase` command to remove a token. The process inherits the user's stderr, so the process can display messages. Some values are passed in via environment variables (see below). The expected interactions are: * `store` β€” The token is sent to the process's stdin, terminated by a newline. The process should store the token keyed off the registry name. If the process fails, it should exit with a nonzero exit status. * `get` β€” The process should send the token to its stdout (trailing newline will be trimmed). The process inherits the user's stdin, should it need to receive input. If the process is unable to fulfill the request, it should exit with a nonzero exit code. * `erase` β€” The process should remove the token associated with the registry name. If the token is not found, the process should exit with a 0 exit status. ##### Environment The following environment variables will be provided to the executed command: * `CARGO` β€” Path to the `cargo` binary executing the command. * `CARGO_REGISTRY_NAME` β€” Name of the registry the authentication token is for. * `CARGO_REGISTRY_API_URL` β€” The URL of the registry API. #### `cargo logout` A new `cargo logout` command has been added to make it easier to remove a token from storage. This supports both [`credentials` file] tokens and `credential-process` tokens. When used with `credentials` file tokens, it needs the `-Z unstable-options` command-line option: ```console cargo logout -Z unstable-options ``` When used with the `credential-process` config, use the `-Z credential-process` command-line option: ```console cargo logout -Z credential-process ``` [`cargo login`]: ../commands/cargo-login.md [`cargo publish`]: ../commands/cargo-publish.md [`cargo owner`]: ../commands/cargo-owner.md [`cargo yank`]: ../commands/cargo-yank.md [`credentials` file]: config.md#credentials [crates.io]: https://crates.io/ [config file]: config.md ### `cargo config` * Original Issue: [#2362](https://github.com/rust-lang/cargo/issues/2362) * Tracking Issue: [#9301](https://github.com/rust-lang/cargo/issues/9301) The `cargo config` subcommand provides a way to display the configuration files that cargo loads. It currently includes the `get` subcommand which can take an optional config value to display. ```console cargo +nightly -Zunstable-options config get build.rustflags ``` If no config value is included, it will display all config values. See the `--help` output for more options available. ### `doctest-in-workspace` * Tracking Issue: [#9427](https://github.com/rust-lang/cargo/issues/9427) The `-Z doctest-in-workspace` flag changes the behavior of the current working directory used when running doctests. Historically, Cargo has run `rustdoc --test` relative to the root of the package, with paths relative from that root. However, this is inconsistent with how `rustc` and `rustdoc` are normally run in a workspace, where they are run relative to the workspace root. This inconsistency causes problems in various ways, such as when passing RUSTDOCFLAGS with relative paths, or dealing with diagnostic output. The `-Z doctest-in-workspace` flag causes cargo to switch to running `rustdoc` from the root of the workspace. It also passes the `--test-run-directory` to `rustdoc` so that when *running* the tests, they are run from the root of the package. This preserves backwards compatibility and is consistent with how normal unittests are run. ### rustc `--print` * Tracking Issue: [#9357](https://github.com/rust-lang/cargo/issues/9357) `cargo rustc --print=VAL` forwards the `--print` flag to `rustc` in order to extract information from `rustc`. This runs `rustc` with the corresponding [`--print`](https://doc.rust-lang.org/rustc/command-line-arguments.html#--print-print-compiler-information) flag, and then immediately exits without compiling. Exposing this as a cargo flag allows cargo to inject the correct target and RUSTFLAGS based on the current configuration. The primary use case is to run `cargo rustc --print=cfg` to get config values for the appropriate target and influenced by any other RUSTFLAGS. ### Different binary name * Tracking Issue: [#9778](https://github.com/rust-lang/cargo/issues/9778) * PR: [#9627](https://github.com/rust-lang/cargo/pull/9627) The `different-binary-name` feature allows setting the filename of the binary without having to obey the restrictions placed on crate names. For example, the crate name must use only `alphanumeric` characters or `-` or `_`, and cannot be empty. The `filename` parameter should **not** include the binary extension, `cargo` will figure out the appropriate extension and use that for the binary on its own. The `filename` parameter is only available in the `[[bin]]` section of the manifest. ```toml cargo-features = ["different-binary-name"] [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" path = "src/main.rs" ``` ### scrape-examples * RFC: [#3123](https://github.com/rust-lang/rfcs/pull/3123) * Tracking Issue: [#9910](https://github.com/rust-lang/cargo/issues/9910) The `-Z rustdoc-scrape-examples` argument tells Rustdoc to search crates in the current workspace for calls to functions. Those call-sites are then included as documentation. The flag can take an argument of `all` or `examples` which configures which crate in the workspace to analyze for examples. For instance: ``` cargo doc -Z unstable-options -Z rustdoc-scrape-examples=examples ``` ### check-cfg * RFC: [#3013](https://github.com/rust-lang/rfcs/pull/3013) * Tracking Issue: [#10554](https://github.com/rust-lang/cargo/issues/10554) `-Z check-cfg` command line enables compile time checking of name and values in `#[cfg]`, `cfg!`, `#[link]` and `#[cfg_attr]` with the `rustc` and `rustdoc` unstable `--check-cfg` command line. It's values are: - `features`: enables features checking via `--check-cfg=values(feature, ...)`. Note than this command line options will probably become the default when stabilizing. - `names`: enables well known names checking via `--check-cfg=names()`. - `values`: enables well known values checking via `--check-cfg=values()`. - `output`: enable the use of `rustc-check-cfg` in build script. For instance: ``` cargo check -Z unstable-options -Z check-cfg=features cargo check -Z unstable-options -Z check-cfg=names cargo check -Z unstable-options -Z check-cfg=values cargo check -Z unstable-options -Z check-cfg=features,names,values ``` Or for `output`: ```rust,no_run // build.rs println!("cargo:rustc-check-cfg=names(foo, bar)"); ``` ``` cargo check -Z unstable-options -Z check-cfg=output ``` ### `cargo:rustc-check-cfg=CHECK_CFG` The `rustc-check-cfg` instruction tells Cargo to pass the given value to the `--check-cfg` flag to the compiler. This may be used for compile-time detection of unexpected conditional compilation name and/or values. This can only be used in combination with `-Zcheck-cfg=output` otherwise it is ignored with a warning. If you want to integrate with Cargo features, use `-Zcheck-cfg=features` instead of trying to do it manually with this option. ## Stabilized and removed features ### Compile progress The compile-progress feature has been stabilized in the 1.30 release. Progress bars are now enabled by default. See [`term.progress`](config.md#termprogresswhen) for more information about controlling this feature. ### Edition Specifying the `edition` in `Cargo.toml` has been stabilized in the 1.31 release. See [the edition field](manifest.md#the-edition-field) for more information about specifying this field. ### rename-dependency Specifying renamed dependencies in `Cargo.toml` has been stabilized in the 1.31 release. See [renaming dependencies](specifying-dependencies.md#renaming-dependencies-in-cargotoml) for more information about renaming dependencies. ### Alternate Registries Support for alternate registries has been stabilized in the 1.34 release. See the [Registries chapter](registries.md) for more information about alternate registries. ### Offline Mode The offline feature has been stabilized in the 1.36 release. See the [`--offline` flag](../commands/cargo.md#option-cargo---offline) for more information on using the offline mode. ### publish-lockfile The `publish-lockfile` feature has been removed in the 1.37 release. The `Cargo.lock` file is always included when a package is published if the package contains a binary target. `cargo install` requires the `--locked` flag to use the `Cargo.lock` file. See [`cargo package`](../commands/cargo-package.md) and [`cargo install`](../commands/cargo-install.md) for more information. ### default-run The `default-run` feature has been stabilized in the 1.37 release. See [the `default-run` field](manifest.md#the-default-run-field) for more information about specifying the default target to run. ### cache-messages Compiler message caching has been stabilized in the 1.40 release. Compiler warnings are now cached by default and will be replayed automatically when re-running Cargo. ### install-upgrade The `install-upgrade` feature has been stabilized in the 1.41 release. [`cargo install`] will now automatically upgrade packages if they appear to be out-of-date. See the [`cargo install`] documentation for more information. [`cargo install`]: ../commands/cargo-install.md ### Profile Overrides Profile overrides have been stabilized in the 1.41 release. See [Profile Overrides](profiles.md#overrides) for more information on using overrides. ### Config Profiles Specifying profiles in Cargo config files and environment variables has been stabilized in the 1.43 release. See the [config `[profile]` table](config.md#profile) for more information about specifying [profiles](profiles.md) in config files. ### crate-versions The `-Z crate-versions` flag has been stabilized in the 1.47 release. The crate version is now automatically included in the [`cargo doc`](../commands/cargo-doc.md) documentation sidebar. ### Features The `-Z features` flag has been stabilized in the 1.51 release. See [feature resolver version 2](features.md#feature-resolver-version-2) for more information on using the new feature resolver. ### package-features The `-Z package-features` flag has been stabilized in the 1.51 release. See the [resolver version 2 command-line flags](features.md#resolver-version-2-command-line-flags) for more information on using the features CLI options. ### Resolver The `resolver` feature in `Cargo.toml` has been stabilized in the 1.51 release. See the [resolver versions](resolver.md#resolver-versions) for more information about specifying resolvers. ### extra-link-arg The `extra-link-arg` feature to specify additional linker arguments in build scripts has been stabilized in the 1.56 release. See the [build script documentation](build-scripts.md#outputs-of-the-build-script) for more information on specifying extra linker arguments. ### configurable-env The `configurable-env` feature to specify environment variables in Cargo configuration has been stabilized in the 1.56 release. See the [config documentation](config.html#env) for more information about configuring environment variables. ### rust-version The `rust-version` field in `Cargo.toml` has been stabilized in the 1.56 release. See the [rust-version field](manifest.html#the-rust-version-field) for more information on using the `rust-version` field and the `--ignore-rust-version` option. ### codegen-backend The `codegen-backend` feature makes it possible to select the codegen backend used by rustc using a profile. Example: ```toml [package] name = "foo" [dependencies] serde = "1.0.117" [profile.dev.package.foo] codegen-backend = "cranelift" ``` ### patch-in-config The `-Z patch-in-config` flag, and the corresponding support for `[patch]` section in Cargo configuration files has been stabilized in the 1.56 release. See the [patch field](config.html#patch) for more information. ### edition 2021 The 2021 edition has been stabilized in the 1.56 release. See the [`edition` field](manifest.md#the-edition-field) for more information on setting the edition. See [`cargo fix --edition`](../commands/cargo-fix.md) and [The Edition Guide](../../edition-guide/index.html) for more information on migrating existing projects. ### Custom named profiles Custom named profiles have been stabilized in the 1.57 release. See the [profiles chapter](profiles.md#custom-profiles) for more information. ### Profile `strip` option The profile `strip` option has been stabilized in the 1.59 release. See the [profiles chapter](profiles.md#strip) for more information. ### Future incompat report Support for generating a future-incompat report has been stabilized in the 1.59 release. See the [future incompat report chapter](future-incompat-report.md) for more information. ### Namespaced features Namespaced features has been stabilized in the 1.60 release. See the [Features chapter](features.md#optional-dependencies) for more information. ### Weak dependency features Weak dependency features has been stabilized in the 1.60 release. See the [Features chapter](features.md#dependency-features) for more information. ### timings The `-Ztimings` option has been stabilized as `--timings` in the 1.60 release. (`--timings=html` and the machine-readable `--timings=json` output remain unstable and require `-Zunstable-options`.) ### config-cli The `--config` CLI option has been stabilized in the 1.63 release. See the [config documentation](config.html#command-line-overrides) for more information. ### multitarget The `-Z multitarget` option has been stabilized in the 1.64 release. See [`build.target`](config.md#buildtarget) for more information about setting the default target platform triples. ### crate-type The `--crate-type` flag for `cargo rustc` has been stabilized in the 1.64 release. See the [`cargo rustc` documentation](../commands/cargo-rustc.md) for more information. ### Workspace Inheritance Workspace Inheritance has been stabilized in the 1.64 release. See [workspace.package](workspaces.md#the-workspacepackage-table), [workspace.dependencies](workspaces.md#the-workspacedependencies-table), and [inheriting-a-dependency-from-a-workspace](specifying-dependencies.md#inheriting-a-dependency-from-a-workspace) for more information.cargo-0.66.0/src/doc/src/reference/workspaces.md000066400000000000000000000163421432416201200214630ustar00rootroot00000000000000## Workspaces A *workspace* is a collection of one or more packages that share common dependency resolution (with a shared `Cargo.lock`), output directory, and various settings such as profiles. Packages that are part of a workspaces are called *workspace members*. There are two flavours of workspaces: as root package or as virtual manifest. ### Root package A workspace can be created by adding a [`[workspace]` section](#the-workspace-section) to `Cargo.toml`. This can be added to a `Cargo.toml` that already defines a `[package]`, in which case the package is the *root package* of the workspace. The *workspace root* is the directory where the workspace's `Cargo.toml` is located. ### Virtual manifest Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section but without a [`[package]` section][package]. This is called a *virtual manifest*. This is typically useful when there isn't a "primary" package, or you want to keep all the packages organized in separate directories. ### Key features The key points of workspaces are: * All packages share a common `Cargo.lock` file which resides in the *workspace root*. * All packages share a common [output directory], which defaults to a directory named `target` in the *workspace root*. * The [`[patch]`][patch], [`[replace]`][replace] and [`[profile.*]`][profiles] sections in `Cargo.toml` are only recognized in the *root* manifest, and ignored in member crates' manifests. ### The `[workspace]` section The `[workspace]` table in `Cargo.toml` defines which packages are members of the workspace: ```toml [workspace] members = ["member1", "path/to/member2", "crates/*"] exclude = ["crates/foo", "path/to/other"] ``` All [`path` dependencies] residing in the workspace directory automatically become members. Additional members can be listed with the `members` key, which should be an array of strings containing directories with `Cargo.toml` files. The `members` list also supports [globs] to match multiple paths, using typical filename glob patterns like `*` and `?`. The `exclude` key can be used to prevent paths from being included in a workspace. This can be useful if some path dependencies aren't desired to be in the workspace at all, or using a glob pattern and you want to remove a directory. An empty `[workspace]` table can be used with a `[package]` to conveniently create a workspace with the package and all of its path dependencies. ### Workspace selection When inside a subdirectory within the workspace, Cargo will automatically search the parent directories for a `Cargo.toml` file with a `[workspace]` definition to determine which workspace to use. The [`package.workspace`] manifest key can be used in member crates to point at a workspace's root to override this automatic search. The manual setting can be useful if the member is not inside a subdirectory of the workspace root. ### Package selection In a workspace, package-related cargo commands like [`cargo build`] can use the `-p` / `--package` or `--workspace` command-line flags to determine which packages to operate on. If neither of those flags are specified, Cargo will use the package in the current working directory. If the current directory is a virtual workspace, it will apply to all members (as if `--workspace` were specified on the command-line). The optional `default-members` key can be specified to set the members to operate on when in the workspace root and the package selection flags are not used: ```toml [workspace] members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] default-members = ["path/to/member2", "path/to/member3/foo"] ``` When specified, `default-members` must expand to a subset of `members`. ### The `workspace.metadata` table The `workspace.metadata` table is ignored by Cargo and will not be warned about. This section can be used for tools that would like to store workspace configuration in `Cargo.toml`. For example: ```toml [workspace] members = ["member1", "member2"] [workspace.metadata.webcontents] root = "path/to/webproject" tool = ["npm", "run", "build"] # ... ``` There is a similar set of tables at the package level at [`package.metadata`][package-metadata]. While cargo does not specify a format for the content of either of these tables, it is suggested that external tools may wish to use them in a consistent fashion, such as referring to the data in `workspace.metadata` if data is missing from `package.metadata`, if that makes sense for the tool in question. ### The `workspace.package` table The `workspace.package` table is where you define keys that can be inherited by members of a workspace. These keys can be inherited by defining them in the member package with `{key}.workspace = true`. Keys that are supported: | | | |----------------|-----------------| | `authors` | `categories` | | `description` | `documentation` | | `edition` | `exclude` | | `homepage` | `include` | | `keywords` | `license` | | `license-file` | `publish` | | `readme` | `repository` | | `rust-version` | `version` | - `license-file` and `readme` are relative to the workspace root - `include` and `exclude` are relative to your package root Example: ```toml # [PROJECT_DIR]/Cargo.toml [workspace] members = ["bar"] [workspace.package] version = "1.2.3" authors = ["Nice Folks"] description = "A short description of my package" documentation = "https://example.com/bar" ``` ```toml # [PROJECT_DIR]/bar/Cargo.toml [package] name = "bar" version.workspace = true authors.workspace = true description.workspace = true documentation.workspace = true ``` ### The `workspace.dependencies` table The `workspace.dependencies` table is where you define dependencies to be inherited by members of a workspace. Specifying a workspace dependency is similar to [package dependencies][specifying-dependencies] except: - Dependencies from this table cannot be declared as `optional` - [`features`][features] declared in this table are additive with the `features` from `[dependencies]` You can then [inherit the workspace dependency as a package dependency][inheriting-a-dependency-from-a-workspace] Example: ```toml # [PROJECT_DIR]/Cargo.toml [workspace] members = ["bar"] [workspace.dependencies] cc = "1.0.73" rand = "0.8.5" regex = { version = "1.6.0", default-features = false, features = ["std"] } ``` ```toml # [PROJECT_DIR]/bar/Cargo.toml [project] name = "bar" version = "0.2.0" [dependencies] regex = { workspace = true, features = ["unicode"] } [build-dependencies] cc.workspace = true [dev-dependencies] rand.workspace = true ``` [package]: manifest.md#the-package-section [package-metadata]: manifest.md#the-metadata-table [output directory]: ../guide/build-cache.md [patch]: overriding-dependencies.md#the-patch-section [replace]: overriding-dependencies.md#the-replace-section [profiles]: profiles.md [`path` dependencies]: specifying-dependencies.md#specifying-path-dependencies [`package.workspace`]: manifest.md#the-workspace-field [globs]: https://docs.rs/glob/0.3.0/glob/struct.Pattern.html [`cargo build`]: ../commands/cargo-build.md [specifying-dependencies]: specifying-dependencies.md [features]: features.md [inheriting-a-dependency-from-a-workspace]: specifying-dependencies.md#inheriting-a-dependency-from-a-workspace cargo-0.66.0/src/doc/theme/000077500000000000000000000000001432416201200153275ustar00rootroot00000000000000cargo-0.66.0/src/doc/theme/favicon.png000066400000000000000000000353561432416201200174760ustar00rootroot0000000000000000 ¨%6  ¨ή% h†6(0` $  '-:l3LbΜKnΔ@^y[/=$= 3b(=OΏ4OhτDf„i“Ή`ˆ¬ςMq‘³@_zT*>O /FXc!1A‘=]zώ?a€Fi‰p›Αx₯ΝS}£8^υAe„Έ@_zZ&6D ):JWcij&9K’Acώ>]yCb~\£k”ΉNxž)Rw/W|8_‚φ?cƒ½@`{],>N !/9L#-x uegis':L·=Ys>a}>yŸMгS}ŸKp‘3Y|*Rw*Rw.V{8^χ@d„Ώ@`|a(;K  #,6D$/u*@TΓ:Xtυ>]zλ+@R΅"†+6Ÿ5K`Χ;_{ω-k‘s¦…½?¬δA’ΦKŽΈO{žDiŠ4Z|+Sw)Rw-Uyη<±μ<―ι@£ΨW—Ύ_…₯Hl6[|#Gg3Xyn™ΐd²ωKp‘ΗAaiη<―κ<―κ:°λZΊλ}ΐδcŸΔO€Fj‰Bd„l–»v‘ΙHr˜3[~ϊ>bƒΚBc€n7Nc   1 @ENQV0H^Ί>Zu>[t:dƒ'o™w­|Ά}Έ ~Έ Ί Ί €»ŒΔ>ζ;―κ;―ι:ιVΈκ{Ιπ_Όμ<₯έG•ΓP¦V{œ`†¨Cj(Qv+Sx4\ϋ>c„ΝAb€q5La" +)5b/G]·7SmΑ!3C Z%t.AR²;Ysο4d…0tœz―{Ά }Έ }· s© mŸ v¬‚ΌƒΎŽΗ>­ζ;ι;ι;ι@°θ[ΊιWΈι9­θ9­θ>¨ήK”ΐOƒ©Ov˜?d†/Vz)Rw,Uz3[ό?d…ΡJlŠu6Qj".<,-CX7Tnρ@bIo“>_}φ5MdΨ;Ztν1gŠoŸ v­!ˆ½» ~Ή ~Έ q₯ o’ o‘ h—~Ά…Α Ι>­ζ:ι:ι:ι:θ9­θ:¬η9­η:­θ<ιE­β?ͺβEΞO‹³PyœAfˆ0Wz$Jk3YzgΆό\ƒ¦ΣQv—F5PiZ>_|ύ>_}=\xB`|IiƒN|™r£v―y΄{΅#‹ΏƒΌ Ό x q€}Άq’h–{±ˆΔ"’Λ=­ε:­θ:θ:­θ:­θ9­η9¬η:­θ:­θ<θF―ε=°λ<°μ=­ζC ΣNŽ·Pzž>a€;_p›Αu‘Θm˜Ύj9Vp]>\wύ?[u:`}3o”V˜»NΖ |΅z΅{Ά }·$ŒΑ…Ώ‚Όnži˜q£p v©~΄‹Ζ$”Ν=¬ε9­θ9­η9­η9¬η8¬η9¬η9¬η9¬η;­ηF°ε=°κ<―κ<―κ<°λ<­ηB’ΦL»S£]ƒ₯h΄l•Ίf;VmW:cσ&k”s§ΆS£ΜFšΕ ~· }Έ ~Ή €Ί&ŽΓˆΒΈp‘j˜ožnžv¨†ΏΘ&–Ο=¬ε9¬η9¬η8¬η8¬η9¬η9¬η9¬η9¬η;­ηF°ε=―κ;ι;―κ:¬ζ:­η;―κ;θ@€ΩJ’ΏT„©πW{œT*Ri!vͺΣy³z΅€Έ2’Β„» Ί Ί »‚Ό(ΔŠΔΆu¨nr’v§}°ŽΘΚ(˜Ρ<¬ε8¬η8«ζ8«ζ8«ζ8«ζ8«ζ8«ζ8«ζ:¬ζF―ε<―ι;ι7€Ϋ1’Γ1‘Α6ŸΤ;­θ;―κ;ιD©ήΚP‚Ÿ!\{|³|· }Έ ~Ή Ί €»‚Ό‚ΌƒΎ…ΐ*‘ΖŒΖ„½ožt₯|°|―ŠΒ‘Μ’Ν*šΣ<¬ε6§α7©γ8«ζ8«ζ8«ζ8«ζ8«ζ8«ζ:¬ζF―ε<θ9«ε0Ώ0ΐ6‘Χ2•Θ5žΤ:ι:ιB°θΚU₯Ν*x ΉΟ Ί €»Όƒ½ƒΎ„Ώ…Ώ†ΑˆΒ+’ΗŽΘŒΖ|―v§{‰Α’Ν “Ξ ”Ο+›Υ<¬ε1ŸΧ5₯ή7«ζ7ͺε6ͺε7«ζ7ͺε7ͺε9«εF―ε;θ8¨α0ΐ.‹Ί1”Η5ŸΥ0ΐ8¦ί:­θB°θΙT©Τ0†²…½ΞΌƒΎ…Ώ†Α‡Β‡ΒˆΓ‰ΓŠΔ-”Ι‘ΚΛΚΕΚ ”Ο!”Ο"–Ρ&—Ρ6G­ΰ;ͺγ7ͺδ5ͺεL±εVΆη=¬ε7ͺε7ͺε9«εG―ε;­η8¨α1•Θ/Ώ/ŽΎ.Œ»,…³3›Π9­θA°θΙR©Σ2ˆ΄ˆΐΞ…ΐ†ΑˆΓ‰Δ‹ΖŒΖ‹ΖŒΖΘ0–Λ"“Ν’Ν“Ξ ”Ο!•Π#–Ρ*˜Π0–Ι,Ύ.ΒHͺάIͺΫH«ή>«γWΆηwΕμWΆη5©δ5¨β8¨βG°ε:­η8«ζ0“Ε+„².‹Ί-‰Έ1•Θ1•Θ8¬ηA°θΙR¨Σ5ŠΆ‹ΓΞˆΓŠΕ‹ΖŒΗŽΙΛΛΛ#‘Μ2˜Ν$•Ο ”Ο!•Π&—Π3šΟ8–Η(ŠΌ„Ή»Ε@―θ=―κA­γFͺέ`³έ}Γζ^Έζ5©δ2‘Ϊ4‘ΩG°ζ9«ζ8«ζ4žΥ.½/‘Γ-ŠΉ0’Ε1–Ι8«ζ@―ηΙR§8» ŽΖΞŒΗΘΚΛ’Ν!•Π$—Ρ$’Κ(•Ο5›Ο'˜Ρ+˜Π2—ΛFœΘdͺΞ:”Β ΌƒΏ„Ύ!Η@η<°λ<°λ<―κ@ζV±ΰ[±έH¬ί?ͺΰ9¨αF―ε8«ε7«ζ7©δ2™Ξ0“Ε1—Λ.ŽΏ3Σ7«ζ@ζΙS¨Σ<“ΐ#’ΚΞΛ’Ν “Ξ"•Π*šΣ'šΥ(œΧ(—Ο-Χ<Ÿ8˜Κ+Ώ„Ί9—Η\ͺ6•Ζ„Ώ†Α†Α#‘Ι@―η<°λ<―κ<―κ<°λ;―λ<―ιA¬γH«ήNίS³δ=¬ε7«ζ6ͺζ6©γ3Τ0”Η2šΠ6©γ7«ζ?ζΙSͺΦ<’½'–ΞΞ ”Ο"–Ρ)šΤQ­ά_³ή/ŸΩ/’ά<¨ίM°βD’Ρώ…ΌΌ‚Ύ;™ΚQ₯Ο%ΕˆΓ‰Δ‰Δ%“Λ@―η;ι;ι:¬ζ9¨ΰ:«ε;―κ;―κ;ιA―ηU³βNίώJ­ΰώ@¬γ8«ε6ͺζ6ͺε6ͺε6©δ6ͺε?­εΙP¦ΠFœΘ,šΞ%”Ν&šΥ4’Ϊi»δjΉβG«ήR²γW΅εU΅ηA§Ϋ…ΐ„Ώ†Α‹ΔŠΔŠΕ‹ΖŒΗŒΗ'•Ξ@―η;ι:¬ζ3–Θ0Ώ1’Γ7£Ϊ;ι;ι>°ιL΄ι?°ιD­δώK¬ήώM­ήώE­βώ:«δ6ͺε5©δ6©γ>¬εΙP¦Ρ^΄ΰ1ŸΦΞ(–Ξ/ ΪG¬ΰŒΚι“Κηn»γW΄δM±εM²η@§άˆΒˆΒ‰ΔŠΕŒΗΘΘŽΙŽΙ)—Π?η:ι6’Ψ-‡΅3—Κ6’Ω2•Η7₯έ:ι>―ιL΄ι=°λ<°λ<―κE―εrΌβώd΅ήύK­ίώ?¬γ7ͺδ=¬εΙO¦ΡeΊε@©ήΞDͺίQ°αX΄δnΏθ}ΕλƒΗλ€Εθp½ε^·δD¨Ϋ‹Ε‹ΖŒΗŽΙΚΚΛΛ‘Μ,š?ζ:ι4Σ1’Δ-ˆ·4›Ο1“Ε1”Η9­η=θL΄ι=°κ<―κ;―κB±κsΕοuΔμJαώJ«άύK¬άM°βΘV©Ηι bΈδžV³δυO±δL±εJ°εM²ζY·ηlΏκ}Ζλ…Θκb΄ήΙŽΙΚ‘Λ’Ν’Ν “Ν “Ν ”Ο.›Τ?­ζ9­θ5 Χ0’Δ-ˆ·/ŽΏ-‡΅.‹Ί7§ί=ηK³θ=―κ;ι;―κ>―ιfΏμvΕνB±ι9­η>«βMΰΓ\’Δ tΒκYΆηCS³ε“P²εΪN²εόL±εK°εL²ζR΄ηa»ιY³α ’Μ‘Μ“Ξ!”Ο"•Π"•Π"•Π#–Π#–Ρ1Υ?­ζ8¬η7§α-ŠΊ,†΄.ŒΌ/ΐ0“Ζ5’Ω<θK³η<―ι;ι;―ι;ι=―ιJ³ι=η9¬η:ι@εΕI–» xΔμ]ΆδYΆζcS΄ζ΄O²ενN²ζM±ζL±ζDͺί#•Ο!”Ο#–Ρ$˜%™Σ&šΤ&™Τ&˜Σ'˜Σ3ŸΧ>­ζ8«ζ8«ζ2™Ξ0’Ε0’Δ/‘Β/Α5£Ϋ<­ηJ²η<θ:­θ:θ:ι:­θ8­θ9¬η9­θ:ιA­εΕI–Ό pΌδ ]·ζ7VΆη…R΄ηΠP³ηωE¬ΰ&˜$˜Σ&™Τ'›Φ)œΦ*žΩ+žΩ*›Υ)–Ο6 Ω>¬ε7«ζ8«ζ7©γ1—Λ0’Δ/‘Γ/’Ε7¨γ;¬ζJ²η;­θ:­θ:­θ:­θ9­θ9¬η9­θ:­η:­θ@­δΕH”» ‚ΙοaΊι[ΈθUG«ήΎ*›Υ(›Φ)Ψ*žΩ-ŸΪ0’Ϋ/’έ. Ω,šΣ:€έ>¬δ6ͺε7ͺε7«ζ6©γ3 Χ2œ6§ΰ7ͺε:«εI±ζ:­η9¬η9¬η9¬η8¬η9¬η9­θ9¬η9­θ?¬δΕH•» ;’Φm-ŸΩ+ŸΪ, Ϋ=¨ήaΈδN―ΰ0₯ΰ7¨βB­γS΄εR΅θA―ζ8«ε6ͺε6ͺε7«ζ7«ζ7ͺε6ͺε:¬εI±ζ:­η9¬η8¬ζ8¬ζ8¬η9¬η9¬η9¬η9¬η?¬δΕG”Ί >₯Ϋm/ŸΨ-ŸΨ.’ήS³δsΑιX΄γG―δT΄εW΅εT΅ηUΆθX·θTΆθG±ζ;¬ε6ͺε5©δ6©δ6©δ9«εI±ζύ9«ζ8«ζ8«ε8«ζ8«ζ8«ζ8«ζ8«ζ8«ζ?¬δΕPœΒ A¨έm0žΦ.žΧ5§αd»η”Ξμ€Βδ]΅γP²δL²ζM²ηN²ηN³ηR΅θW·θXΈιO΄θAζ8ͺδ5¨γ9ͺδI²ηό8©γ4£ά7«ζ7«ζ7«ζ7«ζ7«ζ7«ζ7ͺεB―ηΕxΓι F¬ΰm>ͺβHβS²γg»ζ„Θλ†ΘλΕθrΎε^ΆδQ²δM²ζN³θN³θN³ηP΅ιU·ιYΈιTΆιG°ζ>¬εI²ηό7¨α2 Ψ7ͺε8«ε;¬ε6ͺε7«ζ7ͺε7ͺεB―ηΕ}Ηξ V±ίmX΄γT³γO±δL±εL±εWΆηjΎι}Ζλ„Ηκ{Βηj»εX΅εO³ζN³θO΄ιO΄θO΄θRΆιYΉκ]Ίκ^ΉθύM²ζ@ε8ͺε<¬δiΎιXΆη9«ε6ͺδ6ͺεBζΕ|Ζν 5œΡm1Τ>§έJ―βN²εM±εK°εL±εR΄η`ΊθtΒλ‚Θλ‚ΖκtΏζaΈεS΄ζO΄θQ΅θUΆιW·ιUΆθS΅ηU΅ζX΅ζR³εK±εpΑλoΑλ=¬δ5¨γ3’ΫA­δΕ|Ηξ 3Σm#–Ρ$—,Φ;¦έG­βM±εM±εM²ζL±ζN²ζX·θjΏλ|Ελ†Θλ†Θκl½ηUΆηR΄ηO²ηM²ηM³ηM²ηO³ηYΆζp½ε“Λθ‰ΘιH°ε7©γ0žΥ@¬δΕ|Ηο 7‘Χm'šΤ'›Φ(œΧ)Ψ- Ϊ9¦ήE­γM²ζN²ζM²ηM²ηO³ηVΆθg½κh½ιV΅ζM±εM²ζL²ζL²ζP³ζ]·ζpΏθ€Ει†ΙμƒΙνsΑιZΆεS³εG―δH°ζΖ|Ζμ 9€Ϊm*žΨ*žΩ+ŸΪ8₯έU²βDͺή0€ί9©γL±εWΆηX·θVΆηP²εM±εK±εK°εK±εM±εV΄εg»ζzΓι…ΘλΗμnΐκ]ΊκQ΅ιN³θP΄θTΆθZ·ηψf»η”Οο=§έm-žΨ-ŸΪ.’έW΄δtΑθQ±αB­γQ³δW΄δV΅ζY·θώ\ΈθώWΆζR³ζN±εQ±γ_·δsΏηΖι‚ΗλuΓλc»κT΅ηN²ζN³θO΄ιQ΅ιύS΅ιΰWΈκœ]ΊλKnΐμ@ͺαm.›Σ.žΧ3¦αfΌθŒΚκu½βY΄γQ²εM²ζM³ηN³ηO³ηT΅θZΈθi½ι‡Ιλ‰Ιλ{Ελi½ιWΆηN³ηM²ζN²ηO³ηR΅θρV·ιΌYΆζl]Άδ%sΕπC­γm:¨ΰE­αQ²γpΏηŽΛλ‰Ικ|ΓθjΊδX΄δN²εM²ζN³ηN³ηO³ηTΆιcΌκkΏλ^ΊιT΅ηN²ζN²ηP³ηϋS΅θΧW·ιŽ_Ίθ?qΐθ aΉζ_Y΅δωT²γP±δL±εP³ζ^ΉθrΑλΗλƒΖιvΐζcΈδT΄εN²ζN³ηO΄θN΄θO΄θTΆι[Ήι`»ιοY·θ­X·θ^a»ι~ΘοyΑη]·ζYR³ε¬O±διM±ζώL±εK°εM²ζUΆθfΌιyΔκ„Θλ€Ειo½ζ\ΆεQ΄ηO΄θQ΅θόTΆιΪXΈκ’hΏμ?ˆΝρh½θ[·ζ0T²γ|Q²εΙN²εφM±εL²ζL±εO³ζ\ΉιoΑλΗμ†ΘλƒΕηυkΊβΐXΆηfaΌλ wΖξc΅ΰ[ΆεLT΅ηQ΄ηίN²ζόM²ζM²ηO΄θϊYΈιΦzΔ돠ΘέG°ΐΗqΒμ^Ίι,WΆη}S΅ηΥT΅ηΝY·θo]Ήι!L―δώψπππψΰΰΰωΐΰΰΰΰΰΰΰΰΰΰΰΰΰΰΰΰΰψΐψόόόόόόόόόόόόόώΐπώ( @   # 1?l8SlΞTyšΘHi‡]3I\/J%0ƒ8VqρEg†n—Ύ\…ͺσ:_΅=^{V0DV "?S`g-;—?]yϋCg…\†©T{ -Uy1Y}φ9^»>_|Z2H\   (>)6w1JaΔ2Ka½!*Š ,7›2QjΪ,gŒ$~―BΟIŠ΄Dn’4[~,Tx0W{χBgˆΏLnŒ^3J^"0?!2B†:XtυAb€Ef…9\xφ*iϊu¨ {΅‡ΐ;­η<―ιF₯Ψc˜ΌLu—2UvIn‘k•»ψGnΒ=_}b>Wo , =FP+>Pž?]x:`~*l”w« |Ά ~Ή Ί‰Γ<¬ζ;―κA±λqΓνYάEŽΊP~’cŠ?f‹.Vzω7]Ζ=_~f5Oe  1?^3NgΉ9XtΔ"3CŠ+?Q€0\yη)n–| }· {΄ q₯ q₯€»Ζ;¬ε;ι;ιM΄ιG²ι9­θB₯ΩJ’½I|’«δEœΝO‰±`‹―ψf޲I1\x)q ΰw±ˆ½-Α Ή Ί†ΎŒΔ{±p q‘z¬‹Ζ%–Ο:«δ8¬η8«ζ8¬ζ8¬ζ8¬ζ?­ζ@―θ:¬ζ5žΣ59«ε<¬εEŸΡΫR…©#qœ }ΆΧ }Ή €Ί Ό‚½ƒΎ‰Β!Η€Άs£{ˆΐ’Ν'™9©β5¦ΰ8«ζ7«ζ7«ζ7«ε>­ε@―θ6’Ω/½4šΟ4›Ο9¬ζ?°ιO°ΰ%†Ά‚ΌΥ‚½„Ώ†Α‡Α‡Β Ε$’ʍȇ½ŒΔ ”Ο#•Π0›@©ί7§ΰ:¬ζK²ζ;¬ε6ͺε>¬ε?―θ5‘Ψ/ŽΏ0Α.ŒΌ6’Ω>―θM―ΰ*Š»‡ΐՇ‰ċƍǍΘ%‘Κ'•Ν“Ξ!•Π&–Π+•Λ'ŽΒ(ΓCͺίE«ΰH­αpΐθN³ζ3¦α<©α?η6₯ή.‹»-ŠΊ/ΐ3Σ=θMί.ΐŒΕΥŒΗŽΙΛ!”Ο"“Ν+–Ξ,™Π(–Ξ7™ΛM Λˆ½‚ΌŒΖ=­η<―κAε\΅βS±α<§ή>©ΰ>­ζ7ͺε2›Ρ0’Ε/Β4 Χ=­ηMί3”Ε!’ΛΥ’Ν&—Ρ6 Φ*œΧ,Φ9‘Χ2–ΘˆΎ2“ΔP£ΞˆΑ†Α Ι=­η<―κ;―κ;ι>θB­γM°βI―γ;«ε7©γ3 Χ3žΥ6©γ<¬ζL­ί;œΝ'–ΟΥ#—B¨άiΉβEͺήL°γP±γ!ŽΖƒΎŒΔ%ΗŠΕ‹Ζ#“Ν=­η:¬ζ5›Π4šΞ9¨α:κD±θH±ζF­βώF¬αώ@¬γ8ͺε5©δ:«δK¬έL­ί2ŸΦΥ6’Ω]ΆγŠΘιrΎζ\ΆεM±δ#’ΚˆΓ‹ΖΘŽΙΙ&–Π=­η7€Ϋ/½3™Ν3™Ν9ͺδC±ιB±κ;―κE°ηm½εώS±ΰώA«ΰ@¬γΡL­έeΊδT²βΆQ±γόQ²εYΆηjΎιwΒιqΎζ.˜ΞΘΛ‘Μ’Μ’Ν*™Σ<­ζ6€ά/ΐ/ŽΎ.Œ»5 ΦB°θB±ι;κ@°κkΑνTΆι?«βI­ΰΞX­Ψe»ηS³ε[P²ε­M±εκL±εQ΄η\·ζ0›’Ν!•Π#–Ρ#–Ρ#–Ρ.œΥ<¬ε7¨β/ŽΏ.ŒΌ/ΐ3œA―θA°θ:ι;ιA°ι@―θ9­θ>­ζΟI€fΉδZΆε1S΄ζ~P³ηΛM±εχ/Φ#—&šΤ'›Φ)Ψ(™Σ1Φ;«ε7«ζ4 Ψ0”Η/‘Γ4 ΨA―η@―θ:­θ:­θ9­θ9¬η:­θ>­ζΟG£_ΊκQ±γZ1ŸΨθ(œΧ. Ϊ;¦ά1£έ0 Ϊ=¦έ@­ε7ͺε6ͺε5€έ4’Ϊ6©δ@ζ@η9¬η9¬η9¬η9¬η9¬η=¬εΟF£ΡB€Φ1 ΨΨ+ŸΪE¬αjΌζF­βK±δT΄ζT΅θL³ηAζ8«ζ6ͺε5©δ?­εώ?ζώ7«ζ8«ζ8¬ζ8¬ζ8«ζ<¬εΟJ¦ΥG©Ϋ6£ΫΨ7₯έ\·ζ‹Κκo½εY΅εN²ζM²ηQ΄θTΆθQ΅θG°η;«δ?­εώ=«γώ4₯ί7«ζ7«ζ7«ζ7«ε>­ζΟ^ΉθO«ΪO°βΨQ²γS³ε_ΉηnΏιxΓιqΎζ_ΈζR΄ηN³θP΄ιSΆιTΆιUΆθώK±εώ<ͺγ<¬εTΆη?­ε6ͺε=­ζΟ`»κ:Π0œΤΨ9€ΪF­βK°εM±εU΅ηeΌιvΒκxΒιk½η[ΈηS΅θTΆθS΅θR΄ηR΄ζV΅εzΔκQ΄ζ3₯ί:§ίΟaΎξ?€Ψ)šΤΨ&šΥ,žΨ9¦ήE­γK±ζM²ζQ΄η_ΊιrΑκdΊηO²ζL±ζN²ζXΆηj½θ|Δι„Θκ`ΉζGγD­βΟb½λD©έ.ŸΩΨ*žΩ;§ήV²β9¨ΰEδU΅ζX·θS΄ζM²ζK°δQ²εaΉζsΐθyΓκnΐκ]ΊιR΅ιQ΄θύV·θβ_Ήθ†|ΔκH―γ1ŸΨΨ. ΪV΅εΔθ[΅γR³εO³ζQ΄ηU΅ηX·ηnΎθ{ΓιsΑιb»θT΅θN³ηP΄θρSΆθ½UΆθl[Ίλ%fΐπS΄ζE¬αΧJβ^ΈζzΓιzΓιoΎζ]·εQ³ζM²ηP΄θ\Ήκb»κY·θR΄ηϋP΄ηΧT΅θYΈθ?gΌθ vΒι^·εoR²δΛM±εχL±εU΅ηgΌιvΒιwΒθiΌηYΆηP΄θP΅θρXΈι»cΌκe^ΉιqΒλZ·ζT³δLQ²εN²εΰM²ζόP³ζ\ΉιpΑλϋ‚ΖιάuΎγ—XΆη?`Όμ r½εZ·η+T΅η}Q΄ηΧQ΄ηΠUΆθrzΒη%ΡΚΒώψψ€ώώΰΐΐΐΐΐΐΐΐΐΐΐπώ€€€€€€€€€ΐπώ(  &J>[uΛY~ Λ:^]6Sl #%8Hh ,8!9L¦5m“ϊK‰²:e‰τ4Xz·EhˆUKk‡  8 -:v9]zς+hωr£ϊ…Ύ9©βR¦ΥHƒ«R€χAf‰Ό3WwY7Vr/EZ8Tm°:^yΝ d‹άy« z± s¨ˆΑ8«ε@±κ?ηD£ΦAˆ΄:iŽψRx›·k’Ά"3_~/%l–ζ.…΄€Ή†Ώvͺt¦Η8ͺδ8¬η9¬η>―θ:©β=ŸΣL˜Ζε^’Ή/}± €ΊΪ„Ύ…ΐΕƒΊˆΏ(—Π9§ί>­ε:¬ε=­ζ3šΞ1–Ι9©αΨE±η ‹ΑŠΔٍȐΛ(•Ν*–Ν-”Θ"Ζ=ͺαQ³ζFβ=«δ4žΤ/‘Γ7€ΫΨE³κ+–Μ$•ΟΩ@₯Ψ@§ά3œΡ$Γ(ΖΙ9ͺδ9¦έ>¬γF―ε@«β:¦ά9©βΨB­δE©ΫB©έΘ`ΈεiΌθB₯ΨŒΗΚ%–Π7₯έ0’Γ5ΣA°ιD±ιVΆηB¬βΧH­ΰ~ΕιYΆζ)P²εvU΅ηΕB¨άφ"–Ρ%™Σ,œΦ8¨α0–Ι2™Ν?η;ι>―θ<­ζΦD«ί`»λ 4’Ϊ’6£ΫH­ΰBͺΰE―ζ;ͺβ6§α=­ζ8«ζ7¬η:¬ζΦB«ΰ?§έyN°βl½ηcΊηXΆηP΄θJ²ηF―εώ=«δ@ζ9«εΦH±ζ1žΦy7£ΫGγWΆηe»θaΊθR΄ηWΆηcΊηc»ηA¬βΦK±ε0 Ωy>¨ήY΄γO²εS΄ηY·ηe»ηd»θ]ΉιςW·θΏVΆηbcΊηJαWR²δαdΊηόe»η_Ήη[Έθό]ΉιΫUΆθP΄η?VΆη sΒλR²ε+O²ε}T΅ηΧc»ιΤrΏηyWΆη&oΔρώψπ€€€€€€ΰπψψψψώcargo-0.66.0/src/doc/theme/head.hbs000066400000000000000000000000741432416201200167270ustar00rootroot00000000000000 cargo-0.66.0/src/etc/000077500000000000000000000000001432416201200142335ustar00rootroot00000000000000cargo-0.66.0/src/etc/_cargo000066400000000000000000000536321432416201200154210ustar00rootroot00000000000000#compdef cargo autoload -U regexp-replace _cargo() { local curcontext="$curcontext" ret=1 local -a command_scope_spec common parallel features msgfmt triple target registry local -a state line state_descr # These are set by _arguments typeset -A opt_args common=( '(-q --quiet)*'{-v,--verbose}'[use verbose output]' '(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]' '-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags' '--frozen[require that Cargo.lock and cache are up-to-date]' '--locked[require that Cargo.lock is up-to-date]' '--color=[specify colorization option]:coloring:(auto always never)' '(- 1 *)'{-h,--help}'[show help message]' ) # leading items in parentheses are an exclusion list for the arguments following that arg # See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions # - => exclude all other options # 1 => exclude positional arg 1 # * => exclude all other args # +blah => exclude +blah _arguments -s -S -C $common \ '(- 1 *)--list[list installed commands]' \ '(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \ '(- 1 *)'{-V,--version}'[show version information]' \ '(+beta +nightly)+stable[use the stable toolchain]' \ '(+stable +nightly)+beta[use the beta toolchain]' \ '(+stable +beta)+nightly[use the nightly toolchain]' \ '1: :_cargo_cmds' \ '*:: :->args' # These flags are mutually exclusive specifiers for the scope of a command; as # they are used in multiple places without change, they are expanded into the # appropriate command's `_arguments` where appropriate. command_scope_spec=( '(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names' '(--bench --bin --test --lib)--example=[specify example name]:example name:_cargo_example_names' '(--bench --example --test --lib)--bin=[specify binary name]:binary name' '(--bench --bin --example --test)--lib=[specify library name]:library name' '(--bench --bin --example --lib)--test=[specify test name]:test name' ) parallel=( '(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]' '--keep-going[do not abort build on first error]' ) features=( '(--all-features)'{-F+,--features=}'[specify features to activate]:feature' '(--features -F)--all-features[activate all available features]' "--no-default-features[don't build the default features]" ) msgfmt='--message-format=[specify error format]:error format [human]:(human json short)' triple='--target=[specify target triple]:target triple:_cargo_target_triple' target='--target-dir=[specify directory for all generated artifacts]:directory:_directories' manifest='--manifest-path=[specify path to manifest]:path:_directories' registry='--registry=[specify registry to use]:registry' case $state in args) curcontext="${curcontext%:*}-${words[1]}:" case ${words[1]} in add) _arguments -s -A "^--" $common $manifest $registry \ {-F+,--features=}'[specify features to activate]:feature' \ "--default-features[enable the default features]" \ "--no-default-features[don't enable the default features]" \ "--optional[mark the dependency as optional]" \ "--no-optional[mark the dependency as required]" \ "--dev[add as a dev dependency]" \ "--build[add as a build dependency]" \ "--target=[add as a dependency to the given target platform]" \ "--rename=[rename the dependency]" \ "--dry-run[don't actually write the manifest]" \ '--branch=[branch to use when adding from git]:branch' \ '--git=[specify URL from which to add the crate]:url:_urls' \ '--path=[local filesystem path to crate to add]: :_directories' \ '--rev=[specific commit to use when adding from git]:commit' \ '--tag=[tag to use when adding from git]:tag' \ '1: :_guard "^-*" "crate name"' \ '*:args:_default' ;; bench) _arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \ "${command_scope_spec[@]}" \ '--all-targets[benchmark all targets]' \ "--no-run[compile but don't run]" \ '(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \ '--exclude=[exclude packages from the benchmark]:spec' \ '--no-fail-fast[run all benchmarks regardless of failure]' \ '1: :_guard "^-*" "bench name"' \ '*:args:_default' ;; build | b) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ "${command_scope_spec[@]}" \ '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \ '--release[build in release mode]' \ '--build-plan[output the build plan in JSON]' \ ;; check | c) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ "${command_scope_spec[@]}" \ '(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \ '--release[check in release mode]' \ ;; clean) _arguments -s -S $common $triple $target $manifest \ '(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \ '--release[clean release artifacts]' \ '--doc[clean just the documentation directory]' ;; doc | d) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--no-deps[do not build docs for dependencies]' \ '--document-private-items[include non-public items in the documentation]' \ '--open[open docs in browser after the build]' \ '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \ '--release[build artifacts in release mode, with optimizations]' \ ;; fetch) _arguments -s -S $common $triple $manifest ;; fix) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ "${command_scope_spec[@]}" \ '--broken-code[fix code even if it already has compiler errors]' \ '--edition[fix in preparation for the next edition]' \ '--edition-idioms[fix warnings to migrate to the idioms of an edition]' \ '--allow-no-vcs[fix code even if a VCS was not detected]' \ '--allow-dirty[fix code even if the working directory is dirty]' \ '--allow-staged[fix code even if the working directory has staged changes]' ;; generate-lockfile) _arguments -s -S $common $manifest ;; help) _cargo_cmds ;; init) _arguments -s -S $common $registry \ '--lib[use library template]' \ '--edition=[specify edition to set for the crate generated]:edition:(2015 2018 2021)' \ '--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \ '--name=[set the resulting package name]:name' \ '1:path:_directories' ;; install) _arguments -s -S $common $parallel $features $triple $registry \ '(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \ '--bin=[only install the specified binary]:binary' \ '--branch=[branch to use when installing from git]:branch' \ '--debug[Build in debug mode (with the "dev" profile) instead of release mode]' \ '--example=[install the specified example instead of binaries]:example:_cargo_example_names' \ '--git=[specify URL from which to install the crate]:url:_urls' \ '--path=[local filesystem path to crate to install]: :_directories' \ '--rev=[specific commit to use when installing from git]:commit' \ '--root=[directory to install packages into]: :_directories' \ '--tag=[tag to use when installing from git]:tag' \ '--version=[version to install from crates.io]:version' \ '--list[list all installed packages and their versions]' \ '*: :_guard "^-*" "crate"' ;; locate-project) _arguments -s -S $common $manifest \ '--message-format=[specify output representation]:output representation [json]:(json plain)' \ '--workspace[locate Cargo.toml of the workspace root]' ;; login) _arguments -s -S $common $registry \ '*: :_guard "^-*" "token"' ;; metadata) _arguments -s -S $common $features $manifest \ "--no-deps[output information only about the root package and don't fetch dependencies]" \ '--format-version=[specify format version]:version [1]:(1)' ;; new) _arguments -s -S $common $registry \ '--lib[use library template]' \ '--vcs:initialize a new repo with a given VCS:(git hg none)' \ '--name=[set the resulting package name]' ;; owner) _arguments -s -S $common $registry \ '(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \ '--index=[specify registry index]:index' \ '(-l --list)'{-l,--list}'[list owners of a crate]' \ '(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \ '--token=[specify API token to use when authenticating]:token' \ '*: :_guard "^-*" "crate"' ;; package) _arguments -s -S $common $parallel $features $triple $target $manifest \ '(-l --list)'{-l,--list}'[print files included in a package without making one]' \ '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ '--allow-dirty[allow dirty working directories to be packaged]' \ "--no-verify[don't build to verify contents]" ;; pkgid) _arguments -s -S $common $manifest \ '(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \ '*: :_guard "^-*" "spec"' ;; publish) _arguments -s -S $common $parallel $features $triple $target $manifest $registry \ '--index=[specify registry index]:index' \ '--allow-dirty[allow dirty working directories to be packaged]' \ "--no-verify[don't verify the contents by building them]" \ '--token=[specify token to use when uploading]:token' \ '--dry-run[perform all checks without uploading]' ;; read-manifest) _arguments -s -S $common $manifest ;; run | r) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--example=[name of the bin target]:name:_cargo_example_names' \ '--bin=[name of the bin target]:name' \ '(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \ '--release[build in release mode]' \ '*: :_default' ;; rustc) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \ '--profile=[specify profile to build the selected target for]:profile' \ '--release[build artifacts in release mode, with optimizations]' \ "${command_scope_spec[@]}" \ '*: : _dispatch rustc rustc -default-' ;; rustdoc) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--document-private-items[include non-public items in the documentation]' \ '--open[open the docs in a browser after the operation]' \ '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \ '--release[build artifacts in release mode, with optimizations]' \ "${command_scope_spec[@]}" \ '*: : _dispatch rustdoc rustdoc -default-' ;; search) _arguments -s -S $common $registry \ '--index=[specify registry index]:index' \ '--limit=[limit the number of results]:results [10]' \ '*: :_guard "^-*" "query"' ;; test | t) _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ '--test=[test name]: :_cargo_test_names' \ '--no-fail-fast[run all tests regardless of failure]' \ '--no-run[compile but do not run]' \ '(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \ '--all[test all packages in the workspace]' \ '--release[build artifacts in release mode, with optimizations]' \ '1: :_cargo_test_names' \ '(--doc --bin --example --test --bench)--lib[only test library]' \ '(--lib --bin --example --test --bench)--doc[only test documentation]' \ '(--lib --doc --example --test --bench)--bin=[binary name]' \ '(--lib --doc --bin --test --bench)--example=[example name]:_cargo_example_names' \ '(--lib --doc --bin --example --bench)--test=[test name]' \ '(--lib --doc --bin --example --test)--bench=[benchmark name]' \ '*: :_default' ;; tree) _arguments -s -S $common $features $triple $manifest \ '(-p --package)'{-p+,--package=}'[package to use as the root]:package:_cargo_package_names' \ '(-i --invert)'{-i+,--invert=}'[invert the tree for the given package]:package:_cargo_package_names' \ '--prefix=[line prefix]:prefix:(depth indent none)' \ '--no-dedupe[repeat shared dependencies]' \ '(-d --duplicates)'{-d,--duplicates}'[packages with multiple versions]' \ '--charset=[utf8 or ascii]:charset:(utf8 ascii)' \ '(-f --format)'{-f,--format=}'[format string]:format' \ '(-e --edges)'{-e,--edges=}'[edge kinds]:kind:(features normal build dev all no-dev no-build no-normal)' \ ;; uninstall) _arguments -s -S $common \ '(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \ '--bin=[only uninstall the specified binary]:name' \ '--root=[directory to uninstall packages from]: :_files -/' \ '*:crate:_cargo_installed_crates -F line' ;; update) _arguments -s -S $common $manifest \ '--aggressive=[force dependency update]' \ "--dry-run[don't actually write the lockfile]" \ '(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \ '--precise=[update single dependency to precise release]:release' ;; verify-project) _arguments -s -S $common $manifest ;; version) _arguments -s -S $common ;; yank) _arguments -s -S $common $registry \ '--version=[specify yank version]:version' \ '--undo[undo a yank, putting a version back into the index]' \ '--index=[specify registry index to yank from]:registry index' \ '--token=[specify API token to use when authenticating]:token' \ '*: :_guard "^-*" "crate"' ;; *) # allow plugins to define their own functions if ! _call_function ret _cargo-${words[1]}; then # fallback on default completion for unknown commands _default && ret=0 fi (( ! ret )) ;; esac ;; esac } _cargo_unstable_flags() { local flags flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } ) _describe -t flags 'unstable flag' flags } _cargo_installed_crates() { local expl _description crates expl 'crate' compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *} } _cargo_cmds() { local -a commands # This uses Parameter Expansion Flags, which are a built-in Zsh feature. # See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags # and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion # # # How this work? # # First it splits the result of `cargo --list` at newline, then it removes the first line. # Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]). # Then it replaces those spaces between item and description with a `:` # # [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} ) _describe -t commands 'command' commands } _cargo_target_triple() { local -a targets targets=( ${(f)"$(rustc --print target-list)"} ) _describe 'target triple' targets } #FIXME: Disabled until fixed #gets package names from the manifest file _cargo_package_names() { _message -e packages package } # Extracts the values of "name" from the array given in $1 and shows them as # command line options for completion _cargo_names_from_array() { local manifest=$(cargo locate-project --message-format plain) if [[ -z $manifest ]]; then return 0 fi local last_line local -a names; local in_block=false local block_name=$1 names=() while read -r line; do if [[ $last_line == "[[$block_name]]" ]]; then in_block=true else if [[ $last_line =~ '\s*\[\[.*' ]]; then in_block=false fi fi if [[ $in_block == true ]]; then if [[ $line =~ '\s*name\s*=' ]]; then regexp-replace line '^\s*name\s*=\s*|"' '' names+=( "$line" ) fi fi last_line=$line done < "$manifest" _describe "$block_name" names } #Gets the test names from the manifest file _cargo_test_names() { _cargo_names_from_array "test" } #Gets the bench names from the manifest file _cargo_benchmark_names() { _cargo_names_from_array "bench" } _cargo_example_names() { if [[ -d examples ]]; then local -a files=(${(@f)$(echo examples/*.rs(:t:r))}) _values 'example' "${files[@]}" fi } _cargo cargo-0.66.0/src/etc/cargo.bashcomp.sh000066400000000000000000000243161432416201200174630ustar00rootroot00000000000000# Required for bash versions < 4.1 # Default bash version is 3.2 on latest macOS. See #6874 shopt -s extglob command -v cargo >/dev/null 2>&1 && _cargo() { local cur prev words cword _get_comp_words_by_ref cur prev words cword COMPREPLY=() # Skip past - and + options to find the command. local nwords=${#words[@]} local cmd_i cmd dd_i for (( cmd_i=1; cmd_i<$nwords; cmd_i++ )); do if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then cmd="${words[$cmd_i]}" break fi done # Find the location of the -- separator. for (( dd_i=1; dd_i<$nwords-1; dd_i++ )); do if [[ "${words[$dd_i]}" = "--" ]]; then break fi done local vcs='git hg none pijul fossil' local color='auto always never' local msg_format='human json short' local opt_help='-h --help' local opt_verbose='-v --verbose' local opt_quiet='-q --quiet' local opt_color='--color' local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" local opt_pkg_spec='-p --package --all --exclude --workspace' local opt_pkg='-p --package' local opt_feat='-F --features --all-features --no-default-features' local opt_mani='--manifest-path' local opt_parallel='-j --jobs --keep-going' local opt_force='-f --force' local opt_sync='-s --sync' local opt_lock='--frozen --locked --offline' local opt_targets="--lib --bin --bins --example --examples --test --tests --bench --benches --all-targets" local opt___nocmd="$opt_common -V --version --list --explain" local opt__add="$opt_common -p --package --features --default-features --no-default-features $opt_mani --optional --no-optional --rename --dry-run --path --git --branch --tag --rev --registry --dev --build --target" local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --no-run --no-fail-fast --target-dir" local opt__build="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir" local opt__b="$opt__build" local opt__check="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir" local opt__c="$opt__check" local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release --doc --target-dir --profile" local opt__clippy="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --no-deps --fix" local opt__doc="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel --message-format --bin --bins --lib --target --open --no-deps --release --document-private-items --target-dir --profile" local opt__d="$opt__doc" local opt__fetch="$opt_common $opt_mani $opt_lock --target" local opt__fix="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_parallel $opt_targets $opt_lock --release --target --message-format --broken-code --edition --edition-idioms --allow-no-vcs --allow-dirty --allow-staged --profile --target-dir" local opt__generate_lockfile="$opt_common $opt_mani $opt_lock" local opt__help="$opt_help" local opt__init="$opt_common $opt_lock --bin --lib --name --vcs --edition --registry" local opt__install="$opt_common $opt_feat $opt_parallel $opt_lock $opt_force --bin --bins --branch --debug --example --examples --git --list --path --rev --root --tag --version --registry --target --profile --no-track" local opt__locate_project="$opt_common $opt_mani $opt_lock --message-format --workspace" local opt__login="$opt_common $opt_lock --registry" local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version=1 --no-deps --filter-platform" local opt__new="$opt_common $opt_lock --vcs --bin --lib --name --edition --registry" local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token --registry" local opt__package="$opt_common $opt_mani $opt_feat $opt_lock $opt_parallel --allow-dirty -l --list --no-verify --no-metadata --target --target-dir" local opt__pkgid="$opt_common $opt_mani $opt_lock $opt_pkg" local opt__publish="$opt_common $opt_mani $opt_feat $opt_lock $opt_parallel --allow-dirty --dry-run --token --no-verify --index --registry --target --target-dir" local opt__read_manifest="$opt_help $opt_quiet $opt_verbose $opt_mani $opt_color $opt_lock --no-deps" local opt__report="$opt_help $opt_verbose $opt_color future-incompat future-incompatibilities" local opt__report__future_incompat="$opt_help $opt_verbose $opt_color $opt_pkg --id" local opt__run="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel --message-format --target --bin --example --release --target-dir --profile" local opt__r="$opt__run" local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets -L --crate-type --extern --message-format --profile --target --release --target-dir" local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --open --target-dir --profile" local opt__search="$opt_common $opt_lock --limit --index --registry" local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir --profile" local opt__t="$opt__test" local opt__tree="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock --target -i --invert --prefix --no-dedupe --duplicates -d --charset -f --format -e --edges" local opt__uninstall="$opt_common $opt_lock $opt_pkg --bin --root" local opt__update="$opt_common $opt_mani $opt_lock $opt_pkg --aggressive --precise --dry-run" local opt__vendor="$opt_common $opt_mani $opt_lock $opt_sync --no-delete --respect-source-config --versioned-dirs" local opt__verify_project="$opt_common $opt_mani $opt_lock" local opt__version="$opt_common $opt_lock" local opt__yank="$opt_common $opt_lock --version --undo --index --token --registry" local opt__libtest="--help --include-ignored --ignored --test --bench --list --logfile --nocapture --test-threads --skip -q --quiet --exact --color --format" if [[ $cword -gt $dd_i ]]; then # Completion after -- separator. if [[ "${cmd}" = @(test|bench) ]]; then COMPREPLY=( $( compgen -W "${opt__libtest}" -- "$cur" ) ) else # Fallback to filename completion, useful with `cargo run`. _filedir fi elif [[ $cword -le $cmd_i ]]; then # Completion before or at the command. if [[ "$cur" == -* ]]; then COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) elif [[ "$cur" == +* ]]; then COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) ) else _ensure_cargo_commands_cache_filled COMPREPLY=( $( compgen -W "$__cargo_commands_cache" -- "$cur" ) ) fi else case "${prev}" in --vcs) COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) ;; --color) COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) ;; --message-format) COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) ) ;; --manifest-path) _filedir toml ;; --bin) COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) ;; --test) COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) ;; --bench) COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) ;; --example) COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) ;; --target) COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) ;; --target-dir|--path) _filedir -d ;; help) _ensure_cargo_commands_cache_filled COMPREPLY=( $( compgen -W "$__cargo_commands_cache" -- "$cur" ) ) ;; *) if [[ "$cmd" == "report" && "$prev" == future-incompat* ]]; then local opt_var=opt__${cmd//-/_}__${prev//-/_} else local opt_var=opt__${cmd//-/_} fi if [[ -z "${!opt_var}" ]]; then # Fallback to filename completion. _filedir else COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) fi ;; esac fi # compopt does not work in bash version 3 return 0 } && complete -F _cargo cargo __cargo_commands_cache= _ensure_cargo_commands_cache_filled(){ if [[ -z $__cargo_commands_cache ]]; then __cargo_commands_cache="$(cargo --list 2>/dev/null | awk 'NR>1 {print $1}')" fi } _locate_manifest(){ cargo locate-project --message-format plain 2>/dev/null } # Extracts the values of "name" from the array given in $1 and shows them as # command line options for completion _get_names_from_array() { local manifest=$(_locate_manifest) if [[ -z $manifest ]]; then return 0 fi local last_line local -a names local in_block=false local block_name=$1 while read line do if [[ $last_line == "[[$block_name]]" ]]; then in_block=true else if [[ $last_line =~ .*\[\[.* ]]; then in_block=false fi fi if [[ $in_block == true ]]; then if [[ $line =~ .*name.*\= ]]; then line=${line##*=} line=${line%%\"} line=${line##*\"} names+=($line) fi fi last_line=$line done < $manifest echo "${names[@]}" } #Gets the bin names from the manifest file _bin_names() { _get_names_from_array "bin" } #Gets the test names from the manifest file _test_names() { _get_names_from_array "test" } #Gets the bench names from the manifest file _benchmark_names() { _get_names_from_array "bench" } _get_examples(){ local manifest=$(_locate_manifest) [ -z "$manifest" ] && return 0 local files=("${manifest%/*}"/examples/*.rs) local names=("${files[@]##*/}") local names=("${names[@]%.*}") # "*" means no examples found if [[ "${names[@]}" != "*" ]]; then echo "${names[@]}" fi } _get_targets(){ local result=() local targets=$(rustup target list) while read line do if [[ "$line" =~ default|installed ]]; then result+=("${line%% *}") fi done <<< "$targets" echo "${result[@]}" } _toolchains(){ local result=() local toolchains=$(rustup toolchain list) local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]" local date="[0-9]{4}-[0-9]{2}-[0-9]{2}" while read line do # Strip " (default)" line=${line%% *} if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then if [[ -z ${BASH_REMATCH[3]} ]]; then result+=("+${BASH_REMATCH[1]}") else # channel-date result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}") fi result+=("+$line") else result+=("+$line") fi done <<< "$toolchains" echo "${result[@]}" } # vim:ft=sh cargo-0.66.0/src/etc/man/000077500000000000000000000000001432416201200150065ustar00rootroot00000000000000cargo-0.66.0/src/etc/man/cargo-add.1000066400000000000000000000162251432416201200167170ustar00rootroot00000000000000'\" t .TH "CARGO\-ADD" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-add \- Add dependencies to a Cargo.toml manifest file .SH "SYNOPSIS" \fBcargo add\fR [\fIoptions\fR] \fIcrate\fR\&... .br \fBcargo add\fR [\fIoptions\fR] \fB\-\-path\fR \fIpath\fR .br \fBcargo add\fR [\fIoptions\fR] \fB\-\-git\fR \fIurl\fR [\fIcrate\fR\&...] .SH "DESCRIPTION" This command can add or modify dependencies. .sp The source for the dependency can be specified with: .sp .RS 4 \h'-04'\(bu\h'+02'\fIcrate\fR\fB@\fR\fIversion\fR: Fetch from a registry with a version constraint of "\fIversion\fR" .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB\-\-path\fR \fIpath\fR: Fetch from the specified \fIpath\fR .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB\-\-git\fR \fIurl\fR: Pull from a git repo at \fIurl\fR .RE .sp If no source is specified, then a best effort will be made to select one, including: .sp .RS 4 \h'-04'\(bu\h'+02'Existing dependencies in other tables (like \fBdev\-dependencies\fR) .RE .sp .RS 4 \h'-04'\(bu\h'+02'Workspace members .RE .sp .RS 4 \h'-04'\(bu\h'+02'Latest release in the registry .RE .sp When you add a package that is already present, the existing entry will be updated with the flags specified. .sp Upon successful invocation, the enabled (\fB+\fR) and disabled (\fB\-\fR) \fIfeatures\fR of the specified dependency will be listed in the command's output. .SH "OPTIONS" .SS "Source options" .sp \fB\-\-git\fR \fIurl\fR .RS 4 \fIGit URL to add the specified crate from\fR \&. .RE .sp \fB\-\-branch\fR \fIbranch\fR .RS 4 Branch to use when adding from git. .RE .sp \fB\-\-tag\fR \fItag\fR .RS 4 Tag to use when adding from git. .RE .sp \fB\-\-rev\fR \fIsha\fR .RS 4 Specific commit to use when adding from git. .RE .sp \fB\-\-path\fR \fIpath\fR .RS 4 \fIFilesystem path\fR to local crate to add. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Section options" .sp \fB\-\-dev\fR .RS 4 Add as a \fIdevelopment dependency\fR \&. .RE .sp \fB\-\-build\fR .RS 4 Add as a \fIbuild dependency\fR \&. .RE .sp \fB\-\-target\fR \fItarget\fR .RS 4 Add as a dependency to the \fIgiven target platform\fR \&. .RE .SS "Dependency options" .sp \fB\-\-rename\fR \fIname\fR .RS 4 \fIRename\fR the dependency. .RE .sp \fB\-\-optional\fR .RS 4 Mark the dependency as \fIoptional\fR \&. .RE .sp \fB\-\-no\-optional\fR .RS 4 Mark the dependency as \fIrequired\fR \&. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Disable the \fIdefault features\fR \&. .RE .sp \fB\-\-default\-features\fR .RS 4 Re\-enable the \fIdefault features\fR \&. .RE .sp \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of \fIfeatures to activate\fR \&. When adding multiple crates, the features for a specific crate may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Add \fBregex\fR as a dependency .sp .RS 4 .nf cargo add regex .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Add \fBtrybuild\fR as a dev\-dependency .sp .RS 4 .nf cargo add \-\-dev trybuild .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Add an older version of \fBnom\fR as a dependency .sp .RS 4 .nf cargo add nom@5 .fi .RE .RE .sp .RS 4 \h'-04' 4.\h'+01'Add support for serializing data structures to json with \fBderive\fRs .sp .RS 4 .nf cargo add serde serde_json \-F serde/derive .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-bench.1000066400000000000000000000441601432416201200172450ustar00rootroot00000000000000'\" t .TH "CARGO\-BENCH" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-bench \- Execute benchmarks of a package .SH "SYNOPSIS" \fBcargo bench\fR [\fIoptions\fR] [\fIbenchname\fR] [\fB\-\-\fR \fIbench\-options\fR] .SH "DESCRIPTION" Compile and execute benchmarks. .sp The benchmark filtering argument \fIbenchname\fR and all the arguments following the two dashes (\fB\-\-\fR) are passed to the benchmark binaries and thus to \fIlibtest\fR (rustc's built in unit\-test and micro\-benchmarking framework). If you are passing arguments to both Cargo and the binary, the ones after \fB\-\-\fR go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of \fBcargo bench \-\- \-\-help\fR and check out the rustc book's chapter on how tests work at \&. .sp As an example, this will run only the benchmark named \fBfoo\fR (and skip other similarly named benchmarks like \fBfoobar\fR): .sp .RS 4 .nf cargo bench \-\- foo \-\-exact .fi .RE .sp Benchmarks are built with the \fB\-\-test\fR option to \fBrustc\fR which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the \fB#[bench]\fR attribute. Cargo passes the \fB\-\-bench\fR flag to the test harness to tell it to run only benchmarks. .sp The libtest harness may be disabled by setting \fBharness = false\fR in the target manifest settings, in which case your code will need to provide its own \fBmain\fR function to handle running benchmarks. .RS 3 .ll -5 .sp \fBNote\fR: The \fI\f(BI#[bench]\fI attribute\fR is currently unstable and only available on the \fInightly channel\fR \&. There are some packages available on \fIcrates.io\fR that may help with running benchmarks on the stable channel, such as \fICriterion\fR \&. .br .RE .ll .sp By default, \fBcargo bench\fR uses the \fI\f(BIbench\fI profile\fR , which enables optimizations and disables debugging information. If you need to debug a benchmark, you can use the \fB\-\-profile=dev\fR command\-line option to switch to the dev profile. You can then run the debug\-enabled benchmark within a debugger. .SH "OPTIONS" .SS "Benchmark Options" .sp \fB\-\-no\-run\fR .RS 4 Compile, but don't run benchmarks. .RE .sp \fB\-\-no\-fail\-fast\fR .RS 4 Run all benchmarks regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all benchmarks within the executable to completion, this flag only applies to the executable as a whole. .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Benchmark only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Benchmark all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo bench\fR will build the following targets of the selected packages: .sp .RS 4 \h'-04'\(bu\h'+02'lib \[em] used to link with binaries and benchmarks .RE .sp .RS 4 \h'-04'\(bu\h'+02'bins (only if benchmark targets are built and required features are available) .RE .sp .RS 4 \h'-04'\(bu\h'+02'lib as a benchmark .RE .sp .RS 4 \h'-04'\(bu\h'+02'bins as benchmarks .RE .sp .RS 4 \h'-04'\(bu\h'+02'benchmark targets .RE .sp The default behavior can be changed by setting the \fBbench\fR flag for the target in the manifest settings. Setting examples to \fBbench = true\fR will build and run the example as a benchmark. Setting targets to \fBbench = false\fR will stop them from being benchmarked by default. Target selection options that take a target by name ignore the \fBbench\fR flag and will always benchmark the given target. .sp Binary targets are automatically built if there is an integration test or benchmark being selected to benchmark. This allows an integration test to execute the binary to exercise and test its behavior. The \fBCARGO_BIN_EXE_\fR \fIenvironment variable\fR is set when the integration test is built so that it can use the \fI\f(BIenv\fI macro\fR to locate the executable. .sp Passing target selection flags will benchmark only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Benchmark the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Benchmark the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Benchmark all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Benchmark the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Benchmark all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Benchmark the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Benchmark all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Benchmark the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Benchmark all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Benchmark all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Benchmark for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Benchmark with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Benchmark the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" By default the Rust test harness hides output from benchmark execution to keep results readable. Benchmark output can be recovered (e.g., for debugging) by passing \fB\-\-nocapture\fR to the benchmark binaries: .sp .RS 4 .nf cargo bench \-\- \-\-nocapture .fi .RE .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" The \fB\-\-jobs\fR argument affects the building of the benchmark executable but does not affect how many threads are used when running the benchmarks. The Rust test harness runs benchmarks serially in a single thread. .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build and execute all the benchmarks of the current package: .sp .RS 4 .nf cargo bench .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Run only a specific benchmark within a specific benchmark target: .sp .RS 4 .nf cargo bench \-\-bench bench_name \-\- modname::some_benchmark .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-test\fR(1) cargo-0.66.0/src/etc/man/cargo-build.1000066400000000000000000000366361432416201200172760ustar00rootroot00000000000000'\" t .TH "CARGO\-BUILD" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-build \- Compile the current package .SH "SYNOPSIS" \fBcargo build\fR [\fIoptions\fR] .SH "DESCRIPTION" Compile local packages and all of their dependencies. .SH "OPTIONS" .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Build only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Build all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo build\fR will build all binary and library targets of the selected packages. Binaries are skipped if they have \fBrequired\-features\fR that are missing. .sp Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The \fBCARGO_BIN_EXE_\fR \fIenvironment variable\fR is set when the integration test is built so that it can use the \fI\f(BIenv\fI macro\fR to locate the executable. .sp Passing target selection flags will build only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Build the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Build all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Build all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Build all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Build all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Build for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Build optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Build with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .sp \fB\-\-out\-dir\fR \fIdirectory\fR .RS 4 Copy final artifacts to this directory. .sp This option is unstable and available only on the \fInightly channel\fR and requires the \fB\-Z unstable\-options\fR flag to enable. See for more information. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .sp \fB\-\-build\-plan\fR .RS 4 Outputs a series of JSON messages to stdout that indicate the commands to run the build. .sp This option is unstable and available only on the \fInightly channel\fR and requires the \fB\-Z unstable\-options\fR flag to enable. See for more information. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .sp \fB\-\-future\-incompat\-report\fR .RS 4 Displays a future\-incompat report for any future\-incompatible warnings produced during execution of this command .sp See \fBcargo\-report\fR(1) .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build the local package and all of its dependencies: .sp .RS 4 .nf cargo build .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Build with optimizations: .sp .RS 4 .nf cargo build \-\-release .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-rustc\fR(1) cargo-0.66.0/src/etc/man/cargo-check.1000066400000000000000000000354441432416201200172500ustar00rootroot00000000000000'\" t .TH "CARGO\-CHECK" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-check \- Check the current package .SH "SYNOPSIS" \fBcargo check\fR [\fIoptions\fR] .SH "DESCRIPTION" Check a local package and all of its dependencies for errors. This will essentially compile the packages without performing the final step of code generation, which is faster than running \fBcargo build\fR\&. The compiler will save metadata files to disk so that future runs will reuse them if the source has not been modified. Some diagnostics and errors are only emitted during code generation, so they inherently won't be reported with \fBcargo check\fR\&. .SH "OPTIONS" .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Check only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Check all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo check\fR will check all binary and library targets of the selected packages. Binaries are skipped if they have \fBrequired\-features\fR that are missing. .sp Passing target selection flags will check only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Check the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Check the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Check all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Check the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Check all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Check the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Check all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Check the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Check all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Check all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Check for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Check optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Check with the given profile. .sp As a special case, specifying the \fBtest\fR profile will also enable checking in test mode which will enable checking tests and enable the \fBtest\fR cfg option. See \fIrustc tests\fR for more detail. .sp See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Check the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .sp \fB\-\-future\-incompat\-report\fR .RS 4 Displays a future\-incompat report for any future\-incompatible warnings produced during execution of this command .sp See \fBcargo\-report\fR(1) .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Check the local package for errors: .sp .RS 4 .nf cargo check .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Check all targets, including unit tests: .sp .RS 4 .nf cargo check \-\-all\-targets \-\-profile=test .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-build\fR(1) cargo-0.66.0/src/etc/man/cargo-clean.1000066400000000000000000000137641432416201200172560ustar00rootroot00000000000000'\" t .TH "CARGO\-CLEAN" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-clean \- Remove generated artifacts .SH "SYNOPSIS" \fBcargo clean\fR [\fIoptions\fR] .SH "DESCRIPTION" Remove artifacts from the target directory that Cargo has generated in the past. .sp With no options, \fBcargo clean\fR will delete the entire target directory. .SH "OPTIONS" .SS "Package Selection" When no packages are selected, all packages and all dependencies in the workspace are cleaned. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Clean only the specified packages. This flag may be specified multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format. .RE .SS "Clean Options" .sp \fB\-\-doc\fR .RS 4 This option will cause \fBcargo clean\fR to remove only the \fBdoc\fR directory in the target directory. .RE .sp \fB\-\-release\fR .RS 4 Remove all artifacts in the \fBrelease\fR directory. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Remove all artifacts in the directory with the given profile name. .RE .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .sp \fB\-\-target\fR \fItriple\fR .RS 4 Clean for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Remove the entire target directory: .sp .RS 4 .nf cargo clean .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Remove only the release artifacts: .sp .RS 4 .nf cargo clean \-\-release .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-build\fR(1) cargo-0.66.0/src/etc/man/cargo-doc.1000066400000000000000000000320031432416201200167240ustar00rootroot00000000000000'\" t .TH "CARGO\-DOC" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-doc \- Build a package's documentation .SH "SYNOPSIS" \fBcargo doc\fR [\fIoptions\fR] .SH "DESCRIPTION" Build the documentation for the local package and all dependencies. The output is placed in \fBtarget/doc\fR in rustdoc's usual format. .SH "OPTIONS" .SS "Documentation Options" .sp \fB\-\-open\fR .RS 4 Open the docs in a browser after building them. This will use your default browser unless you define another one in the \fBBROWSER\fR environment variable or use the \fI\f(BIdoc.browser\fI\fR configuration option. .RE .sp \fB\-\-no\-deps\fR .RS 4 Do not build documentation for dependencies. .RE .sp \fB\-\-document\-private\-items\fR .RS 4 Include non\-public items in the documentation. This will be enabled by default if documenting a binary target. .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Document only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Document all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo doc\fR will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have \fBrequired\-features\fR that are missing. .sp The default behavior can be changed by setting \fBdoc = false\fR for the target in the manifest settings. Using target selection options will ignore the \fBdoc\fR flag and will always document the given target. .sp \fB\-\-lib\fR .RS 4 Document the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Document all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Document all example targets. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Document for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Document optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Document with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build the local package documentation and its dependencies and output to \fBtarget/doc\fR\&. .sp .RS 4 .nf cargo doc .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-rustdoc\fR(1), \fBrustdoc\fR(1) cargo-0.66.0/src/etc/man/cargo-fetch.1000066400000000000000000000127531432416201200172620ustar00rootroot00000000000000'\" t .TH "CARGO\-FETCH" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-fetch \- Fetch dependencies of a package from the network .SH "SYNOPSIS" \fBcargo fetch\fR [\fIoptions\fR] .SH "DESCRIPTION" If a \fBCargo.lock\fR file is available, this command will ensure that all of the git dependencies and/or registry dependencies are downloaded and locally available. Subsequent Cargo commands will be able to run offline after a \fBcargo fetch\fR unless the lock file changes. .sp If the lock file is not available, then this command will generate the lock file before fetching the dependencies. .sp If \fB\-\-target\fR is not specified, then all target dependencies are fetched. .sp See also the \fIcargo\-prefetch\fR plugin which adds a command to download popular crates. This may be useful if you plan to use Cargo without a network with the \fB\-\-offline\fR flag. .SH "OPTIONS" .SS "Fetch options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Fetch for the given architecture. The default is all architectures. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Fetch all dependencies: .sp .RS 4 .nf cargo fetch .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-update\fR(1), \fBcargo\-generate\-lockfile\fR(1) cargo-0.66.0/src/etc/man/cargo-fix.1000066400000000000000000000434601432416201200167560ustar00rootroot00000000000000'\" t .TH "CARGO\-FIX" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-fix \- Automatically fix lint warnings reported by rustc .SH "SYNOPSIS" \fBcargo fix\fR [\fIoptions\fR] .SH "DESCRIPTION" This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! .sp Executing \fBcargo fix\fR will under the hood execute \fBcargo\-check\fR(1). Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For example if you'd like to apply all fixes to the current package, you can run: .sp .RS 4 .nf cargo fix .fi .RE .sp which behaves the same as \fBcargo check \-\-all\-targets\fR\&. .sp \fBcargo fix\fR is only capable of fixing code that is normally compiled with \fBcargo check\fR\&. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: .sp .RS 4 .nf cargo fix \-\-features foo .fi .RE .sp Similarly, other \fBcfg\fR expressions like platform\-specific code will need to pass \fB\-\-target\fR to fix code for the given target. .sp .RS 4 .nf cargo fix \-\-target x86_64\-pc\-windows\-gnu .fi .RE .sp If you encounter any problems with \fBcargo fix\fR or otherwise have any questions or feature requests please don't hesitate to file an issue at \&. .SS "Edition migration" The \fBcargo fix\fR subcommand can also be used to migrate a package from one \fIedition\fR to the next. The general procedure is: .sp .RS 4 \h'-04' 1.\h'+01'Run \fBcargo fix \-\-edition\fR\&. Consider also using the \fB\-\-all\-features\fR flag if your project has multiple features. You may also want to run \fBcargo fix \-\-edition\fR multiple times with different \fB\-\-target\fR flags if your project has platform\-specific code gated by \fBcfg\fR attributes. .RE .sp .RS 4 \h'-04' 2.\h'+01'Modify \fBCargo.toml\fR to set the \fIedition field\fR to the new edition. .RE .sp .RS 4 \h'-04' 3.\h'+01'Run your project tests to verify that everything still works. If new warnings are issued, you may want to consider running \fBcargo fix\fR again (without the \fB\-\-edition\fR flag) to apply any suggestions given by the compiler. .RE .sp And hopefully that's it! Just keep in mind of the caveats mentioned above that \fBcargo fix\fR cannot update code for inactive features or \fBcfg\fR expressions. Also, in some rare cases the compiler is unable to automatically migrate all code to the new edition, and this may require manual changes after building with the new edition. .SH "OPTIONS" .SS "Fix options" .sp \fB\-\-broken\-code\fR .RS 4 Fix code even if it already has compiler errors. This is useful if \fBcargo fix\fR fails to apply the changes. It will apply the changes and leave the broken code in the working directory for you to inspect and manually fix. .RE .sp \fB\-\-edition\fR .RS 4 Apply changes that will update the code to the next edition. This will not update the edition in the \fBCargo.toml\fR manifest, which must be updated manually after \fBcargo fix \-\-edition\fR has finished. .RE .sp \fB\-\-edition\-idioms\fR .RS 4 Apply suggestions that will update code to the preferred style for the current edition. .RE .sp \fB\-\-allow\-no\-vcs\fR .RS 4 Fix code even if a VCS was not detected. .RE .sp \fB\-\-allow\-dirty\fR .RS 4 Fix code even if the working directory has changes. .RE .sp \fB\-\-allow\-staged\fR .RS 4 Fix code even if the working directory has staged changes. .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Fix only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Fix all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo fix\fR will fix all targets (\fB\-\-all\-targets\fR implied). Binaries are skipped if they have \fBrequired\-features\fR that are missing. .sp Passing target selection flags will fix only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Fix the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Fix the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Fix all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Fix the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Fix all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Fix the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Fix all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Fix the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Fix all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Fix all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Fix for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Fix optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Fix with the given profile. .sp As a special case, specifying the \fBtest\fR profile will also enable checking in test mode which will enable checking tests and enable the \fBtest\fR cfg option. See \fIrustc tests\fR for more detail. .sp See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Fix the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Apply compiler suggestions to the local package: .sp .RS 4 .nf cargo fix .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Update a package to prepare it for the next edition: .sp .RS 4 .nf cargo fix \-\-edition .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Apply suggested idioms for the current edition: .sp .RS 4 .nf cargo fix \-\-edition\-idioms .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-check\fR(1) cargo-0.66.0/src/etc/man/cargo-generate-lockfile.1000066400000000000000000000107411432416201200215440ustar00rootroot00000000000000'\" t .TH "CARGO\-GENERATE\-LOCKFILE" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-generate\-lockfile \- Generate the lockfile for a package .SH "SYNOPSIS" \fBcargo generate\-lockfile\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will create the \fBCargo.lock\fR lockfile for the current package or workspace. If the lockfile already exists, it will be rebuilt with the latest available version of every package. .sp See also \fBcargo\-update\fR(1) which is also capable of creating a \fBCargo.lock\fR lockfile and has more options for controlling update behavior. .SH "OPTIONS" .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Create or update the lockfile for the current package or workspace: .sp .RS 4 .nf cargo generate\-lockfile .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-update\fR(1) cargo-0.66.0/src/etc/man/cargo-help.1000066400000000000000000000007471432416201200171210ustar00rootroot00000000000000'\" t .TH "CARGO\-HELP" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-help \- Get help for a Cargo command .SH "SYNOPSIS" \fBcargo help\fR [\fIsubcommand\fR] .SH "DESCRIPTION" Prints a help message for the given command. .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Get help for a command: .sp .RS 4 .nf cargo help build .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Help is also available with the \fB\-\-help\fR flag: .sp .RS 4 .nf cargo build \-\-help .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-init.1000066400000000000000000000110301432416201200171170ustar00rootroot00000000000000'\" t .TH "CARGO\-INIT" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-init \- Create a new Cargo package in an existing directory .SH "SYNOPSIS" \fBcargo init\fR [\fIoptions\fR] [\fIpath\fR] .SH "DESCRIPTION" This command will create a new Cargo manifest in the current directory. Give a path as an argument to create in the given directory. .sp If there are typically\-named Rust source files already in the directory, those will be used. If not, then a sample \fBsrc/main.rs\fR file will be created, or \fBsrc/lib.rs\fR if \fB\-\-lib\fR is passed. .sp If the directory is not already in a VCS repository, then a new repository is created (see \fB\-\-vcs\fR below). .sp See \fBcargo\-new\fR(1) for a similar command which will create a new package in a new directory. .SH "OPTIONS" .SS "Init Options" .sp \fB\-\-bin\fR .RS 4 Create a package with a binary target (\fBsrc/main.rs\fR). This is the default behavior. .RE .sp \fB\-\-lib\fR .RS 4 Create a package with a library target (\fBsrc/lib.rs\fR). .RE .sp \fB\-\-edition\fR \fIedition\fR .RS 4 Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021 .RE .sp \fB\-\-name\fR \fIname\fR .RS 4 Set the package name. Defaults to the directory name. .RE .sp \fB\-\-vcs\fR \fIvcs\fR .RS 4 Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to \fBgit\fR or the configuration value \fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name which will restrict publishing only to that registry. .sp Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry defined by the \fBregistry.default\fR config key is used. If the default registry is not set and \fB\-\-registry\fR is not used, the \fBpublish\fR field will not be set which means that publishing will not be restricted. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Create a binary Cargo package in the current directory: .sp .RS 4 .nf cargo init .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-new\fR(1) cargo-0.66.0/src/etc/man/cargo-install.1000066400000000000000000000375141432416201200176410ustar00rootroot00000000000000'\" t .TH "CARGO\-INSTALL" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-install \- Build and install a Rust binary .SH "SYNOPSIS" \fBcargo install\fR [\fIoptions\fR] \fIcrate\fR[@\fIversion\fR]\&... .br \fBcargo install\fR [\fIoptions\fR] \fB\-\-path\fR \fIpath\fR .br \fBcargo install\fR [\fIoptions\fR] \fB\-\-git\fR \fIurl\fR [\fIcrate\fR\&...] .br \fBcargo install\fR [\fIoptions\fR] \fB\-\-list\fR .SH "DESCRIPTION" This command manages Cargo's local set of installed binary crates. Only packages which have executable \fB[[bin]]\fR or \fB[[example]]\fR targets can be installed, and all executables are installed into the installation root's \fBbin\fR folder. .sp The installation root is determined, in order of precedence: .sp .RS 4 \h'-04'\(bu\h'+02'\fB\-\-root\fR option .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR .RE .sp There are multiple sources from which a crate can be installed. The default location is crates.io but the \fB\-\-git\fR, \fB\-\-path\fR, and \fB\-\-registry\fR flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the \fIcrate\fR argument is required to indicate which crate should be installed. .sp Crates from crates.io can optionally specify the version they wish to install via the \fB\-\-version\fR flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the \fB\-\-bin\fR argument can selectively install only one of them, and if you'd rather install examples the \fB\-\-example\fR argument can be used as well. .sp If the package is already installed, Cargo will reinstall it if the installed version does not appear to be up\-to\-date. If any of the following values change, then Cargo will reinstall the package: .sp .RS 4 \h'-04'\(bu\h'+02'The package version and source. .RE .sp .RS 4 \h'-04'\(bu\h'+02'The set of binary names installed. .RE .sp .RS 4 \h'-04'\(bu\h'+02'The chosen features. .RE .sp .RS 4 \h'-04'\(bu\h'+02'The profile (\fB\-\-profile\fR). .RE .sp .RS 4 \h'-04'\(bu\h'+02'The target (\fB\-\-target\fR). .RE .sp Installing with \fB\-\-path\fR will always build and install, unless there are conflicting binaries from another package. The \fB\-\-force\fR flag may be used to force Cargo to always reinstall the package. .sp If the source is crates.io or \fB\-\-git\fR then by default the crate will be built in a temporary target directory. To avoid this, the target directory can be specified by setting the \fBCARGO_TARGET_DIR\fR environment variable to a relative path. In particular, this can be useful for caching build artifacts on continuous integration systems. .sp By default, the \fBCargo.lock\fR file that is included with the package will be ignored. This means that Cargo will recompute which versions of dependencies to use, possibly using newer versions that have been released since the package was published. The \fB\-\-locked\fR flag can be used to force Cargo to use the packaged \fBCargo.lock\fR file if it is available. This may be useful for ensuring reproducible builds, to use the exact same set of dependencies that were available when the package was published. It may also be useful if a newer version of a dependency is published that no longer builds on your system, or has other problems. The downside to using \fB\-\-locked\fR is that you will not receive any fixes or updates to any dependency. Note that Cargo did not start publishing \fBCargo.lock\fR files until version 1.37, which means packages published with prior versions will not have a \fBCargo.lock\fR file available. .SH "OPTIONS" .SS "Install Options" .sp \fB\-\-vers\fR \fIversion\fR, \fB\-\-version\fR \fIversion\fR .RS 4 Specify a version to install. This may be a \fIversion requirement\fR , like \fB~1.2\fR, to have Cargo select the newest version from the given requirement. If the version does not have a requirement operator (such as \fB^\fR or \fB~\fR), then it must be in the form \fIMAJOR.MINOR.PATCH\fR, and will install exactly that version; it is \fInot\fR treated as a caret requirement like Cargo dependencies are. .RE .sp \fB\-\-git\fR \fIurl\fR .RS 4 Git URL to install the specified crate from. .RE .sp \fB\-\-branch\fR \fIbranch\fR .RS 4 Branch to use when installing from git. .RE .sp \fB\-\-tag\fR \fItag\fR .RS 4 Tag to use when installing from git. .RE .sp \fB\-\-rev\fR \fIsha\fR .RS 4 Specific commit to use when installing from git. .RE .sp \fB\-\-path\fR \fIpath\fR .RS 4 Filesystem path to local crate to install. .RE .sp \fB\-\-list\fR .RS 4 List all installed packages and their versions. .RE .sp \fB\-f\fR, \fB\-\-force\fR .RS 4 Force overwriting existing crates or binaries. This can be used if a package has installed a binary with the same name as another package. This is also useful if something has changed on the system that you want to rebuild with, such as a newer version of \fBrustc\fR\&. .RE .sp \fB\-\-no\-track\fR .RS 4 By default, Cargo keeps track of the installed packages with a metadata file stored in the installation root directory. This flag tells Cargo not to use or create that file. With this flag, Cargo will refuse to overwrite any existing files unless the \fB\-\-force\fR flag is used. This also disables Cargo's ability to protect against multiple concurrent invocations of Cargo installing at the same time. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Install only the specified binary. .RE .sp \fB\-\-bins\fR .RS 4 Install all binaries. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Install only the specified example. .RE .sp \fB\-\-examples\fR .RS 4 Install all examples. .RE .sp \fB\-\-root\fR \fIdir\fR .RS 4 Directory to install packages into. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .sp \fB\-\-index\fR \fIindex\fR .RS 4 The URL of the registry index to use. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Install for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to a new temporary folder located in the temporary directory of the platform. .sp When using \fB\-\-path\fR, by default it will use \fBtarget\fR directory in the workspace of the local crate unless \fB\-\-target\-dir\fR is specified. .RE .sp \fB\-\-debug\fR .RS 4 Build with the \fBdev\fR profile instead the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Install with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Manifest Options" .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Install or upgrade a package from crates.io: .sp .RS 4 .nf cargo install ripgrep .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Install or reinstall the package in the current directory: .sp .RS 4 .nf cargo install \-\-path . .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'View the list of installed packages: .sp .RS 4 .nf cargo install \-\-list .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-uninstall\fR(1), \fBcargo\-search\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo-locate-project.1000066400000000000000000000067031432416201200211020ustar00rootroot00000000000000'\" t .TH "CARGO\-LOCATE\-PROJECT" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-locate\-project \- Print a JSON representation of a Cargo.toml file's location .SH "SYNOPSIS" \fBcargo locate\-project\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will print a JSON object to stdout with the full path to the \fBCargo.toml\fR manifest. .SH "OPTIONS" .sp \fB\-\-workspace\fR .RS 4 Locate the \fBCargo.toml\fR at the root of the workspace, as opposed to the current workspace member. .RE .SS "Display Options" .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The representation in which to print the project location. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (default): JSON object with the path under the key "root". .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBplain\fR: Just the path. .RE .RE .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Display the path to the manifest based on the current directory: .sp .RS 4 .nf cargo locate\-project .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-metadata\fR(1) cargo-0.66.0/src/etc/man/cargo-login.1000066400000000000000000000070071432416201200172750ustar00rootroot00000000000000'\" t .TH "CARGO\-LOGIN" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-login \- Save an API token from the registry locally .SH "SYNOPSIS" \fBcargo login\fR [\fIoptions\fR] [\fItoken\fR] .SH "DESCRIPTION" This command will save the API token to disk so that commands that require authentication, such as \fBcargo\-publish\fR(1), will be automatically authenticated. The token is saved in \fB$CARGO_HOME/credentials.toml\fR\&. \fBCARGO_HOME\fR defaults to \fB\&.cargo\fR in your home directory. .sp If the \fItoken\fR argument is not specified, it will be read from stdin. .sp The API token for crates.io may be retrieved from \&. .sp Take care to keep the token secret, it should not be shared with anyone else. .SH "OPTIONS" .SS "Login Options" .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Save the API token to disk: .sp .RS 4 .nf cargo login .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo-metadata.1000066400000000000000000000434321432416201200177470ustar00rootroot00000000000000'\" t .TH "CARGO\-METADATA" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-metadata \- Machine\-readable metadata about the current package .SH "SYNOPSIS" \fBcargo metadata\fR [\fIoptions\fR] .SH "DESCRIPTION" Output JSON to stdout containing information about the workspace members and resolved dependencies of the current package. .sp It is recommended to include the \fB\-\-format\-version\fR flag to future\-proof your code to ensure the output is in the format you are expecting. .sp See the \fIcargo_metadata crate\fR for a Rust API for reading the metadata. .SH "OUTPUT FORMAT" The output has the following format: .sp .RS 4 .nf { /* Array of all packages in the workspace. It also includes all feature\-enabled dependencies unless \-\-no\-deps is used. */ "packages": [ { /* The name of the package. */ "name": "my\-package", /* The version of the package. */ "version": "0.1.0", /* The Package ID, a unique identifier for referring to the package. */ "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", /* The license value from the manifest, or null. */ "license": "MIT/Apache\-2.0", /* The license\-file value from the manifest, or null. */ "license_file": "LICENSE", /* The description value from the manifest, or null. */ "description": "Package description.", /* The source ID of the package. This represents where a package is retrieved from. This is null for path dependencies and workspace members. For other dependencies, it is a string with the format: \- "registry+URL" for registry\-based dependencies. Example: "registry+https://github.com/rust\-lang/crates.io\-index" \- "git+URL" for git\-based dependencies. Example: "git+https://github.com/rust\-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" */ "source": null, /* Array of dependencies declared in the package's manifest. */ "dependencies": [ { /* The name of the dependency. */ "name": "bitflags", /* The source ID of the dependency. May be null, see description for the package source. */ "source": "registry+https://github.com/rust\-lang/crates.io\-index", /* The version requirement for the dependency. Dependencies without a version requirement have a value of "*". */ "req": "^1.0", /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* If the dependency is renamed, this is the new name for the dependency as a string. null if it is not renamed. */ "rename": null, /* Boolean of whether or not this is an optional dependency. */ "optional": false, /* Boolean of whether or not default features are enabled. */ "uses_default_features": true, /* Array of features enabled. */ "features": [], /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)", /* The file system path for a local path dependency. not present if not a path dependency. */ "path": "/path/to/dep", /* A string of the URL of the registry this dependency is from. If not specified or null, the dependency is from the default registry (crates.io). */ "registry": null } ], /* Array of Cargo targets. */ "targets": [ { /* Array of target kinds. \- lib targets list the `crate\-type` values from the manifest such as "lib", "rlib", "dylib", "proc\-macro", etc. (default ["lib"]) \- binary is ["bin"] \- example is ["example"] \- integration test is ["test"] \- benchmark is ["bench"] \- build script is ["custom\-build"] */ "kind": [ "bin" ], /* Array of crate types. \- lib and example libraries list the `crate\-type` values from the manifest such as "lib", "rlib", "dylib", "proc\-macro", etc. (default ["lib"]) \- all other target kinds are ["bin"] */ "crate_types": [ "bin" ], /* The name of the target. */ "name": "my\-package", /* Absolute path to the root source file of the target. */ "src_path": "/path/to/my\-package/src/main.rs", /* The Rust edition of the target. Defaults to the package edition. */ "edition": "2018", /* Array of required features. This property is not included if no required features are set. */ "required\-features": ["feat1"], /* Whether the target should be documented by `cargo doc`. */ "doc": true, /* Whether or not this target has doc tests enabled, and the target is compatible with doc testing. */ "doctest": false, /* Whether or not this target should be built and run with `\-\-test` */ "test": true } ], /* Set of features defined for the package. Each feature maps to an array of features or dependencies it enables. */ "features": { "default": [ "feat1" ], "feat1": [], "feat2": [] }, /* Absolute path to this package's manifest. */ "manifest_path": "/path/to/my\-package/Cargo.toml", /* Package metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all\-features": true } } }, /* List of registries to which this package may be published. Publishing is unrestricted if null, and forbidden if an empty array. */ "publish": [ "crates\-io" ], /* Array of authors from the manifest. Empty array if no authors specified. */ "authors": [ "Jane Doe " ], /* Array of categories from the manifest. */ "categories": [ "command\-line\-utilities" ], /* Optional string that is the default binary picked by cargo run. */ "default_run": null, /* Optional string that is the minimum supported rust version */ "rust_version": "1.56", /* Array of keywords from the manifest. */ "keywords": [ "cli" ], /* The readme value from the manifest or null if not specified. */ "readme": "README.md", /* The repository value from the manifest or null if not specified. */ "repository": "https://github.com/rust\-lang/cargo", /* The homepage value from the manifest or null if not specified. */ "homepage": "https://rust\-lang.org", /* The documentation value from the manifest or null if not specified. */ "documentation": "https://doc.rust\-lang.org/stable/std", /* The default edition of the package. Note that individual targets may have different editions. */ "edition": "2018", /* Optional string that is the name of a native library the package is linking to. */ "links": null, } ], /* Array of members of the workspace. Each entry is the Package ID for the package. */ "workspace_members": [ "my\-package 0.1.0 (path+file:///path/to/my\-package)", ], // The resolved dependency graph for the entire workspace. The enabled // features are based on the enabled features for the "current" package. // Inactivated optional dependencies are not listed. // // This is null if \-\-no\-deps is specified. // // By default, this includes all dependencies for all target platforms. // The `\-\-filter\-platform` flag may be used to narrow to a specific // target triple. "resolve": { /* Array of nodes within the dependency graph. Each node is a package. */ "nodes": [ { /* The Package ID of this node. */ "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", /* The dependencies of this package, an array of Package IDs. */ "dependencies": [ "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)" ], /* The dependencies of this package. This is an alternative to "dependencies" which contains additional information. In particular, this handles renamed dependencies. */ "deps": [ { /* The name of the dependency's library target. If this is a renamed dependency, this is the new name. */ "name": "bitflags", /* The Package ID of the dependency. */ "pkg": "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)", /* Array of dependency kinds. Added in Cargo 1.40. */ "dep_kinds": [ { /* The dependency kind. "dev", "build", or null for a normal dependency. */ "kind": null, /* The target platform for the dependency. null if not a target dependency. */ "target": "cfg(windows)" } ] } ], /* Array of features enabled on this package. */ "features": [ "default" ] } ], /* The root package of the workspace. This is null if this is a virtual workspace. Otherwise it is the Package ID of the root package. */ "root": "my\-package 0.1.0 (path+file:///path/to/my\-package)" }, /* The absolute path to the build directory where Cargo places its output. */ "target_directory": "/path/to/my\-package/target", /* The version of the schema for this metadata structure. This will be changed if incompatible changes are ever made. */ "version": 1, /* The absolute path to the root of the workspace. */ "workspace_root": "/path/to/my\-package" /* Workspace metadata. This is null if no metadata is specified. */ "metadata": { "docs": { "rs": { "all\-features": true } } } } .fi .RE .SH "OPTIONS" .SS "Output Options" .sp \fB\-\-no\-deps\fR .RS 4 Output information only about the workspace members and don't fetch dependencies. .RE .sp \fB\-\-format\-version\fR \fIversion\fR .RS 4 Specify the version of the output format to use. Currently \fB1\fR is the only possible value. .RE .sp \fB\-\-filter\-platform\fR \fItriple\fR .RS 4 This filters the \fBresolve\fR output to only include dependencies for the given target triple. Without this flag, the resolve includes all targets. .sp Note that the dependencies listed in the "packages" array still includes all dependencies. Each package definition is intended to be an unaltered reproduction of the information within \fBCargo.toml\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Output JSON about the current package: .sp .RS 4 .nf cargo metadata \-\-format\-version=1 .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-new.1000066400000000000000000000105341432416201200167550ustar00rootroot00000000000000'\" t .TH "CARGO\-NEW" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-new \- Create a new Cargo package .SH "SYNOPSIS" \fBcargo new\fR [\fIoptions\fR] \fIpath\fR .SH "DESCRIPTION" This command will create a new Cargo package in the given directory. This includes a simple template with a \fBCargo.toml\fR manifest, sample source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see \fB\-\-vcs\fR below). .sp See \fBcargo\-init\fR(1) for a similar command which will create a new manifest in an existing directory. .SH "OPTIONS" .SS "New Options" .sp \fB\-\-bin\fR .RS 4 Create a package with a binary target (\fBsrc/main.rs\fR). This is the default behavior. .RE .sp \fB\-\-lib\fR .RS 4 Create a package with a library target (\fBsrc/lib.rs\fR). .RE .sp \fB\-\-edition\fR \fIedition\fR .RS 4 Specify the Rust edition to use. Default is 2021. Possible values: 2015, 2018, 2021 .RE .sp \fB\-\-name\fR \fIname\fR .RS 4 Set the package name. Defaults to the directory name. .RE .sp \fB\-\-vcs\fR \fIvcs\fR .RS 4 Initialize a new VCS repository for the given version control system (git, hg, pijul, or fossil) or do not initialize any version control at all (none). If not specified, defaults to \fBgit\fR or the configuration value \fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name which will restrict publishing only to that registry. .sp Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry defined by the \fBregistry.default\fR config key is used. If the default registry is not set and \fB\-\-registry\fR is not used, the \fBpublish\fR field will not be set which means that publishing will not be restricted. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Create a binary Cargo package in the given directory: .sp .RS 4 .nf cargo new foo .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-init\fR(1) cargo-0.66.0/src/etc/man/cargo-owner.1000066400000000000000000000117021432416201200173140ustar00rootroot00000000000000'\" t .TH "CARGO\-OWNER" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-owner \- Manage the owners of a crate on the registry .SH "SYNOPSIS" \fBcargo owner\fR [\fIoptions\fR] \fB\-\-add\fR \fIlogin\fR [\fIcrate\fR] .br \fBcargo owner\fR [\fIoptions\fR] \fB\-\-remove\fR \fIlogin\fR [\fIcrate\fR] .br \fBcargo owner\fR [\fIoptions\fR] \fB\-\-list\fR [\fIcrate\fR] .SH "DESCRIPTION" This command will modify the owners for a crate on the registry. Owners of a crate can upload new versions and yank old versions. Non\-team owners can also modify the set of owners, so take care! .sp This command requires you to be authenticated with either the \fB\-\-token\fR option or using \fBcargo\-login\fR(1). .sp If the crate name is not specified, it will use the package name from the current directory. .sp See \fIthe reference\fR for more information about owners and publishing. .SH "OPTIONS" .SS "Owner Options" .sp \fB\-a\fR, \fB\-\-add\fR \fIlogin\fR\&... .RS 4 Invite the given user or team as an owner. .RE .sp \fB\-r\fR, \fB\-\-remove\fR \fIlogin\fR\&... .RS 4 Remove the given user or team as an owner. .RE .sp \fB\-l\fR, \fB\-\-list\fR .RS 4 List owners of a crate. .RE .sp \fB\-\-token\fR \fItoken\fR .RS 4 API token to use when authenticating. This overrides the token stored in the credentials file (which is created by \fBcargo\-login\fR(1)). .sp \fICargo config\fR environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment variable. Tokens for other registries may be specified with environment variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name of the registry in all capital letters. .RE .sp \fB\-\-index\fR \fIindex\fR .RS 4 The URL of the registry index to use. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'List owners of a package: .sp .RS 4 .nf cargo owner \-\-list foo .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Invite an owner to a package: .sp .RS 4 .nf cargo owner \-\-add username foo .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Remove an owner from a package: .sp .RS 4 .nf cargo owner \-\-remove username foo .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo-package.1000066400000000000000000000255351432416201200175660ustar00rootroot00000000000000'\" t .TH "CARGO\-PACKAGE" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-package \- Assemble the local package into a distributable tarball .SH "SYNOPSIS" \fBcargo package\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will create a distributable, compressed \fB\&.crate\fR file with the source code of the package in the current directory. The resulting file will be stored in the \fBtarget/package\fR directory. This performs the following steps: .sp .RS 4 \h'-04' 1.\h'+01'Load and check the current workspace, performing some basic checks. .sp .RS 4 \h'-04'\(bu\h'+02'Path dependencies are not allowed unless they have a version key. Cargo will ignore the path key for dependencies in published packages. \fBdev\-dependencies\fR do not have this restriction. .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Create the compressed \fB\&.crate\fR file. .sp .RS 4 \h'-04'\(bu\h'+02'The original \fBCargo.toml\fR file is rewritten and normalized. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB[patch]\fR, \fB[replace]\fR, and \fB[workspace]\fR sections are removed from the manifest. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBCargo.lock\fR is automatically included if the package contains an executable binary or example target. \fBcargo\-install\fR(1) will use the packaged lock file if the \fB\-\-locked\fR flag is used. .RE .sp .RS 4 \h'-04'\(bu\h'+02'A \fB\&.cargo_vcs_info.json\fR file is included that contains information about the current VCS checkout hash if available (not included with \fB\-\-allow\-dirty\fR). .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Extract the \fB\&.crate\fR file and build it to verify it can build. .sp .RS 4 \h'-04'\(bu\h'+02'This will rebuild your package from scratch to ensure that it can be built from a pristine state. The \fB\-\-no\-verify\fR flag can be used to skip this step. .RE .RE .sp .RS 4 \h'-04' 4.\h'+01'Check that build scripts did not modify any source files. .RE .sp The list of files included can be controlled with the \fBinclude\fR and \fBexclude\fR fields in the manifest. .sp See \fIthe reference\fR for more details about packaging and publishing. .SS ".cargo_vcs_info.json format" Will generate a \fB\&.cargo_vcs_info.json\fR in the following format .sp .RS 4 .nf { "git": { "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" }, "path_in_vcs": "" } .fi .RE .sp \fBpath_in_vcs\fR will be set to a repo\-relative path for packages in subdirectories of the version control repository. .SH "OPTIONS" .SS "Package Options" .sp \fB\-l\fR, \fB\-\-list\fR .RS 4 Print files included in a package without making one. .RE .sp \fB\-\-no\-verify\fR .RS 4 Don't verify the contents by building them. .RE .sp \fB\-\-no\-metadata\fR .RS 4 Ignore warnings about a lack of human\-usable metadata (such as the description or the license). .RE .sp \fB\-\-allow\-dirty\fR .RS 4 Allow working directories with uncommitted VCS changes to be packaged. .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Package only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Package all members in the workspace. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Package for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Create a compressed \fB\&.crate\fR file of the current package: .sp .RS 4 .nf cargo package .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo-pkgid.1000066400000000000000000000137201432416201200172620ustar00rootroot00000000000000'\" t .TH "CARGO\-PKGID" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-pkgid \- Print a fully qualified package specification .SH "SYNOPSIS" \fBcargo pkgid\fR [\fIoptions\fR] [\fIspec\fR] .SH "DESCRIPTION" Given a \fIspec\fR argument, print out the fully qualified package ID specifier for a package or dependency in the current workspace. This command will generate an error if \fIspec\fR is ambiguous as to which package it refers to in the dependency graph. If no \fIspec\fR is given, then the specifier for the local package is printed. .sp This command requires that a lockfile is available and dependencies have been fetched. .sp A package specifier consists of a name, version, and source URL. You are allowed to use partial specifiers to succinctly match a specific package as long as it matches only one package. The format of a \fIspec\fR can be one of the following: .TS allbox tab(:); lt lt. T{ SPEC Structure T}:T{ Example SPEC T} T{ \fIname\fR T}:T{ \fBbitflags\fR T} T{ \fIname\fR\fB@\fR\fIversion\fR T}:T{ \fBbitflags@1.0.4\fR T} T{ \fIurl\fR T}:T{ \fBhttps://github.com/rust\-lang/cargo\fR T} T{ \fIurl\fR\fB#\fR\fIversion\fR T}:T{ \fBhttps://github.com/rust\-lang/cargo#0.33.0\fR T} T{ \fIurl\fR\fB#\fR\fIname\fR T}:T{ \fBhttps://github.com/rust\-lang/crates.io\-index#bitflags\fR T} T{ \fIurl\fR\fB#\fR\fIname\fR\fB:\fR\fIversion\fR T}:T{ \fBhttps://github.com/rust\-lang/cargo#crates\-io@0.21.0\fR T} .TE .sp .SH "OPTIONS" .SS "Package Selection" .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 Get the package ID for the given package instead of the current package. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Retrieve package specification for \fBfoo\fR package: .sp .RS 4 .nf cargo pkgid foo .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Retrieve package specification for version 1.0.0 of \fBfoo\fR: .sp .RS 4 .nf cargo pkgid foo@1.0.0 .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Retrieve package specification for \fBfoo\fR from crates.io: .sp .RS 4 .nf cargo pkgid https://github.com/rust\-lang/crates.io\-index#foo .fi .RE .RE .sp .RS 4 \h'-04' 4.\h'+01'Retrieve package specification for \fBfoo\fR from a local package: .sp .RS 4 .nf cargo pkgid file:///path/to/local/package#foo .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1), \fBcargo\-metadata\fR(1) cargo-0.66.0/src/etc/man/cargo-publish.1000066400000000000000000000226571432416201200176430ustar00rootroot00000000000000'\" t .TH "CARGO\-PUBLISH" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-publish \- Upload a package to the registry .SH "SYNOPSIS" \fBcargo publish\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will create a distributable, compressed \fB\&.crate\fR file with the source code of the package in the current directory and upload it to a registry. The default registry is \&. This performs the following steps: .sp .RS 4 \h'-04' 1.\h'+01'Performs a few checks, including: .sp .RS 4 \h'-04'\(bu\h'+02'Checks the \fBpackage.publish\fR key in the manifest for restrictions on which registries you are allowed to publish to. .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Create a \fB\&.crate\fR file by following the steps in \fBcargo\-package\fR(1). .RE .sp .RS 4 \h'-04' 3.\h'+01'Upload the crate to the registry. Note that the server will perform additional checks on the crate. .RE .sp This command requires you to be authenticated with either the \fB\-\-token\fR option or using \fBcargo\-login\fR(1). .sp See \fIthe reference\fR for more details about packaging and publishing. .SH "OPTIONS" .SS "Publish Options" .sp \fB\-\-dry\-run\fR .RS 4 Perform all checks without uploading. .RE .sp \fB\-\-token\fR \fItoken\fR .RS 4 API token to use when authenticating. This overrides the token stored in the credentials file (which is created by \fBcargo\-login\fR(1)). .sp \fICargo config\fR environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment variable. Tokens for other registries may be specified with environment variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name of the registry in all capital letters. .RE .sp \fB\-\-no\-verify\fR .RS 4 Don't verify the contents by building them. .RE .sp \fB\-\-allow\-dirty\fR .RS 4 Allow working directories with uncommitted VCS changes to be packaged. .RE .sp \fB\-\-index\fR \fIindex\fR .RS 4 The URL of the registry index to use. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to publish to. Registry names are defined in \fICargo config files\fR \&. If not specified, and there is a \fI\f(BIpackage.publish\fI\fR field in \fBCargo.toml\fR with a single registry, then it will publish to that registry. Otherwise it will use the default registry, which is defined by the \fI\f(BIregistry.default\fI\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Package Selection" By default, the package in the current working directory is selected. The \fB\-p\fR flag can be used to choose a different package in a workspace. .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 The package to publish. See \fBcargo\-pkgid\fR(1) for the SPEC format. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Publish for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Publish the current package: .sp .RS 4 .nf cargo publish .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-package\fR(1), \fBcargo\-login\fR(1) cargo-0.66.0/src/etc/man/cargo-report.1000066400000000000000000000017351432416201200175020ustar00rootroot00000000000000'\" t .TH "CARGO\-REPORT" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-report \- Generate and display various kinds of reports .SH "SYNOPSIS" \fBcargo report\fR \fItype\fR [\fIoptions\fR] .SS "DESCRIPTION" Displays a report of the given \fItype\fR \- currently, only \fBfuture\-incompat\fR is supported .SH "OPTIONS" .sp \fB\-\-id\fR \fIid\fR .RS 4 Show the report with the specified Cargo\-generated id .RE .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Only display a report for the specified package .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Display the latest future\-incompat report: .sp .RS 4 .nf cargo report future\-incompat .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Display the latest future\-incompat report for a specific package: .sp .RS 4 .nf cargo report future\-incompat \-\-package my\-dep:0.0.1 .fi .RE .RE .SH "SEE ALSO" \fIFuture incompat report\fR .sp \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-run.1000066400000000000000000000253671432416201200170020ustar00rootroot00000000000000'\" t .TH "CARGO\-RUN" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-run \- Run the current package .SH "SYNOPSIS" \fBcargo run\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] .SH "DESCRIPTION" Run a binary or example of the local package. .sp All the arguments following the two dashes (\fB\-\-\fR) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after \fB\-\-\fR go to the binary, the ones before go to Cargo. .SH "OPTIONS" .SS "Package Selection" By default, the package in the current working directory is selected. The \fB\-p\fR flag can be used to choose a different package in a workspace. .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 The package to run. See \fBcargo\-pkgid\fR(1) for the SPEC format. .RE .SS "Target Selection" When no target selection options are given, \fBcargo run\fR will run the binary target. If there are multiple binary targets, you must pass a target flag to choose one. Or, the \fBdefault\-run\fR field may be specified in the \fB[package]\fR section of \fBCargo.toml\fR to choose the name of the binary to run by default. .sp \fB\-\-bin\fR \fIname\fR .RS 4 Run the specified binary. .RE .sp \fB\-\-example\fR \fIname\fR .RS 4 Run the specified example. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Run for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Run optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Run with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Run the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build the local package and run its main target (assuming only one binary): .sp .RS 4 .nf cargo run .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Run an example with extra arguments: .sp .RS 4 .nf cargo run \-\-example exname \-\- \-\-exoption exarg1 exarg2 .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-build\fR(1) cargo-0.66.0/src/etc/man/cargo-rustc.1000066400000000000000000000376621432416201200173370ustar00rootroot00000000000000'\" t .TH "CARGO\-RUSTC" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-rustc \- Compile the current package, and pass extra options to the compiler .SH "SYNOPSIS" \fBcargo rustc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] .SH "DESCRIPTION" The specified target for the current package (or package specified by \fB\-p\fR if provided) will be compiled along with all of its dependencies. The specified \fIargs\fR will all be passed to the final compiler invocation, not any of the dependencies. Note that the compiler will still unconditionally receive arguments such as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified \fIargs\fR will simply be added to the compiler invocation. .sp See for documentation on rustc flags. .sp This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which target is compiled. .sp To pass flags to all compiler processes spawned by Cargo, use the \fBRUSTFLAGS\fR \fIenvironment variable\fR or the \fBbuild.rustflags\fR \fIconfig value\fR \&. .SH "OPTIONS" .SS "Package Selection" By default, the package in the current working directory is selected. The \fB\-p\fR flag can be used to choose a different package in a workspace. .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 The package to build. See \fBcargo\-pkgid\fR(1) for the SPEC format. .RE .SS "Target Selection" When no target selection options are given, \fBcargo rustc\fR will build all binary and library targets of the selected package. .sp Binary targets are automatically built if there is an integration test or benchmark being selected to build. This allows an integration test to execute the binary to exercise and test its behavior. The \fBCARGO_BIN_EXE_\fR \fIenvironment variable\fR is set when the integration test is built so that it can use the \fI\f(BIenv\fI macro\fR to locate the executable. .sp Passing target selection flags will build only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Build the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Build the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Build all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Build the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Build all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Build the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Build all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Build the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Build all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Build for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Build optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Build with the given profile. .sp The \fBrustc\fR subcommand will treat the following named profiles with special behaviors: .sp .RS 4 \h'-04'\(bu\h'+02'\fBcheck\fR \[em] Builds in the same way as the \fBcargo\-check\fR(1) command with the \fBdev\fR profile. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBtest\fR \[em] Builds in the same way as the \fBcargo\-test\fR(1) command, enabling building in test mode which will enable tests and enable the \fBtest\fR cfg option. See \fIrustc tests\fR for more detail. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBbench\fR \[em] Builds in the same was as the \fBcargo\-bench\fR(1) command, similar to the \fBtest\fR profile. .RE .sp See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Build the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .sp \fB\-\-crate\-type\fR \fIcrate\-type\fR .RS 4 Build for the given crate type. This flag accepts a comma\-separated list of 1 or more crate types, of which the allowed values are the same as \fBcrate\-type\fR field in the manifest for configurating a Cargo target. See \fI\f(BIcrate\-type\fI field\fR for possible values. .sp If the manifest contains a list, and \fB\-\-crate\-type\fR is provided, the command\-line argument value will override what is in the manifest. .sp This flag only works when building a \fBlib\fR or \fBexample\fR library target. .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .sp \fB\-\-future\-incompat\-report\fR .RS 4 Displays a future\-incompat report for any future\-incompatible warnings produced during execution of this command .sp See \fBcargo\-report\fR(1) .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Check if your package (not including dependencies) uses unsafe code: .sp .RS 4 .nf cargo rustc \-\-lib \-\- \-D unsafe\-code .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Try an experimental flag on the nightly compiler, such as this which prints the size of every type: .sp .RS 4 .nf cargo rustc \-\-lib \-\- \-Z print\-type\-sizes .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Override \fBcrate\-type\fR field in Cargo.toml with command\-line option: .sp .RS 4 .nf cargo rustc \-\-lib \-\-crate\-type lib,cdylib .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-build\fR(1), \fBrustc\fR(1) cargo-0.66.0/src/etc/man/cargo-rustdoc.1000066400000000000000000000341101432416201200176430ustar00rootroot00000000000000'\" t .TH "CARGO\-RUSTDOC" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-rustdoc \- Build a package's documentation, using specified custom flags .SH "SYNOPSIS" \fBcargo rustdoc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] .SH "DESCRIPTION" The specified target for the current package (or package specified by \fB\-p\fR if provided) will be documented with the specified \fIargs\fR being passed to the final rustdoc invocation. Dependencies will not be documented as part of this command. Note that rustdoc will still unconditionally receive arguments such as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified \fIargs\fR will simply be added to the rustdoc invocation. .sp See for documentation on rustdoc flags. .sp This command requires that only one target is being compiled when additional arguments are provided. If more than one target is available for the current package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which target is compiled. .sp To pass flags to all rustdoc processes spawned by Cargo, use the \fBRUSTDOCFLAGS\fR \fIenvironment variable\fR or the \fBbuild.rustdocflags\fR \fIconfig value\fR \&. .SH "OPTIONS" .SS "Documentation Options" .sp \fB\-\-open\fR .RS 4 Open the docs in a browser after building them. This will use your default browser unless you define another one in the \fBBROWSER\fR environment variable or use the \fI\f(BIdoc.browser\fI\fR configuration option. .RE .SS "Package Selection" By default, the package in the current working directory is selected. The \fB\-p\fR flag can be used to choose a different package in a workspace. .sp \fB\-p\fR \fIspec\fR, \fB\-\-package\fR \fIspec\fR .RS 4 The package to document. See \fBcargo\-pkgid\fR(1) for the SPEC format. .RE .SS "Target Selection" When no target selection options are given, \fBcargo rustdoc\fR will document all binary and library targets of the selected package. The binary will be skipped if its name is the same as the lib target. Binaries are skipped if they have \fBrequired\-features\fR that are missing. .sp Passing target selection flags will document only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Document the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Document the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Document all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Document the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Document all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Document the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Document all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Document the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Document all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Document all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Document for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Document optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Document with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Document the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build documentation with custom CSS included from a given file: .sp .RS 4 .nf cargo rustdoc \-\-lib \-\- \-\-extend\-css extra.css .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-doc\fR(1), \fBrustdoc\fR(1) cargo-0.66.0/src/etc/man/cargo-search.1000066400000000000000000000066251432416201200174370ustar00rootroot00000000000000'\" t .TH "CARGO\-SEARCH" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-search \- Search packages in crates.io .SH "SYNOPSIS" \fBcargo search\fR [\fIoptions\fR] [\fIquery\fR\&...] .SH "DESCRIPTION" This performs a textual search for crates on \&. The matching crates will be displayed along with their description in TOML format suitable for copying into a \fBCargo.toml\fR manifest. .SH "OPTIONS" .SS "Search Options" .sp \fB\-\-limit\fR \fIlimit\fR .RS 4 Limit the number of results (default: 10, max: 100). .RE .sp \fB\-\-index\fR \fIindex\fR .RS 4 The URL of the registry index to use. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Search for a package from crates.io: .sp .RS 4 .nf cargo search serde .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-install\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo-test.1000066400000000000000000000461771432416201200171570ustar00rootroot00000000000000'\" t .TH "CARGO\-TEST" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-test \- Execute unit and integration tests of a package .SH "SYNOPSIS" \fBcargo test\fR [\fIoptions\fR] [\fItestname\fR] [\fB\-\-\fR \fItest\-options\fR] .SH "DESCRIPTION" Compile and execute unit, integration, and documentation tests. .sp The test filtering argument \fBTESTNAME\fR and all the arguments following the two dashes (\fB\-\-\fR) are passed to the test binaries and thus to \fIlibtest\fR (rustc's built in unit\-test and micro\-benchmarking framework). If you're passing arguments to both Cargo and the binary, the ones after \fB\-\-\fR go to the binary, the ones before go to Cargo. For details about libtest's arguments see the output of \fBcargo test \-\- \-\-help\fR and check out the rustc book's chapter on how tests work at \&. .sp As an example, this will filter for tests with \fBfoo\fR in their name and run them on 3 threads in parallel: .sp .RS 4 .nf cargo test foo \-\- \-\-test\-threads 3 .fi .RE .sp Tests are built with the \fB\-\-test\fR option to \fBrustc\fR which creates a special executable by linking your code with libtest. The executable automatically runs all functions annotated with the \fB#[test]\fR attribute in multiple threads. \fB#[bench]\fR annotated functions will also be run with one iteration to verify that they are functional. .sp If the package contains multiple test targets, each target compiles to a special executable as aforementioned, and then is run serially. .sp The libtest harness may be disabled by setting \fBharness = false\fR in the target manifest settings, in which case your code will need to provide its own \fBmain\fR function to handle running tests. .SS "Documentation tests" Documentation tests are also run by default, which is handled by \fBrustdoc\fR\&. It extracts code samples from documentation comments of the library target, and then executes them. .sp Different from normal test targets, each code block compiles to a doctest executable on the fly with \fBrustc\fR\&. These executables run in parallel in separate processes. The compilation of a code block is in fact a part of test function controlled by libtest, so some options such as \fB\-\-jobs\fR might not take effect. Note that this execution model of doctests is not guaranteed and may change in the future; beware of depending on it. .sp See the \fIrustdoc book\fR for more information on writing doc tests. .SH "OPTIONS" .SS "Test Options" .sp \fB\-\-no\-run\fR .RS 4 Compile, but don't run tests. .RE .sp \fB\-\-no\-fail\-fast\fR .RS 4 Run all tests regardless of failure. Without this flag, Cargo will exit after the first executable fails. The Rust test harness will run all tests within the executable to completion, this flag only applies to the executable as a whole. .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Test only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Test all members in the workspace. .RE .sp \fB\-\-all\fR .RS 4 Deprecated alias for \fB\-\-workspace\fR\&. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Target Selection" When no target selection options are given, \fBcargo test\fR will build the following targets of the selected packages: .sp .RS 4 \h'-04'\(bu\h'+02'lib \[em] used to link with binaries, examples, integration tests, and doc tests .RE .sp .RS 4 \h'-04'\(bu\h'+02'bins (only if integration tests are built and required features are available) .RE .sp .RS 4 \h'-04'\(bu\h'+02'examples \[em] to ensure they compile .RE .sp .RS 4 \h'-04'\(bu\h'+02'lib as a unit test .RE .sp .RS 4 \h'-04'\(bu\h'+02'bins as unit tests .RE .sp .RS 4 \h'-04'\(bu\h'+02'integration tests .RE .sp .RS 4 \h'-04'\(bu\h'+02'doc tests for the lib target .RE .sp The default behavior can be changed by setting the \fBtest\fR flag for the target in the manifest settings. Setting examples to \fBtest = true\fR will build and run the example as a test. Setting targets to \fBtest = false\fR will stop them from being tested by default. Target selection options that take a target by name ignore the \fBtest\fR flag and will always test the given target. .sp Doc tests for libraries may be disabled by setting \fBdoctest = false\fR for the library in the manifest. .sp Binary targets are automatically built if there is an integration test or benchmark being selected to test. This allows an integration test to execute the binary to exercise and test its behavior. The \fBCARGO_BIN_EXE_\fR \fIenvironment variable\fR is set when the integration test is built so that it can use the \fI\f(BIenv\fI macro\fR to locate the executable. .sp Passing target selection flags will test only the specified targets. .sp Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each glob pattern. .sp \fB\-\-lib\fR .RS 4 Test the package's library. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Test the specified binary. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-bins\fR .RS 4 Test all binary targets. .RE .sp \fB\-\-example\fR \fIname\fR\&... .RS 4 Test the specified example. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-examples\fR .RS 4 Test all example targets. .RE .sp \fB\-\-test\fR \fIname\fR\&... .RS 4 Test the specified integration test. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-tests\fR .RS 4 Test all targets in test mode that have the \fBtest = true\fR manifest flag set. By default this includes the library and binaries built as unittests, and integration tests. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a unittest, and once as a dependency for binaries, integration tests, etc.). Targets may be enabled or disabled by setting the \fBtest\fR flag in the manifest settings for the target. .RE .sp \fB\-\-bench\fR \fIname\fR\&... .RS 4 Test the specified benchmark. This flag may be specified multiple times and supports common Unix glob patterns. .RE .sp \fB\-\-benches\fR .RS 4 Test all targets in benchmark mode that have the \fBbench = true\fR manifest flag set. By default this includes the library and binaries built as benchmarks, and bench targets. Be aware that this will also build any required dependencies, so the lib target may be built twice (once as a benchmark, and once as a dependency for binaries, benchmarks, etc.). Targets may be enabled or disabled by setting the \fBbench\fR flag in the manifest settings for the target. .RE .sp \fB\-\-all\-targets\fR .RS 4 Test all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. .RE .sp \fB\-\-doc\fR .RS 4 Test only the library's documentation. This cannot be mixed with other target options. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Compilation Options" .sp \fB\-\-target\fR \fItriple\fR .RS 4 Test for the given architecture. The default is the host architecture. The general format of the triple is \fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a list of supported targets. This flag may be specified multiple times. .sp This may also be specified with the \fBbuild.target\fR \fIconfig value\fR \&. .sp Note that specifying this flag makes Cargo run in a different mode where the target artifacts are placed in a separate directory. See the \fIbuild cache\fR documentation for more details. .RE .sp \fB\-r\fR, \fB\-\-release\fR .RS 4 Test optimized artifacts with the \fBrelease\fR profile. See also the \fB\-\-profile\fR option for choosing a specific profile by name. .RE .sp \fB\-\-profile\fR \fIname\fR .RS 4 Test with the given profile. See the \fIthe reference\fR for more details on profiles. .RE .sp \fB\-\-ignore\-rust\-version\fR .RS 4 Test the target even if the selected Rust compiler is older than the required Rust version as configured in the project's \fBrust\-version\fR field. .RE .sp \fB\-\-timings=\fR\fIfmts\fR .RS 4 Output information how long each compilation takes, and track concurrency information over time. Accepts an optional comma\-separated list of output formats; \fB\-\-timings\fR without an argument will default to \fB\-\-timings=html\fR\&. Specifying an output format (rather than the default) is unstable and requires \fB\-Zunstable\-options\fR\&. Valid output formats: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhtml\fR (unstable, requires \fB\-Zunstable\-options\fR): Write a human\-readable file \fBcargo\-timing.html\fR to the \fBtarget/cargo\-timings\fR directory with a report of the compilation. Also write a report to the same directory with a timestamp in the filename if you want to look at older runs. HTML output is suitable for human consumption only, and does not provide machine\-readable timing data. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR (unstable, requires \fB\-Zunstable\-options\fR): Emit machine\-readable JSON information about timing information. .RE .RE .SS "Output Options" .sp \fB\-\-target\-dir\fR \fIdirectory\fR .RS 4 Directory for all generated artifacts and intermediate files. May also be specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the \fBbuild.target\-dir\fR \fIconfig value\fR \&. Defaults to \fBtarget\fR in the root of the workspace. .RE .SS "Display Options" By default the Rust test harness hides output from test execution to keep results readable. Test output can be recovered (e.g., for debugging) by passing \fB\-\-nocapture\fR to the test binaries: .sp .RS 4 .nf cargo test \-\- \-\-nocapture .fi .RE .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .sp \fB\-\-message\-format\fR \fIfmt\fR .RS 4 The output format for diagnostic messages. Can be specified multiple times and consists of comma\-separated values. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with \fBshort\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR and \fBjson\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See \fIthe reference\fR for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages contains embedded ANSI color codes for respecting rustc's default color scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. .RE .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SS "Miscellaneous Options" The \fB\-\-jobs\fR argument affects the building of the test executable but does not affect how many threads are used when running the tests. The Rust test harness includes an option to control the number of threads used: .sp .RS 4 .nf cargo test \-j 2 \-\- \-\-test\-threads=2 .fi .RE .sp \fB\-j\fR \fIN\fR, \fB\-\-jobs\fR \fIN\fR .RS 4 Number of parallel jobs to run. May also be specified with the \fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to the number of logical CPUs. If negative, it sets the maximum number of parallel jobs to the number of logical CPUs plus provided value. Should not be 0. .RE .sp \fB\-\-keep\-going\fR .RS 4 Build as many crates in the dependency graph as possible, rather than aborting the build on the first one that fails to build. Unstable, requires \fB\-Zunstable\-options\fR\&. .RE .sp \fB\-\-future\-incompat\-report\fR .RS 4 Displays a future\-incompat report for any future\-incompatible warnings produced during execution of this command .sp See \fBcargo\-report\fR(1) .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Execute all the unit and integration tests of the current package: .sp .RS 4 .nf cargo test .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Run only tests whose names match against a filter string: .sp .RS 4 .nf cargo test name_filter .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Run only a specific test within a specific integration test: .sp .RS 4 .nf cargo test \-\-test int_test_name \-\- modname::test_name .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-bench\fR(1), \fItypes of tests\fR , \fIhow to write tests\fR cargo-0.66.0/src/etc/man/cargo-tree.1000066400000000000000000000350071432416201200171250ustar00rootroot00000000000000'\" t .TH "CARGO\-TREE" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-tree \- Display a tree visualization of a dependency graph .SH "SYNOPSIS" \fBcargo tree\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will display a tree of dependencies to the terminal. An example of a simple project that depends on the "rand" package: .sp .RS 4 .nf myproject v0.1.0 (/myproject) `\-\- rand v0.7.3 |\-\- getrandom v0.1.14 | |\-\- cfg\-if v0.1.10 | `\-\- libc v0.2.68 |\-\- libc v0.2.68 (*) |\-\- rand_chacha v0.2.2 | |\-\- ppv\-lite86 v0.2.6 | `\-\- rand_core v0.5.1 | `\-\- getrandom v0.1.14 (*) `\-\- rand_core v0.5.1 (*) [build\-dependencies] `\-\- cc v1.0.50 .fi .RE .sp Packages marked with \fB(*)\fR have been "de\-duplicated". The dependencies for the package have already been shown elsewhere in the graph, and so are not repeated. Use the \fB\-\-no\-dedupe\fR option to repeat the duplicates. .sp The \fB\-e\fR flag can be used to select the dependency kinds to display. The "features" kind changes the output to display the features enabled by each dependency. For example, \fBcargo tree \-e features\fR: .sp .RS 4 .nf myproject v0.1.0 (/myproject) `\-\- log feature "serde" `\-\- log v0.4.8 |\-\- serde v1.0.106 `\-\- cfg\-if feature "default" `\-\- cfg\-if v0.1.10 .fi .RE .sp In this tree, \fBmyproject\fR depends on \fBlog\fR with the \fBserde\fR feature. \fBlog\fR in turn depends on \fBcfg\-if\fR with "default" features. When using \fB\-e features\fR it can be helpful to use \fB\-i\fR flag to show how the features flow into a package. See the examples below for more detail. .SH "OPTIONS" .SS "Tree Options" .sp \fB\-i\fR \fIspec\fR, \fB\-\-invert\fR \fIspec\fR .RS 4 Show the reverse dependencies for the given package. This flag will invert the tree and display the packages that depend on the given package. .sp Note that in a workspace, by default it will only display the package's reverse dependencies inside the tree of the workspace member in the current directory. The \fB\-\-workspace\fR flag can be used to extend it so that it will show the package's reverse dependencies across the entire workspace. The \fB\-p\fR flag can be used to display the package's reverse dependencies only with the subtree of the package given to \fB\-p\fR\&. .RE .sp \fB\-\-prune\fR \fIspec\fR .RS 4 Prune the given package from the display of the dependency tree. .RE .sp \fB\-\-depth\fR \fIdepth\fR .RS 4 Maximum display depth of the dependency tree. A depth of 1 displays the direct dependencies, for example. .RE .sp \fB\-\-no\-dedupe\fR .RS 4 Do not de\-duplicate repeated dependencies. Usually, when a package has already displayed its dependencies, further occurrences will not re\-display its dependencies, and will include a \fB(*)\fR to indicate it has already been shown. This flag will cause those duplicates to be repeated. .RE .sp \fB\-d\fR, \fB\-\-duplicates\fR .RS 4 Show only dependencies which come in multiple versions (implies \fB\-\-invert\fR). When used with the \fB\-p\fR flag, only shows duplicates within the subtree of the given package. .sp It can be beneficial for build times and executable sizes to avoid building that same package multiple times. This flag can help identify the offending packages. You can then investigate if the package that depends on the duplicate with the older version can be updated to the newer version so that only one instance is built. .RE .sp \fB\-e\fR \fIkinds\fR, \fB\-\-edges\fR \fIkinds\fR .RS 4 The dependency kinds to display. Takes a comma separated list of values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBall\fR \[em] Show all edge kinds. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnormal\fR \[em] Show normal dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBbuild\fR \[em] Show build dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBdev\fR \[em] Show development dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBfeatures\fR \[em] Show features enabled by each dependency. If this is the only kind given, then it will automatically include the other dependency kinds. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBno\-normal\fR \[em] Do not include normal dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBno\-build\fR \[em] Do not include build dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBno\-dev\fR \[em] Do not include development dependencies. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBno\-proc\-macro\fR \[em] Do not include procedural macro dependencies. .RE .sp The \fBnormal\fR, \fBbuild\fR, \fBdev\fR, and \fBall\fR dependency kinds cannot be mixed with \fBno\-normal\fR, \fBno\-build\fR, or \fBno\-dev\fR dependency kinds. .sp The default is \fBnormal,build,dev\fR\&. .RE .sp \fB\-\-target\fR \fItriple\fR .RS 4 Filter dependencies matching the given target\-triple. The default is the host platform. Use the value \fBall\fR to include \fIall\fR targets. .RE .SS "Tree Formatting Options" .sp \fB\-\-charset\fR \fIcharset\fR .RS 4 Chooses the character set to use for the tree. Valid values are "utf8" or "ascii". Default is "utf8". .RE .sp \fB\-f\fR \fIformat\fR, \fB\-\-format\fR \fIformat\fR .RS 4 Set the format string for each package. The default is "{p}". .sp This is an arbitrary string which will be used to display each package. The following strings will be replaced with the corresponding value: .sp .RS 4 \h'-04'\(bu\h'+02'\fB{p}\fR \[em] The package name. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB{l}\fR \[em] The package license. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB{r}\fR \[em] The package repository URL. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB{f}\fR \[em] Comma\-separated list of package features that are enabled. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB{lib}\fR \[em] The name, as used in a \fBuse\fR statement, of the package's library. .RE .RE .sp \fB\-\-prefix\fR \fIprefix\fR .RS 4 Sets how each line is displayed. The \fIprefix\fR value can be one of: .sp .RS 4 \h'-04'\(bu\h'+02'\fBindent\fR (default) \[em] Shows each line indented as a tree. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBdepth\fR \[em] Show as a list, with the numeric depth printed before each entry. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnone\fR \[em] Show as a flat list. .RE .RE .SS "Package Selection" By default, when no package selection options are given, the packages selected depend on the selected manifest file (based on the current working directory if \fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then the workspaces default members are selected, otherwise only the package defined by the manifest will be selected. .sp The default members of a workspace can be set explicitly with the \fBworkspace.default\-members\fR key in the root manifest. If this is not set, a virtual workspace will include all workspace members (equivalent to passing \fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Display only the specified packages. See \fBcargo\-pkgid\fR(1) for the SPEC format. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .sp \fB\-\-workspace\fR .RS 4 Display all members in the workspace. .RE .sp \fB\-\-exclude\fR \fISPEC\fR\&... .RS 4 Exclude the specified packages. Must be used in conjunction with the \fB\-\-workspace\fR flag. This flag may be specified multiple times and supports common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally expanding glob patterns before Cargo handles them, you must use single quotes or double quotes around each pattern. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Feature Selection" The feature flags allow you to control which features are enabled. When no feature options are given, the \fBdefault\fR feature is activated for every selected package. .sp See \fIthe features documentation\fR for more details. .sp \fB\-F\fR \fIfeatures\fR, \fB\-\-features\fR \fIfeatures\fR .RS 4 Space or comma separated list of features to activate. Features of workspace members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may be specified multiple times, which enables all specified features. .RE .sp \fB\-\-all\-features\fR .RS 4 Activate all available features of all selected packages. .RE .sp \fB\-\-no\-default\-features\fR .RS 4 Do not activate the \fBdefault\fR feature of the selected packages. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Display the tree for the package in the current directory: .sp .RS 4 .nf cargo tree .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Display all the packages that depend on the \fBsyn\fR package: .sp .RS 4 .nf cargo tree \-i syn .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Show the features enabled on each package: .sp .RS 4 .nf cargo tree \-\-format "{p} {f}" .fi .RE .RE .sp .RS 4 \h'-04' 4.\h'+01'Show all packages that are built multiple times. This can happen if multiple semver\-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). .sp .RS 4 .nf cargo tree \-d .fi .RE .RE .sp .RS 4 \h'-04' 5.\h'+01'Explain why features are enabled for the \fBsyn\fR package: .sp .RS 4 .nf cargo tree \-e features \-i syn .fi .RE .sp The \fB\-e features\fR flag is used to show features. The \fB\-i\fR flag is used to invert the graph so that it displays the packages that depend on \fBsyn\fR\&. An example of what this would display: .sp .RS 4 .nf syn v1.0.17 |\-\- syn feature "clone\-impls" | `\-\- syn feature "default" | `\-\- rustversion v1.0.2 | `\-\- rustversion feature "default" | `\-\- myproject v0.1.0 (/myproject) | `\-\- myproject feature "default" (command\-line) |\-\- syn feature "default" (*) |\-\- syn feature "derive" | `\-\- syn feature "default" (*) |\-\- syn feature "full" | `\-\- rustversion v1.0.2 (*) |\-\- syn feature "parsing" | `\-\- syn feature "default" (*) |\-\- syn feature "printing" | `\-\- syn feature "default" (*) |\-\- syn feature "proc\-macro" | `\-\- syn feature "default" (*) `\-\- syn feature "quote" |\-\- syn feature "printing" (*) `\-\- syn feature "proc\-macro" (*) .fi .RE .sp To read this graph, you can follow the chain for each feature from the root to see why it is included. For example, the "full" feature is added by the \fBrustversion\fR crate which is included from \fBmyproject\fR (with the default features), and \fBmyproject\fR is the package selected on the command\-line. All of the other \fBsyn\fR features are added by the "default" feature ("quote" is added by "printing" and "proc\-macro", both of which are default features). .sp If you're having difficulty cross\-referencing the de\-duplicated \fB(*)\fR entries, try with the \fB\-\-no\-dedupe\fR flag to get the full output. .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-metadata\fR(1) cargo-0.66.0/src/etc/man/cargo-uninstall.1000066400000000000000000000072721432416201200202020ustar00rootroot00000000000000'\" t .TH "CARGO\-UNINSTALL" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-uninstall \- Remove a Rust binary .SH "SYNOPSIS" \fBcargo uninstall\fR [\fIoptions\fR] [\fIspec\fR\&...] .SH "DESCRIPTION" This command removes a package installed with \fBcargo\-install\fR(1). The \fIspec\fR argument is a package ID specification of the package to remove (see \fBcargo\-pkgid\fR(1)). .sp By default all binaries are removed for a crate but the \fB\-\-bin\fR and \fB\-\-example\fR flags can be used to only remove particular binaries. .sp The installation root is determined, in order of precedence: .sp .RS 4 \h'-04'\(bu\h'+02'\fB\-\-root\fR option .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR .RE .SH "OPTIONS" .SS "Install Options" .sp \fB\-p\fR, \fB\-\-package\fR \fIspec\fR\&... .RS 4 Package to uninstall. .RE .sp \fB\-\-bin\fR \fIname\fR\&... .RS 4 Only uninstall the binary \fIname\fR\&. .RE .sp \fB\-\-root\fR \fIdir\fR .RS 4 Directory to uninstall packages from. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Uninstall a previously installed package. .sp .RS 4 .nf cargo uninstall ripgrep .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-install\fR(1) cargo-0.66.0/src/etc/man/cargo-update.1000066400000000000000000000137211432416201200174470ustar00rootroot00000000000000'\" t .TH "CARGO\-UPDATE" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-update \- Update dependencies as recorded in the local lock file .SH "SYNOPSIS" \fBcargo update\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will update dependencies in the \fBCargo.lock\fR file to the latest version. If the \fBCargo.lock\fR file does not exist, it will be created with the latest available versions. .SH "OPTIONS" .SS "Update Options" .sp \fB\-p\fR \fIspec\fR\&..., \fB\-\-package\fR \fIspec\fR\&... .RS 4 Update only the specified packages. This flag may be specified multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format. .sp If packages are specified with the \fB\-p\fR flag, then a conservative update of the lockfile will be performed. This means that only the dependency specified by SPEC will be updated. Its transitive dependencies will be updated only if SPEC cannot be updated without updating dependencies. All other dependencies will remain locked at their currently recorded versions. .sp If \fB\-p\fR is not specified, all dependencies are updated. .RE .sp \fB\-\-aggressive\fR .RS 4 When used with \fB\-p\fR, dependencies of \fIspec\fR are forced to update as well. Cannot be used with \fB\-\-precise\fR\&. .RE .sp \fB\-\-precise\fR \fIprecise\fR .RS 4 When used with \fB\-p\fR, allows you to specify a specific version number to set the package to. If the package comes from a git repository, this can be a git revision (such as a SHA hash or tag). .RE .sp \fB\-w\fR, \fB\-\-workspace\fR .RS 4 Attempt to update only packages defined in the workspace. Other packages are updated only if they don't already exist in the lockfile. This option is useful for updating \fBCargo.lock\fR after you've changed version numbers in \fBCargo.toml\fR\&. .RE .sp \fB\-\-dry\-run\fR .RS 4 Displays what would be updated, but doesn't actually write the lockfile. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Update all dependencies in the lockfile: .sp .RS 4 .nf cargo update .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Update only specific dependencies: .sp .RS 4 .nf cargo update \-p foo \-p bar .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Set a specific dependency to a specific version: .sp .RS 4 .nf cargo update \-p foo \-\-precise 1.2.3 .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1) cargo-0.66.0/src/etc/man/cargo-vendor.1000066400000000000000000000135101432416201200174560ustar00rootroot00000000000000'\" t .TH "CARGO\-VENDOR" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-vendor \- Vendor all dependencies locally .SH "SYNOPSIS" \fBcargo vendor\fR [\fIoptions\fR] [\fIpath\fR] .SH "DESCRIPTION" This cargo subcommand will vendor all crates.io and git dependencies for a project into the specified directory at \fB\fR\&. After this command completes the vendor directory specified by \fB\fR will contain all remote sources from dependencies specified. Additional manifests beyond the default one can be specified with the \fB\-s\fR option. .sp The \fBcargo vendor\fR command will also print out the configuration necessary to use the vendored sources, which you will need to add to \fB\&.cargo/config.toml\fR\&. .SH "OPTIONS" .SS "Vendor Options" .sp \fB\-s\fR \fImanifest\fR, \fB\-\-sync\fR \fImanifest\fR .RS 4 Specify an extra \fBCargo.toml\fR manifest to workspaces which should also be vendored and synced to the output. May be specified multiple times. .RE .sp \fB\-\-no\-delete\fR .RS 4 Don't delete the "vendor" directory when vendoring, but rather keep all existing contents of the vendor directory .RE .sp \fB\-\-respect\-source\-config\fR .RS 4 Instead of ignoring \fB[source]\fR configuration by default in \fB\&.cargo/config.toml\fR read it and use it when downloading crates from crates.io, for example .RE .sp \fB\-\-versioned\-dirs\fR .RS 4 Normally versions are only added to disambiguate multiple versions of the same package. This option causes all directories in the "vendor" directory to be versioned, which makes it easier to track the history of vendored packages over time, and can help with the performance of re\-vendoring when only a subset of the packages have changed. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Vendor all dependencies into a local "vendor" folder .sp .RS 4 .nf cargo vendor .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Vendor all dependencies into a local "third\-party/vendor" folder .sp .RS 4 .nf cargo vendor third\-party/vendor .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Vendor the current workspace as well as another to "vendor" .sp .RS 4 .nf cargo vendor \-s ../path/to/Cargo.toml .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-verify-project.1000066400000000000000000000106031432416201200211310ustar00rootroot00000000000000'\" t .TH "CARGO\-VERIFY\-PROJECT" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-verify\-project \- Check correctness of crate manifest .SH "SYNOPSIS" \fBcargo verify\-project\fR [\fIoptions\fR] .SH "DESCRIPTION" This command will parse the local manifest and check its validity. It emits a JSON object with the result. A successful validation will display: .sp .RS 4 .nf {"success":"true"} .fi .RE .sp An invalid workspace will display: .sp .RS 4 .nf {"invalid":"human\-readable error message"} .fi .RE .SH "OPTIONS" .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-manifest\-path\fR \fIpath\fR .RS 4 Path to the \fBCargo.toml\fR file. By default, Cargo searches for the \fBCargo.toml\fR file in the current directory or any parent directory. .RE .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: The workspace is OK. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB1\fR: The workspace is invalid. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Check the current workspace for errors: .sp .RS 4 .nf cargo verify\-project .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-package\fR(1) cargo-0.66.0/src/etc/man/cargo-version.1000066400000000000000000000012251432416201200176460ustar00rootroot00000000000000'\" t .TH "CARGO\-VERSION" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-version \- Show version information .SH "SYNOPSIS" \fBcargo version\fR [\fIoptions\fR] .SH "DESCRIPTION" Displays the version of Cargo. .SH "OPTIONS" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Display additional version information. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Display the version: .sp .RS 4 .nf cargo version .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'The version is also available via flags: .sp .RS 4 .nf cargo \-\-version cargo \-V .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Display extra version information: .sp .RS 4 .nf cargo \-Vv .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1) cargo-0.66.0/src/etc/man/cargo-yank.1000066400000000000000000000111551432416201200171260ustar00rootroot00000000000000'\" t .TH "CARGO\-YANK" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo\-yank \- Remove a pushed crate from the index .SH "SYNOPSIS" \fBcargo yank\fR [\fIoptions\fR] \fIcrate\fR@\fIversion\fR .br \fBcargo yank\fR [\fIoptions\fR] \fB\-\-version\fR \fIversion\fR [\fIcrate\fR] .SH "DESCRIPTION" The yank command removes a previously published crate's version from the server's index. This command does not delete any data, and the crate will still be available for download via the registry's download link. .sp Note that existing crates locked to a yanked version will still be able to download the yanked version to use it. Cargo will, however, not allow any new crates to be locked to any yanked version. .sp This command requires you to be authenticated with either the \fB\-\-token\fR option or using \fBcargo\-login\fR(1). .sp If the crate name is not specified, it will use the package name from the current directory. .SH "OPTIONS" .SS "Yank Options" .sp \fB\-\-vers\fR \fIversion\fR, \fB\-\-version\fR \fIversion\fR .RS 4 The version to yank or un\-yank. .RE .sp \fB\-\-undo\fR .RS 4 Undo a yank, putting a version back into the index. .RE .sp \fB\-\-token\fR \fItoken\fR .RS 4 API token to use when authenticating. This overrides the token stored in the credentials file (which is created by \fBcargo\-login\fR(1)). .sp \fICargo config\fR environment variables can be used to override the tokens stored in the credentials file. The token for crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment variable. Tokens for other registries may be specified with environment variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name of the registry in all capital letters. .RE .sp \fB\-\-index\fR \fIindex\fR .RS 4 The URL of the registry index to use. .RE .sp \fB\-\-registry\fR \fIregistry\fR .RS 4 Name of the registry to use. Registry names are defined in \fICargo config files\fR \&. If not specified, the default registry is used, which is defined by the \fBregistry.default\fR config key which defaults to \fBcrates\-io\fR\&. .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Yank a crate from the index: .sp .RS 4 .nf cargo yank foo@1.0.7 .fi .RE .RE .SH "SEE ALSO" \fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1) cargo-0.66.0/src/etc/man/cargo.1000066400000000000000000000224001432416201200161610ustar00rootroot00000000000000'\" t .TH "CARGO" "1" .nh .ad l .ss \n[.ss] 0 .SH "NAME" cargo \- The Rust package manager .SH "SYNOPSIS" \fBcargo\fR [\fIoptions\fR] \fIcommand\fR [\fIargs\fR] .br \fBcargo\fR [\fIoptions\fR] \fB\-\-version\fR .br \fBcargo\fR [\fIoptions\fR] \fB\-\-list\fR .br \fBcargo\fR [\fIoptions\fR] \fB\-\-help\fR .br \fBcargo\fR [\fIoptions\fR] \fB\-\-explain\fR \fIcode\fR .SH "DESCRIPTION" This program is a package manager and build tool for the Rust language, available at \&. .SH "COMMANDS" .SS "Build Commands" \fBcargo\-bench\fR(1) .br \ \ \ \ Execute benchmarks of a package. .sp \fBcargo\-build\fR(1) .br \ \ \ \ Compile a package. .sp \fBcargo\-check\fR(1) .br \ \ \ \ Check a local package and all of its dependencies for errors. .sp \fBcargo\-clean\fR(1) .br \ \ \ \ Remove artifacts that Cargo has generated in the past. .sp \fBcargo\-doc\fR(1) .br \ \ \ \ Build a package's documentation. .sp \fBcargo\-fetch\fR(1) .br \ \ \ \ Fetch dependencies of a package from the network. .sp \fBcargo\-fix\fR(1) .br \ \ \ \ Automatically fix lint warnings reported by rustc. .sp \fBcargo\-run\fR(1) .br \ \ \ \ Run a binary or example of the local package. .sp \fBcargo\-rustc\fR(1) .br \ \ \ \ Compile a package, and pass extra options to the compiler. .sp \fBcargo\-rustdoc\fR(1) .br \ \ \ \ Build a package's documentation, using specified custom flags. .sp \fBcargo\-test\fR(1) .br \ \ \ \ Execute unit and integration tests of a package. .SS "Manifest Commands" \fBcargo\-generate\-lockfile\fR(1) .br \ \ \ \ Generate \fBCargo.lock\fR for a project. .sp \fBcargo\-locate\-project\fR(1) .br \ \ \ \ Print a JSON representation of a \fBCargo.toml\fR file's location. .sp \fBcargo\-metadata\fR(1) .br \ \ \ \ Output the resolved dependencies of a package in machine\-readable format. .sp \fBcargo\-pkgid\fR(1) .br \ \ \ \ Print a fully qualified package specification. .sp \fBcargo\-tree\fR(1) .br \ \ \ \ Display a tree visualization of a dependency graph. .sp \fBcargo\-update\fR(1) .br \ \ \ \ Update dependencies as recorded in the local lock file. .sp \fBcargo\-vendor\fR(1) .br \ \ \ \ Vendor all dependencies locally. .sp \fBcargo\-verify\-project\fR(1) .br \ \ \ \ Check correctness of crate manifest. .SS "Package Commands" \fBcargo\-init\fR(1) .br \ \ \ \ Create a new Cargo package in an existing directory. .sp \fBcargo\-install\fR(1) .br \ \ \ \ Build and install a Rust binary. .sp \fBcargo\-new\fR(1) .br \ \ \ \ Create a new Cargo package. .sp \fBcargo\-search\fR(1) .br \ \ \ \ Search packages in crates.io. .sp \fBcargo\-uninstall\fR(1) .br \ \ \ \ Remove a Rust binary. .SS "Publishing Commands" \fBcargo\-login\fR(1) .br \ \ \ \ Save an API token from the registry locally. .sp \fBcargo\-owner\fR(1) .br \ \ \ \ Manage the owners of a crate on the registry. .sp \fBcargo\-package\fR(1) .br \ \ \ \ Assemble the local package into a distributable tarball. .sp \fBcargo\-publish\fR(1) .br \ \ \ \ Upload a package to the registry. .sp \fBcargo\-yank\fR(1) .br \ \ \ \ Remove a pushed crate from the index. .SS "General Commands" \fBcargo\-help\fR(1) .br \ \ \ \ Display help information about Cargo. .sp \fBcargo\-version\fR(1) .br \ \ \ \ Show version information. .SH "OPTIONS" .SS "Special Options" .sp \fB\-V\fR, \fB\-\-version\fR .RS 4 Print version info and exit. If used with \fB\-\-verbose\fR, prints extra information. .RE .sp \fB\-\-list\fR .RS 4 List all installed Cargo subcommands. If used with \fB\-\-verbose\fR, prints extra information. .RE .sp \fB\-\-explain\fR \fIcode\fR .RS 4 Run \fBrustc \-\-explain CODE\fR which will print out a detailed explanation of an error message (for example, \fBE0004\fR). .RE .SS "Display Options" .sp \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Use verbose output. May be specified twice for "very verbose" output which includes extra output such as dependency warnings and build script output. May also be specified with the \fBterm.verbose\fR \fIconfig value\fR \&. .RE .sp \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Do not print cargo log messages. May also be specified with the \fBterm.quiet\fR \fIconfig value\fR \&. .RE .sp \fB\-\-color\fR \fIwhen\fR .RS 4 Control when colored output is used. Valid values: .sp .RS 4 \h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the terminal. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. .RE .sp May also be specified with the \fBterm.color\fR \fIconfig value\fR \&. .RE .SS "Manifest Options" .sp \fB\-\-frozen\fR, \fB\-\-locked\fR .RS 4 Either of these flags requires that the \fBCargo.lock\fR file is up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from attempting to access the network to determine if it is out\-of\-date. .sp These may be used in environments where you want to assert that the \fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network access. .RE .sp \fB\-\-offline\fR .RS 4 Prevents Cargo from accessing the network for any reason. Without this flag, Cargo will stop with an error if it needs to access the network and the network is not available. With this flag, Cargo will attempt to proceed without the network if possible. .sp Beware that this may result in different dependency resolution than online mode. Cargo will restrict itself to crates that are downloaded locally, even if there might be a newer version as indicated in the local copy of the index. See the \fBcargo\-fetch\fR(1) command to download dependencies before going offline. .sp May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. .RE .SS "Common Options" .sp \fB+\fR\fItoolchain\fR .RS 4 If Cargo has been installed with rustup, and the first argument to \fBcargo\fR begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such as \fB+stable\fR or \fB+nightly\fR). See the \fIrustup documentation\fR for more information about how toolchain overrides work. .RE .sp \fB\-\-config\fR \fIKEY=VALUE\fR or \fIPATH\fR .RS 4 Overrides a Cargo configuration value. The argument should be in TOML syntax of \fBKEY=VALUE\fR, or provided as a path to an extra configuration file. This flag may be specified multiple times. See the \fIcommand\-line overrides section\fR for more information. .RE .sp \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help information. .RE .sp \fB\-Z\fR \fIflag\fR .RS 4 Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. .RE .SH "ENVIRONMENT" See \fIthe reference\fR for details on environment variables that Cargo reads. .SH "EXIT STATUS" .sp .RS 4 \h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. .RE .sp .RS 4 \h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. .RE .SH "FILES" \fB~/.cargo/\fR .br \ \ \ \ Default location for Cargo's "home" directory where it stores various files. The location can be changed with the \fBCARGO_HOME\fR environment variable. .sp \fB$CARGO_HOME/bin/\fR .br \ \ \ \ Binaries installed by \fBcargo\-install\fR(1) will be located here. If using \fIrustup\fR , executables distributed with Rust are also located here. .sp \fB$CARGO_HOME/config.toml\fR .br \ \ \ \ The global configuration file. See \fIthe reference\fR for more information about configuration files. .sp \fB\&.cargo/config.toml\fR .br \ \ \ \ Cargo automatically searches for a file named \fB\&.cargo/config.toml\fR in the current directory, and all parent directories. These configuration files will be merged with the global configuration file. .sp \fB$CARGO_HOME/credentials.toml\fR .br \ \ \ \ Private authentication information for logging in to a registry. .sp \fB$CARGO_HOME/registry/\fR .br \ \ \ \ This directory contains cached downloads of the registry index and any downloaded dependencies. .sp \fB$CARGO_HOME/git/\fR .br \ \ \ \ This directory contains cached downloads of git dependencies. .sp Please note that the internal structure of the \fB$CARGO_HOME\fR directory is not stable yet and may be subject to change. .SH "EXAMPLES" .sp .RS 4 \h'-04' 1.\h'+01'Build a local package and all of its dependencies: .sp .RS 4 .nf cargo build .fi .RE .RE .sp .RS 4 \h'-04' 2.\h'+01'Build a package with optimizations: .sp .RS 4 .nf cargo build \-\-release .fi .RE .RE .sp .RS 4 \h'-04' 3.\h'+01'Run tests for a cross\-compiled target: .sp .RS 4 .nf cargo test \-\-target i686\-unknown\-linux\-gnu .fi .RE .RE .sp .RS 4 \h'-04' 4.\h'+01'Create a new package that builds an executable: .sp .RS 4 .nf cargo new foobar .fi .RE .RE .sp .RS 4 \h'-04' 5.\h'+01'Create a package in the current directory: .sp .RS 4 .nf mkdir foo && cd foo cargo init . .fi .RE .RE .sp .RS 4 \h'-04' 6.\h'+01'Learn about a command's options and usage: .sp .RS 4 .nf cargo help clean .fi .RE .RE .SH "BUGS" See for issues. .SH "SEE ALSO" \fBrustc\fR(1), \fBrustdoc\fR(1) cargo-0.66.0/tests/000077500000000000000000000000001432416201200140335ustar00rootroot00000000000000cargo-0.66.0/tests/build-std/000077500000000000000000000000001432416201200157225ustar00rootroot00000000000000cargo-0.66.0/tests/build-std/main.rs000066400000000000000000000147651432416201200172310ustar00rootroot00000000000000//! A test suite for `-Zbuild-std` which is much more expensive than the //! standard test suite. //! //! This test suite attempts to perform a full integration test where we //! actually compile the standard library from source (like the real one) and //! the various tests associated with that. //! //! YOU SHOULD IDEALLY NOT WRITE TESTS HERE. //! //! If possible, use `tests/testsuite/standard_lib.rs` instead. That uses a //! 'mock' sysroot which is much faster to compile. The tests here are //! extremely intensive and are only intended to run on CI and are theoretically //! not catching any regressions that `tests/testsuite/standard_lib.rs` isn't //! already catching. //! //! All tests here should use `#[cargo_test(build_std_real)]` to indicate that //! boilerplate should be generated to require the nightly toolchain and the //! `CARGO_RUN_BUILD_STD_TESTS` env var to be set to actually run these tests. //! Otherwise the tests are skipped. use cargo_test_support::*; use std::env; use std::path::Path; fn enable_build_std(e: &mut Execs, arg: Option<&str>) { e.env_remove("CARGO_HOME"); e.env_remove("HOME"); // And finally actually enable `build-std` for now let arg = match arg { Some(s) => format!("-Zbuild-std={}", s), None => "-Zbuild-std".to_string(), }; e.arg(arg); e.masquerade_as_nightly_cargo(&["build-std"]); } // Helper methods used in the tests below trait BuildStd: Sized { fn build_std(&mut self) -> &mut Self; fn build_std_arg(&mut self, arg: &str) -> &mut Self; fn target_host(&mut self) -> &mut Self; } impl BuildStd for Execs { fn build_std(&mut self) -> &mut Self { enable_build_std(self, None); self } fn build_std_arg(&mut self, arg: &str) -> &mut Self { enable_build_std(self, Some(arg)); self } fn target_host(&mut self) -> &mut Self { self.arg("--target").arg(rustc_host()); self } } #[cargo_test(build_std_real)] fn basic() { let p = project() .file( "src/main.rs", " fn main() { foo::f(); } #[test] fn smoke_bin_unit() { foo::f(); } ", ) .file( "src/lib.rs", " extern crate alloc; extern crate proc_macro; /// ``` /// foo::f(); /// ``` pub fn f() { } #[test] fn smoke_lib_unit() { f(); } ", ) .file( "tests/smoke.rs", " #[test] fn smoke_integration() { foo::f(); } ", ) .build(); p.cargo("check").build_std().target_host().run(); p.cargo("build") .build_std() .target_host() // Importantly, this should not say [UPDATING] // There have been multiple bugs where every build triggers and update. .with_stderr( "[COMPILING] foo v0.0.1 [..]\n\ [FINISHED] dev [..]", ) .run(); p.cargo("run").build_std().target_host().run(); p.cargo("test").build_std().target_host().run(); // Check for hack that removes dylibs. let deps_dir = Path::new("target") .join(rustc_host()) .join("debug") .join("deps"); assert!(p.glob(deps_dir.join("*.rlib")).count() > 0); assert_eq!(p.glob(deps_dir.join("*.dylib")).count(), 0); } #[cargo_test(build_std_real)] fn cross_custom() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [target.custom-target.dependencies] dep = { path = "dep" } "#, ) .file( "src/lib.rs", "#![no_std] pub fn f() -> u32 { dep::answer() }", ) .file("dep/Cargo.toml", &basic_manifest("dep", "0.1.0")) .file("dep/src/lib.rs", "#![no_std] pub fn answer() -> u32 { 42 }") .file( "custom-target.json", r#" { "llvm-target": "x86_64-unknown-none-gnu", "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", "arch": "x86_64", "target-endian": "little", "target-pointer-width": "64", "target-c-int-width": "32", "os": "none", "linker-flavor": "ld.lld" } "#, ) .build(); p.cargo("build --target custom-target.json -v") .build_std_arg("core") .run(); } #[cargo_test(build_std_real)] fn custom_test_framework() { let p = project() .file( "src/lib.rs", r#" #![no_std] #![cfg_attr(test, no_main)] #![feature(custom_test_frameworks)] #![test_runner(crate::test_runner)] pub fn test_runner(_tests: &[&dyn Fn()]) {} #[panic_handler] fn panic(_info: &core::panic::PanicInfo) -> ! { loop {} } "#, ) .file( "target.json", r#" { "llvm-target": "x86_64-unknown-none-gnu", "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", "arch": "x86_64", "target-endian": "little", "target-pointer-width": "64", "target-c-int-width": "32", "os": "none", "linker-flavor": "ld.lld", "linker": "rust-lld", "executables": true, "panic-strategy": "abort" } "#, ) .build(); // This is a bit of a hack to use the rust-lld that ships with most toolchains. let sysroot = paths::sysroot(); let sysroot = Path::new(&sysroot); let sysroot_bin = sysroot .join("lib") .join("rustlib") .join(rustc_host()) .join("bin"); let path = env::var_os("PATH").unwrap_or_default(); let mut paths = env::split_paths(&path).collect::>(); paths.insert(0, sysroot_bin); let new_path = env::join_paths(paths).unwrap(); p.cargo("test --target target.json --no-run -v") .env("PATH", new_path) .build_std_arg("core") .run(); } cargo-0.66.0/tests/internal.rs000066400000000000000000000064271432416201200162260ustar00rootroot00000000000000//! Tests for internal code checks. #![allow(clippy::all)] use std::fs; #[test] fn check_forbidden_code() { // Do not use certain macros, functions, etc. if !cargo_util::is_ci() { // Only check these on CI, otherwise it could be annoying. use std::io::Write; writeln!( std::io::stderr(), "\nSkipping check_forbidden_code test, set CI=1 to enable" ) .unwrap(); return; } let root_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("src"); for entry in walkdir::WalkDir::new(&root_path) .into_iter() .filter_entry(|e| e.path() != root_path.join("doc")) .filter_map(|e| e.ok()) { let path = entry.path(); if !entry .file_name() .to_str() .map(|s| s.ends_with(".rs")) .unwrap_or(false) { continue; } eprintln!("checking {}", path.display()); let c = fs::read_to_string(path).unwrap(); for (line_index, line) in c.lines().enumerate() { if line.trim().starts_with("//") { continue; } if line_has_print(line) { if entry.file_name().to_str().unwrap() == "cargo_new.rs" && line.contains("Hello") { // An exception. continue; } panic!( "found print macro in {}:{}\n\n{}\n\n\ print! macros should not be used in Cargo because they can panic.\n\ Use one of the drop_print macros instead.\n\ ", path.display(), line_index, line ); } if line_has_macro(line, "dbg") { panic!( "found dbg! macro in {}:{}\n\n{}\n\n\ dbg! should not be used outside of debugging.", path.display(), line_index, line ); } } } } fn line_has_print(line: &str) -> bool { line_has_macro(line, "print") || line_has_macro(line, "eprint") || line_has_macro(line, "println") || line_has_macro(line, "eprintln") } #[test] fn line_has_print_works() { assert!(line_has_print("print!")); assert!(line_has_print("println!")); assert!(line_has_print("eprint!")); assert!(line_has_print("eprintln!")); assert!(line_has_print("(print!(\"hi!\"))")); assert!(!line_has_print("print")); assert!(!line_has_print("i like to print things")); assert!(!line_has_print("drop_print!")); assert!(!line_has_print("drop_println!")); assert!(!line_has_print("drop_eprint!")); assert!(!line_has_print("drop_eprintln!")); } fn line_has_macro(line: &str, mac: &str) -> bool { for (i, _) in line.match_indices(mac) { if line.get(i + mac.len()..i + mac.len() + 1) != Some("!") { continue; } if i == 0 { return true; } // Check for identifier boundary start. let prev1 = line.get(i - 1..i).unwrap().chars().next().unwrap(); if prev1.is_alphanumeric() || prev1 == '_' { continue; } return true; } false } cargo-0.66.0/tests/testsuite/000077500000000000000000000000001432416201200160645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/advanced_env.rs000066400000000000000000000021231432416201200210450ustar00rootroot00000000000000//! -Zadvanced-env tests use cargo_test_support::{paths, project, registry::Package}; #[cargo_test] // I don't know why, but `Command` forces all env keys to be upper case on // Windows. Seems questionable, since I think Windows is case-preserving. #[cfg_attr(windows, ignore = "broken due to not preserving case on Windows")] fn source_config_env() { // Try to define [source] with environment variables. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] somedep = "1.0" "#, ) .file("src/lib.rs", "") .build(); Package::new("somedep", "1.0.0") .local(true) .file("src/lib.rs", "") .publish(); let path = paths::root().join("registry"); p.cargo("check -Zadvanced-env") .masquerade_as_nightly_cargo(&["advanced-env"]) .env("CARGO_SOURCE_crates-io_REPLACE_WITH", "my-local-source") .env("CARGO_SOURCE_my-local-source_LOCAL_REGISTRY", path) .run(); } cargo-0.66.0/tests/testsuite/alt_registry.rs000066400000000000000000001167521432416201200211560ustar00rootroot00000000000000//! Tests for alternative registries. use cargo::util::IntoUrl; use cargo_test_support::publish::validate_alt_upload; use cargo_test_support::registry::{self, Package, RegistryBuilder}; use cargo_test_support::{basic_manifest, git, paths, project}; use std::fs; #[cargo_test] fn depend_on_alt_registry() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").alternative(true).publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `alternative`) [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); p.cargo("clean").run(); // Don't download a second time p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn depend_on_alt_registry_depends_on_same_registry_no_index() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").alternative(true).publish(); Package::new("bar", "0.0.1") .registry_dep("baz", "0.0.1") .alternative(true) .publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `alternative`) [DOWNLOADED] [..] v0.0.1 (registry `alternative`) [COMPILING] baz v0.0.1 (registry `alternative`) [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn depend_on_alt_registry_depends_on_same_registry() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").alternative(true).publish(); Package::new("bar", "0.0.1") .registry_dep("baz", "0.0.1") .alternative(true) .publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `alternative`) [DOWNLOADED] [..] v0.0.1 (registry `alternative`) [COMPILING] baz v0.0.1 (registry `alternative`) [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn depend_on_alt_registry_depends_on_crates_io() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1") .dep("baz", "0.0.1") .alternative(true) .publish(); p.cargo("build") .with_stderr_unordered( "\ [UPDATING] `alternative` index [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] baz v0.0.1 (registry `dummy-registry`) [DOWNLOADED] bar v0.0.1 (registry `alternative`) [COMPILING] baz v0.0.1 [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn registry_and_path_dep_works() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn registry_incompatible_with_git() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( " dependency (bar) specification is ambiguous. \ Only one of `git` or `registry` is allowed.", ) .run(); } #[cargo_test] fn cannot_publish_to_crates_io_with_registry_dependency() { registry::alt_init(); let fakeio_path = paths::root().join("fake.io"); let fakeio_url = fakeio_path.into_url().unwrap(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", &format!( r#" [registries.fakeio] index = "{}" "#, fakeio_url ), ) .build(); Package::new("bar", "0.0.1").alternative(true).publish(); // Since this can't really call plain `publish` without fetching the real // crates.io index, create a fake one that points to the real crates.io. git::repo(&fakeio_path) .file( "config.json", r#" {"dl": "https://crates.io/api/v1/crates", "api": "https://crates.io"} "#, ) .build(); // Login so that we have the token available p.cargo("login --registry fakeio TOKEN").run(); p.cargo("publish --registry fakeio") .with_status(101) .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") .run(); p.cargo("publish --token sekrit --index") .arg(fakeio_url.to_string()) .with_status(101) .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") .run(); } #[cargo_test] fn publish_with_registry_dependency() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").alternative(true).publish(); // Login so that we have the token available p.cargo("login --registry alternative TOKEN").run(); p.cargo("publish --registry alternative").run(); validate_alt_upload( r#"{ "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": false, "target": null, "version_req": "^0.0.1" } ], "description": null, "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": null, "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "homepage": null, "documentation": null, "vers": "0.0.1" }"#, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } #[cargo_test] fn alt_registry_and_crates_io_deps() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] crates_io_dep = "0.0.1" [dependencies.alt_reg_dep] version = "0.1.0" registry = "alternative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("crates_io_dep", "0.0.1").publish(); Package::new("alt_reg_dep", "0.1.0") .alternative(true) .publish(); p.cargo("build") .with_stderr_unordered( "\ [UPDATING] `alternative` index [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] crates_io_dep v0.0.1 (registry `dummy-registry`) [DOWNLOADED] alt_reg_dep v0.1.0 (registry `alternative`) [COMPILING] alt_reg_dep v0.1.0 (registry `alternative`) [COMPILING] crates_io_dep v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn block_publish_due_to_no_token() { registry::alt_init(); let p = project().file("src/lib.rs", "").build(); fs::remove_file(paths::home().join(".cargo/credentials")).unwrap(); // Now perform the actual publish p.cargo("publish --registry alternative") .with_status(101) .with_stderr_contains( "error: no upload token found, \ please run `cargo login` or pass `--token`", ) .run(); } #[cargo_test] fn publish_to_alt_registry() { registry::alt_init(); let p = project().file("src/main.rs", "fn main() {}").build(); // Setup the registry by publishing a package Package::new("bar", "0.0.1").alternative(true).publish(); // Login so that we have the token available p.cargo("login --registry alternative TOKEN").run(); // Now perform the actual publish p.cargo("publish --registry alternative").run(); validate_alt_upload( r#"{ "authors": [], "badges": {}, "categories": [], "deps": [], "description": null, "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": null, "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "homepage": null, "documentation": null, "vers": "0.0.1" }"#, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } #[cargo_test] fn publish_with_crates_io_dep() { registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = ["me"] license = "MIT" description = "foo" [dependencies.bar] version = "0.0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); // Login so that we have the token available p.cargo("login --registry alternative TOKEN").run(); p.cargo("publish --registry alternative").run(); validate_alt_upload( r#"{ "authors": ["me"], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.0.1" } ], "description": "foo", "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "homepage": null, "documentation": null, "vers": "0.0.1" }"#, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } #[cargo_test] fn passwords_in_registries_index_url_forbidden() { registry::alt_init(); let config = paths::home().join(".cargo/config"); fs::write( config, r#" [registries.alternative] index = "ssh://git:secret@foobar.com" "#, ) .unwrap(); let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("publish --registry alternative") .with_status(101) .with_stderr( "\ error: invalid index URL for registry `alternative` defined in [..]/home/.cargo/config Caused by: registry URLs may not contain passwords ", ) .run(); } #[cargo_test] fn patch_alt_reg() { registry::alt_init(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { version = "0.1.0", registry = "alternative" } [patch.alternative] bar = { path = "bar" } "#, ) .file( "src/lib.rs", " extern crate bar; pub fn f() { bar::bar(); } ", ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `alternative` index [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn bad_registry_name() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "bad name" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: invalid character ` ` in registry name: `bad name`, [..]", ) .run(); for cmd in &[ "init", "install foo", "login", "owner", "publish", "search", "yank --version 0.0.1", ] { p.cargo(cmd) .arg("--registry") .arg("bad name") .with_status(101) .with_stderr("[ERROR] invalid character ` ` in registry name: `bad name`, [..]") .run(); } } #[cargo_test] fn no_api() { let _registry = RegistryBuilder::new().alternative().no_api().build(); Package::new("bar", "0.0.1").alternative(true).publish(); // First check that a dependency works. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies.bar] version = "0.0.1" registry = "alternative" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `alternative`) [COMPILING] bar v0.0.1 (registry `alternative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); // Check all of the API commands. let err = "[ERROR] registry `alternative` does not support API commands"; p.cargo("login --registry alternative TOKEN") .with_status(101) .with_stderr_contains(&err) .run(); p.cargo("publish --registry alternative") .with_status(101) .with_stderr_contains(&err) .run(); p.cargo("search --registry alternative") .with_status(101) .with_stderr_contains(&err) .run(); p.cargo("owner --registry alternative --list") .with_status(101) .with_stderr_contains(&err) .run(); p.cargo("yank --registry alternative --version=0.0.1 bar") .with_status(101) .with_stderr_contains(&err) .run(); p.cargo("yank --registry alternative --version=0.0.1 bar") .with_stderr_contains(&err) .with_status(101) .run(); } #[cargo_test] fn alt_reg_metadata() { // Check for "registry" entries in `cargo metadata` with alternative registries. registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] altdep = { version = "0.0.1", registry = "alternative" } iodep = { version = "0.0.1" } "#, ) .file("src/lib.rs", "") .build(); Package::new("bar", "0.0.1").publish(); Package::new("altdep", "0.0.1") .dep("bar", "0.0.1") .alternative(true) .publish(); Package::new("altdep2", "0.0.1").alternative(true).publish(); Package::new("iodep", "0.0.1") .registry_dep("altdep2", "0.0.1") .publish(); // The important thing to check here is the "registry" value in `deps`. // They should be: // foo -> altdep: alternative-registry // foo -> iodep: null (because it is in crates.io) // altdep -> bar: null (because it is in crates.io) // iodep -> altdep2: alternative-registry p.cargo("metadata --format-version=1 --no-deps") .with_json( r#" { "packages": [ { "name": "foo", "version": "0.0.1", "id": "foo 0.0.1 (path+file:[..]/foo)", "license": null, "license_file": null, "description": null, "source": null, "dependencies": [ { "name": "altdep", "source": "registry+file:[..]/alternative-registry", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": "file:[..]/alternative-registry" }, { "name": "iodep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null } ], "targets": "{...}", "features": {}, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } ], "workspace_members": [ "foo 0.0.1 (path+file:[..]/foo)" ], "resolve": null, "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); // --no-deps uses a different code path, make sure both work. p.cargo("metadata --format-version=1") .with_json( r#" { "packages": [ { "name": "altdep", "version": "0.0.1", "id": "altdep 0.0.1 (registry+file:[..]/alternative-registry)", "license": null, "license_file": null, "description": null, "source": "registry+file:[..]/alternative-registry", "dependencies": [ { "name": "bar", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null } ], "targets": "{...}", "features": {}, "manifest_path": "[..]/altdep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "altdep2", "version": "0.0.1", "id": "altdep2 0.0.1 (registry+file:[..]/alternative-registry)", "license": null, "license_file": null, "description": null, "source": "registry+file:[..]/alternative-registry", "dependencies": [], "targets": "{...}", "features": {}, "manifest_path": "[..]/altdep2-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "bar", "version": "0.0.1", "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [], "targets": "{...}", "features": {}, "manifest_path": "[..]/bar-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "foo", "version": "0.0.1", "id": "foo 0.0.1 (path+file:[..]/foo)", "license": null, "license_file": null, "description": null, "source": null, "dependencies": [ { "name": "altdep", "source": "registry+file:[..]/alternative-registry", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": "file:[..]/alternative-registry" }, { "name": "iodep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null } ], "targets": "{...}", "features": {}, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "iodep", "version": "0.0.1", "id": "iodep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [ { "name": "altdep2", "source": "registry+file:[..]/alternative-registry", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": "file:[..]/alternative-registry" } ], "targets": "{...}", "features": {}, "manifest_path": "[..]/iodep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } ], "workspace_members": [ "foo 0.0.1 (path+file:[..]/foo)" ], "resolve": "{...}", "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn unknown_registry() { // A known registry refers to an unknown registry. // foo -> bar(crates.io) -> baz(alt) registry::alt_init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").alternative(true).publish(); Package::new("bar", "0.0.1") .registry_dep("baz", "0.0.1") .publish(); // Remove "alternative" from config. let cfg_path = paths::home().join(".cargo/config"); let mut config = fs::read_to_string(&cfg_path).unwrap(); let start = config.find("[registries.alternative]").unwrap(); config.insert(start, '#'); let start_index = &config[start..].find("index =").unwrap(); config.insert(start + start_index, '#'); fs::write(&cfg_path, config).unwrap(); p.cargo("build").run(); // Important parts: // foo -> bar registry = null // bar -> baz registry = alternate p.cargo("metadata --format-version=1") .with_json( r#" { "packages": [ { "name": "bar", "version": "0.0.1", "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [ { "name": "baz", "source": "registry+file://[..]/alternative-registry", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": "file:[..]/alternative-registry" } ], "targets": "{...}", "features": {}, "manifest_path": "[..]", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "baz", "version": "0.0.1", "id": "baz 0.0.1 (registry+file://[..]/alternative-registry)", "license": null, "license_file": null, "description": null, "source": "registry+file://[..]/alternative-registry", "dependencies": [], "targets": "{...}", "features": {}, "manifest_path": "[..]", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "foo", "version": "0.0.1", "id": "foo 0.0.1 (path+file://[..]/foo)", "license": null, "license_file": null, "description": null, "source": null, "dependencies": [ { "name": "bar", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null } ], "targets": "{...}", "features": {}, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } ], "workspace_members": [ "foo 0.0.1 (path+file://[..]/foo)" ], "resolve": "{...}", "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null } "#, ) .run(); } #[cargo_test] fn registries_index_relative_url() { registry::alt_init(); let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); fs::write( &config, r#" [registries.relative] index = "file:alternative-registry" "#, ) .unwrap(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "relative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").alternative(true).publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `relative` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `relative`) [COMPILING] bar v0.0.1 (registry `relative`) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn registries_index_relative_path_not_allowed() { registry::alt_init(); let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); fs::write( &config, r#" [registries.relative] index = "alternative-registry" "#, ) .unwrap(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "0.0.1" registry = "relative" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").alternative(true).publish(); p.cargo("build") .with_stderr(&format!( "\ error: failed to parse manifest at `{root}/foo/Cargo.toml` Caused by: invalid index URL for registry `relative` defined in [..]/.cargo/config Caused by: invalid url `alternative-registry`: relative URL without a base ", root = paths::root().to_str().unwrap() )) .with_status(101) .run(); } #[cargo_test] fn both_index_and_registry() { let p = project().file("src/lib.rs", "").build(); for cmd in &["publish", "owner", "search", "yank --version 1.0.0"] { p.cargo(cmd) .arg("--registry=foo") .arg("--index=foo") .with_status(101) .with_stderr( "[ERROR] both `--index` and `--registry` \ should not be set at the same time", ) .run(); } } cargo-0.66.0/tests/testsuite/artifact_dep.rs000066400000000000000000002110541432416201200210620ustar00rootroot00000000000000//! Tests specific to artifact dependencies, designated using //! the new `dep = { artifact = "bin", … }` syntax in manifests. use cargo_test_support::compare::match_exact; use cargo_test_support::registry::Package; use cargo_test_support::{ basic_bin_manifest, basic_manifest, cross_compile, project, publish, registry, rustc_host, Project, }; #[cargo_test] fn check_with_invalid_artifact_dependency() { // invalid name let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "unknown" } "#, ) .file("src/lib.rs", "extern crate bar;") // this would fail but we don't get there, artifacts are no libs .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/Cargo.toml` Caused by: 'unknown' is not a valid artifact specifier ", ) .with_status(101) .run(); fn run_cargo_with_and_without_bindeps_feature( p: &Project, cmd: &str, assert: &dyn Fn(&mut cargo_test_support::Execs), ) { assert( p.cargo(&format!("{} -Z bindeps", cmd)) .masquerade_as_nightly_cargo(&["bindeps"]), ); assert(&mut p.cargo(cmd)); } // lib specified without artifact let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = { path = "bar/", lib = true } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); run_cargo_with_and_without_bindeps_feature(&p, "check", &|cargo| { cargo .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/Cargo.toml` Caused by: 'lib' specifier cannot be used without an 'artifact = …' value (bar) ", ) .with_status(101) .run(); }); // target specified without artifact let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = { path = "bar/", target = "target" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); run_cargo_with_and_without_bindeps_feature(&p, "check", &|cargo| { cargo .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/Cargo.toml` Caused by: 'target' specifier cannot be used without an 'artifact = …' value (bar) ", ) .with_status(101) .run(); }) } #[cargo_test] fn check_with_invalid_target_triple() { // invalid name let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin", target = "unknown-target-triple" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains( r#"[..]Could not find specification for target "unknown-target-triple"[..]"#, ) .with_status(101) .run(); } #[cargo_test] fn build_without_nightly_aborts_with_error() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..] Caused by: `artifact = …` requires `-Z bindeps` (bar) ", ) .run(); } #[cargo_test] fn disallow_artifact_and_no_artifact_dep_to_same_package_within_the_same_dep_category() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin" } bar_stable = { path = "bar/", package = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr("\ [WARNING] foo v0.0.0 ([CWD]) ignoring invalid dependency `bar_stable` which is missing a lib target [ERROR] the crate `foo v0.0.0 ([CWD])` depends on crate `bar v0.5.0 ([CWD]/bar)` multiple times with different names", ) .run(); } #[cargo_test] fn features_are_unified_among_lib_and_bin_dep_of_same_target() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies.d1] path = "d1" features = ["d1f1"] artifact = "bin" lib = true [dependencies.d2] path = "d2" features = ["d2f2"] "#, ) .file( "src/main.rs", r#" fn main() { d1::f1(); d1::f2(); d2::f1(); d2::f2(); } "#, ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] d1f1 = ["d2"] [dependencies.d2] path = "../d2" features = ["d2f1"] optional = true "#, ) .file( "d1/src/main.rs", r#"fn main() { #[cfg(feature = "d1f1")] d2::f1(); // Using f2 is only possible as features are unififed across the same target. // Our own manifest would only enable f1, and f2 comes in because a parent crate // enables the feature in its manifest. #[cfg(feature = "d1f1")] d2::f2(); }"#, ) .file( "d1/src/lib.rs", r#" #[cfg(feature = "d2")] extern crate d2; /// Importing f2 here shouldn't be possible as unless features are unified. #[cfg(feature = "d1f1")] pub use d2::{f1, f2}; "#, ) .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [features] d2f1 = [] d2f2 = [] "#, ) .file( "d2/src/lib.rs", r#" #[cfg(feature = "d2f1")] pub fn f1() {} #[cfg(feature = "d2f2")] pub fn f2() {} "#, ) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] d2 v0.0.1 ([CWD]/d2) [COMPILING] d1 v0.0.1 ([CWD]/d1) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn features_are_not_unified_among_lib_and_bin_dep_of_different_target() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", &r#" [project] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies.d1] path = "d1" features = ["d1f1"] artifact = "bin" lib = true target = "$TARGET" [dependencies.d2] path = "d2" features = ["d2f2"] "# .replace("$TARGET", target), ) .file( "src/main.rs", r#" fn main() { // the lib = true part always builds for our current target, unifying dependencies d1::d2::f1(); d1::d2::f2(); d2::f1(); d2::f2(); } "#, ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] d1f1 = ["d2"] [dependencies.d2] path = "../d2" features = ["d2f1"] optional = true "#, ) .file("d1/src/main.rs", r#"fn main() { // f1 we set ourselves d2::f1(); // As 'main' is only compiled as part of the artifact dependency and since that is not unified // if the target differs, trying to access f2 is a compile time error as the feature isn't enabled in our dependency tree. d2::f2(); }"#) .file( "d1/src/lib.rs", r#" #[cfg(feature = "d2")] pub extern crate d2; "#, ) .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [features] d2f1 = [] d2f2 = [] "#, ) .file( "d2/src/lib.rs", r#" #[cfg(feature = "d2f1")] pub fn f1() {} #[cfg(feature = "d2f2")] pub fn f2() {} "#, ) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr_contains( "error[E0425]: cannot find function `f2` in crate `d2`\n --> d1/src/main.rs:6:17", ) .run(); } #[cargo_test] fn feature_resolution_works_for_cfg_target_specification() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", &r#" [project] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies.d1] path = "d1" artifact = "bin" target = "$TARGET" "# .replace("$TARGET", target), ) .file( "src/main.rs", r#" fn main() { let _b = include_bytes!(env!("CARGO_BIN_FILE_D1")); } "#, ) .file( "d1/Cargo.toml", &r#" [package] name = "d1" version = "0.0.1" authors = [] [target.'$TARGET'.dependencies] d2 = { path = "../d2" } "# .replace("$TARGET", target), ) .file( "d1/src/main.rs", r#"fn main() { d1::f(); }"#, ) .file("d1/build.rs", r#"fn main() { }"#) .file( "d1/src/lib.rs", &r#"pub fn f() { #[cfg(target = "$TARGET")] d2::f(); } "# .replace("$TARGET", target), ) .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] "#, ) .file("d2/build.rs", r#"fn main() { }"#) .file("d2/src/lib.rs", "pub fn f() {}") .build(); p.cargo("test -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); } #[cargo_test] fn build_script_with_bin_artifacts() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = ["bin", "staticlib", "cdylib"] } "#, ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { let baz: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_baz").expect("CARGO_BIN_FILE_BAR_baz").into(); println!("{}", baz.display()); assert!(&baz.is_file()); let lib: std::path::PathBuf = std::env::var("CARGO_STATICLIB_FILE_BAR_bar").expect("CARGO_STATICLIB_FILE_BAR_bar").into(); println!("{}", lib.display()); assert!(&lib.is_file()); let lib: std::path::PathBuf = std::env::var("CARGO_CDYLIB_FILE_BAR_bar").expect("CARGO_CDYLIB_FILE_BAR_bar").into(); println!("{}", lib.display()); assert!(&lib.is_file()); let dir: std::path::PathBuf = std::env::var("CARGO_BIN_DIR_BAR").expect("CARGO_BIN_DIR_BAR").into(); println!("{}", dir.display()); assert!(dir.is_dir()); let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into(); println!("{}", bar.display()); assert!(&bar.is_file()); let bar2: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_bar").expect("CARGO_BIN_FILE_BAR_bar").into(); println!("{}", bar2.display()); assert_eq!(bar, bar2); } "#) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = [] [lib] crate-type = ["staticlib", "cdylib"] "#, ) // compilation target is native for build scripts unless overridden .file("bar/src/bin/bar.rs", &format!(r#"fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }}"#, cross_compile::native())) .file("bar/src/bin/baz.rs", "fn main() {}") .file("bar/src/lib.rs", "") .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains("[COMPILING] foo [..]") .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)") .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); let build_script_output = build_script_output_string(&p, "foo"); let msg = "we need the binary directory for this artifact along with all binary paths"; if cfg!(target_env = "msvc") { match_exact( "[..]/artifact/bar-[..]/bin/baz.exe\n\ [..]/artifact/bar-[..]/staticlib/bar-[..].lib\n\ [..]/artifact/bar-[..]/cdylib/bar.dll\n\ [..]/artifact/bar-[..]/bin\n\ [..]/artifact/bar-[..]/bin/bar.exe\n\ [..]/artifact/bar-[..]/bin/bar.exe", &build_script_output, msg, "", None, ) .unwrap(); } else { match_exact( "[..]/artifact/bar-[..]/bin/baz-[..]\n\ [..]/artifact/bar-[..]/staticlib/libbar-[..].a\n\ [..]/artifact/bar-[..]/cdylib/[..]bar.[..]\n\ [..]/artifact/bar-[..]/bin\n\ [..]/artifact/bar-[..]/bin/bar-[..]\n\ [..]/artifact/bar-[..]/bin/bar-[..]", &build_script_output, msg, "", None, ) .unwrap(); } assert!( !p.bin("bar").is_file(), "artifacts are located in their own directory, exclusively, and won't be lifted up" ); assert!(!p.bin("baz").is_file(),); assert_artifact_executable_output(&p, "debug", "bar", "bar"); } #[cargo_test] fn build_script_with_bin_artifact_and_lib_false() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { bar::doit() } "#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() { bar::doit(); }") .file( "bar/src/lib.rs", r#" pub fn doit() { panic!("sentinel"); } "#, ) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr_does_not_contain("[..]sentinel[..]") .run(); } #[cargo_test] fn lib_with_bin_artifact_and_lib_false() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file( "src/lib.rs", r#" pub fn foo() { bar::doit() }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() { bar::doit(); }") .file( "bar/src/lib.rs", r#" pub fn doit() { panic!("sentinel"); } "#, ) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr_does_not_contain("[..]sentinel[..]") .run(); } #[cargo_test] fn build_script_with_selected_dashed_bin_artifact_and_lib_true() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar-baz = { path = "bar/", artifact = "bin:baz-suffix", lib = true } "#, ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { bar_baz::print_env() } "#) .file( "bar/Cargo.toml", r#" [package] name = "bar-baz" version = "0.5.0" authors = [] [[bin]] name = "bar" [[bin]] name = "baz-suffix" "#, ) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", r#" pub fn print_env() { let dir: std::path::PathBuf = std::env::var("CARGO_BIN_DIR_BAR_BAZ").expect("CARGO_BIN_DIR_BAR_BAZ").into(); let bin: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_BAZ_baz-suffix").expect("CARGO_BIN_FILE_BAR_BAZ_baz-suffix").into(); println!("{}", dir.display()); println!("{}", bin.display()); assert!(dir.is_dir()); assert!(&bin.is_file()); assert!(std::env::var("CARGO_BIN_FILE_BAR_BAZ").is_err(), "CARGO_BIN_FILE_BAR_BAZ isn't set due to name mismatch"); assert!(std::env::var("CARGO_BIN_FILE_BAR_BAZ_bar").is_err(), "CARGO_BIN_FILE_BAR_BAZ_bar isn't set as binary isn't selected"); } "#) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar-baz v0.5.0 ([CWD]/bar) [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); let build_script_output = build_script_output_string(&p, "foo"); let msg = "we need the binary directory for this artifact and the binary itself"; if cfg!(target_env = "msvc") { cargo_test_support::compare::match_exact( &format!( "[..]/artifact/bar-baz-[..]/bin\n\ [..]/artifact/bar-baz-[..]/bin/baz_suffix{}", std::env::consts::EXE_SUFFIX, ), &build_script_output, msg, "", None, ) .unwrap(); } else { cargo_test_support::compare::match_exact( "[..]/artifact/bar-baz-[..]/bin\n\ [..]/artifact/bar-baz-[..]/bin/baz_suffix-[..]", &build_script_output, msg, "", None, ) .unwrap(); } assert!( !p.bin("bar").is_file(), "artifacts are located in their own directory, exclusively, and won't be lifted up" ); assert_artifact_executable_output(&p, "debug", "bar", "baz_suffix"); } #[cargo_test] fn lib_with_selected_dashed_bin_artifact_and_lib_true() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar-baz = { path = "bar/", artifact = ["bin:baz-suffix", "staticlib", "cdylib"], lib = true } "#, ) .file( "src/lib.rs", r#" pub fn foo() { bar_baz::exists(); env!("CARGO_BIN_DIR_BAR_BAZ"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_BAZ_baz-suffix")); let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_BAZ")); let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_BAZ_bar-baz")); let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_BAZ")); let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_BAZ_bar-baz")); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar-baz" version = "0.5.0" authors = [] [lib] crate-type = ["rlib", "staticlib", "cdylib"] [[bin]] name = "bar" [[bin]] name = "baz-suffix" "#, ) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn exists() {}") .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar-baz v0.5.0 ([CWD]/bar) [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); assert!( !p.bin("bar").is_file(), "artifacts are located in their own directory, exclusively, and won't be lifted up" ); assert_artifact_executable_output(&p, "debug", "bar", "baz_suffix"); } #[cargo_test] fn allow_artifact_and_no_artifact_dep_to_same_package_within_different_dep_categories() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin" } [dev-dependencies] bar = { path = "bar/", package = "bar" } "#, ) .file( "src/lib.rs", r#" #[cfg(test)] extern crate bar; pub fn foo() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "") .build(); p.cargo("test -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)") .with_stderr_contains("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") .run(); } #[cargo_test] fn normal_build_deps_are_picked_up_in_presence_of_an_artifact_build_dep_to_the_same_package() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar", artifact = "bin:bar" } [build-dependencies] bar = { path = "bar" } "#, ) .file("build.rs", "fn main() { bar::f(); }") .file( "src/lib.rs", r#" pub fn foo() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn f() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); } #[cargo_test] fn disallow_using_example_binaries_as_artifacts() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin:one-example" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/examples/one-example.rs", "fn main() {}") .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr(r#"[ERROR] dependency `bar` in package `foo` requires a `bin:one-example` artifact to be present."#) .run(); } /// From RFC 3028 /// /// > You may also specify separate dependencies with different artifact values, as well as /// dependencies on the same crate without artifact specified; for instance, you may have a /// build dependency on the binary of a crate and a normal dependency on the Rust library of the same crate. #[cargo_test] fn allow_artifact_and_non_artifact_dependency_to_same_crate() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin" } [dependencies] bar = { path = "bar/" } "#, ) .file("src/lib.rs", r#" pub fn foo() { bar::doit(); assert!(option_env!("CARGO_BIN_FILE_BAR").is_none()); }"#) .file( "build.rs", r#" fn main() { assert!(option_env!("CARGO_BIN_FILE_BAR").is_none(), "no environment variables at build time"); std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap(); }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn doit() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains("[COMPILING] bar [..]") .with_stderr_contains("[COMPILING] foo [..]") .run(); } #[cargo_test] fn build_script_deps_adopt_specified_target_unconditionally() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies.bar] path = "bar/" artifact = "bin" target = "{}" "#, target ), ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into(); assert!(&bar.is_file()); }"#) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn doit() {}") .build(); p.cargo("check -v -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_does_not_contain(format!( "[RUNNING] `rustc --crate-name build_script_build build.rs [..]--target {} [..]", target )) .with_stderr_contains("[RUNNING] `rustc --crate-name build_script_build build.rs [..]") .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]", target )) .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]", target )) .with_stderr_does_not_contain(format!( "[RUNNING] `rustc --crate-name foo [..]--target {} [..]", target )) .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") .run(); } /// inverse RFC-3176 #[cargo_test] fn build_script_deps_adopt_do_not_allow_multiple_targets_under_different_name_and_same_version() { if cross_compile::disabled() { return; } let alternate = cross_compile::alternate(); let native = cross_compile::native(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies.bar] path = "bar/" artifact = "bin" target = "{}" [build-dependencies.bar-native] package = "bar" path = "bar/" artifact = "bin" target = "{}" "#, alternate, native ), ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into(); assert!(&bar.is_file()); let bar_native: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR_NATIVE_bar").expect("CARGO_BIN_FILE_BAR_NATIVE_bar").into(); assert!(&bar_native.is_file()); assert_ne!(bar_native, bar, "should build different binaries due to different targets"); }"#) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -v -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr(format!( "error: the crate `foo v0.0.0 ([CWD])` depends on crate `bar v0.5.0 ([CWD]/bar)` multiple times with different names", )) .run(); } #[cargo_test] fn non_build_script_deps_adopt_specified_target_unconditionally() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies.bar] path = "bar/" artifact = "bin" target = "{}" "#, target ), ) .file( "src/lib.rs", r#"pub fn foo() { let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn doit() {}") .build(); p.cargo("check -v -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]", target )) .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]", target )) .with_stderr_does_not_contain(format!( "[RUNNING] `rustc --crate-name foo [..]--target {} [..]", target )) .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") .run(); } #[cargo_test] fn no_cross_doctests_works_with_artifacts() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin", lib = true } "#, ) .file( "src/lib.rs", r#" //! ``` //! env!("CARGO_BIN_DIR_BAR"); //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); //! ``` pub fn foo() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); } "#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/lib.rs", r#"pub extern "C" fn c() {}"#) .file("bar/src/main.rs", "fn main() {}") .build(); let target = rustc_host(); p.cargo("test -Z bindeps --target") .arg(&target) .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr(&format!( "\ [COMPILING] bar v0.5.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE]) [DOCTEST] foo ", triple = target )) .run(); println!("c"); let target = cross_compile::alternate(); // This will build the library, but does not build or run doc tests. // This should probably be a warning or error. p.cargo("test -Z bindeps -v --doc --target") .arg(&target) .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains(format!( "[COMPILING] bar v0.5.0 ([CWD]/bar) [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {triple} [..] [RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {triple} [..] [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo [..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..]", triple = target )) .run(); if !cross_compile::can_run_on_host() { return; } // This tests the library, but does not run the doc tests. p.cargo("test -Z bindeps -v --target") .arg(&target) .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains(&format!( "[FRESH] bar v0.5.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo [..]--test[..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[CWD]/target/{triple}/debug/deps/foo-[..][EXE]`", triple = target )) .run(); } #[cargo_test] fn build_script_deps_adopts_target_platform_if_target_equals_target() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin", target = "target" } "#, ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into(); assert!(&bar.is_file()); }"#) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn doit() {}") .build(); let alternate_target = cross_compile::alternate(); p.cargo("check -v -Z bindeps --target") .arg(alternate_target) .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_does_not_contain(format!( "[RUNNING] `rustc --crate-name build_script_build build.rs [..]--target {} [..]", alternate_target )) .with_stderr_contains("[RUNNING] `rustc --crate-name build_script_build build.rs [..]") .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--target {} [..]", alternate_target )) .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..]--target {} [..]", alternate_target )) .with_stderr_contains(format!( "[RUNNING] `rustc --crate-name foo [..]--target {} [..]", alternate_target )) .run(); } #[cargo_test] // TODO(ST): rename bar (dependency) to something else and un-ignore this with RFC-3176 #[cfg_attr(target_env = "msvc", ignore = "msvc not working")] fn profile_override_basic() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [build-dependencies] bar = { path = "bar", artifact = "bin" } [dependencies] bar = { path = "bar", artifact = "bin" } [profile.dev.build-override] opt-level = 1 [profile.dev] opt-level = 3 "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build -v -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains( "[RUNNING] `rustc --crate-name build_script_build [..] -C opt-level=1 [..]`", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..] -C opt-level=3 [..]`", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name bar bar/src/main.rs [..] -C opt-level=1 [..]`", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] -C opt-level=1 [..]`", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] -C opt-level=3 [..]`", ) .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] -C opt-level=3 [..]`") .run(); } #[cargo_test] fn dependencies_of_dependencies_work_in_artifacts() { Package::new("baz", "1.0.0") .file("src/lib.rs", "pub fn baz() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap(); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = "1.0.0" "#, ) .file("bar/src/lib.rs", r#"pub fn bar() {baz::baz()}"#) .file("bar/src/main.rs", r#"fn main() {bar::bar()}"#) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); // cargo tree sees artifacts as the dependency kind they are in and doesn't do anything special with it. p.cargo("tree -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stdout( "\ foo v0.0.0 ([CWD]) [build-dependencies] └── bar v0.5.0 ([CWD]/bar) └── baz v1.0.0 ", ) .run(); } // TODO: Fix this potentially by reverting 887562bfeb8c540594d7d08e6e9a4ab7eb255865 which adds artifact information to the registry // followed by 0ff93733626f7cbecaf9dce9ab62b4ced0be088e which picks it up. // For reference, see comments by ehuss https://github.com/rust-lang/cargo/pull/9992#discussion_r801086315 and // joshtriplett https://github.com/rust-lang/cargo/pull/9992#issuecomment-1033394197 . #[cargo_test] #[ignore = "broken, need artifact info in index"] fn targets_are_picked_up_from_non_workspace_artifact_deps() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); Package::new("artifact", "1.0.0") .file("src/main.rs", r#"fn main() {}"#) .file("src/lib.rs", r#"pub fn lib() {}"#) .publish(); let mut dep = registry::Dependency::new("artifact", "1.0.0"); Package::new("uses-artifact", "1.0.0") .file( "src/lib.rs", r#"pub fn uses_artifact() { let _b = include_bytes!(env!("CARGO_BIN_FILE_ARTIFACT")); }"#, ) .add_dep(dep.artifact("bin", Some(target.to_string()))) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] uses-artifact = { version = "1.0.0" } "#, ) .file( "src/lib.rs", r#"pub fn foo() { uses_artifact::uses_artifact(); }"#, ) .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); } #[cargo_test] fn allow_dep_renames_with_multiple_versions() { Package::new("bar", "1.0.0") .file("src/main.rs", r#"fn main() {println!("1.0.0")}"#) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin" } bar_stable = { package = "bar", version = "1.0.0", artifact = "bin" } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR").expect("BAR present")).status().unwrap(); std::process::Command::new(std::env::var("CARGO_BIN_FILE_BAR_STABLE_bar").expect("BAR STABLE present")).status().unwrap(); } "#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", r#"fn main() {println!("0.5.0")}"#) .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains("[COMPILING] bar [..]") .with_stderr_contains("[COMPILING] foo [..]") .run(); let build_script_output = build_script_output_string(&p, "foo"); match_exact( "0.5.0\n1.0.0", &build_script_output, "build script output", "", None, ) .unwrap(); } #[cargo_test] fn allow_artifact_and_non_artifact_dependency_to_same_crate_if_these_are_not_the_same_dep_kind() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin", lib = false } [dependencies] bar = { path = "bar/" } "#, ) .file("src/lib.rs", r#" pub fn foo() { bar::doit(); assert!(option_env!("CARGO_BIN_FILE_BAR").is_none()); }"#) .file( "build.rs", r#"fn main() { println!("{}", std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR")); println!("{}", std::env::var("CARGO_BIN_FILE_BAR_bar").expect("CARGO_BIN_FILE_BAR_bar")); }"#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn doit() {}") .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar [..] [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn prevent_no_lib_warning_with_artifact_dependencies() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file( "src/lib.rs", r#"pub fn foo() { let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); }"#, ) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [CHECKING] foo v0.0.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn show_no_lib_warning_with_artifact_dependencies_that_have_no_lib_but_lib_true() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [build-dependencies] bar = { path = "bar/", artifact = "bin" } [dependencies] bar = { path = "bar/", artifact = "bin", lib = true } "#, ) .file("src/lib.rs", "") .file("src/build.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr_contains("[WARNING] foo v0.0.0 ([CWD]) ignoring invalid dependency `bar` which is missing a lib target") .with_stderr_contains("[COMPILING] bar v0.5.0 ([CWD]/bar)") .with_stderr_contains("[CHECKING] foo [..]") .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } #[cargo_test] fn resolver_2_build_dep_without_lib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] edition = "2021" [build-dependencies] bar = { path = "bar/", artifact = "bin" } "#, ) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { let bar: std::path::PathBuf = std::env::var("CARGO_BIN_FILE_BAR").expect("CARGO_BIN_FILE_BAR").into(); assert!(&bar.is_file()); }"#) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); } #[cargo_test] fn check_missing_crate_type_in_package_fails() { for crate_type in &["cdylib", "staticlib", "bin"] { let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = {{ path = "bar/", artifact = "{}" }} "#, crate_type ), ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) //no bin, just rlib .file("bar/src/lib.rs", "") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr( "[ERROR] dependency `bar` in package `foo` requires a `[..]` artifact to be present.", ) .run(); } } #[cargo_test] fn check_target_equals_target_in_non_build_dependency_errors() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [dependencies] bar = { path = "bar/", artifact = "bin", target = "target" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("check -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_status(101) .with_stderr_contains( " `target = \"target\"` in normal- or dev-dependencies has no effect (bar)", ) .run(); } #[cargo_test] fn env_vars_and_build_products_for_various_build_targets() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] resolver = "2" [lib] doctest = true [build-dependencies] bar = { path = "bar/", artifact = ["cdylib", "staticlib"] } [dependencies] bar = { path = "bar/", artifact = "bin", lib = true } [dev-dependencies] bar = { path = "bar/", artifact = "bin:baz" } "#, ) .file("build.rs", r#" fn main() { let file: std::path::PathBuf = std::env::var("CARGO_CDYLIB_FILE_BAR").expect("CARGO_CDYLIB_FILE_BAR").into(); assert!(&file.is_file()); let file: std::path::PathBuf = std::env::var("CARGO_STATICLIB_FILE_BAR").expect("CARGO_STATICLIB_FILE_BAR").into(); assert!(&file.is_file()); assert!(std::env::var("CARGO_BIN_FILE_BAR").is_err()); assert!(std::env::var("CARGO_BIN_FILE_BAR_baz").is_err()); } "#) .file( "src/lib.rs", r#" //! ``` //! bar::c(); //! env!("CARGO_BIN_DIR_BAR"); //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar")); //! let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz")); //! assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none()); //! assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none()); //! ``` pub fn foo() { bar::c(); env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz")); assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none()); assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none()); } #[cfg(test)] #[test] fn env_unit() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz")); assert!(option_env!("CARGO_STATICLIB_FILE_BAR").is_none()); assert!(option_env!("CARGO_CDYLIB_FILE_BAR").is_none()); } "#, ) .file( "tests/main.rs", r#" #[test] fn env_integration() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_bar")); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR_baz")); }"#, ) .file("build.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = [] [lib] crate-type = ["staticlib", "cdylib", "rlib"] [[bin]] name = "bar" [[bin]] name = "baz" "#, ) .file("bar/src/lib.rs", r#"pub extern "C" fn c() {}"#) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("test -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar [..] [COMPILING] foo [..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] unittests [..] [RUNNING] tests/main.rs [..] [DOCTEST] foo ", ) .run(); } #[cargo_test] fn publish_artifact_dep() { registry::init(); Package::new("bar", "1.0.0").publish(); Package::new("baz", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" resolver = "2" [dependencies] bar = { version = "1.0", artifact = "bin", lib = true } [build-dependencies] baz = { version = "1.0", artifact = ["bin:a", "cdylib", "staticlib"], target = "target" } "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish -Z bindeps --no-verify --token sekrit") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [{ "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" }, { "default_features": true, "features": [], "kind": "build", "name": "baz", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": "foo", "features": {}, "homepage": "foo", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": "foo", "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "foo" version = "0.1.0" authors = [] description = "foo" homepage = "foo" documentation = "foo" license = "MIT" repository = "foo" resolver = "2" [dependencies.bar] version = "1.0" artifact = ["bin"] lib = true [build-dependencies.baz] version = "1.0" artifact = [ "bin:a", "cdylib", "staticlib", ] target = "target""#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn doc_lib_true() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies.bar] path = "bar" artifact = "bin" lib = true "#, ) .file("src/lib.rs", "extern crate bar; pub fn foo() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("doc -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [DOCUMENTING] bar v0.0.1 ([CWD]/bar) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/bar/index.html").is_file()); // Verify that it emits rmeta for the bin and lib dependency. assert_eq!(p.glob("target/debug/artifact/*.rlib").count(), 0); assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 2); p.cargo("doc -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint") .with_stdout("") .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/bar/index.html").is_file()); } #[cargo_test] fn rustdoc_works_on_libs_with_artifacts_and_lib_false() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] resolver = "2" [dependencies.bar] path = "bar" artifact = ["bin", "staticlib", "cdylib"] "#, ) .file( "src/lib.rs", r#" pub fn foo() { env!("CARGO_BIN_DIR_BAR"); let _b = include_bytes!(env!("CARGO_BIN_FILE_BAR")); let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR")); let _b = include_bytes!(env!("CARGO_CDYLIB_FILE_BAR_bar")); let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR")); let _b = include_bytes!(env!("CARGO_STATICLIB_FILE_BAR_bar")); }"#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = [] [lib] crate-type = ["staticlib", "cdylib"] "#, ) .file("bar/src/lib.rs", "pub fn bar() {}") .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("doc -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_stderr( "\ [COMPILING] bar v0.5.0 ([CWD]/bar) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!( !p.root().join("target/doc/bar/index.html").is_file(), "bar is not a lib dependency and thus remains undocumented" ); } fn assert_artifact_executable_output( p: &Project, target_name: &str, dep_name: &str, bin_name: &str, ) { if cfg!(target_env = "msvc") { assert_eq!( p.glob(format!( "target/{}/deps/artifact/{}-*/bin/{}{}", target_name, dep_name, bin_name, std::env::consts::EXE_SUFFIX )) .count(), 1, "artifacts are placed into their own output directory to not possibly clash" ); } else { assert_eq!( p.glob(format!( "target/{}/deps/artifact/{}-*/bin/{}-*{}", target_name, dep_name, bin_name, std::env::consts::EXE_SUFFIX )) .filter_map(Result::ok) .filter(|f| f.extension().map_or(true, |ext| ext != "o" && ext != "d")) .count(), 1, "artifacts are placed into their own output directory to not possibly clash" ); } } fn build_script_output_string(p: &Project, package_name: &str) -> String { let paths = p .glob(format!("target/debug/build/{}-*/output", package_name)) .collect::, _>>() .unwrap(); assert_eq!(paths.len(), 1); std::fs::read_to_string(&paths[0]).unwrap() } #[cargo_test] fn build_script_features_for_shared_dependency() { // When a build script is built and run, its features should match. Here: // // foo // -> artifact on d1 with target // -> common with features f1 // // d1 // -> common with features f2 // // common has features f1 and f2, with a build script. // // When common is built as a dependency of d1, it should have features // `f2` (for the library and the build script). // // When common is built as a dependency of foo, it should have features // `f1` (for the library and the build script). if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", &r#" [project] name = "foo" version = "0.0.1" resolver = "2" [dependencies] d1 = { path = "d1", artifact = "bin", target = "$TARGET" } common = { path = "common", features = ["f1"] } "# .replace("$TARGET", target), ) .file( "src/main.rs", r#" fn main() { let _b = include_bytes!(env!("CARGO_BIN_FILE_D1")); common::f1(); } "#, ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" [dependencies] common = { path = "../common", features = ["f2"] } "#, ) .file( "d1/src/main.rs", r#"fn main() { common::f2(); }"#, ) .file( "common/Cargo.toml", r#" [package] name = "common" version = "0.0.1" [features] f1 = [] f2 = [] "#, ) .file( "common/src/lib.rs", r#" #[cfg(feature = "f1")] pub fn f1() {} #[cfg(feature = "f2")] pub fn f2() {} "#, ) .file( "common/build.rs", &r#" use std::env::var_os; fn main() { assert_eq!(var_os("CARGO_FEATURE_F1").is_some(), cfg!(feature="f1")); assert_eq!(var_os("CARGO_FEATURE_F2").is_some(), cfg!(feature="f2")); if std::env::var("TARGET").unwrap() == "$TARGET" { assert!(var_os("CARGO_FEATURE_F1").is_none()); assert!(var_os("CARGO_FEATURE_F2").is_some()); } else { assert!(var_os("CARGO_FEATURE_F1").is_some()); assert!(var_os("CARGO_FEATURE_F2").is_none()); } } "# .replace("$TARGET", target), ) .build(); p.cargo("build -Z bindeps -v") .masquerade_as_nightly_cargo(&["bindeps"]) .run(); } cargo-0.66.0/tests/testsuite/bad_config.rs000066400000000000000000001003031432416201200205020ustar00rootroot00000000000000//! Tests for some invalid .cargo/config files. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project, rustc_host}; #[cargo_test] fn bad1() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [target] nonexistent-target = "foo" "#, ) .build(); p.cargo("build -v --target=nonexistent-target") .with_status(101) .with_stderr( "\ [ERROR] invalid configuration for key `target.nonexistent-target` expected a table, but found a string for `[..]` in [..]config ", ) .run(); } #[cargo_test] fn bad2() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [http] proxy = 3.0 "#, ) .build(); p.cargo("publish -v") .with_status(101) .with_stderr( "\ [ERROR] could not load Cargo configuration Caused by: failed to load TOML configuration from `[..]config` Caused by: failed to parse key `http` Caused by: failed to parse key `proxy` Caused by: found TOML configuration value of unknown type `float` ", ) .run(); } #[cargo_test] fn bad3() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [http] proxy = true "#, ) .build(); Package::new("foo", "1.0.0").publish(); p.cargo("publish -v") .with_status(101) .with_stderr( "\ error: failed to update registry [..] Caused by: error in [..]config: `http.proxy` expected a string, but found a boolean ", ) .run(); } #[cargo_test] fn bad4() { let p = project() .file( ".cargo/config", r#" [cargo-new] vcs = false "#, ) .build(); p.cargo("new -v foo") .with_status(101) .with_stderr( "\ [ERROR] Failed to create package `foo` at `[..]` Caused by: error in [..]config: `cargo-new.vcs` expected a string, but found a boolean ", ) .run(); } #[cargo_test] fn bad6() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [http] user-agent = true "#, ) .build(); Package::new("foo", "1.0.0").publish(); p.cargo("publish -v") .with_status(101) .with_stderr( "\ error: failed to update registry [..] Caused by: error in [..]config: `http.user-agent` expected a string, but found a boolean ", ) .run(); } #[cargo_test] fn invalid_global_config() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] foo = "0.1.0" "#, ) .file(".cargo/config", "4") .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 2 | 1 | 4 | ^ Unexpected end of input Expected `.` or `=` ", ) .run(); } #[cargo_test] fn bad_cargo_lock() { let p = project() .file("Cargo.lock", "[[package]]\nfoo = 92") .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse lock file at: [..]Cargo.lock Caused by: missing field `name` for key `package` ", ) .run(); } #[cargo_test] fn duplicate_packages_in_cargo_lock() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse lock file at: [..] Caused by: package `bar` is specified twice in the lockfile ", ) .run(); } #[cargo_test] fn bad_source_in_cargo_lock() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "You shall not parse" "#, ) .build(); p.cargo("build --verbose") .with_status(101) .with_stderr( "\ [ERROR] failed to parse lock file at: [..] Caused by: invalid source `You shall not parse` for key `package.source` ", ) .run(); } #[cargo_test] fn bad_dependency_in_lockfile() { let p = project() .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn bad_git_dependency() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] foo = { git = "file:.." } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [UPDATING] git repository `file:///` [ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` Caused by: failed to load source for dependency `foo` Caused by: Unable to update file:/// Caused by: failed to clone into: [..] Caused by: [..]'file:///' is not a valid local file URI[..] ", ) .run(); } #[cargo_test] fn bad_crate_type() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] crate-type = ["bad_type", "rlib"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains( "error: failed to run `rustc` to learn about crate-type bad_type information", ) .run(); } #[cargo_test] fn malformed_override() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [target.x86_64-apple-darwin.freetype] native = { foo: "bar" } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 8, column 27 | 8 | native = { | ^ Unexpected ` ` Expected key ", ) .run(); } #[cargo_test] fn duplicate_binary_names() { let p = project() .file( "Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[bin]] name = "e" path = "a.rs" [[bin]] name = "e" path = "b.rs" "#, ) .file("a.rs", r#"fn main() -> () {}"#) .file("b.rs", r#"fn main() -> () {}"#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: found duplicate binary name e, but all binary targets must have a unique name ", ) .run(); } #[cargo_test] fn duplicate_example_names() { let p = project() .file( "Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[example]] name = "ex" path = "examples/ex.rs" [[example]] name = "ex" path = "examples/ex2.rs" "#, ) .file("examples/ex.rs", r#"fn main () -> () {}"#) .file("examples/ex2.rs", r#"fn main () -> () {}"#) .build(); p.cargo("build --example ex") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: found duplicate example name ex, but all example targets must have a unique name ", ) .run(); } #[cargo_test] fn duplicate_bench_names() { let p = project() .file( "Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[bench]] name = "ex" path = "benches/ex.rs" [[bench]] name = "ex" path = "benches/ex2.rs" "#, ) .file("benches/ex.rs", r#"fn main () {}"#) .file("benches/ex2.rs", r#"fn main () {}"#) .build(); p.cargo("bench") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: found duplicate bench name ex, but all bench targets must have a unique name ", ) .run(); } #[cargo_test] fn duplicate_deps() { let p = project() .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("shim-bar/src/lib.rs", "pub fn a() {}") .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("linux-bar/src/lib.rs", "pub fn a() {}") .file( "Cargo.toml", r#" [package] name = "qqq" version = "0.0.1" authors = [] [dependencies] bar = { path = "shim-bar" } [target.x86_64-unknown-linux-gnu.dependencies] bar = { path = "linux-bar" } "#, ) .file("src/main.rs", r#"fn main () {}"#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: Dependency 'bar' has different source paths depending on the build target. Each dependency must \ have a single canonical source path irrespective of build target. ", ) .run(); } #[cargo_test] fn duplicate_deps_diff_sources() { let p = project() .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("shim-bar/src/lib.rs", "pub fn a() {}") .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("linux-bar/src/lib.rs", "pub fn a() {}") .file( "Cargo.toml", r#" [package] name = "qqq" version = "0.0.1" authors = [] [target.i686-unknown-linux-gnu.dependencies] bar = { path = "shim-bar" } [target.x86_64-unknown-linux-gnu.dependencies] bar = { path = "linux-bar" } "#, ) .file("src/main.rs", r#"fn main () {}"#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: Dependency 'bar' has different source paths depending on the build target. Each dependency must \ have a single canonical source path irrespective of build target. ", ) .run(); } #[cargo_test] fn unused_keys() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [target.foo] bar = "3" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ warning: unused manifest key: target.foo.bar [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] bulid = "foo" "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_stderr( "\ warning: unused manifest key: project.bulid [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p = project() .at("bar") .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] build = "foo" "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_stderr( "\ warning: unused manifest key: lib.build [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn unused_keys_in_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] bulid = "foo" "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build --workspace") .with_stderr( "\ [WARNING] [..]/foo/Cargo.toml: unused manifest key: workspace.bulid [COMPILING] bar [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn empty_dependencies() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = {} "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); p.cargo("build") .with_stderr_contains( "\ warning: dependency (bar) specified without providing a local path, Git repository, or version \ to use. This will be considered an error in future versions ", ) .run(); } #[cargo_test] fn invalid_toml_historically_allowed_fails() { let p = project() .file(".cargo/config", "[bar] baz = 2") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 7 | 1 | [bar] baz = 2 | ^ Unexpected `b` Expected newline or end of input While parsing a Table Header ", ) .run(); } #[cargo_test] fn ambiguous_git_reference() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = "http://127.0.0.1" branch = "master" tag = "some-tag" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: dependency (bar) specification is ambiguous. Only one of `branch`, `tag` or `rev` is allowed. ", ) .run(); } #[cargo_test] fn fragment_in_git_url() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = "http://127.0.0.1#foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains( "\ [WARNING] URL fragment `#foo` in git URL is ignored for dependency (bar). \ If you were trying to specify a specific git revision, \ use `rev = \"foo\"` in the dependency declaration. ", ) .run(); } #[cargo_test] fn bad_source_config1() { let p = project() .file("src/lib.rs", "") .file(".cargo/config", "[source.foo]") .build(); p.cargo("build") .with_status(101) .with_stderr("error: no source location specified for `source.foo`, need [..]") .run(); } #[cargo_test] fn bad_source_config2() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] registry = 'http://example.com' replace-with = 'bar' "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update registry `crates-io` Caused by: could not find a configured source with the name `bar` \ when attempting to lookup `crates-io` (configuration in [..]) ", ) .run(); } #[cargo_test] fn bad_source_config3() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] registry = 'https://example.com' replace-with = 'crates-io' "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update registry `crates-io` Caused by: detected a cycle of `replace-with` sources, [..] ", ) .run(); } #[cargo_test] fn bad_source_config4() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] replace-with = 'bar' [source.bar] registry = 'https://example.com' replace-with = 'crates-io' "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 ([..])` Caused by: failed to load source for dependency `bar` Caused by: Unable to update registry `crates-io` Caused by: detected a cycle of `replace-with` sources, the source `crates-io` is \ eventually replaced with itself (configuration in [..]) ", ) .run(); } #[cargo_test] fn bad_source_config5() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] registry = 'https://example.com' replace-with = 'bar' [source.bar] registry = 'not a url' "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: configuration key `source.bar.registry` specified an invalid URL (in [..]) Caused by: invalid url `not a url`: [..] ", ) .run(); } #[cargo_test] fn both_git_and_path_specified() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = "http://127.0.0.1" path = "bar" "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build -v") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: dependency (bar) specification is ambiguous. Only one of `git` or `path` is allowed. ", ) .run(); } #[cargo_test] fn bad_source_config6() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] registry = 'https://example.com' replace-with = ['not', 'a', 'string'] "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] error in [..]/foo/.cargo/config: could not load config key `source.crates-io.replace-with` Caused by: error in [..]/foo/.cargo/config: `source.crates-io.replace-with` expected a string, but found a array " ) .run(); } #[cargo_test] fn ignored_git_revision() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] path = "bar" branch = "spam" "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build -v") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: key `branch` is ignored for dependency (bar). ", ) .run(); } #[cargo_test] fn bad_source_config7() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.foo] registry = 'https://example.com' local-registry = 'file:///another/file' "#, ) .build(); Package::new("bar", "0.1.0").publish(); p.cargo("build") .with_status(101) .with_stderr("error: more than one source location specified for `source.foo`") .run(); } #[cargo_test] fn bad_source_config8() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.foo] branch = "somebranch" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "[ERROR] source definition `source.foo` specifies `branch`, \ but that requires a `git` key to be specified (in [..]/foo/.cargo/config)", ) .run(); } #[cargo_test] fn bad_dependency() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] bar = 3 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: invalid type: integer `3`, expected a version string like [..] ", ) .run(); } #[cargo_test] fn bad_debuginfo() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [profile.dev] debug = 'a' "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: expected a boolean or an integer for [..] ", ) .run(); } #[cargo_test] fn bad_opt_level() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 3 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: expected a boolean or a string for key [..] ", ) .run(); } #[cargo_test] fn warn_semver_metadata() { Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dependencies] bar = "1.0.0+1234" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_stderr_contains("[WARNING] version requirement `1.0.0+1234` for dependency `bar`[..]") .run(); } #[cargo_test] fn bad_target_cfg() { // Invalid type in a StringList. // // The error message is a bit unfortunate here. The type here ends up // being essentially Value>, and each layer of "Value" // adds some context to the error message. Also, untagged enums provide // strange error messages. Hopefully most users will be able to untangle // the message. let p = project() .file( ".cargo/config", r#" [target.'cfg(not(target_os = "none"))'] runner = false "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] error in [..]/foo/.cargo/config: \ could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` Caused by: error in [..]/foo/.cargo/config: \ could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` Caused by: invalid configuration for key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` expected a string or array of strings, but found a boolean for \ `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` in [..]/foo/.cargo/config ", ) .run(); } #[cargo_test] fn bad_target_links_overrides() { // Invalid parsing of links overrides. // // This error message is terrible. Nothing in the deserialization path is // using config::Value<>, so nothing is able to report the location. I // think this illustrates how the way things break down with how it // currently is designed with serde. let p = project() .file( ".cargo/config", &format!( r#" [target.{}.somelib] rustc-flags = 'foo' "#, rustc_host() ), ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "[ERROR] Only `-l` and `-L` flags are allowed in target config \ `target.[..].rustc-flags` (in [..]foo/.cargo/config): `foo`", ) .run(); p.change_file( ".cargo/config", &format!( "[target.{}.somelib] warning = \"foo\" ", rustc_host(), ), ); p.cargo("check") .with_status(101) .with_stderr("[ERROR] `warning` is not supported in build script overrides") .run(); } #[cargo_test] fn redefined_sources() { // Cannot define a source multiple times. let p = project() .file( ".cargo/config", r#" [source.foo] registry = "https://github.com/rust-lang/crates.io-index" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] source `foo` defines source registry `crates-io`, \ but that source is already defined by `crates-io` note: Sources are not allowed to be defined multiple times. ", ) .run(); p.change_file( ".cargo/config", r#" [source.one] directory = "index" [source.two] directory = "index" "#, ); // Name is `[..]` because we can't guarantee the order. p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] source `[..]` defines source dir [..]/foo/index, \ but that source is already defined by `[..]` note: Sources are not allowed to be defined multiple times. ", ) .run(); } cargo-0.66.0/tests/testsuite/bad_manifest_path.rs000066400000000000000000000216511432416201200220670ustar00rootroot00000000000000//! Tests for invalid --manifest-path arguments. use cargo_test_support::{basic_bin_manifest, main_file, project}; #[track_caller] fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo(command) .arg("--manifest-path") .arg(manifest_path_argument) .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr( "[ERROR] the manifest-path must be a path \ to a Cargo.toml file", ) .run(); } #[track_caller] fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { let p = project().build(); let expected_path = manifest_path_argument .split('/') .collect::>() .join("[..]"); p.cargo(command) .arg("--manifest-path") .arg(manifest_path_argument) .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr(format!( "[ERROR] manifest path `{}` does not exist", expected_path )) .run(); } #[cargo_test] fn bench_dir_containing_cargo_toml() { assert_not_a_cargo_toml("bench", "foo"); } #[cargo_test] fn bench_dir_plus_file() { assert_not_a_cargo_toml("bench", "foo/bar"); } #[cargo_test] fn bench_dir_plus_path() { assert_not_a_cargo_toml("bench", "foo/bar/baz"); } #[cargo_test] fn bench_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn build_dir_containing_cargo_toml() { assert_not_a_cargo_toml("build", "foo"); } #[cargo_test] fn build_dir_plus_file() { assert_not_a_cargo_toml("bench", "foo/bar"); } #[cargo_test] fn build_dir_plus_path() { assert_not_a_cargo_toml("bench", "foo/bar/baz"); } #[cargo_test] fn build_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn clean_dir_containing_cargo_toml() { assert_not_a_cargo_toml("clean", "foo"); } #[cargo_test] fn clean_dir_plus_file() { assert_not_a_cargo_toml("clean", "foo/bar"); } #[cargo_test] fn clean_dir_plus_path() { assert_not_a_cargo_toml("clean", "foo/bar/baz"); } #[cargo_test] fn clean_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn doc_dir_containing_cargo_toml() { assert_not_a_cargo_toml("doc", "foo"); } #[cargo_test] fn doc_dir_plus_file() { assert_not_a_cargo_toml("doc", "foo/bar"); } #[cargo_test] fn doc_dir_plus_path() { assert_not_a_cargo_toml("doc", "foo/bar/baz"); } #[cargo_test] fn doc_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn fetch_dir_containing_cargo_toml() { assert_not_a_cargo_toml("fetch", "foo"); } #[cargo_test] fn fetch_dir_plus_file() { assert_not_a_cargo_toml("fetch", "foo/bar"); } #[cargo_test] fn fetch_dir_plus_path() { assert_not_a_cargo_toml("fetch", "foo/bar/baz"); } #[cargo_test] fn fetch_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn generate_lockfile_dir_containing_cargo_toml() { assert_not_a_cargo_toml("generate-lockfile", "foo"); } #[cargo_test] fn generate_lockfile_dir_plus_file() { assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); } #[cargo_test] fn generate_lockfile_dir_plus_path() { assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); } #[cargo_test] fn generate_lockfile_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn package_dir_containing_cargo_toml() { assert_not_a_cargo_toml("package", "foo"); } #[cargo_test] fn package_dir_plus_file() { assert_not_a_cargo_toml("package", "foo/bar"); } #[cargo_test] fn package_dir_plus_path() { assert_not_a_cargo_toml("package", "foo/bar/baz"); } #[cargo_test] fn package_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn pkgid_dir_containing_cargo_toml() { assert_not_a_cargo_toml("pkgid", "foo"); } #[cargo_test] fn pkgid_dir_plus_file() { assert_not_a_cargo_toml("pkgid", "foo/bar"); } #[cargo_test] fn pkgid_dir_plus_path() { assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); } #[cargo_test] fn pkgid_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn publish_dir_containing_cargo_toml() { assert_not_a_cargo_toml("publish", "foo"); } #[cargo_test] fn publish_dir_plus_file() { assert_not_a_cargo_toml("publish", "foo/bar"); } #[cargo_test] fn publish_dir_plus_path() { assert_not_a_cargo_toml("publish", "foo/bar/baz"); } #[cargo_test] fn publish_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn read_manifest_dir_containing_cargo_toml() { assert_not_a_cargo_toml("read-manifest", "foo"); } #[cargo_test] fn read_manifest_dir_plus_file() { assert_not_a_cargo_toml("read-manifest", "foo/bar"); } #[cargo_test] fn read_manifest_dir_plus_path() { assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); } #[cargo_test] fn read_manifest_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn run_dir_containing_cargo_toml() { assert_not_a_cargo_toml("run", "foo"); } #[cargo_test] fn run_dir_plus_file() { assert_not_a_cargo_toml("run", "foo/bar"); } #[cargo_test] fn run_dir_plus_path() { assert_not_a_cargo_toml("run", "foo/bar/baz"); } #[cargo_test] fn run_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn rustc_dir_containing_cargo_toml() { assert_not_a_cargo_toml("rustc", "foo"); } #[cargo_test] fn rustc_dir_plus_file() { assert_not_a_cargo_toml("rustc", "foo/bar"); } #[cargo_test] fn rustc_dir_plus_path() { assert_not_a_cargo_toml("rustc", "foo/bar/baz"); } #[cargo_test] fn rustc_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn test_dir_containing_cargo_toml() { assert_not_a_cargo_toml("test", "foo"); } #[cargo_test] fn test_dir_plus_file() { assert_not_a_cargo_toml("test", "foo/bar"); } #[cargo_test] fn test_dir_plus_path() { assert_not_a_cargo_toml("test", "foo/bar/baz"); } #[cargo_test] fn test_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn update_dir_containing_cargo_toml() { assert_not_a_cargo_toml("update", "foo"); } #[cargo_test] fn update_dir_plus_file() { assert_not_a_cargo_toml("update", "foo/bar"); } #[cargo_test] fn update_dir_plus_path() { assert_not_a_cargo_toml("update", "foo/bar/baz"); } #[cargo_test] fn update_dir_to_nonexistent_cargo_toml() { assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); } #[cargo_test] fn verify_project_dir_containing_cargo_toml() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project --manifest-path foo") .cwd(p.root().parent().unwrap()) .with_status(1) .with_stdout( "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ", ) .run(); } #[cargo_test] fn verify_project_dir_plus_file() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project --manifest-path foo/bar") .cwd(p.root().parent().unwrap()) .with_status(1) .with_stdout( "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ", ) .run(); } #[cargo_test] fn verify_project_dir_plus_path() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project --manifest-path foo/bar/baz") .cwd(p.root().parent().unwrap()) .with_status(1) .with_stdout( "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ", ) .run(); } #[cargo_test] fn verify_project_dir_to_nonexistent_cargo_toml() { let p = project().build(); p.cargo("verify-project --manifest-path foo/bar/baz/Cargo.toml") .cwd(p.root().parent().unwrap()) .with_status(1) .with_stdout( "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ ", ) .run(); } cargo-0.66.0/tests/testsuite/bench.rs000066400000000000000000001261611432416201200175200ustar00rootroot00000000000000//! Tests for the `cargo bench` command. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; #[cargo_test(nightly, reason = "bench")] fn cargo_bench_simple() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[bench] fn bench_hello(_b: &mut test::Bencher) { assert_eq!(hello(), "hello") } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello\n").run(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench_hello ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_bench_implicit() { let p = project() .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } fn main() { println!("Hello main!"); } "#, ) .file( "tests/other.rs", r#" #![feature(test)] extern crate test; #[bench] fn run3(_ben: &mut test::Bencher) { } "#, ) .file( "benches/mybench.rs", r#" #![feature(test)] extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench --benches") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) [RUNNING] [..] (target/release/deps/mybench-[..][EXE]) ", ) .with_stdout_contains("test run2 ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_bin_implicit() { let p = project() .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } fn main() { println!("Hello main!"); } "#, ) .file( "tests/other.rs", r#" #![feature(test)] extern crate test; #[bench] fn run3(_ben: &mut test::Bencher) { } "#, ) .file( "benches/mybench.rs", r#" #![feature(test)] extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench --bins") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) ", ) .with_stdout_contains("test run1 ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_tarname() { let p = project() .file( "benches/bin1.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .file( "benches/bin2.rs", r#" #![feature(test)] extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench --bench bin2") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/bin2-[..][EXE]) ", ) .with_stdout_contains("test run2 ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_multiple_targets() { let p = project() .file( "benches/bin1.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .file( "benches/bin2.rs", r#" #![feature(test)] extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { } "#, ) .file( "benches/bin3.rs", r#" #![feature(test)] extern crate test; #[bench] fn run3(_ben: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench --bench bin1 --bench bin2") .with_stdout_contains("test run1 ... bench: [..]") .with_stdout_contains("test run2 ... bench: [..]") .with_stdout_does_not_contain("[..]run3[..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn cargo_bench_verbose() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; fn main() {} #[bench] fn bench_hello(_b: &mut test::Bencher) {} "#, ) .build(); p.cargo("bench -v hello") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] src/main.rs [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`", ) .with_stdout_contains("test bench_hello ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn many_similar_names() { let p = project() .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; pub fn foo() {} #[bench] fn lib_bench(_b: &mut test::Bencher) {} ", ) .file( "src/main.rs", " #![feature(test)] #[cfg(test)] extern crate foo; #[cfg(test)] extern crate test; fn main() {} #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } ", ) .file( "benches/foo.rs", r#" #![feature(test)] extern crate foo; extern crate test; #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } "#, ) .build(); p.cargo("bench") .with_stdout_contains("test bin_bench ... bench: 0 ns/iter (+/- 0)") .with_stdout_contains("test lib_bench ... bench: 0 ns/iter (+/- 0)") .with_stdout_contains("test bench_bench ... bench: 0 ns/iter (+/- 0)") .run(); } #[cargo_test(nightly, reason = "bench")] fn cargo_bench_failing_test() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[bench] fn bench_hello(_b: &mut test::Bencher) { assert_eq!(hello(), "nope") } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello\n").run(); // Force libtest into serial execution so that the test header will be printed. p.cargo("bench -- --test-threads=1") .with_stdout_contains("test bench_hello ...[..]") .with_stderr_contains( "\ [COMPILING] foo v0.5.0 ([CWD])[..] [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains( "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]", ) .with_stdout_contains("[..]left: `\"hello\"`[..]") .with_stdout_contains("[..]right: `\"nope\"`[..]") .with_stdout_contains("[..]src/main.rs:15[..]") .with_status(101) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_with_lib_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "baz" path = "src/main.rs" "#, ) .file( "src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; /// /// ```rust /// extern crate foo; /// fn main() { /// println!("{}", foo::foo()); /// } /// ``` /// pub fn foo(){} #[bench] fn lib_bench(_b: &mut test::Bencher) {} "#, ) .file( "src/main.rs", " #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; #[cfg(test)] extern crate test; fn main() {} #[bench] fn bin_bench(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) [RUNNING] [..] (target/release/deps/baz-[..][EXE])", ) .with_stdout_contains("test lib_bench ... bench: [..]") .with_stdout_contains("test bin_bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_with_deep_lib_dep() { let p = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies.foo] path = "../foo" "#, ) .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate foo; #[cfg(test)] extern crate test; #[bench] fn bar_bench(_b: &mut test::Bencher) { foo::foo(); } ", ) .build(); let _p2 = project() .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; pub fn foo() {} #[bench] fn foo_bench(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [COMPILING] bar v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/bar-[..][EXE])", ) .with_stdout_contains("test bar_bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn external_bench_explicit() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bench]] name = "bench" path = "src/bench.rs" "#, ) .file( "src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } #[bench] fn internal_bench(_b: &mut test::Bencher) {} "#, ) .file( "src/bench.rs", r#" #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} "#, ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) [RUNNING] [..] (target/release/deps/bench-[..][EXE])", ) .with_stdout_contains("test internal_bench ... bench: [..]") .with_stdout_contains("test external_bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn external_bench_implicit() { let p = project() .file( "src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } #[bench] fn internal_bench(_b: &mut test::Bencher) {} "#, ) .file( "benches/external.rs", r#" #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} "#, ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) [RUNNING] [..] (target/release/deps/external-[..][EXE])", ) .with_stdout_contains("test internal_bench ... bench: [..]") .with_stdout_contains("test external_bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_autodiscover_2015() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] edition = "2015" [features] magic = [] [[bench]] name = "bench_magic" required-features = ["magic"] "#, ) .file("src/lib.rs", "") .file( "benches/bench_basic.rs", r#" #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; extern crate test; #[bench] fn bench_basic(_b: &mut test::Bencher) {} "#, ) .file( "benches/bench_magic.rs", r#" #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; extern crate test; #[bench] fn bench_magic(_b: &mut test::Bencher) {} "#, ) .build(); p.cargo("bench bench_basic") .with_stderr( "warning: \ An explicit [[bench]] section is specified in Cargo.toml which currently disables Cargo from automatically inferring other benchmark targets. This inference behavior will change in the Rust 2018 edition and the following files will be included as a benchmark target: * [..]bench_basic.rs This is likely to break cargo build or cargo test as these files may not be ready to be compiled as a benchmark target today. You can future-proof yourself and disable this warning by adding `autobenches = false` to your [package] section. You may also move the files to a location where Cargo would not automatically infer them to be a target, such as in subfolders. For more information on this warning you can consult https://github.com/rust-lang/cargo/issues/5330 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) ", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn dont_run_examples() { let p = project() .file("src/lib.rs", "") .file( "examples/dont-run-me-i-will-fail.rs", r#"fn main() { panic!("Examples should not be run by 'cargo test'"); }"#, ) .build(); p.cargo("bench").run(); } #[cargo_test(nightly, reason = "bench")] fn pass_through_command_line() { let p = project() .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn foo(_b: &mut test::Bencher) {} #[bench] fn bar(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("bench bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bar ... bench: [..]") .run(); p.cargo("bench foo") .with_stderr( "[FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test foo ... bench: [..]") .run(); } // Regression test for running cargo-bench twice with // tests in an rlib #[cargo_test(nightly, reason = "bench")] fn cargo_bench_twice() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/foo.rs", r#" #![crate_type = "rlib"] #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn dummy_bench(b: &mut test::Bencher) { } "#, ) .build(); for _ in 0..2 { p.cargo("bench").run(); } } #[cargo_test(nightly, reason = "bench")] fn lib_bin_same_name() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" [[bin]] name = "foo" "#, ) .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn lib_bench(_b: &mut test::Bencher) {} ", ) .file( "src/main.rs", " #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; #[cfg(test)] extern crate test; #[bench] fn bin_bench(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE]) [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains_n("test [..] ... bench: [..]", 2) .run(); } #[cargo_test(nightly, reason = "bench")] fn lib_with_standard_name() { let p = project() .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; /// ``` /// syntax::foo(); /// ``` pub fn foo() {} #[bench] fn foo_bench(_b: &mut test::Bencher) {} ", ) .file( "benches/bench.rs", " #![feature(test)] extern crate syntax; extern crate test; #[bench] fn bench(_b: &mut test::Bencher) { syntax::foo() } ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/syntax-[..][EXE]) [RUNNING] [..] (target/release/deps/bench-[..][EXE])", ) .with_stdout_contains("test foo_bench ... bench: [..]") .with_stdout_contains("test bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn lib_with_standard_name2() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] name = "syntax" bench = false doctest = false "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " #![feature(test)] #[cfg(test)] extern crate syntax; #[cfg(test)] extern crate test; fn main() {} #[bench] fn bench(_b: &mut test::Bencher) { syntax::foo() } ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/syntax-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_dylib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate_type = ["dylib"] [dependencies.bar] path = "bar" "#, ) .file( "src/lib.rs", r#" #![feature(test)] extern crate bar as the_bar; #[cfg(test)] extern crate test; pub fn bar() { the_bar::baz(); } #[bench] fn foo(_b: &mut test::Bencher) {} "#, ) .file( "benches/bench.rs", r#" #![feature(test)] extern crate foo as the_foo; extern crate test; #[bench] fn foo(_b: &mut test::Bencher) { the_foo::bar(); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#, ) .file("bar/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("bench -v") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [RUNNING] [..] -C opt-level=3 [..] [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] [..] -C opt-level=3 [..] [RUNNING] [..] -C opt-level=3 [..] [RUNNING] [..] -C opt-level=3 [..] [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` [RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", ) .with_stdout_contains_n("test foo ... bench: [..]", 2) .run(); p.root().move_into_the_past(); p.cargo("bench -v") .with_stderr( "\ [FRESH] bar v0.0.1 ([CWD]/bar) [FRESH] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` [RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", ) .with_stdout_contains_n("test foo ... bench: [..]", 2) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_twice_with_build_cmd() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("build.rs", "fn main() {}") .file( "src/lib.rs", " #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn foo(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test foo ... bench: [..]") .run(); p.cargo("bench") .with_stderr( "[FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test foo ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_with_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "6.6.6" authors = [] [[example]] name = "teste1" [[bench]] name = "testb1" "#, ) .file( "src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; #[cfg(test)] use test::Bencher; pub fn f1() { println!("f1"); } pub fn f2() {} #[bench] fn bench_bench1(_b: &mut Bencher) { f2(); } "#, ) .file( "benches/testb1.rs", " #![feature(test)] extern crate foo; extern crate test; use test::Bencher; #[bench] fn bench_bench2(_b: &mut Bencher) { foo::f2(); } ", ) .file( "examples/teste1.rs", r#" extern crate foo; fn main() { println!("example1"); foo::f1(); } "#, ) .build(); p.cargo("bench -v") .with_stderr( "\ [COMPILING] foo v6.6.6 ([CWD]) [RUNNING] `rustc [..]` [RUNNING] `rustc [..]` [RUNNING] `rustc [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench` [RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`", ) .with_stdout_contains("test bench_bench1 ... bench: [..]") .with_stdout_contains("test bench_bench2 ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn test_a_bench() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.1.0" [lib] name = "foo" test = false doctest = false [[bench]] name = "b" test = true "#, ) .file("src/lib.rs", "") .file("benches/b.rs", "#[test] fn foo() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.1.0 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/b-[..][EXE])", ) .with_stdout_contains("test foo ... ok") .run(); } #[cargo_test(nightly, reason = "bench")] fn test_bench_no_run() { let p = project() .file("src/lib.rs", "") .file( "benches/bbaz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_: &mut Bencher) {} "#, ) .build(); p.cargo("bench --no-run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] bench [optimized] target(s) in [..] [EXECUTABLE] benches src/lib.rs (target/release/deps/foo-[..][EXE]) [EXECUTABLE] benches/bbaz.rs (target/release/deps/bbaz-[..][EXE]) ", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn test_bench_no_run_emit_json() { let p = project() .file("src/lib.rs", "") .file( "benches/bbaz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_: &mut Bencher) {} "#, ) .build(); p.cargo("bench --no-run --message-format json") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] bench [optimized] target(s) in [..] ", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn test_bench_no_fail_fast() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[bench] fn bench_hello(_b: &mut test::Bencher) { assert_eq!(hello(), "hello") } #[bench] fn bench_nope(_b: &mut test::Bencher) { assert_eq!("nope", hello()) } "#, ) .file( "benches/b1.rs", r#" #![feature(test)] extern crate test; #[bench] fn b1_fail(_b: &mut test::Bencher) { assert_eq!(1, 2); } "#, ) .build(); p.cargo("bench --no-fail-fast -- --test-threads=1") .with_status(101) .with_stderr( "\ [COMPILING] foo v0.5.0 [..] [FINISHED] bench [..] [RUNNING] unittests src/main.rs (target/release/deps/foo[..]) [ERROR] bench failed, to rerun pass `--bin foo` [RUNNING] benches/b1.rs (target/release/deps/b1[..]) [ERROR] bench failed, to rerun pass `--bench b1` [ERROR] 2 targets failed: `--bin foo` `--bench b1` ", ) .with_stdout_contains("running 2 tests") .with_stdout_contains("test bench_hello [..]") .with_stdout_contains("test bench_nope [..]") .with_stdout_contains("test b1_fail [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn test_bench_multiple_packages() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.1.0" [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#, ) .file("src/lib.rs", "") .build(); let _bar = project() .at("bar") .file( "Cargo.toml", r#" [project] name = "bar" authors = [] version = "0.1.0" [[bench]] name = "bbar" test = true "#, ) .file("src/lib.rs", "") .file( "benches/bbar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_b: &mut Bencher) {} "#, ) .build(); let _baz = project() .at("baz") .file( "Cargo.toml", r#" [project] name = "baz" authors = [] version = "0.1.0" [[bench]] name = "bbaz" test = true "#, ) .file("src/lib.rs", "") .file( "benches/bbaz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_b: &mut Bencher) {} "#, ) .build(); p.cargo("bench -p bar -p baz") .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbaz-[..][EXE])") .with_stdout_contains("test bench_baz ... bench: [..]") .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbar-[..][EXE])") .with_stdout_contains("test bench_bar ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_all_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "benches/foo.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_foo(_: &mut Bencher) -> () { () } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file( "bar/benches/bar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_: &mut Bencher) -> () { () } "#, ) .build(); p.cargo("bench --workspace") .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") .with_stdout_contains("test bench_bar ... bench: [..]") .with_stderr_contains("[RUNNING] [..] (target/release/deps/foo-[..][EXE])") .with_stdout_contains("test bench_foo ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_all_exclude() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; #[bench] pub fn bar(b: &mut test::Bencher) { b.iter(|| {}); } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file( "baz/src/lib.rs", "#[test] pub fn baz() { break_the_build(); }", ) .build(); p.cargo("bench --workspace --exclude baz") .with_stdout_contains( "\ running 1 test test bar ... bench: [..] ns/iter (+/- [..])", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_all_exclude_glob() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" #![feature(test)] #[cfg(test)] extern crate test; #[bench] pub fn bar(b: &mut test::Bencher) { b.iter(|| {}); } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file( "baz/src/lib.rs", "#[test] pub fn baz() { break_the_build(); }", ) .build(); p.cargo("bench --workspace --exclude '*z'") .with_stdout_contains( "\ running 1 test test bar ... bench: [..] ns/iter (+/- [..])", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_all_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file( "bar/benches/bar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_: &mut Bencher) -> () { () } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .file( "baz/benches/baz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_: &mut Bencher) -> () { () } "#, ) .build(); // The order in which bar and baz are built is not guaranteed p.cargo("bench --workspace") .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") .with_stdout_contains("test bench_baz ... bench: [..]") .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") .with_stdout_contains("test bench_bar ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_virtual_manifest_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .file( "bar/benches/bar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_: &mut Bencher) -> () { break_the_build(); } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .file( "baz/benches/baz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_: &mut Bencher) -> () { () } "#, ) .build(); // The order in which bar and baz are built is not guaranteed p.cargo("bench -p '*z'") .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") .with_stdout_contains("test bench_baz ... bench: [..]") .with_stderr_does_not_contain("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") .with_stdout_does_not_contain("test bench_bar ... bench: [..]") .run(); } // https://github.com/rust-lang/cargo/issues/4287 #[cargo_test(nightly, reason = "bench")] fn legacy_bench_name() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [[bench]] name = "bench" "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/bench.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_foo(_: &mut Bencher) -> () { () } "#, ) .build(); p.cargo("bench") .with_stderr_contains( "\ [WARNING] path `[..]src/bench.rs` was erroneously implicitly accepted for benchmark `bench`, please set bench.path in Cargo.toml", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_virtual_manifest_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn foo() {}") .file( "bar/benches/bar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_: &mut Bencher) -> () { () } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .file( "baz/benches/baz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_: &mut Bencher) -> () { () } "#, ) .build(); // The order in which bar and baz are built is not guaranteed p.cargo("bench") .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") .with_stdout_contains("test bench_baz ... bench: [..]") .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") .with_stdout_contains("test bench_bar ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn json_artifact_includes_executable_for_benchmark() { let p = project() .file( "benches/benchmark.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_foo(_: &mut Bencher) -> () { () } "#, ) .build(); p.cargo("bench --no-run --message-format=json") .with_json( r#" { "executable": "[..]/foo/target/release/deps/benchmark-[..][EXE]", "features": [], "filenames": "{...}", "fresh": false, "package_id": "foo 0.0.1 ([..])", "manifest_path": "[..]", "profile": "{...}", "reason": "compiler-artifact", "target": { "crate_types": [ "bin" ], "kind": [ "bench" ], "doc": false, "doctest": false, "edition": "2015", "name": "benchmark", "src_path": "[..]/foo/benches/benchmark.rs", "test": false } } {"reason": "build-finished", "success": true} "#, ) .run(); } cargo-0.66.0/tests/testsuite/binary_name.rs000066400000000000000000000176661432416201200207360ustar00rootroot00000000000000use cargo_test_support::install::{ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, }; use cargo_test_support::project; #[cargo_test] fn gated() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" path = "src/main.rs" "#, ) .file("src/main.rs", "fn main() { assert!(true) }") .build(); // Run cargo build. p.cargo("build") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_status(101) .with_stderr_contains("[..]feature `different-binary-name` is required") .run(); } #[cargo_test] // This test checks if: // 1. The correct binary is produced // 2. The deps file has the correct content // 3. Fingerprinting works // 4. `cargo clean` command works fn binary_name1() { // Create the project. let p = project() .file( "Cargo.toml", r#" cargo-features = ["different-binary-name"] [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" path = "src/main.rs" "#, ) .file("src/main.rs", "fn main() { assert!(true) }") .build(); // Run cargo build. p.cargo("build") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); // Check the name of the binary that cargo has generated. // A binary with the name of the crate should NOT be created. let foo_path = p.bin("foo"); assert!(!foo_path.is_file()); // A binary with the name provided in `filename` parameter should be created. let bar_path = p.bin("007bar"); assert!(bar_path.is_file()); // Check if deps file exists. let deps_path = p.bin("007bar").with_extension("d"); assert!(deps_path.is_file(), "{:?}", bar_path); let depinfo = p.read_file(deps_path.to_str().unwrap()); // Prepare what content we expect to be present in deps file. let deps_exp = format!( "{}: {}", p.bin("007bar").to_str().unwrap(), p.root().join("src").join("main.rs").to_str().unwrap() ); // Compare actual deps content with expected deps content. assert!( depinfo.lines().any(|line| line == deps_exp), "Content of `{}` is incorrect", deps_path.to_string_lossy() ); // Run cargo second time, to verify fingerprint. p.cargo("build -p foo -v") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); // Run cargo clean. p.cargo("clean -p foo") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); // Check if the appropriate file was removed. assert!( !bar_path.is_file(), "`cargo clean` did not remove the correct files" ); } #[cargo_test] // This test checks if: // 1. Check `cargo run` // 2. Check `cargo test` // 3. Check `cargo install/uninstall` fn binary_name2() { // Create the project. let p = project() .file( "Cargo.toml", r#" cargo-features = ["different-binary-name"] [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" "#, ) .file( "src/main.rs", r#" fn hello(name: &str) -> String { format!("Hello, {}!", name) } fn main() { println!("{}", hello("crabs")); } #[cfg(test)] mod tests { use super::*; #[test] fn check_crabs() { assert_eq!(hello("crabs"), "Hello, crabs!"); } } "#, ) .build(); // Run cargo build. p.cargo("build") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); // Check the name of the binary that cargo has generated. // A binary with the name of the crate should NOT be created. let foo_path = p.bin("foo"); assert!(!foo_path.is_file()); // A binary with the name provided in `filename` parameter should be created. let bar_path = p.bin("007bar"); assert!(bar_path.is_file()); // Check if `cargo test` works p.cargo("test") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test tests::check_crabs ... ok") .run(); // Check if `cargo run` is able to execute the binary p.cargo("run") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_stdout("Hello, crabs!") .run(); p.cargo("install") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); assert_has_installed_exe(cargo_home(), "007bar"); p.cargo("uninstall") .with_stderr("[REMOVING] [ROOT]/home/.cargo/bin/007bar[EXE]") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); assert_has_not_installed_exe(cargo_home(), "007bar"); } #[cargo_test] fn check_env_vars() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["different-binary-name"] [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" "#, ) .file( "src/main.rs", r#" fn main() { println!("{}", option_env!("CARGO_BIN_NAME").unwrap()); } "#, ) .file( "tests/integration.rs", r#" #[test] fn check_env_vars2() { let value = option_env!("CARGO_BIN_EXE_007bar").expect("Could not find environment variable."); assert!(value.contains("007bar")); } "# ) .build(); // Run cargo build. p.cargo("build") .masquerade_as_nightly_cargo(&["different-binary-name"]) .run(); p.cargo("run") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_stdout("007bar") .run(); p.cargo("test") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_status(0) .run(); } #[cargo_test] fn check_msg_format_json() { // Create the project. let p = project() .file( "Cargo.toml", r#" cargo-features = ["different-binary-name"] [project] name = "foo" version = "0.0.1" [[bin]] name = "foo" filename = "007bar" path = "src/main.rs" "#, ) .file("src/main.rs", "fn main() { assert!(true) }") .build(); let output = r#" { "reason": "compiler-artifact", "package_id": "foo 0.0.1 [..]", "manifest_path": "[CWD]/Cargo.toml", "target": "{...}", "profile": "{...}", "features": [], "filenames": "{...}", "executable": "[ROOT]/foo/target/debug/007bar[EXE]", "fresh": false } {"reason":"build-finished", "success":true} "#; // Run cargo build. p.cargo("build --message-format=json") .masquerade_as_nightly_cargo(&["different-binary-name"]) .with_json(output) .run(); } cargo-0.66.0/tests/testsuite/build.rs000066400000000000000000005102451432416201200175400ustar00rootroot00000000000000//! Tests for the `cargo build` command. use cargo::{ core::compiler::CompileMode, core::{Shell, Workspace}, ops::CompileOptions, Config, }; use cargo_test_support::compare; use cargo_test_support::paths::{root, CargoPathExt}; use cargo_test_support::registry::Package; use cargo_test_support::tools; use cargo_test_support::{ basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, git, is_nightly, main_file, paths, process, project, rustc_host, sleep_ms, symlink_supported, t, Execs, ProjectBuilder, }; use cargo_util::paths::dylib_path_envvar; use std::env; use std::fs; use std::io::Read; use std::process::Stdio; #[cargo_test] fn cargo_compile_simple() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); } #[cargo_test] fn cargo_fail_with_no_stderr() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &String::from("refusal")) .build(); p.cargo("build --message-format=json") .with_status(101) .with_stderr_does_not_contain("--- stderr") .run(); } /// Checks that the `CARGO_INCREMENTAL` environment variable results in /// `rustc` getting `-C incremental` passed to it. #[cargo_test] fn cargo_compile_incremental() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build -v") .env("CARGO_INCREMENTAL", "1") .with_stderr_contains( "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", ) .run(); p.cargo("test -v") .env("CARGO_INCREMENTAL", "1") .with_stderr_contains( "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", ) .run(); } #[cargo_test] fn incremental_profile() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [profile.dev] incremental = false [profile.release] incremental = true "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .env_remove("CARGO_INCREMENTAL") .with_stderr_does_not_contain("[..]C incremental=[..]") .run(); p.cargo("build -v") .env("CARGO_INCREMENTAL", "1") .with_stderr_contains("[..]C incremental=[..]") .run(); p.cargo("build --release -v") .env_remove("CARGO_INCREMENTAL") .with_stderr_contains("[..]C incremental=[..]") .run(); p.cargo("build --release -v") .env("CARGO_INCREMENTAL", "0") .with_stderr_does_not_contain("[..]C incremental=[..]") .run(); } #[cargo_test] fn incremental_config() { let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [build] incremental = false "#, ) .build(); p.cargo("build -v") .env_remove("CARGO_INCREMENTAL") .with_stderr_does_not_contain("[..]C incremental=[..]") .run(); p.cargo("build -v") .env("CARGO_INCREMENTAL", "1") .with_stderr_contains("[..]C incremental=[..]") .run(); } #[cargo_test] fn cargo_compile_with_workspace_excluded() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build --workspace --exclude foo") .with_stderr_does_not_contain("[..]virtual[..]") .with_stderr_contains("[..]no packages to compile") .with_status(101) .run(); } #[cargo_test] fn cargo_compile_manifest_path() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build --manifest-path foo/Cargo.toml") .cwd(p.root().parent().unwrap()) .run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn cargo_compile_with_invalid_manifest() { let p = project().file("Cargo.toml", "").build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: virtual manifests must be configured with [workspace] ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_manifest2() { let p = project() .file( "Cargo.toml", " [project] foo = bar ", ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 3, column 23 | 3 | foo = bar | ^ Unexpected `b` Expected quoted string ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_manifest3() { let p = project().file("src/Cargo.toml", "a = bar").build(); p.cargo("build --manifest-path src/Cargo.toml") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 5 | 1 | a = bar | ^ Unexpected `b` Expected quoted string ", ) .run(); } #[cargo_test] fn cargo_compile_duplicate_build_targets() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "main" path = "src/main.rs" crate-type = ["dylib"] [dependencies] "#, ) .file("src/main.rs", "#![allow(warnings)] fn main() {}") .build(); p.cargo("build") .with_stderr( "\ warning: file found to be present in multiple build targets: [..]main.rs [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_version() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0")) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: unexpected end of input while parsing minor version number for key `package.version` ", ) .run(); } #[cargo_test] fn cargo_compile_with_empty_package_name() { let p = project() .file("Cargo.toml", &basic_manifest("", "0.0.0")) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: package name cannot be an empty string ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_package_name() { let p = project() .file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0")) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: invalid character `:` in package name: `foo::bar`, [..] ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_bin_target_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: binary target names cannot be empty ", ) .run(); } #[cargo_test] fn cargo_compile_with_forbidden_bin_target_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "build" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: the binary target name `build` is forbidden, it conflicts with with cargo's build directory names ", ) .run(); } #[cargo_test] fn cargo_compile_with_bin_and_crate_type() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "the_foo_bin" path = "src/foo.rs" crate-type = ["cdylib", "rlib"] "#, ) .file("src/foo.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: the target `the_foo_bin` is a binary and can't have any crate-types set \ (currently \"cdylib, rlib\")", ) .run(); } #[cargo_test] fn cargo_compile_api_exposes_artifact_paths() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "the_foo_bin" path = "src/bin.rs" [lib] name = "the_foo_lib" path = "src/foo.rs" crate-type = ["cdylib", "rlib"] "#, ) .file("src/foo.rs", "pub fn bar() {}") .file("src/bin.rs", "pub fn main() {}") .build(); let shell = Shell::from_write(Box::new(Vec::new())); let config = Config::new(shell, env::current_dir().unwrap(), paths::home()); let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); let compile_options = CompileOptions::new(ws.config(), CompileMode::Build).unwrap(); let result = cargo::ops::compile(&ws, &compile_options).unwrap(); assert_eq!(1, result.binaries.len()); assert!(result.binaries[0].path.exists()); assert!(result.binaries[0] .path .to_str() .unwrap() .contains("the_foo_bin")); assert_eq!(1, result.cdylibs.len()); // The exact library path varies by platform, but should certainly exist at least assert!(result.cdylibs[0].path.exists()); assert!(result.cdylibs[0] .path .to_str() .unwrap() .contains("the_foo_lib")); } #[cargo_test] fn cargo_compile_with_bin_and_proc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "the_foo_bin" path = "src/foo.rs" proc-macro = true "#, ) .file("src/foo.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_lib_target_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [lib] name = "" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: library target names cannot be empty ", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_non_numeric_dep_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] crossbeam = "y" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: failed to parse the version requirement `y` for dependency `crossbeam` Caused by: unexpected character 'y' while parsing major version number ", ) .run(); } #[cargo_test] fn cargo_compile_without_manifest() { let p = project().no_manifest().build(); p.cargo("build") .with_status(101) .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory") .run(); } #[cargo_test] #[cfg(target_os = "linux")] fn cargo_compile_with_lowercase_cargo_toml() { let p = project() .no_manifest() .file("cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build") .with_status(101) .with_stderr( "[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory, \ but found cargo.toml please try to rename it to Cargo.toml", ) .run(); } #[cargo_test] fn cargo_compile_with_invalid_code() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", "invalid rust code!") .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[ERROR] could not compile `foo` due to previous error\n") .run(); assert!(p.root().join("Cargo.lock").is_file()); } #[cargo_test] fn cargo_compile_with_invalid_code_in_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#, ) .file("src/main.rs", "invalid rust code!") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "invalid rust code!") .build(); let _baz = project() .at("baz") .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("src/lib.rs", "invalid rust code!") .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]invalid rust code[..]") .with_stderr_contains("[ERROR] could not compile [..]") .run(); } #[cargo_test] fn cargo_compile_with_warnings_in_the_root_package() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", "fn main() {} fn dead() {}") .build(); p.cargo("build") .with_stderr_contains("[WARNING] [..]dead[..]") .run(); } #[cargo_test] fn cargo_compile_with_warnings_in_a_dep_package() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file( "bar/src/bar.rs", r#" pub fn gimme() -> &'static str { "test passed" } fn dead() {} "#, ) .build(); p.cargo("build") .with_stderr_contains("[WARNING] [..]dead[..]") .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); } #[cargo_test] fn cargo_compile_with_nested_deps_inferred() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = 'bar' [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" "#, ) .file( "bar/src/lib.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) .file( "baz/src/lib.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("libbar.rlib").is_file()); assert!(!p.bin("libbaz.rlib").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); } #[cargo_test] fn cargo_compile_with_nested_deps_correct_bin() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [[bin]] name = "foo" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" "#, ) .file( "bar/src/lib.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) .file( "baz/src/lib.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("libbar.rlib").is_file()); assert!(!p.bin("libbaz.rlib").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); } #[cargo_test] fn cargo_compile_with_nested_deps_shorthand() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" [lib] name = "bar" "#, ) .file( "bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#, ) .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file( "baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("libbar.rlib").is_file()); assert!(!p.bin("libbaz.rlib").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); } #[cargo_test] fn cargo_compile_with_nested_deps_longhand() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" version = "0.5.0" [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" version = "0.5.0" [lib] name = "bar" "#, ) .file( "bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#, ) .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file( "baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("libbar.rlib").is_file()); assert!(!p.bin("libbaz.rlib").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); } // Check that Cargo gives a sensible error if a dependency can't be found // because of a name mismatch. #[cargo_test] fn cargo_compile_with_dep_name_mismatch() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["wycats@example.com"] [[bin]] name = "foo" [dependencies.notquitebar] path = "bar" "#, ) .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"])) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: no matching package named `notquitebar` found location searched: [CWD]/bar required by package `foo v0.0.1 ([CWD])` ", ) .run(); } // Ensure that renamed deps have a valid name #[cargo_test] fn cargo_compile_with_invalid_dep_rename() { let p = project() .file( "Cargo.toml", r#" [package] name = "buggin" version = "0.1.0" [dependencies] "haha this isn't a valid name πŸ›" = { package = "libc", version = "0.1" } "#, ) .file("src/main.rs", &main_file(r#""What's good?""#, &[])) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: invalid character ` ` in dependency name: `haha this isn't a valid name πŸ›`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters) ", ) .run(); } #[cargo_test] fn cargo_compile_with_filename() { let p = project() .file("src/lib.rs", "") .file( "src/bin/a.rs", r#" extern crate foo; fn main() { println!("hello a.rs"); } "#, ) .file("examples/a.rs", r#"fn main() { println!("example"); }"#) .build(); p.cargo("build --bin bin.rs") .with_status(101) .with_stderr( "\ [ERROR] no bin target named `bin.rs`. Available bin targets: a ", ) .run(); p.cargo("build --bin a.rs") .with_status(101) .with_stderr( "\ [ERROR] no bin target named `a.rs` Did you mean `a`?", ) .run(); p.cargo("build --example example.rs") .with_status(101) .with_stderr( "\ [ERROR] no example target named `example.rs`. Available example targets: a ", ) .run(); p.cargo("build --example a.rs") .with_status(101) .with_stderr( "\ [ERROR] no example target named `a.rs` Did you mean `a`?", ) .run(); } #[cargo_test] fn incompatible_dependencies() { Package::new("bad", "0.1.0").publish(); Package::new("bad", "1.0.0").publish(); Package::new("bad", "1.0.1").publish(); Package::new("bad", "1.0.2").publish(); Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish(); Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish(); Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish(); Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish(); Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish(); Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [dependencies] bar = "0.1.0" baz = "0.1.0" qux = "0.1.0" "#, ) .file("src/main.rs", "fn main(){}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "\ error: failed to select a version for `bad`. ... required by package `qux v0.1.0` ... which satisfies dependency `qux = \"^0.1.0\"` of package `foo v0.0.1 ([..])` versions that meet the requirements `>=1.0.1` are: 1.0.2, 1.0.1 all possible versions conflict with previously selected packages. previously selected package `bad v1.0.0` ... which satisfies dependency `bad = \"=1.0.0\"` of package `baz v0.1.0` ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])` failed to select a version for `bad` which could resolve this conflict", ) .run(); } #[cargo_test] fn incompatible_dependencies_with_multi_semver() { Package::new("bad", "1.0.0").publish(); Package::new("bad", "1.0.1").publish(); Package::new("bad", "2.0.0").publish(); Package::new("bad", "2.0.1").publish(); Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish(); Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [dependencies] bar = "0.1.0" baz = "0.1.0" bad = ">=1.0.1, <=2.0.0" "#, ) .file("src/main.rs", "fn main(){}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "\ error: failed to select a version for `bad`. ... required by package `foo v0.0.1 ([..])` versions that meet the requirements `>=1.0.1, <=2.0.0` are: 2.0.0, 1.0.1 all possible versions conflict with previously selected packages. previously selected package `bad v2.0.1` ... which satisfies dependency `bad = \">=2.0.1\"` of package `baz v0.1.0` ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])` previously selected package `bad v1.0.0` ... which satisfies dependency `bad = \"=1.0.0\"` of package `bar v0.1.0` ... which satisfies dependency `bar = \"^0.1.0\"` of package `foo v0.0.1 ([..])` failed to select a version for `bad` which could resolve this conflict", ) .run(); } #[cargo_test] fn compile_path_dep_then_change_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build").run(); p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2")); p.cargo("build").run(); } #[cargo_test] fn ignores_carriage_return_in_lockfile() { let p = project() .file("src/main.rs", "mod a; fn main() {}") .file("src/a.rs", "") .build(); p.cargo("build").run(); let lock = p.read_lockfile(); p.change_file("Cargo.lock", &lock.replace("\n", "\r\n")); p.cargo("build").run(); } #[cargo_test] fn cargo_default_env_metadata_env_var() { // Ensure that path dep + dylib + env_var get metadata // (even though path_dep + dylib should not) let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "// hi") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#, ) .file("bar/src/lib.rs", "// hello") .build(); // No metadata on libbar since it's a dylib path dependency p.cargo("build -v") .with_stderr(&format!( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \ --emit=[..]link \ -C prefer-dynamic[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ -C extra-filename=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps \ --extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX, )) .run(); p.cargo("clean").run(); // If you set the env-var, then we expect metadata on libbar p.cargo("build -v") .env("__CARGO_DEFAULT_LIB_METADATA", "stable") .with_stderr(&format!( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \ --emit=[..]link \ -C prefer-dynamic[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ -C extra-filename=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps \ --extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX, )) .run(); } #[cargo_test] fn crate_env_vars() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.1-alpha.1" description = "This is foo" homepage = "https://example.com" repository = "https://example.com/repo.git" authors = ["wycats@example.com"] license = "MIT OR Apache-2.0" license-file = "license.txt" rust-version = "1.61.0" [[bin]] name = "foo-bar" path = "src/main.rs" "#, ) .file( "src/main.rs", r#" extern crate foo; static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); static VERSION: &'static str = env!("CARGO_PKG_VERSION"); static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); static PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE"); static REPOSITORY: &'static str = env!("CARGO_PKG_REPOSITORY"); static LICENSE: &'static str = env!("CARGO_PKG_LICENSE"); static LICENSE_FILE: &'static str = env!("CARGO_PKG_LICENSE_FILE"); static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION"); static RUST_VERSION: &'static str = env!("CARGO_PKG_RUST_VERSION"); static BIN_NAME: &'static str = env!("CARGO_BIN_NAME"); static CRATE_NAME: &'static str = env!("CARGO_CRATE_NAME"); fn main() { let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_PRE, CARGO_MANIFEST_DIR); assert_eq!(s, foo::version()); println!("{}", s); assert_eq!("foo", PKG_NAME); assert_eq!("foo-bar", BIN_NAME); assert_eq!("foo_bar", CRATE_NAME); assert_eq!("https://example.com", HOMEPAGE); assert_eq!("https://example.com/repo.git", REPOSITORY); assert_eq!("MIT OR Apache-2.0", LICENSE); assert_eq!("license.txt", LICENSE_FILE); assert_eq!("This is foo", DESCRIPTION); assert_eq!("1.61.0", RUST_VERSION); let s = format!("{}.{}.{}-{}", VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_PRE); assert_eq!(s, VERSION); // Verify CARGO_TARGET_TMPDIR isn't set for bins assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); } "#, ) .file( "src/lib.rs", r#" use std::env; use std::path::PathBuf; pub fn version() -> String { format!("{}-{}-{} @ {} in {}", env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_PATCH"), env!("CARGO_PKG_VERSION_PRE"), env!("CARGO_MANIFEST_DIR")) } pub fn check_no_int_test_env() { env::var("CARGO_TARGET_DIR").unwrap_err(); } pub fn check_tmpdir(tmp: Option<&'static str>) { let tmpdir: PathBuf = tmp.unwrap().into(); let exe: PathBuf = env::current_exe().unwrap().into(); let mut expected: PathBuf = exe.parent().unwrap() .parent().unwrap() .parent().unwrap() .into(); expected.push("tmp"); assert_eq!(tmpdir, expected); // Check that CARGO_TARGET_TMPDIR isn't set for lib code assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); env::var("CARGO_TARGET_TMPDIR").unwrap_err(); } #[test] fn env() { // Check that CARGO_TARGET_TMPDIR isn't set for unit tests assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); env::var("CARGO_TARGET_TMPDIR").unwrap_err(); } "#, ) .file( "tests/env.rs", r#" #[test] fn env() { foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR")); } "#, ); let p = if is_nightly() { p.file( "benches/env.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn env(_: &mut Bencher) { foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR")); } "#, ) .build() } else { p.build() }; println!("build"); p.cargo("build -v").run(); println!("bin"); p.process(&p.bin("foo-bar")) .with_stdout("0-5-1 @ alpha.1 in [CWD]") .run(); println!("test"); p.cargo("test -v").run(); if is_nightly() { println!("bench"); p.cargo("bench -v").run(); } } #[cargo_test] fn crate_authors_env_vars() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.1-alpha.1" authors = ["wycats@example.com", "neikos@example.com"] "#, ) .file( "src/main.rs", r#" extern crate foo; static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS"); fn main() { let s = "wycats@example.com:neikos@example.com"; assert_eq!(AUTHORS, foo::authors()); println!("{}", AUTHORS); assert_eq!(s, AUTHORS); } "#, ) .file( "src/lib.rs", r#" pub fn authors() -> String { format!("{}", env!("CARGO_PKG_AUTHORS")) } "#, ) .build(); println!("build"); p.cargo("build -v").run(); println!("bin"); p.process(&p.bin("foo")) .with_stdout("wycats@example.com:neikos@example.com") .run(); println!("test"); p.cargo("test -v").run(); } #[cargo_test] fn vv_prints_rustc_env_vars() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = ["escape='\"@example.com"] "#, ) .file("src/main.rs", "fn main() {}") .build(); let mut b = p.cargo("build -vv"); if cfg!(windows) { b.with_stderr_contains( "[RUNNING] `[..]set CARGO_PKG_NAME=foo&& [..]rustc [..]`" ).with_stderr_contains( r#"[RUNNING] `[..]set CARGO_PKG_AUTHORS="escape='\"@example.com"&& [..]rustc [..]`"# ) } else { b.with_stderr_contains("[RUNNING] `[..]CARGO_PKG_NAME=foo [..]rustc [..]`") .with_stderr_contains( r#"[RUNNING] `[..]CARGO_PKG_AUTHORS='escape='\''"@example.com' [..]rustc [..]`"#, ) }; b.run(); } // The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs { let v = dylib_path_envvar(); if let Ok(search_path) = env::var(v) { let new_search_path = env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty())) .expect("join_paths"); execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly } execs } // Regression test for #4277 #[cargo_test] fn crate_library_path_env_var() { let p = project() .file( "src/main.rs", &format!( r#" fn main() {{ let search_path = env!("{}"); let paths = std::env::split_paths(&search_path).collect::>(); assert!(!paths.contains(&"".into())); }} "#, dylib_path_envvar() ), ) .build(); setenv_for_removing_empty_component(p.cargo("run")).run(); } // Regression test for #4277 #[cargo_test] fn build_with_fake_libc_not_loading() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .file("libc.so.6", r#""#) .build(); setenv_for_removing_empty_component(p.cargo("build")).run(); } // this is testing that src/.rs still works (for now) #[cargo_test] fn many_crate_types_old_style_lib_location() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate_type = ["rlib", "dylib"] "#, ) .file("src/foo.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_stderr_contains( "\ [WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`, please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", ) .run(); assert!(p.root().join("target/debug/libfoo.rlib").is_file()); let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert!(p.root().join("target/debug").join(&fname).is_file()); } #[cargo_test] fn many_crate_types_correct() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate_type = ["rlib", "dylib"] "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build").run(); assert!(p.root().join("target/debug/libfoo.rlib").is_file()); let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert!(p.root().join("target/debug").join(&fname).is_file()); } #[cargo_test] fn set_both_dylib_and_cdylib_crate_types() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate_type = ["cdylib", "dylib"] "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: library `foo` cannot set the crate type of both `dylib` and `cdylib` ", ) .run(); } #[cargo_test] fn dev_dependencies_conflicting_warning() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dev-dependencies] a = {path = "a"} [dev_dependencies] a = {path = "a"} "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] conflicting between `dev-dependencies` and `dev_dependencies` in the `foo` package.\n `dev_dependencies` is ignored and not recommended for use in the future" ) .run(); } #[cargo_test] fn build_dependencies_conflicting_warning() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [build-dependencies] a = {path = "a"} [build_dependencies] a = {path = "a"} "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] conflicting between `build-dependencies` and `build_dependencies` in the `foo` package.\n `build_dependencies` is ignored and not recommended for use in the future" ) .run(); } #[cargo_test] fn lib_crate_types_conflicting_warning() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate-type = ["rlib", "dylib"] crate_type = ["staticlib", "dylib"] "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] conflicting between `crate-type` and `crate_type` in the `foo` library target.\n `crate_type` is ignored and not recommended for use in the future", ) .run(); } #[cargo_test] fn examples_crate_types_conflicting_warning() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [[example]] name = "ex" path = "examples/ex.rs" crate-type = ["rlib", "dylib"] crate_type = ["proc_macro"] [[example]] name = "goodbye" path = "examples/ex-goodbye.rs" crate-type = ["rlib", "dylib"] crate_type = ["rlib", "staticlib"] "#, ) .file("src/lib.rs", "") .file( "examples/ex.rs", r#" fn main() { println!("ex"); } "#, ) .file( "examples/ex-goodbye.rs", r#" fn main() { println!("goodbye"); } "#, ) .build(); p.cargo("build") .with_stderr_contains( "\ [WARNING] conflicting between `crate-type` and `crate_type` in the `ex` example target.\n `crate_type` is ignored and not recommended for use in the future [WARNING] conflicting between `crate-type` and `crate_type` in the `goodbye` example target.\n `crate_type` is ignored and not recommended for use in the future", ) .run(); } #[cargo_test] fn self_dependency() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [dependencies.test] path = "." [lib] name = "test" path = "src/test.rs" "#, ) .file("src/test.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle: package `test v0.0.0 ([CWD])` ... which satisfies path dependency `test` of package `test v0.0.0 ([..])`", ) .run(); } #[cargo_test] /// Make sure broken and loop symlinks don't break the build /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn ignore_broken_symlinks() { if !symlink_supported() { return; } let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .symlink("Notafile", "bar") // To hit the symlink directory, we need a build script // to trigger a full scan of package files. .file("build.rs", &main_file(r#""build script""#, &[])) .symlink_dir("a/b", "a/b/c/d/foo") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b", ) .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); } #[cargo_test] fn missing_lib_and_bin() { let p = project().build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]Cargo.toml` Caused by: no targets specified in the manifest either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n", ) .run(); } #[cargo_test] fn lto_build() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.release] lto = true "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v --release") .with_stderr( "\ [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin \ --emit=[..]link \ -C opt-level=3 \ -C lto \ [..] [FINISHED] release [optimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn verbose_build() { let p = project().file("src/lib.rs", "").build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn verbose_release_build() { let p = project().file("src/lib.rs", "").build(); p.cargo("build -v --release") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/release/deps` [FINISHED] release [optimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn verbose_release_build_short() { let p = project().file("src/lib.rs", "").build(); p.cargo("build -v -r") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/release/deps` [FINISHED] release [optimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn verbose_release_build_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [dependencies.foo] path = "foo" "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] name = "foo" crate_type = ["dylib", "rlib"] "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build -v --release") .with_stderr(&format!( "\ [COMPILING] foo v0.0.0 ([CWD]/foo) [RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ --crate-type dylib --crate-type rlib \ --emit=[..]link \ -C prefer-dynamic[..]\ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/release/deps` [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/release/deps \ --extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \ --extern foo=[CWD]/target/release/deps/libfoo.rlib` [FINISHED] release [optimized] target(s) in [..] ", prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX )) .run(); } #[cargo_test] fn explicit_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" authors = [] [lib] name = "foo" path = "src/lib.rs" [[example]] name = "hello" path = "examples/ex-hello.rs" [[example]] name = "goodbye" path = "examples/ex-goodbye.rs" "#, ) .file( "src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } pub fn get_goodbye() -> &'static str { "Goodbye" } pub fn get_world() -> &'static str { "World" } "#, ) .file( "examples/ex-hello.rs", r#" extern crate foo; fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); } "#, ) .file( "examples/ex-goodbye.rs", r#" extern crate foo; fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); } "#, ) .build(); p.cargo("build --examples").run(); p.process(&p.bin("examples/hello")) .with_stdout("Hello, World!\n") .run(); p.process(&p.bin("examples/goodbye")) .with_stdout("Goodbye, World!\n") .run(); } #[cargo_test] fn non_existing_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[test]] name = "hello" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --tests -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `hello` test at `tests/hello.rs` or `tests/hello/main.rs`. \ Please specify test.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn non_existing_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[example]] name = "hello" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --examples -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `hello` example at `examples/hello.rs` or `examples/hello/main.rs`. \ Please specify example.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn non_existing_benchmark() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[bench]] name = "hello" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --benches -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `hello` bench at `benches/hello.rs` or `benches/hello/main.rs`. \ Please specify bench.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn non_existing_binary() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/bin/ehlo.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. \ Please specify bin.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn commonly_wrong_path_of_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[test]] name = "foo" "#, ) .file("src/lib.rs", "") .file("test/foo.rs", "") .build(); p.cargo("build --tests -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` test at default paths, but found a file at `test/foo.rs`. Perhaps rename the file to `tests/foo.rs` for target auto-discovery, \ or specify test.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn commonly_wrong_path_of_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[example]] name = "foo" "#, ) .file("src/lib.rs", "") .file("example/foo.rs", "") .build(); p.cargo("build --examples -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` example at default paths, but found a file at `example/foo.rs`. Perhaps rename the file to `examples/foo.rs` for target auto-discovery, \ or specify example.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn commonly_wrong_path_of_benchmark() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] name = "foo" path = "src/lib.rs" [[bench]] name = "foo" "#, ) .file("src/lib.rs", "") .file("bench/foo.rs", "") .build(); p.cargo("build --benches -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` bench at default paths, but found a file at `bench/foo.rs`. Perhaps rename the file to `benches/foo.rs` for target auto-discovery, \ or specify bench.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn commonly_wrong_path_binary() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/bins/foo.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` bin at default paths, but found a file at `src/bins/foo.rs`. Perhaps rename the file to `src/bin/foo.rs` for target auto-discovery, \ or specify bin.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn commonly_wrong_path_subdir_binary() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/bins/foo/main.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` bin at default paths, but found a file at `src/bins/foo/main.rs`. Perhaps rename the file to `src/bin/foo/main.rs` for target auto-discovery, \ or specify bin.path if you want to use a non-default path.", ) .run(); } #[cargo_test] fn found_multiple_target_files() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/bin/foo.rs", "") .file("src/bin/foo/main.rs", "") .build(); p.cargo("build -v") .with_status(101) // Don't assert the inferred paths since the order is non-deterministic. .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: cannot infer path for `foo` bin Cargo doesn't know which to use because multiple target files found \ at `src/bin/foo[..].rs` and `src/bin/foo[..].rs`.", ) .run(); } #[cargo_test] fn legacy_binary_paths_warnings() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" authors = [] [[bin]] name = "bar" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr_contains( "\ [WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`, please set bin.path in Cargo.toml", ) .run(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" authors = [] [[bin]] name = "bar" "#, ) .file("src/lib.rs", "") .file("src/bin/main.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr_contains( "\ [WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`, please set bin.path in Cargo.toml", ) .run(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" authors = [] [[bin]] name = "bar" "#, ) .file("src/bar.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr_contains( "\ [WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`, please set bin.path in Cargo.toml", ) .run(); } #[cargo_test] fn implicit_examples() { let p = project() .file( "src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } pub fn get_goodbye() -> &'static str { "Goodbye" } pub fn get_world() -> &'static str { "World" } "#, ) .file( "examples/hello.rs", r#" extern crate foo; fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); } "#, ) .file( "examples/goodbye.rs", r#" extern crate foo; fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); } "#, ) .build(); p.cargo("build --examples").run(); p.process(&p.bin("examples/hello")) .with_stdout("Hello, World!\n") .run(); p.process(&p.bin("examples/goodbye")) .with_stdout("Goodbye, World!\n") .run(); } #[cargo_test] fn standard_build_no_ndebug() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/foo.rs", r#" fn main() { if cfg!(debug_assertions) { println!("slow") } else { println!("fast") } } "#, ) .build(); p.cargo("build").run(); p.process(&p.bin("foo")).with_stdout("slow\n").run(); } #[cargo_test] fn release_build_ndebug() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/foo.rs", r#" fn main() { if cfg!(debug_assertions) { println!("slow") } else { println!("fast") } } "#, ) .build(); p.cargo("build --release").run(); p.process(&p.release_bin("foo")).with_stdout("fast\n").run(); } #[cargo_test] fn inferred_main_bin() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build").run(); p.process(&p.bin("foo")).run(); } #[cargo_test] fn deletion_causes_failure() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build").run(); p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); p.cargo("build") .with_status(101) .with_stderr_contains("[..]can't find crate for `bar`") .run(); } #[cargo_test] fn bad_cargo_toml_in_target_dir() { let p = project() .file("src/main.rs", "fn main() {}") .file("target/Cargo.toml", "bad-toml") .build(); p.cargo("build").run(); p.process(&p.bin("foo")).run(); } #[cargo_test] fn lib_with_standard_name() { let p = project() .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", "extern crate syntax; fn main() { syntax::foo() }", ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn simple_staticlib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" crate-type = ["staticlib"] "#, ) .file("src/lib.rs", "pub fn foo() {}") .build(); // env var is a test for #1381 p.cargo("build").env("CARGO_LOG", "nekoneko=trace").run(); } #[cargo_test] fn staticlib_rlib_and_bin() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" crate-type = ["staticlib", "rlib"] "#, ) .file("src/lib.rs", "pub fn foo() {}") .file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }") .build(); p.cargo("build -v").run(); } #[cargo_test] fn opt_out_of_bin() { let p = project() .file( "Cargo.toml", r#" bin = [] [package] name = "foo" authors = [] version = "0.0.1" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "bad syntax") .build(); p.cargo("build").run(); } #[cargo_test] fn single_lib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" path = "src/bar.rs" "#, ) .file("src/bar.rs", "") .build(); p.cargo("build").run(); } #[cargo_test] fn freshness_ignores_excluded() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" exclude = ["src/b*.rs"] "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .build(); foo.root().move_into_the_past(); foo.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // Smoke test to make sure it doesn't compile again println!("first pass"); foo.cargo("build").with_stdout("").run(); // Modify an ignored file and make sure we don't rebuild println!("second pass"); foo.change_file("src/bar.rs", ""); foo.cargo("build").with_stdout("").run(); } #[cargo_test] fn rebuild_preserves_out_dir() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' "#, ) .file( "build.rs", r#" use std::env; use std::fs::File; use std::path::Path; fn main() { let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); if env::var_os("FIRST").is_some() { File::create(&path).unwrap(); } else { File::create(&path).unwrap(); } } "#, ) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .build(); foo.root().move_into_the_past(); foo.cargo("build") .env("FIRST", "1") .with_stderr( "\ [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); foo.change_file("src/bar.rs", ""); foo.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn dep_no_libs() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0")) .file("bar/src/main.rs", "") .build(); foo.cargo("build").run(); } #[cargo_test] fn recompile_space_in_name() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] name = "foo" path = "src/my lib.rs" "#, ) .file("src/my lib.rs", "") .build(); foo.cargo("build").run(); foo.root().move_into_the_past(); foo.cargo("build").with_stdout("").run(); } #[cfg(unix)] #[cargo_test] fn credentials_is_unreadable() { use cargo_test_support::paths::home; use std::os::unix::prelude::*; let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "") .build(); let credentials = home().join(".cargo/credentials"); t!(fs::create_dir_all(credentials.parent().unwrap())); t!(fs::write( &credentials, r#" [registry] token = "api-token" "# )); let stat = fs::metadata(credentials.as_path()).unwrap(); let mut perms = stat.permissions(); perms.set_mode(0o000); fs::set_permissions(credentials, perms).unwrap(); p.cargo("build").run(); } #[cfg(unix)] #[cargo_test] fn ignore_bad_directories() { use std::os::unix::prelude::*; let foo = project() .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("src/lib.rs", "") .build(); let dir = foo.root().join("tmp"); fs::create_dir(&dir).unwrap(); let stat = fs::metadata(&dir).unwrap(); let mut perms = stat.permissions(); perms.set_mode(0o644); fs::set_permissions(&dir, perms.clone()).unwrap(); foo.cargo("build").run(); perms.set_mode(0o755); fs::set_permissions(&dir, perms).unwrap(); } #[cargo_test] fn bad_cargo_config() { let foo = project() .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("src/lib.rs", "") .file(".cargo/config", "this is not valid toml") .build(); foo.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 6 | 1 | this is not valid toml | ^ Unexpected `i` Expected `.` or `=` ", ) .run(); } #[cargo_test] fn cargo_platform_specific_dependency() { let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [target.{host}.dependencies] dep = {{ path = "dep" }} [target.{host}.build-dependencies] build = {{ path = "build" }} [target.{host}.dev-dependencies] dev = {{ path = "dev" }} "#, host = host ), ) .file("src/main.rs", "extern crate dep; fn main() { dep::dep() }") .file( "tests/foo.rs", "extern crate dev; #[test] fn foo() { dev::dev() }", ) .file( "build.rs", "extern crate build; fn main() { build::build(); }", ) .file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0")) .file("dep/src/lib.rs", "pub fn dep() {}") .file("build/Cargo.toml", &basic_manifest("build", "0.5.0")) .file("build/src/lib.rs", "pub fn build() {}") .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0")) .file("dev/src/lib.rs", "pub fn dev() {}") .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.cargo("test").run(); } #[cargo_test] fn cargo_platform_specific_dependency_build_dependencies_conflicting_warning() { let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [target.{host}.build-dependencies] build = {{ path = "build" }} [target.{host}.build_dependencies] build = {{ path = "build" }} "#, host = host ), ) .file("src/main.rs", "fn main() { }") .file( "build.rs", "extern crate build; fn main() { build::build(); }", ) .file("build/Cargo.toml", &basic_manifest("build", "0.5.0")) .file("build/src/lib.rs", "pub fn build() {}") .build(); p.cargo("build") .with_stderr_contains( format!("[WARNING] conflicting between `build-dependencies` and `build_dependencies` in the `{}` platform target.\n `build_dependencies` is ignored and not recommended for use in the future", host) ) .run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn cargo_platform_specific_dependency_dev_dependencies_conflicting_warning() { let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [target.{host}.dev-dependencies] dev = {{ path = "dev" }} [target.{host}.dev_dependencies] dev = {{ path = "dev" }} "#, host = host ), ) .file("src/main.rs", "fn main() { }") .file( "tests/foo.rs", "extern crate dev; #[test] fn foo() { dev::dev() }", ) .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0")) .file("dev/src/lib.rs", "pub fn dev() {}") .build(); p.cargo("build") .with_stderr_contains( format!("[WARNING] conflicting between `dev-dependencies` and `dev_dependencies` in the `{}` platform target.\n `dev_dependencies` is ignored and not recommended for use in the future", host) ) .run(); assert!(p.bin("foo").is_file()); p.cargo("test").run(); } #[cargo_test] fn bad_platform_specific_dependency() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [target.wrong-target.dependencies.bar] path = "bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "bar/src/lib.rs", r#"pub fn gimme() -> String { format!("") }"#, ) .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]can't find crate for `bar`") .run(); } #[cargo_test] fn cargo_platform_specific_dependency_wrong_platform() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [target.non-existing-triplet.dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "bar/src/lib.rs", "invalid rust file, should not be compiled", ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("bar")); } #[cargo_test] fn example_as_lib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["lib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "lib").is_file()); } #[cargo_test] fn example_as_rlib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "rlib").is_file()); } #[cargo_test] fn example_as_dylib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["dylib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "dylib").is_file()); } #[cargo_test] fn example_as_proc_macro() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["proc-macro"] "#, ) .file("src/lib.rs", "") .file( "examples/ex.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro] pub fn eat(_item: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "proc-macro").is_file()); } #[cargo_test] fn example_bin_same_name() { let p = project() .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}") .build(); p.cargo("build --examples").run(); assert!(!p.bin("foo").is_file()); // We expect a file of the form bin/foo-{metadata_hash} assert!(p.bin("examples/foo").is_file()); p.cargo("build --examples").run(); assert!(!p.bin("foo").is_file()); // We expect a file of the form bin/foo-{metadata_hash} assert!(p.bin("examples/foo").is_file()); } #[cargo_test] fn compile_then_delete() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("run -v").run(); assert!(p.bin("foo").is_file()); if cfg!(windows) { // On windows unlinking immediately after running often fails, so sleep sleep_ms(100); } fs::remove_file(&p.bin("foo")).unwrap(); p.cargo("run -v").run(); } #[cargo_test] fn transitive_dependencies_not_available() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.aaaaa] path = "a" "#, ) .file( "src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}", ) .file( "a/Cargo.toml", r#" [package] name = "aaaaa" version = "0.0.1" authors = [] [dependencies.bbbbb] path = "../b" "#, ) .file("a/src/lib.rs", "extern crate bbbbb;") .file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains("[..] can't find crate for `bbbbb`[..]") .run(); } #[cargo_test] fn cyclic_deps_rejected() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies.foo] path = ".." "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle: package `a v0.0.1 ([CWD]/a)` ... which satisfies path dependency `a` of package `foo v0.0.1 ([CWD])` ... which satisfies path dependency `foo` of package `a v0.0.1 ([..])`", ).run(); } #[cargo_test] fn predictable_filenames() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["dylib", "rlib"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v").run(); assert!(p.root().join("target/debug/libfoo.rlib").is_file()); let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert!(p.root().join("target/debug").join(dylib_name).is_file()); } #[cargo_test] fn dashes_to_underscores() { let p = project() .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) .file("src/lib.rs", "") .file("src/main.rs", "extern crate foo_bar; fn main() {}") .build(); p.cargo("build -v").run(); assert!(p.bin("foo-bar").is_file()); } #[cargo_test] fn dashes_in_crate_name_bad() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo-bar" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "extern crate foo_bar; fn main() {}") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: library target names cannot contain hyphens: foo-bar ", ) .run(); } #[cargo_test] fn rustc_env_var() { let p = project().file("src/lib.rs", "").build(); p.cargo("build -v") .env("RUSTC", "rustc-that-does-not-exist") .with_status(101) .with_stderr( "\ [ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..]) Caused by: [..] ", ) .run(); assert!(!p.bin("a").is_file()); } #[cargo_test] fn filtering() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("src/bin/b.rs", "fn main() {}") .file("examples/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .build(); p.cargo("build --lib").run(); assert!(!p.bin("a").is_file()); p.cargo("build --bin=a --example=a").run(); assert!(p.bin("a").is_file()); assert!(!p.bin("b").is_file()); assert!(p.bin("examples/a").is_file()); assert!(!p.bin("examples/b").is_file()); } #[cargo_test] fn filtering_implicit_bins() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("src/bin/b.rs", "fn main() {}") .file("examples/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .build(); p.cargo("build --bins").run(); assert!(p.bin("a").is_file()); assert!(p.bin("b").is_file()); assert!(!p.bin("examples/a").is_file()); assert!(!p.bin("examples/b").is_file()); } #[cargo_test] fn filtering_implicit_examples() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("src/bin/b.rs", "fn main() {}") .file("examples/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .build(); p.cargo("build --examples").run(); assert!(!p.bin("a").is_file()); assert!(!p.bin("b").is_file()); assert!(p.bin("examples/a").is_file()); assert!(p.bin("examples/b").is_file()); } #[cargo_test] fn ignore_dotfile() { let p = project() .file("src/bin/.a.rs", "") .file("src/bin/a.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn ignore_dotdirs() { let p = project() .file("src/bin/a.rs", "fn main() {}") .file(".git/Cargo.toml", "") .file(".pc/dummy-fix.patch/Cargo.toml", "") .build(); p.cargo("build").run(); } #[cargo_test] fn dotdir_root() { let p = ProjectBuilder::new(root().join(".foo")) .file("src/bin/a.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn custom_target_dir_env() { let p = project().file("src/main.rs", "fn main() {}").build(); let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run(); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(!p.root().join("target/debug").join(&exe_name).is_file()); p.cargo("build").run(); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(p.root().join("target/debug").join(&exe_name).is_file()); p.cargo("build") .env("CARGO_BUILD_TARGET_DIR", "foo2/target") .run(); assert!(p.root().join("foo2/target/debug").join(&exe_name).is_file()); p.change_file( ".cargo/config", r#" [build] target-dir = "foo/target" "#, ); p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run(); assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(p.root().join("target/debug").join(&exe_name).is_file()); } #[cargo_test] fn custom_target_dir_line_parameter() { let p = project().file("src/main.rs", "fn main() {}").build(); let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); p.cargo("build --target-dir foo/target").run(); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(!p.root().join("target/debug").join(&exe_name).is_file()); p.cargo("build").run(); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(p.root().join("target/debug").join(&exe_name).is_file()); p.change_file( ".cargo/config", r#" [build] target-dir = "foo/target" "#, ); p.cargo("build --target-dir bar/target").run(); assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(p.root().join("target/debug").join(&exe_name).is_file()); p.cargo("build --target-dir foobar/target") .env("CARGO_TARGET_DIR", "bar/target") .run(); assert!(p .root() .join("foobar/target/debug") .join(&exe_name) .is_file()); assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); assert!(p.root().join("target/debug").join(&exe_name).is_file()); } #[cargo_test] fn build_multiple_packages() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", &basic_bin_manifest("d1")) .file("d1/src/lib.rs", "") .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [[bin]] name = "d2" doctest = false "#, ) .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") .build(); p.cargo("build -p d1 -p d2 -p foo").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); let d1_path = &p .build_dir() .join("debug") .join(format!("d1{}", env::consts::EXE_SUFFIX)); let d2_path = &p .build_dir() .join("debug") .join(format!("d2{}", env::consts::EXE_SUFFIX)); assert!(d1_path.is_file()); p.process(d1_path).with_stdout("d1").run(); assert!(d2_path.is_file()); p.process(d2_path).with_stdout("d2").run(); } #[cargo_test] fn invalid_spec() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [[bin]] name = "foo" "#, ) .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", &basic_bin_manifest("d1")) .file("d1/src/lib.rs", "") .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") .build(); p.cargo("build -p notAValidDep") .with_status(101) .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages") .run(); p.cargo("build -p d1 -p notAValidDep") .with_status(101) .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages") .run(); } #[cargo_test] fn manifest_with_bom_is_ok() { let p = project() .file( "Cargo.toml", "\u{FEFF} [package] name = \"foo\" version = \"0.0.1\" authors = [] ", ) .file("src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn panic_abort_compiles_with_panic_abort() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.dev] panic = 'abort' "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr_contains("[..] -C panic=abort [..]") .run(); } #[cargo_test] fn compiler_json_error_format() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" "#, ) .file( "build.rs", "fn main() { println!(\"cargo:rustc-cfg=xyz\") }", ) .file("src/main.rs", "fn main() { let unused = 92; }") .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("bar/src/lib.rs", r#"fn dead() {}"#) .build(); let output = |fresh| { r#" { "reason":"compiler-artifact", "package_id":"foo 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["custom-build"], "crate_types":["bin"], "doc": false, "doctest": false, "edition": "2015", "name":"build-script-build", "src_path":"[..]build.rs", "test": false }, "profile": { "debug_assertions": true, "debuginfo": 2, "opt_level": "0", "overflow_checks": true, "test": false }, "executable": null, "features": [], "filenames": "{...}", "fresh": $FRESH } { "reason":"compiler-message", "package_id":"bar 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["lib"], "crate_types":["lib"], "doc": true, "doctest": true, "edition": "2015", "name":"bar", "src_path":"[..]lib.rs", "test": true }, "message":"{...}" } { "reason":"compiler-artifact", "profile": { "debug_assertions": true, "debuginfo": 2, "opt_level": "0", "overflow_checks": true, "test": false }, "executable": null, "features": [], "package_id":"bar 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["lib"], "crate_types":["lib"], "doc": true, "doctest": true, "edition": "2015", "name":"bar", "src_path":"[..]lib.rs", "test": true }, "filenames":[ "[..].rlib", "[..].rmeta" ], "fresh": $FRESH } { "reason":"build-script-executed", "package_id":"foo 0.5.0 ([..])", "linked_libs":[], "linked_paths":[], "env":[], "cfgs":["xyz"], "out_dir": "[..]target/debug/build/foo-[..]/out" } { "reason":"compiler-message", "package_id":"foo 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "edition": "2015", "name":"foo", "src_path":"[..]main.rs", "test": true }, "message":"{...}" } { "reason":"compiler-artifact", "package_id":"foo 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "edition": "2015", "name":"foo", "src_path":"[..]main.rs", "test": true }, "profile": { "debug_assertions": true, "debuginfo": 2, "opt_level": "0", "overflow_checks": true, "test": false }, "executable": "[..]/foo/target/debug/foo[EXE]", "features": [], "filenames": "{...}", "fresh": $FRESH } {"reason": "build-finished", "success": true} "# .replace("$FRESH", fresh) }; // Use `jobs=1` to ensure that the order of messages is consistent. p.cargo("build -v --message-format=json --jobs=1") .with_json_contains_unordered(&output("false")) .run(); // With fresh build, we should repeat the artifacts, // and replay the cached compiler warnings. p.cargo("build -v --message-format=json --jobs=1") .with_json_contains_unordered(&output("true")) .run(); } #[cargo_test] fn wrong_message_format_option() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --message-format XML") .with_status(101) .with_stderr_contains( "\ error: invalid message format specifier: `xml` ", ) .run(); } #[cargo_test] fn message_format_json_forward_stderr() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() { let unused = 0; }") .build(); p.cargo("rustc --release --bin foo --message-format JSON") .with_json_contains_unordered( r#" { "reason":"compiler-message", "package_id":"foo 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "edition": "2015", "name":"foo", "src_path":"[..]", "test": true }, "message":"{...}" } { "reason":"compiler-artifact", "package_id":"foo 0.5.0 ([..])", "manifest_path": "[..]", "target":{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "edition": "2015", "name":"foo", "src_path":"[..]", "test": true }, "profile":{ "debug_assertions":false, "debuginfo":null, "opt_level":"3", "overflow_checks": false, "test":false }, "executable": "{...}", "features":[], "filenames": "{...}", "fresh": false } {"reason": "build-finished", "success": true} "#, ) .run(); } #[cargo_test] fn no_warn_about_package_metadata() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [package.metadata] foo = "bar" a = true b = 3 [package.metadata.another] bar = 3 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "[..] foo v0.0.1 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn no_warn_about_workspace_metadata() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] [workspace.metadata] something = "something_else" x = 1 y = 2 [workspace.metadata.another] bar = 12 "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "[..] foo v0.0.1 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn cargo_build_empty_target() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --target") .arg("") .with_status(101) .with_stderr_contains("[..] target was empty") .run(); } #[cargo_test] fn build_all_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build --workspace") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]) [COMPILING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_all_exclude() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("build --workspace --exclude baz") .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") .with_stderr_unordered( "\ [COMPILING] foo v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_all_exclude_not_found() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build --workspace --exclude baz") .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") .with_stderr_unordered( "\ [WARNING] excluded package(s) `baz` not found in workspace [..] [COMPILING] foo v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_all_exclude_glob() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("build --workspace --exclude '*z'") .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") .with_stderr_unordered( "\ [COMPILING] foo v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_all_exclude_glob_not_found() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build --workspace --exclude '*z'") .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") .with_stderr( "\ [WARNING] excluded package pattern(s) `*z` not found in workspace [..] [COMPILING] [..] v0.1.0 ([..]) [COMPILING] [..] v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_all_exclude_broken_glob() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build --workspace --exclude '[*z'") .with_status(101) .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") .run(); } #[cargo_test] fn build_all_workspace_implicit_examples() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("src/bin/b.rs", "fn main() {}") .file("examples/c.rs", "fn main() {}") .file("examples/d.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .file("bar/src/bin/e.rs", "fn main() {}") .file("bar/src/bin/f.rs", "fn main() {}") .file("bar/examples/g.rs", "fn main() {}") .file("bar/examples/h.rs", "fn main() {}") .build(); p.cargo("build --workspace --examples") .with_stderr( "[..] Compiling bar v0.1.0 ([..])\n\ [..] Compiling foo v0.1.0 ([..])\n\ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); assert!(!p.bin("a").is_file()); assert!(!p.bin("b").is_file()); assert!(p.bin("examples/c").is_file()); assert!(p.bin("examples/d").is_file()); assert!(!p.bin("e").is_file()); assert!(!p.bin("f").is_file()); assert!(p.bin("examples/g").is_file()); assert!(p.bin("examples/h").is_file()); } #[cargo_test] fn build_all_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); // The order in which bar and baz are built is not guaranteed p.cargo("build --workspace") .with_stderr_unordered( "\ [COMPILING] baz v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_virtual_manifest_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); // The order in which `bar` and `baz` are built is not guaranteed. p.cargo("build") .with_stderr_unordered( "\ [COMPILING] baz v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_virtual_manifest_one_project() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("build -p bar") .with_stderr_does_not_contain("[..]baz[..]") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_virtual_manifest_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build -p '*z'") .with_stderr_does_not_contain("[..]bar[..]") .with_stderr( "\ [COMPILING] baz v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_virtual_manifest_glob_not_found() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build -p bar -p '*z'") .with_status(101) .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]") .run(); } #[cargo_test] fn build_virtual_manifest_broken_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build -p '[*z'") .with_status(101) .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") .run(); } #[cargo_test] fn build_all_virtual_manifest_implicit_examples() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .file("bar/src/bin/a.rs", "fn main() {}") .file("bar/src/bin/b.rs", "fn main() {}") .file("bar/examples/c.rs", "fn main() {}") .file("bar/examples/d.rs", "fn main() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "") .file("baz/src/bin/e.rs", "fn main() {}") .file("baz/src/bin/f.rs", "fn main() {}") .file("baz/examples/g.rs", "fn main() {}") .file("baz/examples/h.rs", "fn main() {}") .build(); // The order in which bar and baz are built is not guaranteed p.cargo("build --workspace --examples") .with_stderr_unordered( "\ [COMPILING] baz v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(!p.bin("a").is_file()); assert!(!p.bin("b").is_file()); assert!(p.bin("examples/c").is_file()); assert!(p.bin("examples/d").is_file()); assert!(!p.bin("e").is_file()); assert!(!p.bin("f").is_file()); assert!(p.bin("examples/g").is_file()); assert!(p.bin("examples/h").is_file()); } #[cargo_test] fn build_all_member_dependency_same_name() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.1.0" [dependencies] a = "0.1.0" "#, ) .file("a/src/lib.rs", "pub fn a() {}") .build(); Package::new("a", "0.1.0").publish(); p.cargo("build --workspace") .with_stderr( "[UPDATING] `[..]` index\n\ [DOWNLOADING] crates ...\n\ [DOWNLOADED] a v0.1.0 ([..])\n\ [COMPILING] a v0.1.0\n\ [COMPILING] a v0.1.0 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn run_proper_binary() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "main" [[bin]] name = "other" "#, ) .file("src/lib.rs", "") .file( "src/bin/main.rs", r#"fn main() { panic!("This should never be run."); }"#, ) .file("src/bin/other.rs", "fn main() {}") .build(); p.cargo("run --bin other").run(); } #[cargo_test] fn run_proper_binary_main_rs() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/bin/main.rs", "fn main() {}") .build(); p.cargo("run --bin foo").run(); } #[cargo_test] fn run_proper_alias_binary_from_src() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "foo" [[bin]] name = "bar" "#, ) .file("src/foo.rs", r#"fn main() { println!("foo"); }"#) .file("src/bar.rs", r#"fn main() { println!("bar"); }"#) .build(); p.cargo("build --workspace").run(); p.process(&p.bin("foo")).with_stdout("foo\n").run(); p.process(&p.bin("bar")).with_stdout("bar\n").run(); } #[cargo_test] fn run_proper_alias_binary_main_rs() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "foo" [[bin]] name = "bar" "#, ) .file("src/main.rs", r#"fn main() { println!("main"); }"#) .build(); p.cargo("build --workspace").run(); p.process(&p.bin("foo")).with_stdout("main\n").run(); p.process(&p.bin("bar")).with_stdout("main\n").run(); } #[cargo_test] fn run_proper_binary_main_rs_as_foo() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/foo.rs", r#" fn main() { panic!("This should never be run."); }"#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("run --bin foo").run(); } #[cargo_test] fn rustc_wrapper() { let p = project().file("src/lib.rs", "").build(); let wrapper = tools::echo_wrapper(); let running = format!( "[RUNNING] `{} rustc --crate-name foo [..]", wrapper.display() ); p.cargo("build -v") .env("RUSTC_WRAPPER", &wrapper) .with_stderr_contains(&running) .run(); p.build_dir().rm_rf(); p.cargo("build -v") .env("RUSTC_WORKSPACE_WRAPPER", &wrapper) .with_stderr_contains(&running) .run(); } #[cargo_test] fn rustc_wrapper_relative() { Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); let wrapper = tools::echo_wrapper(); let exe_name = wrapper.file_name().unwrap().to_str().unwrap(); let relative_path = format!("./{}", exe_name); fs::hard_link(&wrapper, p.root().join(exe_name)).unwrap(); let running = format!("[RUNNING] `[ROOT]/foo/./{} rustc[..]", exe_name); p.cargo("build -v") .env("RUSTC_WRAPPER", &relative_path) .with_stderr_contains(&running) .run(); p.build_dir().rm_rf(); p.cargo("build -v") .env("RUSTC_WORKSPACE_WRAPPER", &relative_path) .with_stderr_contains(&running) .run(); p.build_dir().rm_rf(); p.change_file( ".cargo/config.toml", &format!( r#" build.rustc-wrapper = "./{}" "#, exe_name ), ); p.cargo("build -v").with_stderr_contains(&running).run(); } #[cargo_test] fn rustc_wrapper_from_path() { let p = project().file("src/lib.rs", "").build(); p.cargo("build -v") .env("RUSTC_WRAPPER", "wannabe_sccache") .with_status(101) .with_stderr_contains("[..]`wannabe_sccache rustc [..]") .run(); p.build_dir().rm_rf(); p.cargo("build -v") .env("RUSTC_WORKSPACE_WRAPPER", "wannabe_sccache") .with_status(101) .with_stderr_contains("[..]`wannabe_sccache rustc [..]") .run(); } #[cargo_test] fn cdylib_not_lifted() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.1.0" [lib] crate-type = ["cdylib"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); let files = if cfg!(windows) { if cfg!(target_env = "msvc") { vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"] } else { vec!["libfoo.dll.a", "foo.dll"] } } else if cfg!(target_os = "macos") { vec!["libfoo.dylib"] } else { vec!["libfoo.so"] }; for file in files { println!("checking: {}", file); assert!(p.root().join("target/debug/deps").join(&file).is_file()); } } #[cargo_test] fn cdylib_final_outputs() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo-bar" authors = [] version = "0.1.0" [lib] crate-type = ["cdylib"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); let files = if cfg!(windows) { if cfg!(target_env = "msvc") { vec!["foo_bar.dll.lib", "foo_bar.dll"] } else { vec!["foo_bar.dll", "libfoo_bar.dll.a"] } } else if cfg!(target_os = "macos") { vec!["libfoo_bar.dylib"] } else { vec!["libfoo_bar.so"] }; for file in files { println!("checking: {}", file); assert!(p.root().join("target/debug").join(&file).is_file()); } } #[cargo_test] // NOTE: Windows MSVC and wasm32-unknown-emscripten do not use metadata. Skip them. // See #[cfg(not(all(target_os = "windows", target_env = "msvc")))] fn no_dep_info_collision_when_cdylib_and_bin_coexist() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [lib] crate-type = ["cdylib"] "#, ) .file("src/main.rs", "fn main() {}") .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr_unordered( "\ [COMPILING] foo v1.0.0 ([CWD]) [RUNNING] `rustc [..] --crate-type bin [..] -C metadata=[..]` [RUNNING] `rustc [..] --crate-type cdylib [..] -C metadata=[..]` [FINISHED] [..] ", ) .run(); let deps_dir = p.target_debug_dir().join("deps"); assert!(deps_dir.join("foo.d").exists()); let dep_info_count = deps_dir .read_dir() .unwrap() .filter(|e| { let filename = e.as_ref().unwrap().file_name(); let filename = filename.to_str().unwrap(); filename.starts_with("foo") && filename.ends_with(".d") }) .count(); // cdylib -> foo.d // bin -> foo-.d assert_eq!(dep_info_count, 2); } #[cargo_test] fn deterministic_cfg_flags() { // This bug is non-deterministic. let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] build = "build.rs" [features] default = ["f_a", "f_b", "f_c", "f_d"] f_a = [] f_b = [] f_c = [] f_d = [] "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-cfg=cfg_a"); println!("cargo:rustc-cfg=cfg_b"); println!("cargo:rustc-cfg=cfg_c"); println!("cargo:rustc-cfg=cfg_d"); println!("cargo:rustc-cfg=cfg_e"); } "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.1.0 [..] [RUNNING] [..] [RUNNING] [..] [RUNNING] `rustc --crate-name foo [..] \ --cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\ --cfg[..]f_c[..]--cfg[..]f_d[..] \ --cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn explicit_bins_without_paths() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [[bin]] name = "foo" [[bin]] name = "bar" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn no_bin_in_src_with_lib() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/lib.rs", "") .file("src/foo.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "\ [ERROR] failed to parse manifest at `[..]` Caused by: can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. [..]", ) .run(); } #[cargo_test] fn inferred_bins() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .file("src/bin/baz/main.rs", "fn main() {}") .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(p.bin("baz").is_file()); } #[cargo_test] fn inferred_bins_duplicate_name() { // this should fail, because we have two binaries with the same name let p = project() .file("src/main.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .file("src/bin/bar/main.rs", "fn main() {}") .build(); p.cargo("build").with_status(101).with_stderr_contains( "[..]found duplicate binary name bar, but all binary targets must have a unique name[..]", ) .run(); } #[cargo_test] fn inferred_bin_path() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [[bin]] name = "bar" # Note, no `path` key! "#, ) .file("src/bin/bar/main.rs", "fn main() {}") .build(); p.cargo("build").run(); assert!(p.bin("bar").is_file()); } #[cargo_test] fn inferred_examples() { let p = project() .file("src/lib.rs", "fn main() {}") .file("examples/bar.rs", "fn main() {}") .file("examples/baz/main.rs", "fn main() {}") .build(); p.cargo("build --examples").run(); assert!(p.bin("examples/bar").is_file()); assert!(p.bin("examples/baz").is_file()); } #[cargo_test] fn inferred_tests() { let p = project() .file("src/lib.rs", "fn main() {}") .file("tests/bar.rs", "fn main() {}") .file("tests/baz/main.rs", "fn main() {}") .build(); p.cargo("test --test=bar --test=baz").run(); } #[cargo_test] fn inferred_benchmarks() { let p = project() .file("src/lib.rs", "fn main() {}") .file("benches/bar.rs", "fn main() {}") .file("benches/baz/main.rs", "fn main() {}") .build(); p.cargo("bench --bench=bar --bench=baz").run(); } #[cargo_test] fn target_edition() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] edition = "2018" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..]--edition=2018 [..] ", ) .run(); } #[cargo_test] fn target_edition_override() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] edition = "2018" [lib] edition = "2015" "#, ) .file( "src/lib.rs", " pub fn async() {} pub fn try() {} pub fn await() {} ", ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn same_metadata_different_directory() { // A top-level crate built in two different workspaces should have the // same metadata hash. let p = project() .at("foo1") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); let output = t!(String::from_utf8( t!(p.cargo("build -v").exec_with_output()).stderr, )); let metadata = output .split_whitespace() .find(|arg| arg.starts_with("metadata=")) .unwrap(); let p = project() .at("foo2") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build -v") .with_stderr_contains(format!("[..]{}[..]", metadata)) .run(); } #[cargo_test] fn building_a_dependent_crate_without_bin_should_fail() { Package::new("testless", "0.1.0") .file( "Cargo.toml", r#" [project] name = "testless" version = "0.1.0" [[bin]] name = "a_bin" "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] testless = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "[..]can't find `a_bin` bin at `src/bin/a_bin.rs` or `src/bin/a_bin/main.rs`[..]", ) .run(); } #[cargo_test] #[cfg(any(target_os = "macos", target_os = "ios"))] fn uplift_dsym_of_bin_on_mac() { let p = project() .file("src/main.rs", "fn main() { panic!(); }") .file("src/bin/b.rs", "fn main() { panic!(); }") .file("examples/c.rs", "fn main() { panic!(); }") .file("tests/d.rs", "fn main() { panic!(); }") .build(); p.cargo("build --bins --examples --tests") .enable_mac_dsym() .run(); assert!(p.target_debug_dir().join("foo.dSYM").is_dir()); assert!(p.target_debug_dir().join("b.dSYM").is_dir()); assert!(p.target_debug_dir().join("b.dSYM").is_symlink()); assert!(p.target_debug_dir().join("examples/c.dSYM").is_dir()); assert!(!p.target_debug_dir().join("c.dSYM").exists()); assert!(!p.target_debug_dir().join("d.dSYM").exists()); } #[cargo_test] #[cfg(any(target_os = "macos", target_os = "ios"))] fn uplift_dsym_of_bin_on_mac_when_broken_link_exists() { let p = project() .file("src/main.rs", "fn main() { panic!(); }") .build(); let dsym = p.target_debug_dir().join("foo.dSYM"); p.cargo("build").enable_mac_dsym().run(); assert!(dsym.is_dir()); // Simulate the situation where the underlying dSYM bundle goes missing // but the uplifted symlink to it remains. This would previously cause // builds to permanently fail until the bad symlink was manually removed. dsym.rm_rf(); p.symlink( p.target_debug_dir() .join("deps") .join("foo-baaaaaadbaaaaaad.dSYM"), &dsym, ); assert!(dsym.is_symlink()); assert!(!dsym.exists()); p.cargo("build").enable_mac_dsym().run(); assert!(dsym.is_dir()); } #[cargo_test] #[cfg(all(target_os = "windows", target_env = "msvc"))] fn uplift_pdb_of_bin_on_windows() { let p = project() .file("src/main.rs", "fn main() { panic!(); }") .file("src/bin/b.rs", "fn main() { panic!(); }") .file("src/bin/foo-bar.rs", "fn main() { panic!(); }") .file("examples/c.rs", "fn main() { panic!(); }") .file("tests/d.rs", "fn main() { panic!(); }") .build(); p.cargo("build --bins --examples --tests").run(); assert!(p.target_debug_dir().join("foo.pdb").is_file()); assert!(p.target_debug_dir().join("b.pdb").is_file()); assert!(p.target_debug_dir().join("examples/c.pdb").exists()); assert!(p.target_debug_dir().join("foo-bar.exe").is_file()); assert!(p.target_debug_dir().join("foo_bar.pdb").is_file()); assert!(!p.target_debug_dir().join("c.pdb").exists()); assert!(!p.target_debug_dir().join("d.pdb").exists()); } // Ensure that `cargo build` chooses the correct profile for building // targets based on filters (assuming `--profile` is not specified). #[cargo_test] fn build_filter_infer_profile() { let p = project() .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/t1.rs", "") .file("benches/b1.rs", "") .file("examples/ex1.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) .run(); p.root().join("target").rm_rf(); p.cargo("build -v --test=t1") .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 [..]", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name t1 tests/t1.rs [..]--emit=[..]link[..]\ -C debuginfo=2 [..]", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]-C debuginfo=2 [..]", ) .run(); p.root().join("target").rm_rf(); // Bench uses test profile without `--release`. p.cargo("build -v --bench=b1") .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 [..]", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name b1 benches/b1.rs [..]--emit=[..]link[..]\ -C debuginfo=2 [..]", ) .with_stderr_does_not_contain("opt-level") .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]-C debuginfo=2 [..]", ) .run(); } #[cargo_test] fn targets_selected_default() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build -v") // Binaries. .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) // Benchmarks. .with_stderr_does_not_contain( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ -C opt-level=3 --test [..]", ) // Unit tests. .with_stderr_does_not_contain( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ -C debuginfo=2 --test [..]", ) .run(); } #[cargo_test] fn targets_selected_all() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build -v --all-targets") // Binaries. .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) // Unit tests. .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ -C debuginfo=2 --test [..]", ) .run(); } #[cargo_test] fn all_targets_no_lib() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build -v --all-targets") // Binaries. .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) // Unit tests. .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ -C debuginfo=2 --test [..]", ) .run(); } #[cargo_test] fn no_linkable_target() { // Issue 3169: this is currently not an error as per discussion in PR #4797. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] the_lib = { path = "the_lib" } "#, ) .file("src/main.rs", "fn main() {}") .file( "the_lib/Cargo.toml", r#" [package] name = "the_lib" version = "0.1.0" [lib] name = "the_lib" crate-type = ["staticlib"] "#, ) .file("the_lib/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] The package `the_lib` provides no linkable [..] \ while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]", ) .run(); } #[cargo_test] fn avoid_dev_deps() { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dev-dependencies] baz = "1.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] no matching package named `baz` found location searched: registry `crates-io` required by package `bar v0.1.0 ([..]/foo)` ", ) .run(); p.cargo("build -Zavoid-dev-deps") .masquerade_as_nightly_cargo(&["avoid-dev-deps"]) .run(); } #[cargo_test] fn default_cargo_config_jobs() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] jobs = 1 "#, ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn good_cargo_config_jobs() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] jobs = 4 "#, ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn good_jobs() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build --jobs 1").run(); p.cargo("build --jobs -1").run(); } #[cargo_test] fn invalid_cargo_config_jobs() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] jobs = 0 "#, ) .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains("error: jobs may not be 0") .run(); } #[cargo_test] fn invalid_jobs() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build --jobs 0") .with_status(101) .with_stderr_contains("error: jobs may not be 0") .run(); p.cargo("build --jobs over9000") .with_status(1) .with_stderr("error: Invalid value: could not parse `over9000` as a number") .run(); } #[cargo_test] fn target_filters_workspace() { let ws = project() .at("ws") .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "") .file("a/examples/ex1.rs", "fn main() {}") .file("b/Cargo.toml", &basic_bin_manifest("b")) .file("b/src/lib.rs", "") .file("b/src/main.rs", "fn main() {}") .build(); ws.cargo("build -v --example ex") .with_status(101) .with_stderr( "\ [ERROR] no example target named `ex` Did you mean `ex1`?", ) .run(); ws.cargo("build -v --example 'ex??'") .with_status(101) .with_stderr( "\ [ERROR] no example target matches pattern `ex??` Did you mean `ex1`?", ) .run(); ws.cargo("build -v --lib") .with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]") .with_stderr_contains("[RUNNING] `rustc [..]b/src/lib.rs[..]") .run(); ws.cargo("build -v --example ex1") .with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]") .run(); } #[cargo_test] fn target_filters_workspace_not_found() { let ws = project() .at("ws") .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_bin_manifest("a")) .file("a/src/main.rs", "fn main() {}") .file("b/Cargo.toml", &basic_bin_manifest("b")) .file("b/src/main.rs", "fn main() {}") .build(); ws.cargo("build -v --lib") .with_status(101) .with_stderr("[ERROR] no library targets found in packages: a, b") .run(); } #[cfg(unix)] #[cargo_test] fn signal_display() { // Cause the compiler to crash with a signal. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = { path = "pm" } "#, ) .file( "src/lib.rs", r#" #[macro_use] extern crate pm; #[derive(Foo)] pub struct S; "#, ) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true "#, ) .file( "pm/src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Foo)] pub fn derive(_input: TokenStream) -> TokenStream { std::process::abort() } "#, ) .build(); foo.cargo("build") .with_stderr( "\ [COMPILING] pm [..] [COMPILING] foo [..] [ERROR] could not compile `foo` Caused by: process didn't exit successfully: `rustc [..]` (signal: 6, SIGABRT: process abort signal) ", ) .with_status(101) .run(); } #[cargo_test] fn tricky_pipelining() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); foo.cargo("build -p bar").run(); foo.cargo("build -p foo").run(); } #[cargo_test] fn pipelining_works() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); foo.cargo("build") .with_stdout("") .with_stderr( "\ [COMPILING] [..] [COMPILING] [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn pipelining_big_graph() { // Create a crate graph of the form {a,b}{0..29}, where {a,b}(n) depend on {a,b}(n+1) // Then have `foo`, a binary crate, depend on the whole thing. let mut project = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a1 = { path = "a1" } b1 = { path = "b1" } "#, ) .file("src/main.rs", "fn main(){}"); for n in 0..30 { for x in &["a", "b"] { project = project .file( &format!("{x}{n}/Cargo.toml", x = x, n = n), &format!( r#" [package] name = "{x}{n}" version = "0.1.0" [dependencies] a{np1} = {{ path = "../a{np1}" }} b{np1} = {{ path = "../b{np1}" }} "#, x = x, n = n, np1 = n + 1 ), ) .file(&format!("{x}{n}/src/lib.rs", x = x, n = n), ""); } } let foo = project .file("a30/Cargo.toml", &basic_lib_manifest("a30")) .file( "a30/src/lib.rs", r#"compile_error!("don't actually build me");"#, ) .file("b30/Cargo.toml", &basic_lib_manifest("b30")) .file("b30/src/lib.rs", "") .build(); foo.cargo("build -p foo") .with_status(101) .with_stderr_contains("[ERROR] could not compile `a30`[..]") .run(); } #[cargo_test] fn forward_rustc_output() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = '2018' [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "bar::foo!();") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [lib] proc-macro = true "#, ) .file( "bar/src/lib.rs", r#" extern crate proc_macro; use proc_macro::*; #[proc_macro] pub fn foo(input: TokenStream) -> TokenStream { println!("a"); println!("b"); println!("{{}}"); eprintln!("c"); eprintln!("d"); eprintln!("{{a"); // "malformed json" input } "#, ) .build(); foo.cargo("build") .with_stdout("a\nb\n{}") .with_stderr( "\ [COMPILING] [..] [COMPILING] [..] c d {a [FINISHED] [..] ", ) .run(); } #[cargo_test] fn build_lib_only() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("build --lib -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn build_with_no_lib() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --lib") .with_status(101) .with_stderr("[ERROR] no library targets found in package `foo`") .run(); } #[cargo_test] fn build_with_relative_cargo_home_path() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["wycats@example.com"] [dependencies] "test-dependency" = { path = "src/test_dependency" } "#, ) .file("src/main.rs", "fn main() {}") .file("src/test_dependency/src/lib.rs", r#" "#) .file( "src/test_dependency/Cargo.toml", &basic_manifest("test-dependency", "0.0.1"), ) .build(); p.cargo("build").env("CARGO_HOME", "./cargo_home/").run(); } #[cargo_test] fn user_specific_cfgs_are_filtered_out() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#"fn main() {}"#) .file( "build.rs", r#" fn main() { assert!(std::env::var_os("CARGO_CFG_PROC_MACRO").is_none()); assert!(std::env::var_os("CARGO_CFG_DEBUG_ASSERTIONS").is_none()); } "#, ) .build(); p.cargo("rustc -- --cfg debug_assertions --cfg proc_macro") .run(); p.process(&p.bin("foo")).run(); } #[cargo_test] fn close_output() { // What happens when stdout or stderr is closed during a build. // Server to know when rustc has spawned. let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [lib] proc-macro = true [[bin]] name = "foobar" "#, ) .file( "src/lib.rs", &r#" use proc_macro::TokenStream; use std::io::Read; #[proc_macro] pub fn repro(_input: TokenStream) -> TokenStream { println!("hello stdout!"); eprintln!("hello stderr!"); // Tell the test we have started. let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); // Wait for the test to tell us to start printing. let mut buf = [0]; drop(socket.read_exact(&mut buf)); let use_stderr = std::env::var("__CARGO_REPRO_STDERR").is_ok(); // Emit at least 1MB of data. // Linux pipes can buffer up to 64KB. // This test seems to be sensitive to having other threads // calling fork. My hypothesis is that the stdout/stderr // file descriptors are duplicated into the child process, // and during the short window between fork and exec, the // file descriptor is kept alive long enough for the // build to finish. It's a half-baked theory, but this // seems to prevent the spurious errors in CI. // An alternative solution is to run this test in // a single-threaded environment. for i in 0..100000 { if use_stderr { eprintln!("0123456789{}", i); } else { println!("0123456789{}", i); } } TokenStream::new() } "# .replace("__ADDR__", &addr.to_string()), ) .file( "src/bin/foobar.rs", r#" foo::repro!(); fn main() {} "#, ) .build(); // The `stderr` flag here indicates if this should forcefully close stderr or stdout. let spawn = |stderr: bool| { let mut cmd = p.cargo("build").build_command(); cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); if stderr { cmd.env("__CARGO_REPRO_STDERR", "1"); } let mut child = cmd.spawn().unwrap(); // Wait for proc macro to start. let pm_conn = listener.accept().unwrap().0; // Close stderr or stdout. if stderr { drop(child.stderr.take()); } else { drop(child.stdout.take()); } // Tell the proc-macro to continue; drop(pm_conn); // Read the output from the other channel. let out: &mut dyn Read = if stderr { child.stdout.as_mut().unwrap() } else { child.stderr.as_mut().unwrap() }; let mut result = String::new(); out.read_to_string(&mut result).unwrap(); let status = child.wait().unwrap(); assert!(!status.success()); result }; let stderr = spawn(false); compare::match_unordered( "\ [COMPILING] foo [..] hello stderr! [ERROR] [..] [WARNING] build failed, waiting for other jobs to finish... ", &stderr, None, ) .unwrap(); // Try again with stderr. p.build_dir().rm_rf(); let stdout = spawn(true); assert_eq!(stdout, "hello stdout!\n"); } #[cargo_test] fn close_output_during_drain() { // Test to close the output during the build phase (drain_the_queue). // There was a bug where it would hang. // Server to know when rustc has spawned. let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); // Create a wrapper so the test can know when compiling has started. let rustc_wrapper = { let p = project() .at("compiler") .file("Cargo.toml", &basic_manifest("compiler", "1.0.0")) .file( "src/main.rs", &r#" use std::process::Command; use std::env; use std::io::Read; fn main() { // Only wait on the first dependency. if matches!(env::var("CARGO_PKG_NAME").as_deref(), Ok("dep")) { let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); // Wait for the test to tell us to start printing. let mut buf = [0]; drop(socket.read_exact(&mut buf)); } let mut cmd = Command::new("rustc"); for arg in env::args_os().skip(1) { cmd.arg(arg); } std::process::exit(cmd.status().unwrap().code().unwrap()); } "# .replace("__ADDR__", &addr.to_string()), ) .build(); p.cargo("build").run(); p.bin("compiler") }; Package::new("dep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "1.0" "#, ) .file("src/lib.rs", "") .build(); // Spawn cargo, wait for the first rustc to start, and then close stderr. let mut cmd = process(&cargo_exe()) .arg("check") .cwd(p.root()) .env("RUSTC", rustc_wrapper) .build_command(); cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); let mut child = cmd.spawn().expect("cargo should spawn"); // Wait for the rustc wrapper to start. let rustc_conn = listener.accept().unwrap().0; // Close stderr to force an error. drop(child.stderr.take()); // Tell the wrapper to continue. drop(rustc_conn); match child.wait() { Ok(status) => assert!(!status.success()), Err(e) => panic!("child wait failed: {}", e), } } use cargo_test_support::registry::Dependency; #[cargo_test] fn reduced_reproduction_8249() { // https://github.com/rust-lang/cargo/issues/8249 Package::new("a-src", "0.1.0").links("a").publish(); Package::new("a-src", "0.2.0").links("a").publish(); Package::new("b", "0.1.0") .add_dep(Dependency::new("a-src", "0.1").optional(true)) .publish(); Package::new("b", "0.2.0") .add_dep(Dependency::new("a-src", "0.2").optional(true)) .publish(); Package::new("c", "1.0.0") .add_dep(&Dependency::new("b", "0.1.0")) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] b = { version = "*", features = ["a-src"] } a-src = "*" "#, ) .file("src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); cargo_util::paths::append(&p.root().join("Cargo.toml"), b"c = \"*\"").unwrap(); p.cargo("check").run(); p.cargo("check").run(); } #[cargo_test] fn target_directory_backup_exclusion() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); // Newly created target/ should have CACHEDIR.TAG inside... p.cargo("build").run(); let cachedir_tag = p.build_dir().join("CACHEDIR.TAG"); assert!(cachedir_tag.is_file()); assert!(fs::read_to_string(&cachedir_tag) .unwrap() .starts_with("Signature: 8a477f597d28d172789f06886806bc55")); // ...but if target/ already exists CACHEDIR.TAG should not be created in it. fs::remove_file(&cachedir_tag).unwrap(); p.cargo("build").run(); assert!(!&cachedir_tag.is_file()); } #[cargo_test(>=1.64, reason = "--diagnostic-width is stabilized in 1.64")] fn simple_terminal_width() { let p = project() .file( "src/lib.rs", r#" fn main() { let _: () = 42; } "#, ) .build(); p.cargo("build -Zterminal-width=20") .masquerade_as_nightly_cargo(&["terminal-width"]) .with_status(101) .with_stderr_contains("3 | ..._: () = 42;") .run(); } #[cargo_test] fn build_script_o0_default() { let p = project() .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("build -v --release") .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]") .run(); } #[cargo_test] fn build_script_o0_default_even_with_release() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release] opt-level = 1 "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("build -v --release") .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]") .run(); } #[cargo_test] fn primary_package_env_var() { // Test that CARGO_PRIMARY_PACKAGE is enabled only for "foo" and not for any dependency. let is_primary_package = r#" pub fn is_primary_package() -> bool {{ option_env!("CARGO_PRIMARY_PACKAGE").is_some() }} "#; Package::new("qux", "0.1.0") .file("src/lib.rs", is_primary_package) .publish(); let baz = git::new("baz", |project| { project .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("src/lib.rs", is_primary_package) }); let foo = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = {{ path = "bar" }} baz = {{ git = '{}' }} qux = "0.1" "#, baz.url() ), ) .file( "src/lib.rs", &format!( r#" extern crate bar; extern crate baz; extern crate qux; {} #[test] fn verify_primary_package() {{ assert!(!bar::is_primary_package()); assert!(!baz::is_primary_package()); assert!(!qux::is_primary_package()); assert!(is_primary_package()); }} "#, is_primary_package ), ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", is_primary_package) .build(); foo.cargo("test").run(); } cargo-0.66.0/tests/testsuite/build_plan.rs000066400000000000000000000156221432416201200205510ustar00rootroot00000000000000//! Tests for --build-plan feature. use cargo_test_support::registry::Package; use cargo_test_support::{basic_bin_manifest, basic_manifest, main_file, project}; #[cargo_test] fn cargo_build_plan_simple() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build --build-plan -Zunstable-options") .masquerade_as_nightly_cargo(&["build-plan"]) .with_json( r#" { "inputs": [ "[..]/foo/Cargo.toml" ], "invocations": [ { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [], "env": "{...}", "kind": null, "links": "{...}", "outputs": "{...}", "package_name": "foo", "package_version": "0.5.0", "program": "rustc", "target_kind": ["bin"], "compile_mode": "build" } ] } "#, ) .run(); assert!(!p.bin("foo").is_file()); } #[cargo_test] fn cargo_build_plan_single_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.5.0" [dependencies] bar = { path = "bar" } "#, ) .file( "src/lib.rs", r#" extern crate bar; pub fn foo() { bar::bar(); } #[test] fn test() { foo(); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build --build-plan -Zunstable-options") .masquerade_as_nightly_cargo(&["build-plan"]) .with_json( r#" { "inputs": [ "[..]/foo/Cargo.toml", "[..]/foo/bar/Cargo.toml" ], "invocations": [ { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [], "env": "{...}", "kind": null, "links": "{...}", "outputs": [ "[..]/foo/target/debug/deps/libbar-[..].rlib", "[..]/foo/target/debug/deps/libbar-[..].rmeta" ], "package_name": "bar", "package_version": "0.0.1", "program": "rustc", "target_kind": ["lib"], "compile_mode": "build" }, { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [0], "env": "{...}", "kind": null, "links": "{...}", "outputs": [ "[..]/foo/target/debug/deps/libfoo-[..].rlib", "[..]/foo/target/debug/deps/libfoo-[..].rmeta" ], "package_name": "foo", "package_version": "0.5.0", "program": "rustc", "target_kind": ["lib"], "compile_mode": "build" } ] } "#, ) .run(); } #[cargo_test] fn cargo_build_plan_build_script() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#, ) .file("src/main.rs", r#"fn main() {}"#) .file("build.rs", r#"fn main() {}"#) .build(); p.cargo("build --build-plan -Zunstable-options") .masquerade_as_nightly_cargo(&["build-plan"]) .with_json( r#" { "inputs": [ "[..]/foo/Cargo.toml" ], "invocations": [ { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [], "env": "{...}", "kind": null, "links": "{...}", "outputs": "{...}", "package_name": "foo", "package_version": "0.5.0", "program": "rustc", "target_kind": ["custom-build"], "compile_mode": "build" }, { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [0], "env": "{...}", "kind": null, "links": "{...}", "outputs": [], "package_name": "foo", "package_version": "0.5.0", "program": "[..]/build-script-build", "target_kind": ["custom-build"], "compile_mode": "run-custom-build" }, { "args": "{...}", "cwd": "[..]/cit/[..]/foo", "deps": [1], "env": "{...}", "kind": null, "links": "{...}", "outputs": "{...}", "package_name": "foo", "package_version": "0.5.0", "program": "rustc", "target_kind": ["bin"], "compile_mode": "build" } ] } "#, ) .run(); } #[cargo_test] fn build_plan_with_dev_dep() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --build-plan -Zunstable-options") .masquerade_as_nightly_cargo(&["build-plan"]) .run(); } cargo-0.66.0/tests/testsuite/build_script.rs000066400000000000000000003701031432416201200211210ustar00rootroot00000000000000//! Tests for build.rs scripts. use cargo_test_support::compare::assert_match_exact; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::tools; use cargo_test_support::{basic_manifest, cross_compile, is_coarse_mtime, project, project_in}; use cargo_test_support::{rustc_host, sleep_ms, slow_cpu_multiplier, symlink_supported}; use cargo_util::paths::remove_dir_all; use std::env; use std::fs; use std::io; use std::thread; #[cargo_test] fn custom_build_script_failed() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file("build.rs", "fn main() { std::process::exit(101); }") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]` [RUNNING] `[..]/build-script-build` [ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])` Caused by: process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)", ) .run(); } #[cargo_test] fn custom_build_env_vars() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [features] bar_feat = ["bar/foo"] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [features] foo = [] "#, ) .file("bar/src/lib.rs", "pub fn hello() {}"); let file_content = format!( r#" use std::env; use std::path::Path; fn main() {{ let _target = env::var("TARGET").unwrap(); let _ncpus = env::var("NUM_JOBS").unwrap(); let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let opt = env::var("OPT_LEVEL").unwrap(); assert_eq!(opt, "0"); let opt = env::var("PROFILE").unwrap(); assert_eq!(opt, "debug"); let debug = env::var("DEBUG").unwrap(); assert_eq!(debug, "true"); let out = env::var("OUT_DIR").unwrap(); assert!(out.starts_with(r"{0}")); assert!(Path::new(&out).is_dir()); let _host = env::var("HOST").unwrap(); let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); let _cargo = env::var("CARGO").unwrap(); let rustc = env::var("RUSTC").unwrap(); assert_eq!(rustc, "rustc"); let rustdoc = env::var("RUSTDOC").unwrap(); assert_eq!(rustdoc, "rustdoc"); assert!(env::var("RUSTC_WRAPPER").is_err()); assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err()); assert!(env::var("RUSTC_LINKER").is_err()); assert!(env::var("RUSTFLAGS").is_err()); let rustflags = env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(); assert_eq!(rustflags, ""); }} "#, p.root() .join("target") .join("debug") .join("build") .display(), ); let p = p.file("bar/build.rs", &file_content).build(); p.cargo("build --features bar_feat").run(); } #[cargo_test] fn custom_build_env_var_rustflags() { let rustflags = "--cfg=special"; let rustflags_alt = "--cfg=notspecial"; let p = project() .file( ".cargo/config", &format!( r#" [build] rustflags = ["{}"] "#, rustflags ), ) .file( "build.rs", &format!( r#" use std::env; fn main() {{ // Static assertion that exactly one of the cfg paths is always taken. assert!(env::var("RUSTFLAGS").is_err()); let x; #[cfg(special)] {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }} #[cfg(notspecial)] {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }} let _ = x; }} "#, rustflags, rustflags_alt, ), ) .file("src/lib.rs", "") .build(); p.cargo("check").run(); // RUSTFLAGS overrides build.rustflags, so --cfg=special shouldn't be passed p.cargo("check").env("RUSTFLAGS", rustflags_alt).run(); } #[cargo_test] fn custom_build_env_var_encoded_rustflags() { // NOTE: We use "-Clink-arg=-B nope" here rather than, say, "-A missing_docs", since for the // latter it won't matter if the whitespace accidentally gets split, as rustc will do the right // thing either way. let p = project() .file( ".cargo/config", r#" [build] rustflags = ["-Clink-arg=-B nope", "--cfg=foo"] "#, ) .file( "build.rs", r#" use std::env; fn main() {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "-Clink-arg=-B nope\x1f--cfg=foo"); }} "#, ) .file("src/lib.rs", "") .build(); p.cargo("check").run(); } #[cargo_test] fn custom_build_env_var_rustc_wrapper() { let wrapper = tools::echo_wrapper(); let p = project() .file( "build.rs", r#" use std::env; fn main() {{ assert_eq!( env::var("RUSTC_WRAPPER").unwrap(), env::var("CARGO_RUSTC_WRAPPER_CHECK").unwrap() ); }} "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .env("CARGO_BUILD_RUSTC_WRAPPER", &wrapper) .env("CARGO_RUSTC_WRAPPER_CHECK", &wrapper) .run(); } #[cargo_test] fn custom_build_env_var_rustc_workspace_wrapper() { let wrapper = tools::echo_wrapper(); // Workspace wrapper should be set for any crate we're operating directly on. let p = project() .file( "build.rs", r#" use std::env; fn main() {{ assert_eq!( env::var("RUSTC_WORKSPACE_WRAPPER").unwrap(), env::var("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK").unwrap() ); }} "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper) .env("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK", &wrapper) .run(); // But should not be set for a crate from the registry, as then it's not in a workspace. Package::new("bar", "0.1.0") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" links = "a" "#, ) .file( "build.rs", r#" use std::env; fn main() {{ assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err()); }} "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper) .run(); } #[cargo_test] fn custom_build_env_var_rustc_linker() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "/path/to/linker" "#, target ), ) .file( "build.rs", r#" use std::env; fn main() { assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); } "#, ) .file("src/lib.rs", "") .build(); // no crate type set => linker never called => build succeeds if and // only if build.rs succeeds, despite linker binary not existing. p.cargo("build --target").arg(&target).run(); } #[cargo_test] fn custom_build_env_var_rustc_linker_bad_host_target() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "/path/to/linker" "#, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail since host == target when no target is set p.cargo("build --verbose") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/linker [..]` [ERROR] linker `[..]/path/to/linker` not found " ) .run(); } #[cargo_test] fn custom_build_env_var_rustc_linker_host_target() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" target-applies-to-host = false [target.{}] linker = "/path/to/linker" "#, target ), ) .file( "build.rs", r#" use std::env; fn main() { assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); } "#, ) .file("src/lib.rs", "") .build(); // no crate type set => linker never called => build succeeds if and // only if build.rs succeeds, despite linker binary not existing. p.cargo("build -Z target-applies-to-host --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .run(); } #[cargo_test] fn custom_build_env_var_rustc_linker_host_target_env() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "/path/to/linker" "#, target ), ) .file( "build.rs", r#" use std::env; fn main() { assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); } "#, ) .file("src/lib.rs", "") .build(); // no crate type set => linker never called => build succeeds if and // only if build.rs succeeds, despite linker binary not existing. p.cargo("build -Z target-applies-to-host --target") .env("CARGO_TARGET_APPLIES_TO_HOST", "false") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .run(); } #[cargo_test] fn custom_build_invalid_host_config_feature_flag() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "/path/to/linker" "#, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail due to -Zhost-config being set without -Ztarget-applies-to-host p.cargo("build -Z host-config --target") .arg(&target) .masquerade_as_nightly_cargo(&["host-config"]) .with_status(101) .with_stderr_contains( "\ error: the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set ", ) .run(); } #[cargo_test] fn custom_build_linker_host_target_with_bad_host_config() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [host] linker = "/path/to/host/linker" [target.{}] linker = "/path/to/target/linker" "#, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail due to bad host linker being set p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]` [ERROR] linker `[..]/path/to/host/linker` not found " ) .run(); } #[cargo_test] fn custom_build_linker_bad_host() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [host] linker = "/path/to/host/linker" [target.{}] linker = "/path/to/target/linker" "#, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail due to bad host linker being set p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]` [ERROR] linker `[..]/path/to/host/linker` not found " ) .run(); } #[cargo_test] fn custom_build_linker_bad_host_with_arch() { let target = rustc_host(); let p = project() .file( ".cargo/config", &format!( r#" [host] linker = "/path/to/host/linker" [host.{}] linker = "/path/to/host/arch/linker" [target.{}] linker = "/path/to/target/linker" "#, target, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail due to bad host linker being set p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/arch/linker [..]` [ERROR] linker `[..]/path/to/host/arch/linker` not found " ) .run(); } #[cargo_test] fn custom_build_env_var_rustc_linker_cross_arch_host() { let target = rustc_host(); let cross_target = cross_compile::alternate(); let p = project() .file( ".cargo/config", &format!( r#" [host.{}] linker = "/path/to/host/arch/linker" [target.{}] linker = "/path/to/target/linker" "#, cross_target, target ), ) .file( "build.rs", r#" use std::env; fn main() { assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); } "#, ) .file("src/lib.rs", "") .build(); // build.rs should be built fine since cross target != host target. // assertion should succeed since it's still passed the target linker p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .run(); } #[cargo_test] fn custom_build_linker_bad_cross_arch_host() { let target = rustc_host(); let cross_target = cross_compile::alternate(); let p = project() .file( ".cargo/config", &format!( r#" [host] linker = "/path/to/host/linker" [host.{}] linker = "/path/to/host/arch/linker" [target.{}] linker = "/path/to/target/linker" "#, cross_target, target ), ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); // build.rs should fail due to bad host linker being set p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") .arg(&target) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]` [ERROR] linker `[..]/path/to/host/linker` not found " ) .run(); } #[cargo_test] fn custom_build_script_wrong_rustc_flags() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#"fn main() { println!("cargo:rustc-flags=-aaa -bbb"); }"#, ) .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \ `-aaa -bbb`", ) .run(); } #[cargo_test] fn custom_build_script_rustc_flags() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.foo] path = "foo" "#, ) .file("src/main.rs", "fn main() {}") .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#, ) .file("foo/src/lib.rs", "") .file( "foo/build.rs", r#" fn main() { println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); } "#, ) .build(); p.cargo("build --verbose") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name build_script_build foo/build.rs [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ -L dependency=[CWD]/target/debug/deps \ -L /dummy/path1 -L /dummy/path2 -l nonexistinglib` [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar src/main.rs [..]\ -L dependency=[CWD]/target/debug/deps \ --extern foo=[..]libfoo-[..] \ -L /dummy/path1 -L /dummy/path2` [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn custom_build_script_rustc_flags_no_space() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.foo] path = "foo" "#, ) .file("src/main.rs", "fn main() {}") .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#, ) .file("foo/src/lib.rs", "") .file( "foo/build.rs", r#" fn main() { println!("cargo:rustc-flags=-lnonexistinglib -L/dummy/path1 -L/dummy/path2"); } "#, ) .build(); p.cargo("build --verbose") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name build_script_build foo/build.rs [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ -L dependency=[CWD]/target/debug/deps \ -L /dummy/path1 -L /dummy/path2 -l nonexistinglib` [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar src/main.rs [..]\ -L dependency=[CWD]/target/debug/deps \ --extern foo=[..]libfoo-[..] \ -L /dummy/path1 -L /dummy/path2` [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn links_no_build_cmd() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \ not have a custom build script ", ) .run(); } #[cargo_test] fn links_duplicates() { // this tests that the links_duplicates are caught at resolver time let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" [dependencies.a-sys] path = "a-sys" "#, ) .file("src/lib.rs", "") .file("build.rs", "") .file( "a-sys/Cargo.toml", r#" [project] name = "a-sys" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#, ) .file("a-sys/src/lib.rs", "") .file("a-sys/build.rs", "") .build(); p.cargo("build").with_status(101) .with_stderr("\ error: failed to select a version for `a-sys`. ... required by package `foo v0.5.0 ([..])` versions that meet the requirements `*` are: 0.5.0 the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: package `foo v0.5.0 ([..])` Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. failed to select a version for `a-sys` which could resolve this conflict ").run(); } #[cargo_test] fn links_duplicates_old_registry() { // Test old links validator. See `validate_links`. Package::new("bar", "0.1.0") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" links = "a" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" links = "a" [dependencies] bar = "0.1" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 ([..]) [ERROR] multiple packages link to native library `a`, \ but a native library can be linked only once package `bar v0.1.0` ... which satisfies dependency `bar = \"^0.1\"` (locked to 0.1.0) of package `foo v0.1.0 ([..]foo)` links to native library `a` package `foo v0.1.0 ([..]foo)` also links to native library `a` ", ) .run(); } #[cargo_test] fn links_duplicates_deep_dependency() { // this tests that the links_duplicates are caught at resolver time let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file("build.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a-sys] path = "a-sys" "#, ) .file("a/src/lib.rs", "") .file("a/build.rs", "") .file( "a/a-sys/Cargo.toml", r#" [project] name = "a-sys" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#, ) .file("a/a-sys/src/lib.rs", "") .file("a/a-sys/build.rs", "") .build(); p.cargo("build").with_status(101) .with_stderr("\ error: failed to select a version for `a-sys`. ... required by package `a v0.5.0 ([..])` ... which satisfies path dependency `a` of package `foo v0.5.0 ([..])` versions that meet the requirements `*` are: 0.5.0 the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: package `foo v0.5.0 ([..])` Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. failed to select a version for `a-sys` which could resolve this conflict ").run(); } #[cargo_test] fn overrides_and_links() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), "bar"); assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), "baz"); } "#, ) .file( ".cargo/config", &format!( r#" [target.{}.foo] rustc-flags = "-L foo -L bar" foo = "bar" bar = "baz" "#, target ), ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file("a/build.rs", "not valid rust code") .build(); p.cargo("build -v") .with_stderr( "\ [..] [..] [..] [..] [..] [RUNNING] `rustc --crate-name foo [..] -L foo -L bar` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn unused_overrides() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file( ".cargo/config", &format!( r#" [target.{}.foo] rustc-flags = "-L foo -L bar" foo = "bar" bar = "baz" "#, target ), ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn links_passes_env_vars() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); } "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#" use std::env; fn main() { let lib = env::var("CARGO_MANIFEST_LINKS").unwrap(); assert_eq!(lib, "foo"); println!("cargo:foo=bar"); println!("cargo:bar=baz"); } "#, ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn only_rerun_build_script() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("build -v").run(); p.root().move_into_the_past(); p.change_file("some-new-file", ""); p.root().move_into_the_past(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rebuild_continues_to_pass_env_vars() { let a = project() .at("a") .file( "Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::time::Duration; fn main() { println!("cargo:foo=bar"); println!("cargo:bar=baz"); std::thread::sleep(Duration::from_millis(500)); } "#, ) .build(); a.root().move_into_the_past(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = '{}' "#, a.root().display() ), ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); } "#, ) .build(); p.cargo("build -v").run(); p.root().move_into_the_past(); p.change_file("some-new-file", ""); p.root().move_into_the_past(); p.cargo("build -v").run(); } #[cargo_test] fn testing_and_such() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); println!("build"); p.cargo("build -v").run(); p.root().move_into_the_past(); p.change_file("src/lib.rs", ""); p.root().move_into_the_past(); println!("test"); p.cargo("test -vj1") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..]` [RUNNING] `rustc --crate-name foo [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/foo-[..][EXE]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..]`", ) .with_stdout_contains_n("running 0 tests", 2) .run(); println!("doc"); p.cargo("doc -v") .with_stderr( "\ [DOCUMENTING] foo v0.5.0 ([CWD]) [RUNNING] `rustdoc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.change_file("src/main.rs", "fn main() {}"); println!("run"); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); } #[cargo_test] fn propagation_of_l_flags() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" [dependencies.b] path = "../b" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#"fn main() { println!("cargo:rustc-flags=-L bar"); }"#, ) .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("b/src/lib.rs", "") .file("b/build.rs", "bad file") .file( ".cargo/config", &format!( r#" [target.{}.foo] rustc-flags = "-L foo" "#, target ), ) .build(); p.cargo("build -v -j1") .with_stderr_contains( "\ [RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc --crate-name foo [..] -L bar -L foo` ", ) .run(); } #[cargo_test] fn propagation_of_l_flags_new() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" [dependencies.b] path = "../b" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=bar"); } "#, ) .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("b/src/lib.rs", "") .file("b/build.rs", "bad file") .file( ".cargo/config", &format!( r#" [target.{}.foo] rustc-link-search = ["foo"] "#, target ), ) .build(); p.cargo("build -v -j1") .with_stderr_contains( "\ [RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc --crate-name foo [..] -L bar -L foo` ", ) .run(); } #[cargo_test] fn build_deps_simple() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "build.rs", " #[allow(unused_extern_crates)] extern crate a; fn main() {} ", ) .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) .file("a/src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] a v0.5.0 ([CWD]/a) [RUNNING] `rustc --crate-name a [..]` [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] build.rs [..] --extern a=[..]` [RUNNING] `[..]/foo-[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_deps_not_for_normal() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.aaaaa] path = "a" "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate aaaaa;", ) .file( "build.rs", " #[allow(unused_extern_crates)] extern crate aaaaa; fn main() {} ", ) .file("a/Cargo.toml", &basic_manifest("aaaaa", "0.5.0")) .file("a/src/lib.rs", "") .build(); p.cargo("build -v --target") .arg(&target) .with_status(101) .with_stderr_contains("[..]can't find crate for `aaaaa`[..]") .with_stderr_contains( "\ [ERROR] could not compile `foo` due to previous error Caused by: process didn't exit successfully: [..] ", ) .run(); } #[cargo_test] fn build_cmd_with_a_build_cmd() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "build.rs", " #[allow(unused_extern_crates)] extern crate a; fn main() {} ", ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.b] path = "../b" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}", ) .file("b/Cargo.toml", &basic_manifest("b", "0.5.0")) .file("b/src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] b v0.5.0 ([CWD]/b) [RUNNING] `rustc --crate-name b [..]` [COMPILING] a v0.5.0 ([CWD]/a) [RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]` [RUNNING] `[..]/a-[..]/build-script-build` [RUNNING] `rustc --crate-name a [..]lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..]target/debug/deps \ -L [..]target/debug/deps` [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin \ --emit=[..]link[..]\ -C debuginfo=2 -C metadata=[..] --out-dir [..] \ -L [..]target/debug/deps \ --extern a=[..]liba[..].rlib` [RUNNING] `[..]/foo-[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..]lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L [..]target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn out_dir_is_preserved() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; use std::fs::File; use std::path::Path; fn main() { let out = env::var("OUT_DIR").unwrap(); File::create(Path::new(&out).join("foo")).unwrap(); } "#, ) .build(); // Make the file p.cargo("build -v").run(); // Change to asserting that it's there p.change_file( "build.rs", r#" use std::env; use std::fs::File; use std::path::Path; fn main() { let out = env::var("OUT_DIR").unwrap(); File::open(&Path::new(&out).join("foo")).unwrap(); } "#, ); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name build_script_build [..] [RUNNING] `[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..] [FINISHED] [..] ", ) .run(); // Run a fresh build where file should be preserved p.cargo("build -v") .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); // One last time to make sure it's still there. p.change_file("foo", ""); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc --crate-name foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn output_separate_lines() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-flags=-L foo"); println!("cargo:rustc-flags=-l static=foo"); } "#, ) .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[..]/foo-[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` [ERROR] could not find native static library [..] ", ) .run(); } #[cargo_test] fn output_separate_lines_new() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-search=foo"); println!("cargo:rustc-link-lib=static=foo"); println!("cargo:rustc-link-lib=bar"); println!("cargo:rustc-link-search=bar"); } "#, ) .build(); // The order of the arguments passed to rustc is important. p.cargo("build -v") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[..]/foo-[..]/build-script-build` [RUNNING] `rustc --crate-name foo [..] -L foo -L bar -l static=foo -l bar` [ERROR] could not find native static library [..] ", ) .run(); } #[cfg(not(windows))] // FIXME(#867) #[cargo_test] fn code_generation() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file( "src/main.rs", r#" include!(concat!(env!("OUT_DIR"), "/hello.rs")); fn main() { println!("{}", message()); } "#, ) .file( "build.rs", r#" use std::env; use std::fs; use std::path::PathBuf; fn main() { let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); fs::write(dst.join("hello.rs"), " pub fn message() -> &'static str { \"Hello, World!\" } ") .unwrap(); } "#, ) .build(); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo`", ) .with_stdout("Hello, World!") .run(); p.cargo("test").run(); } #[cargo_test] fn release_with_build_script() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() {} "#, ) .build(); p.cargo("build -v --release").run(); } #[cargo_test] fn build_script_only() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file("build.rs", r#"fn main() {}"#) .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: no targets specified in the manifest either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present", ) .run(); } #[cargo_test] fn shared_dep_with_a_build_script() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" [build-dependencies.b] path = "b" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.5.0" authors = [] [dependencies.a] path = "../a" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn transitive_dep_host() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.b] path = "b" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.5.0" authors = [] [lib] name = "b" plugin = true [dependencies.a] path = "../a" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("build").run(); } #[cargo_test] fn test_a_lib_with_a_build_command() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file( "src/lib.rs", r#" include!(concat!(env!("OUT_DIR"), "/foo.rs")); /// ``` /// foo::bar(); /// ``` pub fn bar() { assert_eq!(foo(), 1); } "#, ) .file( "build.rs", r#" use std::env; use std::fs; use std::path::PathBuf; fn main() { let out = PathBuf::from(env::var("OUT_DIR").unwrap()); fs::write(out.join("foo.rs"), "fn foo() -> i32 { 1 }").unwrap(); } "#, ) .build(); p.cargo("test").run(); } #[cargo_test] fn test_dev_dep_build_script() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .build(); p.cargo("test").run(); } #[cargo_test] fn build_script_with_dynamic_native_dependency() { let build = project() .at("builder") .file( "Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] [lib] name = "builder" crate-type = ["dylib"] "#, ) .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") .build(); let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [build-dependencies.bar] path = "bar" "#, ) .file("build.rs", "extern crate bar; fn main() { bar::bar() }") .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "bar/build.rs", r#" use std::env; use std::fs; use std::path::PathBuf; fn main() { let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); let file = format!("{}builder{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); let src = root.join(&file); let dst = out_dir.join(&file); fs::copy(src, dst).unwrap(); if cfg!(target_env = "msvc") { fs::copy(root.join("builder.dll.lib"), out_dir.join("builder.dll.lib")).unwrap(); } println!("cargo:rustc-link-search=native={}", out_dir.display()); } "#, ) .file( "bar/src/lib.rs", r#" pub fn bar() { #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] extern { fn foo(); } unsafe { foo() } } "#, ) .build(); build .cargo("build -v") .env("CARGO_LOG", "cargo::ops::cargo_rustc") .run(); let root = build.root().join("target").join("debug"); foo.cargo("build -v") .env("BUILDER_ROOT", root) .env("CARGO_LOG", "cargo::ops::cargo_rustc") .run(); } #[cargo_test] fn profile_and_opt_level_set_correctly() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); assert_eq!(env::var("PROFILE").unwrap(), "release"); assert_eq!(env::var("DEBUG").unwrap(), "false"); } "#, ) .build(); p.cargo("bench").run(); } #[cargo_test] fn profile_debug_0() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [profile.dev] debug = 0 "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("OPT_LEVEL").unwrap(), "0"); assert_eq!(env::var("PROFILE").unwrap(), "debug"); assert_eq!(env::var("DEBUG").unwrap(), "false"); } "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn build_script_with_lto() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [profile.dev] lto = true "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn test_duplicate_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] build = "build.rs" [dependencies.bar] path = "bar" [build-dependencies.bar] path = "bar" "#, ) .file( "src/main.rs", r#" extern crate bar; fn main() { bar::do_nothing() } "#, ) .file( "build.rs", r#" extern crate bar; fn main() { bar::do_nothing() } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn do_nothing() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn cfg_feedback() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/main.rs", "#[cfg(foo)] fn main() {}") .file( "build.rs", r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn cfg_override() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#, ) .file("src/main.rs", "#[cfg(foo)] fn main() {}") .file("build.rs", "") .file( ".cargo/config", &format!( r#" [target.{}.a] rustc-cfg = ["foo"] "#, target ), ) .build(); p.cargo("build -v").run(); } #[cargo_test] fn cfg_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, ) .file( "src/lib.rs", r#" /// /// ``` /// extern crate foo; /// /// fn main() { /// foo::foo() /// } /// ``` /// #[cfg(foo)] pub fn foo() {} #[cfg(foo)] #[test] fn test_foo() { foo() } "#, ) .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") .build(); p.cargo("test -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] [..] build.rs [..] [RUNNING] `[..]/build-script-build` [RUNNING] [..] --cfg foo[..] [RUNNING] [..] --cfg foo[..] [RUNNING] [..] --cfg foo[..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/foo-[..][EXE]` [RUNNING] `[..]/test-[..][EXE]` [DOCTEST] foo [RUNNING] [..] --cfg foo[..]", ) .with_stdout_contains("test test_foo ... ok") .with_stdout_contains("test test_bar ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .run(); } #[cargo_test] fn cfg_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [dependencies.bar] path = "bar" "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, ) .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "bar/build.rs", r#"fn main() { println!("cargo:rustc-cfg=bar"); }"#, ) .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") .build(); p.cargo("doc").run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); } #[cargo_test] fn cfg_override_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" links = "a" "#, ) .file("build.rs", "") .file( ".cargo/config", &format!( r#" [target.{}.a] rustc-cfg = ["foo"] "#, rustc_host() ), ) .file( "src/lib.rs", r#" /// /// ``` /// extern crate foo; /// /// fn main() { /// foo::foo() /// } /// ``` /// #[cfg(foo)] pub fn foo() {} #[cfg(foo)] #[test] fn test_foo() { foo() } "#, ) .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") .build(); p.cargo("test -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `[..]` [RUNNING] `[..]` [RUNNING] `[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/foo-[..][EXE]` [RUNNING] `[..]/test-[..][EXE]` [DOCTEST] foo [RUNNING] [..] --cfg foo[..]", ) .with_stdout_contains("test test_foo ... ok") .with_stdout_contains("test test_bar ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .run(); } #[cargo_test] fn cfg_override_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" links = "a" [dependencies.bar] path = "bar" "#, ) .file( ".cargo/config", &format!( r#" [target.{target}.a] rustc-cfg = ["foo"] [target.{target}.b] rustc-cfg = ["bar"] "#, target = rustc_host() ), ) .file("build.rs", "") .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" links = "b" "#, ) .file("bar/build.rs", "") .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") .build(); p.cargo("doc").run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); } #[cargo_test] fn env_build() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "src/main.rs", r#" const FOO: &'static str = env!("FOO"); fn main() { println!("{}", FOO); } "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, ) .build(); p.cargo("build -v").run(); p.cargo("run -v").with_stdout("foo\n").run(); } #[cargo_test] fn env_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, ) .file( "src/lib.rs", r#"pub const FOO: &'static str = env!("FOO"); "#, ) .file( "tests/test.rs", r#" extern crate foo; #[test] fn test_foo() { assert_eq!("foo", foo::FOO); } "#, ) .build(); p.cargo("test -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] [..] build.rs [..] [RUNNING] `[..]/build-script-build` [RUNNING] [..] --crate-name foo[..] [RUNNING] [..] --crate-name foo[..] [RUNNING] [..] --crate-name test[..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/foo-[..][EXE]` [RUNNING] `[..]/test-[..][EXE]` [DOCTEST] foo [RUNNING] [..] --crate-name foo[..]", ) .with_stdout_contains_n("running 0 tests", 2) .with_stdout_contains("test test_foo ... ok") .run(); } #[cargo_test] fn env_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "src/main.rs", r#" const FOO: &'static str = env!("FOO"); fn main() {} "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, ) .build(); p.cargo("doc -v").run(); } #[cargo_test] fn flags_go_into_tests() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] b = { path = "b" } "#, ) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a = { path = "../a" } "#, ) .file("b/src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=test"); } "#, ) .build(); p.cargo("test -v --test=foo") .with_stderr( "\ [COMPILING] a v0.5.0 ([..] [RUNNING] `rustc [..] a/build.rs [..]` [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] a/src/lib.rs [..] -L test[..]` [COMPILING] b v0.5.0 ([..] [RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..] src/lib.rs [..] -L test[..]` [RUNNING] `rustc [..] tests/foo.rs [..] -L test[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/foo-[..][EXE]`", ) .with_stdout_contains("running 0 tests") .run(); p.cargo("test -v -pb --lib") .with_stderr( "\ [FRESH] a v0.5.0 ([..] [COMPILING] b v0.5.0 ([..] [RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]/b-[..][EXE]`", ) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn diamond_passes_args_only_once() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#, ) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] b = { path = "../b" } c = { path = "../c" } "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] c = { path = "../c" } "#, ) .file("b/src/lib.rs", "") .file( "c/Cargo.toml", r#" [project] name = "c" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file( "c/build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=test"); } "#, ) .file("c/src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] c v0.5.0 ([..] [RUNNING] `rustc [..]` [RUNNING] `[..]` [RUNNING] `rustc [..]` [COMPILING] b v0.5.0 ([..] [RUNNING] `rustc [..]` [COMPILING] a v0.5.0 ([..] [RUNNING] `rustc [..]` [COMPILING] foo v0.5.0 ([..] [RUNNING] `[..]rmeta -L native=test` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn adding_an_override_invalidates() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file(".cargo/config", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=foo"); } "#, ) .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..]` [RUNNING] `[..]` [RUNNING] `rustc [..] -L native=foo` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.change_file( ".cargo/config", &format!( " [target.{}.foo] rustc-link-search = [\"native=bar\"] ", target ), ); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..] -L native=bar` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn changing_an_override_invalidates() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", &format!( " [target.{}.foo] rustc-link-search = [\"native=foo\"] ", target ), ) .file("build.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..] -L native=foo` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.change_file( ".cargo/config", &format!( " [target.{}.foo] rustc-link-search = [\"native=bar\"] ", target ), ); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..] -L native=bar` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fresh_builds_possible_with_link_libs() { // The bug is non-deterministic. Sometimes you can get a fresh build let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "nativefoo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", &format!( " [target.{}.nativefoo] rustc-link-lib = [\"a\"] rustc-link-search = [\"./b\"] rustc-flags = \"-l z -L ./\" ", target ), ) .file("build.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build -v") .with_stderr( "\ [FRESH] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fresh_builds_possible_with_multiple_metadata_overrides() { // The bug is non-deterministic. Sometimes you can get a fresh build let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", &format!( " [target.{}.foo] a = \"\" b = \"\" c = \"\" d = \"\" e = \"\" ", target ), ) .file("build.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..] [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build -v") .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint=info") .with_stderr( "\ [FRESH] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn generate_good_d_files() { // this is here to stop regression on an issue where build.rs rerun-if-changed paths aren't // made absolute properly, which in turn interacts poorly with the dep-info-basedir setting, // and the dep-info files have other-crate-relative paths spat out in them let p = project() .file( "awoo/Cargo.toml", r#" [project] name = "awoo" version = "0.5.0" build = "build.rs" "#, ) .file("awoo/src/lib.rs", "") .file( "awoo/build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed=barkbarkbark"); } "#, ) .file( "Cargo.toml", r#" [project] name = "meow" version = "0.5.0" [dependencies] awoo = { path = "awoo" } "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v").run(); let dot_d_path = p.bin("meow").with_extension("d"); println!("*meow at* {:?}", dot_d_path); let dot_d = fs::read_to_string(&dot_d_path).unwrap(); println!("*.d file content*: {}", &dot_d); assert_match_exact( "[..]/target/debug/meow[EXE]: [..]/awoo/barkbarkbark [..]/awoo/build.rs[..]", &dot_d, ); // paths relative to dependency roots should not be allowed assert!(!dot_d .split_whitespace() .any(|v| v == "barkbarkbark" || v == "build.rs")); p.change_file( ".cargo/config.toml", r#" [build] dep-info-basedir="." "#, ); p.cargo("build -v").run(); let dot_d = fs::read_to_string(&dot_d_path).unwrap(); println!("*.d file content with dep-info-basedir*: {}", &dot_d); assert_match_exact( "target/debug/meow[EXE]: awoo/barkbarkbark awoo/build.rs[..]", &dot_d, ); // paths relative to dependency roots should not be allowed assert!(!dot_d .split_whitespace() .any(|v| v == "barkbarkbark" || v == "build.rs")); } #[cargo_test] fn generate_good_d_files_for_external_tools() { // This tests having a relative paths going out of the // project root in config's dep-info-basedir let p = project_in("rust_things") .file( "awoo/Cargo.toml", r#" [project] name = "awoo" version = "0.5.0" build = "build.rs" "#, ) .file("awoo/src/lib.rs", "") .file( "awoo/build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed=barkbarkbark"); } "#, ) .file( "Cargo.toml", r#" [project] name = "meow" version = "0.5.0" [dependencies] awoo = { path = "awoo" } "#, ) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", r#" [build] dep-info-basedir="../.." "#, ) .build(); p.cargo("build -v").run(); let dot_d_path = p.bin("meow").with_extension("d"); let dot_d = fs::read_to_string(&dot_d_path).unwrap(); println!("*.d file content with dep-info-basedir*: {}", &dot_d); assert_match_exact( concat!( "rust_things/foo/target/debug/meow[EXE]:", " rust_things/foo/awoo/barkbarkbark", " rust_things/foo/awoo/build.rs", " rust_things/foo/awoo/src/lib.rs", " rust_things/foo/src/main.rs", ), &dot_d, ); } #[cargo_test] fn rebuild_only_on_explicit_paths() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-changed=foo"); println!("cargo:rerun-if-changed=bar"); } "#, ) .build(); p.cargo("build -v").run(); // files don't exist, so should always rerun if they don't exist println!("run without"); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); sleep_ms(1000); p.change_file("foo", ""); p.change_file("bar", ""); sleep_ms(1000); // make sure the to-be-created outfile has a timestamp distinct from the infiles // now the exist, so run once, catch the mtime, then shouldn't run again println!("run with"); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); println!("run with2"); p.cargo("build -v") .with_stderr( "\ [FRESH] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); sleep_ms(1000); // random other files do not affect freshness println!("run baz"); p.change_file("baz", ""); p.cargo("build -v") .with_stderr( "\ [FRESH] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // but changing dependent files does println!("run foo change"); p.change_file("foo", ""); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // .. as does deleting a file println!("run foo delete"); fs::remove_file(p.root().join("bar")).unwrap(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doctest_receives_build_link_args() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=bar"); } "#, ) .build(); p.cargo("test -v") .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo --test [..]-L native=bar[..]`", ) .run(); } #[cargo_test] fn please_respect_the_dag() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies] a = { path = 'a' } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=foo"); } "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=bar"); } "#, ) .build(); p.cargo("build -v") .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`") .run(); } #[cargo_test] fn non_utf8_output() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#" use std::io::prelude::*; fn main() { let mut out = std::io::stdout(); // print something that's not utf8 out.write_all(b"\xff\xff\n").unwrap(); // now print some cargo metadata that's utf8 println!("cargo:rustc-cfg=foo"); // now print more non-utf8 out.write_all(b"\xff\xff\n").unwrap(); } "#, ) .file("src/main.rs", "#[cfg(foo)] fn main() {}") .build(); p.cargo("build -v").run(); } #[cargo_test] fn custom_target_dir() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] target-dir = 'test' "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn panic_abort_with_build_scripts() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [profile.release] panic = 'abort' [dependencies] a = { path = "a" } "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;", ) .file("build.rs", "fn main() {}") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies] b = { path = "../b" } "#, ) .file("a/src/lib.rs", "") .file( "a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}", ) .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("build -v --release").run(); p.root().join("target").rm_rf(); p.cargo("test --release -v") .with_stderr_does_not_contain("[..]panic[..]") .run(); } #[cargo_test] fn warnings_emitted() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:warning=foo"); println!("cargo:warning=bar"); } "#, ) .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `rustc [..]` [RUNNING] `[..]` warning: foo warning: bar [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn warnings_emitted_when_build_script_panics() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:warning=foo"); println!("cargo:warning=bar"); panic!(); } "#, ) .build(); p.cargo("build") .with_status(101) .with_stdout("") .with_stderr_contains("warning: foo\nwarning: bar") .run(); } #[cargo_test] fn warnings_hidden_for_upstream() { Package::new("bar", "0.1.0") .file( "build.rs", r#" fn main() { println!("cargo:warning=foo"); println!("cargo:warning=bar"); } "#, ) .file( "Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 [RUNNING] `rustc [..]` [RUNNING] `[..]` [RUNNING] `rustc [..]` [COMPILING] foo v0.5.0 ([..]) [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn warnings_printed_on_vv() { Package::new("bar", "0.1.0") .file( "build.rs", r#" fn main() { println!("cargo:warning=foo"); println!("cargo:warning=bar"); } "#, ) .file( "Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -vv") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 [RUNNING] `[..] rustc [..]` [RUNNING] `[..]` warning: foo warning: bar [RUNNING] `[..] rustc [..]` [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..] rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn output_shows_on_vv() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::io::prelude::*; fn main() { std::io::stderr().write_all(b"stderr\n").unwrap(); std::io::stdout().write_all(b"stdout\n").unwrap(); } "#, ) .build(); p.cargo("build -vv") .with_stdout("[foo 0.5.0] stdout") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..] rustc [..]` [RUNNING] `[..]` [foo 0.5.0] stderr [RUNNING] `[..] rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn links_with_dots() { let target = rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" links = "a.b" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-search=bar") } "#, ) .file( ".cargo/config", &format!( r#" [target.{}.'a.b'] rustc-link-search = ["foo"] "#, target ), ) .build(); p.cargo("build -v") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`") .run(); } #[cargo_test] fn rustc_and_rustdoc_set_correctly() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("RUSTC").unwrap(), "rustc"); assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc"); } "#, ) .build(); p.cargo("bench").run(); } #[cargo_test] fn cfg_env_vars_available() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); if cfg!(unix) { assert_eq!(fam, "unix"); } else { assert_eq!(fam, "windows"); } } "#, ) .build(); p.cargo("bench").run(); } #[cargo_test] fn switch_features_rerun() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [features] foo = [] "#, ) .file( "src/main.rs", r#" fn main() { println!(include_str!(concat!(env!("OUT_DIR"), "/output"))); } "#, ) .file( "build.rs", r#" use std::env; use std::fs; use std::path::Path; fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); let output = Path::new(&out_dir).join("output"); if env::var_os("CARGO_FEATURE_FOO").is_some() { fs::write(output, "foo").unwrap(); } else { fs::write(output, "bar").unwrap(); } } "#, ) .build(); p.cargo("build -v --features=foo").run(); p.rename_run("foo", "with_foo").with_stdout("foo\n").run(); p.cargo("build -v").run(); p.rename_run("foo", "without_foo") .with_stdout("bar\n") .run(); p.cargo("build -v --features=foo").run(); p.rename_run("foo", "with_foo2").with_stdout("foo\n").run(); } #[cargo_test] fn assume_build_script_when_build_rs_present() { let p = project() .file( "src/main.rs", r#" fn main() { if ! cfg!(foo) { panic!("the build script was not run"); } } "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-cfg=foo"); } "#, ) .build(); p.cargo("run -v").run(); } #[cargo_test] fn if_build_set_to_false_dont_treat_build_rs_as_build_script() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = false "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(foo) { panic!("the build script was run"); } } "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-cfg=foo"); } "#, ) .build(); p.cargo("run -v").run(); } #[cargo_test] fn deterministic_rustc_dependency_flags() { // This bug is non-deterministic hence the large number of dependencies // in the hopes it will have a much higher chance of triggering it. Package::new("dep1", "0.1.0") .file( "Cargo.toml", r#" [project] name = "dep1" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-flags=-L native=test1"); } "#, ) .file("src/lib.rs", "") .publish(); Package::new("dep2", "0.1.0") .file( "Cargo.toml", r#" [project] name = "dep2" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-flags=-L native=test2"); } "#, ) .file("src/lib.rs", "") .publish(); Package::new("dep3", "0.1.0") .file( "Cargo.toml", r#" [project] name = "dep3" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-flags=-L native=test3"); } "#, ) .file("src/lib.rs", "") .publish(); Package::new("dep4", "0.1.0") .file( "Cargo.toml", r#" [project] name = "dep4" version = "0.1.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-flags=-L native=test4"); } "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] dep1 = "*" dep2 = "*" dep3 = "*" dep4 = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .with_stderr_contains( "\ [RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \ -L native=test3 -L native=test4` ", ) .run(); } #[cargo_test] fn links_duplicates_with_cycle() { // this tests that the links_duplicates are caught at resolver time let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" [dependencies.a] path = "a" [dev-dependencies] b = { path = "b" } "#, ) .file("src/lib.rs", "") .file("build.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#, ) .file("a/src/lib.rs", "") .file("a/build.rs", "") .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] foo = { path = ".." } "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("build").with_status(101) .with_stderr("\ error: failed to select a version for `a`. ... required by package `foo v0.5.0 ([..])` versions that meet the requirements `*` are: 0.5.0 the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: package `foo v0.5.0 ([..])` Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. failed to select a version for `a` which could resolve this conflict ").run(); } #[cargo_test] fn rename_with_link_search_path() { _rename_with_link_search_path(false); } #[cargo_test] #[cfg_attr( target_os = "macos", ignore = "don't have a cdylib cross target on macos" )] fn rename_with_link_search_path_cross() { if cross_compile::disabled() { return; } _rename_with_link_search_path(true); } fn _rename_with_link_search_path(cross: bool) { let target_arg = if cross { format!(" --target={}", cross_compile::alternate()) } else { "".to_string() }; let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [lib] crate-type = ["cdylib"] "#, ) .file( "src/lib.rs", "#[no_mangle] pub extern fn cargo_test_foo() {}", ); let p = p.build(); p.cargo(&format!("build{}", target_arg)).run(); let p2 = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "build.rs", r#" use std::env; use std::fs; use std::path::PathBuf; fn main() { // Move the `libfoo.so` from the root of our project into the // build directory. This way Cargo should automatically manage // `LD_LIBRARY_PATH` and such. let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); let src = root.join(&file); let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let dst = dst_dir.join(&file); fs::copy(&src, &dst).unwrap(); // handle windows, like below drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib"))); println!("cargo:rerun-if-changed=build.rs"); if cfg!(target_env = "msvc") { println!("cargo:rustc-link-lib=foo.dll"); } else { println!("cargo:rustc-link-lib=foo"); } println!("cargo:rustc-link-search=all={}", dst.parent().unwrap().display()); } "#, ) .file( "src/main.rs", r#" extern { #[link_name = "cargo_test_foo"] fn foo(); } fn main() { unsafe { foo(); } } "#, ); let p2 = p2.build(); // Move the output `libfoo.so` into the directory of `p2`, and then delete // the `p` project. On macOS, the `libfoo.dylib` artifact references the // original path in `p` so we want to make sure that it can't find it (hence // the deletion). let root = if cross { p.root() .join("target") .join(cross_compile::alternate()) .join("debug") .join("deps") } else { p.root().join("target").join("debug").join("deps") }; let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); let src = root.join(&file); let dst = p2.root().join(&file); fs::copy(&src, &dst).unwrap(); // copy the import library for windows, if it exists drop(fs::copy( &root.join("foo.dll.lib"), p2.root().join("foo.dll.lib"), )); remove_dir_all(p.root()).unwrap(); // Everything should work the first time p2.cargo(&format!("run{}", target_arg)).run(); // Now rename the root directory and rerun `cargo run`. Not only should we // not build anything but we also shouldn't crash. let mut new = p2.root(); new.pop(); new.push("bar2"); // For whatever reason on Windows right after we execute a binary it's very // unlikely that we're able to successfully delete or rename that binary. // It's not really clear why this is the case or if it's a bug in Cargo // holding a handle open too long. In an effort to reduce the flakiness of // this test though we throw this in a loop // // For some more information see #5481 and rust-lang/rust#48775 let mut i = 0; loop { let error = match fs::rename(p2.root(), &new) { Ok(()) => break, Err(e) => e, }; i += 1; if !cfg!(windows) || error.kind() != io::ErrorKind::PermissionDenied || i > 10 { panic!("failed to rename: {}", error); } println!("assuming {} is spurious, waiting to try again", error); thread::sleep(slow_cpu_multiplier(100)); } p2.cargo(&format!("run{}", target_arg)) .cwd(&new) .with_stderr( "\ [FINISHED] [..] [RUNNING] [..] ", ) .run(); } #[cargo_test] fn optional_build_script_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = { path = "bar", optional = true } [build-dependencies] bar = { path = "bar", optional = true } "#, ) .file( "build.rs", r#" #[cfg(feature = "bar")] extern crate bar; fn main() { #[cfg(feature = "bar")] { println!("cargo:rustc-env=FOO={}", bar::bar()); return } println!("cargo:rustc-env=FOO=0"); } "#, ) .file( "src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; fn main() { println!("{}", env!("FOO")); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); let p = p.build(); p.cargo("run").with_stdout("0\n").run(); p.cargo("run --features bar").with_stdout("1\n").run(); } #[cargo_test] fn optional_build_dep_and_required_normal_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "./bar", optional = true } [build-dependencies] bar = { path = "./bar" } "#, ) .file("build.rs", "extern crate bar; fn main() { bar::bar(); }") .file( "src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; fn main() { #[cfg(feature = "bar")] { println!("{}", bar::bar()); } #[cfg(not(feature = "bar"))] { println!("0"); } } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); let p = p.build(); p.cargo("run") .with_stdout("0") .with_stderr( "\ [COMPILING] bar v0.5.0 ([..]) [COMPILING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]foo[EXE]`", ) .run(); p.cargo("run --all-features") .with_stdout("1") .with_stderr( "\ [COMPILING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]foo[EXE]`", ) .run(); } #[cargo_test] fn using_rerun_if_changed_does_not_rebuild() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn links_interrupted_can_restart() { // Test for a `links` dependent build script getting canceled and then // restarted. Steps: // 1. Build to establish fingerprints. // 2. Change something (an env var in this case) that triggers the // dependent build script to run again. Kill the top-level build script // while it is running (such as hitting Ctrl-C). // 3. Run the build again, it should re-run the build script. let bar = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-env-changed=SOMEVAR"); } "#, ) .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.bar] path = '{}' "#, bar.root().display() ), ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { println!("cargo:rebuild-if-changed=build.rs"); if std::path::Path::new("abort").exists() { panic!("Crash!"); } } "#, ) .build(); p.cargo("build").run(); // Simulate the user hitting Ctrl-C during a build. p.change_file("abort", ""); // Set SOMEVAR to trigger a rebuild. p.cargo("build") .env("SOMEVAR", "1") .with_stderr_contains("[..]Crash![..]") .with_status(101) .run(); fs::remove_file(p.root().join("abort")).unwrap(); // Try again without aborting the script. // ***This is currently broken, the script does not re-run. p.cargo("build -v") .env("SOMEVAR", "1") .with_stderr_contains("[RUNNING] [..]/foo-[..]/build-script-build[..]") .run(); } #[cargo_test] fn dev_dep_with_links() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] links = "x" [dev-dependencies] bar = { path = "./bar" } "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] links = "y" [dependencies] foo = { path = ".." } "#, ) .file("bar/build.rs", "fn main() {}") .file("bar/src/lib.rs", "") .build(); p.cargo("check --tests").run() } #[cargo_test] fn rerun_if_directory() { if !symlink_supported() { return; } // rerun-if-changed of a directory should rerun if any file in the directory changes. let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-changed=somedir"); } "#, ) .build(); let dirty = || { p.cargo("check") .with_stderr( "[COMPILING] foo [..]\n\ [FINISHED] [..]", ) .run(); }; let fresh = || { p.cargo("check").with_stderr("[FINISHED] [..]").run(); }; // Start with a missing directory. dirty(); // Because the directory doesn't exist, it will trigger a rebuild every time. // https://github.com/rust-lang/cargo/issues/6003 dirty(); if is_coarse_mtime() { sleep_ms(1000); } // Empty directory. fs::create_dir(p.root().join("somedir")).unwrap(); dirty(); fresh(); if is_coarse_mtime() { sleep_ms(1000); } // Add a file. p.change_file("somedir/foo", ""); p.change_file("somedir/bar", ""); dirty(); fresh(); if is_coarse_mtime() { sleep_ms(1000); } // Add a symlink. p.symlink("foo", "somedir/link"); dirty(); fresh(); if is_coarse_mtime() { sleep_ms(1000); } // Move the symlink. fs::remove_file(p.root().join("somedir/link")).unwrap(); p.symlink("bar", "somedir/link"); dirty(); fresh(); if is_coarse_mtime() { sleep_ms(1000); } // Remove a file. fs::remove_file(p.root().join("somedir/foo")).unwrap(); dirty(); fresh(); } #[cargo_test] fn test_with_dep_metadata() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert_eq!(std::env::var("DEP_BAR_FOO").unwrap(), "bar"); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" links = 'bar' "#, ) .file("bar/src/lib.rs", "") .file( "bar/build.rs", r#" fn main() { println!("cargo:foo=bar"); } "#, ) .build(); p.cargo("test --lib").run(); } #[cargo_test] fn duplicate_script_with_extra_env() { // Test where a build script is run twice, that emits different rustc-env // and rustc-cfg values. In this case, one is run for host, the other for // target. if !cross_compile::can_run_on_host() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "pm"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = { path = "../pm" } "#, ) .file( "foo/src/lib.rs", &r#" //! ```rust //! #[cfg(not(mycfg="{target}"))] //! compile_error!{"expected mycfg set"} //! assert_eq!(env!("CRATE_TARGET"), "{target}"); //! assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}"); //! ``` #[test] fn check_target() { #[cfg(not(mycfg="{target}"))] compile_error!{"expected mycfg set"} // Compile-time assertion. assert_eq!(env!("CRATE_TARGET"), "{target}"); // Run-time assertion. assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}"); } "# .replace("{target}", target), ) .file( "foo/build.rs", r#" fn main() { println!("cargo:rustc-env=CRATE_TARGET={}", std::env::var("TARGET").unwrap()); println!("cargo:rustc-cfg=mycfg=\"{}\"", std::env::var("TARGET").unwrap()); } "#, ) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true # This is just here to speed things up. doctest = false [dev-dependencies] foo = { path = "../foo" } "#, ) .file("pm/src/lib.rs", "") .build(); p.cargo("test --workspace --target") .arg(&target) .with_stdout_contains("test check_target ... ok") .run(); if cargo_test_support::is_nightly() { p.cargo("test --workspace -Z doctest-xcompile --doc --target") .arg(&target) .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains("test src/lib.rs - (line 2) ... ok") .run(); } } #[cargo_test] fn wrong_output() { let p = project() .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:example"); } "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [COMPILING] foo [..] error: invalid output in build script of `foo v0.0.1 ([ROOT]/foo)`: `cargo:example` Expected a line with `cargo:key=value` with an `=` character, but none was found. See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \ for more information about build script outputs. ", ) .run(); } #[cargo_test] fn custom_build_closes_stdin() { // Ensure stdin is closed to prevent deadlock. // See https://github.com/rust-lang/cargo/issues/11196 let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#"fn main() { let mut line = String::new(); std::io::stdin().read_line(&mut line).unwrap(); }"#, ) .build(); p.cargo("build").run(); } cargo-0.66.0/tests/testsuite/build_script_env.rs000066400000000000000000000115761432416201200217770ustar00rootroot00000000000000//! Tests for build.rs rerun-if-env-changed and rustc-env use cargo_test_support::basic_manifest; use cargo_test_support::project; use cargo_test_support::sleep_ms; #[cargo_test] fn rerun_if_env_changes() { let p = project() .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-env-changed=FOO"); } "#, ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "baz") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "baz") .with_stderr("[FINISHED] [..]") .run(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rerun_if_env_or_file_changes() { let p = project() .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-env-changed=FOO"); println!("cargo:rerun-if-changed=foo"); } "#, ) .file("foo", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "bar") .with_stderr("[FINISHED] [..]") .run(); sleep_ms(1000); p.change_file("foo", ""); p.cargo("build") .env("FOO", "bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rustc_bootstrap() { let build_rs = r#" fn main() { println!("cargo:rustc-env=RUSTC_BOOTSTRAP=1"); } "#; let p = project() .file("Cargo.toml", &basic_manifest("has-dashes", "0.0.1")) .file("src/lib.rs", "#![feature(rustc_attrs)]") .file("build.rs", build_rs) .build(); // RUSTC_BOOTSTRAP unset on stable should error p.cargo("build") .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .with_stderr_contains( "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]", ) .with_status(101) .run(); // nightly should warn whether or not RUSTC_BOOTSTRAP is set p.cargo("build") .masquerade_as_nightly_cargo(&["RUSTC_BOOTSTRAP"]) // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc // (this matters when tests are being run with a beta or stable cargo) .env("RUSTC_BOOTSTRAP", "1") .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .run(); // RUSTC_BOOTSTRAP set to the name of the library should warn p.cargo("build") .env("RUSTC_BOOTSTRAP", "has_dashes") .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .run(); // RUSTC_BOOTSTRAP set to some random value should error p.cargo("build") .env("RUSTC_BOOTSTRAP", "bar") .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .with_stderr_contains( "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]", ) .with_status(101) .run(); // Tests for binaries instead of libraries let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) .file("src/main.rs", "#![feature(rustc_attrs)] fn main() {}") .file("build.rs", build_rs) .build(); // nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set p.cargo("build") .masquerade_as_nightly_cargo(&["RUSTC_BOOTSTRAP"]) // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc // (this matters when tests are being run with a beta or stable cargo) .env("RUSTC_BOOTSTRAP", "1") .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .run(); // RUSTC_BOOTSTRAP conditionally set when there's no library should error (regardless of the value) p.cargo("build") .env("RUSTC_BOOTSTRAP", "foo") .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") .with_stderr_contains("help: [..] set the environment variable `RUSTC_BOOTSTRAP=1` [..]") .with_status(101) .run(); } cargo-0.66.0/tests/testsuite/build_script_extra_link_arg.rs000066400000000000000000000246071432416201200241770ustar00rootroot00000000000000//! Tests for additional link arguments. // NOTE: Many of these tests use `without_status()` when passing bogus flags // because MSVC link.exe just gives a warning on unknown flags (how helpful!), // and other linkers will return an error. use cargo_test_support::registry::Package; use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; #[cargo_test] fn build_script_extra_link_arg_bin() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg-bins=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("build -v") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } #[cargo_test] fn build_script_extra_link_arg_bin_single() { let p = project() .file( "Cargo.toml", r#" [package] name = "foobar" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "foo" [[bin]] name = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg-bins=--bogus-flag-all"); println!("cargo:rustc-link-arg-bin=foo=--bogus-flag-foo"); println!("cargo:rustc-link-arg-bin=bar=--bogus-flag-bar"); } "#, ) .build(); p.cargo("build -v") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-foo[..]", ) .with_stderr_contains( "[RUNNING] `rustc --crate-name bar [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-bar[..]", ) .run(); } #[cargo_test] fn build_script_extra_link_arg() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("build -v") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } #[cargo_test] fn link_arg_missing_target() { // Errors when a given target doesn't exist. let p = project() .file("src/lib.rs", "") .file( "build.rs", r#"fn main() { println!("cargo:rustc-link-arg-cdylib=--bogus"); }"#, ) .build(); // TODO: Uncomment this if cdylib restriction is re-added (see // cdylib_link_arg_transitive below). // p.cargo("check") // .with_status(101) // .with_stderr("\ // [COMPILING] foo [..] // error: invalid instruction `cargo:rustc-link-arg-cdylib` from build script of `foo v0.0.1 ([ROOT]/foo)` // The package foo v0.0.1 ([ROOT]/foo) does not have a cdylib target. // ") // .run(); p.change_file( "build.rs", r#"fn main() { println!("cargo:rustc-link-arg-bins=--bogus"); }"#, ); p.cargo("check") .with_status(101) .with_stderr("\ [COMPILING] foo [..] error: invalid instruction `cargo:rustc-link-arg-bins` from build script of `foo v0.0.1 ([ROOT]/foo)` The package foo v0.0.1 ([ROOT]/foo) does not have a bin target. ") .run(); p.change_file( "build.rs", r#"fn main() { println!("cargo:rustc-link-arg-bin=abc=--bogus"); }"#, ); p.cargo("check") .with_status(101) .with_stderr( "\ [COMPILING] foo [..] error: invalid instruction `cargo:rustc-link-arg-bin` from build script of `foo v0.0.1 ([ROOT]/foo)` The package foo v0.0.1 ([ROOT]/foo) does not have a bin target with the name `abc`. ", ) .run(); p.change_file( "build.rs", r#"fn main() { println!("cargo:rustc-link-arg-bin=abc"); }"#, ); p.cargo("check") .with_status(101) .with_stderr( "\ [COMPILING] foo [..] error: invalid instruction `cargo:rustc-link-arg-bin=abc` from build script of `foo v0.0.1 ([ROOT]/foo)` The instruction should have the form cargo:rustc-link-arg-bin=BIN=ARG ", ) .run(); } #[cargo_test] fn cdylib_link_arg_transitive() { // There was an unintended regression in 1.50 where rustc-link-arg-cdylib // arguments from dependencies were being applied in the parent package. // Previously it was silently ignored. // See https://github.com/rust-lang/cargo/issues/9562 let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["cdylib"] [dependencies] bar = {path="bar"} "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("bar/src/lib.rs", "") .file( "bar/build.rs", r#" fn main() { println!("cargo:rustc-link-arg-cdylib=--bogus"); } "#, ) .build(); p.cargo("build -v") .without_status() .with_stderr_contains( "\ [COMPILING] bar v1.0.0 [..] [RUNNING] `rustc --crate-name build_script_build bar/build.rs [..] [RUNNING] `[..]build-script-build[..] warning: cargo:rustc-link-arg-cdylib was specified in the build script of bar v1.0.0 \ ([ROOT]/foo/bar), but that package does not contain a cdylib target Allowing this was an unintended change in the 1.50 release, and may become an error in \ the future. For more information, see . [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]-C link-arg=--bogus[..]` ", ) .run(); } #[cargo_test] fn link_arg_transitive_not_allowed() { // Verify that transitive dependencies don't pass link args. // // Note that rustc-link-arg doesn't have any errors or warnings when it is // unused. Perhaps that could be more aggressive, but it is difficult // since it could be used for test binaries. Package::new("bar", "1.0.0") .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg=--bogus"); } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["cdylib"] [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] [..] [COMPILING] bar v1.0.0 [RUNNING] `rustc --crate-name build_script_build [..] [RUNNING] `[..]/build-script-build[..] [RUNNING] `rustc --crate-name bar [..] [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..] [FINISHED] dev [..] ", ) .with_stderr_does_not_contain("--bogus") .run(); } #[cargo_test] fn link_arg_with_doctest() { let p = project() .file( "src/lib.rs", r#" //! ``` //! let x = 5; //! assert_eq!(x, 5); //! ``` "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("test --doc -v") .without_status() .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } #[cargo_test] fn build_script_extra_link_arg_tests() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file("tests/test_foo.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg-tests=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("test -v") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name test_foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } #[cargo_test] fn build_script_extra_link_arg_benches() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file("benches/bench_foo.rs", "") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg-benches=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("bench -v") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name bench_foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } #[cargo_test] fn build_script_extra_link_arg_examples() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file("examples/example_foo.rs", "fn main() {}") .file( "build.rs", r#" fn main() { println!("cargo:rustc-link-arg-examples=--this-is-a-bogus-flag"); } "#, ) .build(); p.cargo("build -v --examples") .without_status() .with_stderr_contains( "[RUNNING] `rustc --crate-name example_foo [..]-C link-arg=--this-is-a-bogus-flag[..]", ) .run(); } cargo-0.66.0/tests/testsuite/cache_messages.rs000066400000000000000000000327631432416201200213770ustar00rootroot00000000000000//! Tests for caching compiler diagnostics. use super::messages::raw_rustc_output; use cargo_test_support::tools; use cargo_test_support::{basic_manifest, is_coarse_mtime, project, registry::Package, sleep_ms}; fn as_str(bytes: &[u8]) -> &str { std::str::from_utf8(bytes).expect("valid utf-8") } #[cargo_test] fn simple() { // A simple example that generates two warnings (unused functions). let p = project() .file( "src/lib.rs", " fn a() {} fn b() {} ", ) .build(); // Capture what rustc actually emits. This is done to avoid relying on the // exact message formatting in rustc. let rustc_output = raw_rustc_output(&p, "src/lib.rs", &[]); // -q so the output is the same as rustc (no "Compiling" or "Finished"). let cargo_output1 = p .cargo("check -q --color=never") .exec_with_output() .expect("cargo to run"); assert_eq!(rustc_output, as_str(&cargo_output1.stderr)); assert!(cargo_output1.stdout.is_empty()); // Check that the cached version is exactly the same. let cargo_output2 = p .cargo("check -q") .exec_with_output() .expect("cargo to run"); assert_eq!(rustc_output, as_str(&cargo_output2.stderr)); assert!(cargo_output2.stdout.is_empty()); } // same as `simple`, except everything is using the short format #[cargo_test] fn simple_short() { let p = project() .file( "src/lib.rs", " fn a() {} fn b() {} ", ) .build(); let rustc_output = raw_rustc_output(&p, "src/lib.rs", &["--error-format=short"]); let cargo_output1 = p .cargo("check -q --color=never --message-format=short") .exec_with_output() .expect("cargo to run"); assert_eq!(rustc_output, as_str(&cargo_output1.stderr)); // assert!(cargo_output1.stdout.is_empty()); let cargo_output2 = p .cargo("check -q --message-format=short") .exec_with_output() .expect("cargo to run"); println!("{}", String::from_utf8_lossy(&cargo_output2.stdout)); assert_eq!(rustc_output, as_str(&cargo_output2.stderr)); assert!(cargo_output2.stdout.is_empty()); } #[cargo_test] fn color() { // Check enabling/disabling color. let p = project().file("src/lib.rs", "fn a() {}").build(); // Hack for issue in fwdansi 1.1. It is squashing multiple resets // into a single reset. // https://github.com/kennytm/fwdansi/issues/2 fn normalize(s: &str) -> String { #[cfg(windows)] return s.replace("\x1b[0m\x1b[0m", "\x1b[0m"); #[cfg(not(windows))] return s.to_string(); } let compare = |a, b| { assert_eq!(normalize(a), normalize(b)); }; // Capture the original color output. let rustc_color = raw_rustc_output(&p, "src/lib.rs", &["--color=always"]); assert!(rustc_color.contains("\x1b[")); // Capture the original non-color output. let rustc_nocolor = raw_rustc_output(&p, "src/lib.rs", &[]); assert!(!rustc_nocolor.contains("\x1b[")); // First pass, non-cached, with color, should be the same. let cargo_output1 = p .cargo("check -q --color=always") .exec_with_output() .expect("cargo to run"); compare(&rustc_color, as_str(&cargo_output1.stderr)); // Replay cached, with color. let cargo_output2 = p .cargo("check -q --color=always") .exec_with_output() .expect("cargo to run"); compare(&rustc_color, as_str(&cargo_output2.stderr)); // Replay cached, no color. let cargo_output_nocolor = p .cargo("check -q --color=never") .exec_with_output() .expect("cargo to run"); compare(&rustc_nocolor, as_str(&cargo_output_nocolor.stderr)); } #[cargo_test] fn cached_as_json() { // Check that cached JSON output is the same. let p = project().file("src/lib.rs", "fn a() {}").build(); // Grab the non-cached output, feature disabled. // NOTE: When stabilizing, this will need to be redone. let cargo_output = p .cargo("check --message-format=json") .exec_with_output() .expect("cargo to run"); assert!(cargo_output.status.success()); let orig_cargo_out = as_str(&cargo_output.stdout); assert!(orig_cargo_out.contains("compiler-message")); p.cargo("clean").run(); // Check JSON output, not fresh. let cargo_output1 = p .cargo("check --message-format=json") .exec_with_output() .expect("cargo to run"); assert_eq!(as_str(&cargo_output1.stdout), orig_cargo_out); // Check JSON output, fresh. let cargo_output2 = p .cargo("check --message-format=json") .exec_with_output() .expect("cargo to run"); // The only difference should be this field. let fix_fresh = as_str(&cargo_output2.stdout).replace("\"fresh\":true", "\"fresh\":false"); assert_eq!(fix_fresh, orig_cargo_out); } #[cargo_test] fn clears_cache_after_fix() { // Make sure the cache is invalidated when there is no output. let p = project().file("src/lib.rs", "fn asdf() {}").build(); // Fill the cache. p.cargo("check").with_stderr_contains("[..]asdf[..]").run(); let cpath = p .glob("target/debug/.fingerprint/foo-*/output-*") .next() .unwrap() .unwrap(); assert!(std::fs::read_to_string(cpath).unwrap().contains("asdf")); // Fix it. if is_coarse_mtime() { sleep_ms(1000); } p.change_file("src/lib.rs", ""); p.cargo("check") .with_stdout("") .with_stderr( "\ [CHECKING] foo [..] [FINISHED] [..] ", ) .run(); assert_eq!( p.glob("target/debug/.fingerprint/foo-*/output-*").count(), 0 ); // And again, check the cache is correct. p.cargo("check") .with_stdout("") .with_stderr( "\ [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rustdoc() { // Create a warning in rustdoc. let p = project() .file( "src/lib.rs", " #![warn(missing_docs)] pub fn f() {} ", ) .build(); let rustdoc_output = p .cargo("doc -q --color=always") .exec_with_output() .expect("rustdoc to run"); assert!(rustdoc_output.status.success()); let rustdoc_stderr = as_str(&rustdoc_output.stderr); assert!(rustdoc_stderr.contains("missing")); assert!(rustdoc_stderr.contains("\x1b[")); assert_eq!( p.glob("target/debug/.fingerprint/foo-*/output-*").count(), 1 ); // Check the cached output. let rustdoc_output = p .cargo("doc -q --color=always") .exec_with_output() .expect("rustdoc to run"); assert_eq!(as_str(&rustdoc_output.stderr), rustdoc_stderr); } #[cargo_test] fn fix() { // Make sure `fix` is not broken by caching. let p = project().file("src/lib.rs", "pub fn try() {}").build(); p.cargo("fix --edition --allow-no-vcs").run(); assert_eq!(p.read_file("src/lib.rs"), "pub fn r#try() {}"); } #[cargo_test] fn very_verbose() { // Handle cap-lints in dependencies. Package::new("bar", "1.0.0") .file("src/lib.rs", "fn not_used() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check -vv") .with_stderr_contains("[..]not_used[..]") .run(); p.cargo("check").with_stderr("[FINISHED] [..]").run(); p.cargo("check -vv") .with_stderr_contains("[..]not_used[..]") .run(); } #[cargo_test] fn doesnt_create_extra_files() { // Ensure it doesn't create `output` files when not needed. Package::new("dep", "1.0.0") .file("src/lib.rs", "fn unused() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "1.0" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); assert_eq!( p.glob("target/debug/.fingerprint/foo-*/output-*").count(), 0 ); assert_eq!( p.glob("target/debug/.fingerprint/dep-*/output-*").count(), 0 ); if is_coarse_mtime() { sleep_ms(1000); } p.change_file("src/lib.rs", "fn unused() {}"); p.cargo("build").run(); assert_eq!( p.glob("target/debug/.fingerprint/foo-*/output-*").count(), 1 ); } #[cargo_test] fn replay_non_json() { // Handles non-json output. let rustc = project() .at("rustc") .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0")) .file( "src/main.rs", r#" fn main() { eprintln!("line 1"); eprintln!("line 2"); let r = std::process::Command::new("rustc") .args(std::env::args_os().skip(1)) .status(); std::process::exit(r.unwrap().code().unwrap_or(2)); } "#, ) .build(); rustc.cargo("build").run(); let p = project().file("src/lib.rs", "").build(); p.cargo("check") .env("RUSTC", rustc.bin("rustc_alt")) .with_stderr( "\ [CHECKING] foo [..] line 1 line 2 [FINISHED] dev [..] ", ) .run(); p.cargo("check") .env("RUSTC", rustc.bin("rustc_alt")) .with_stderr( "\ line 1 line 2 [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn caching_large_output() { // Handles large number of messages. // This is an arbitrary amount that is greater than the 100 used in // job_queue. This is here to check for deadlocks or any other problems. const COUNT: usize = 250; let rustc = project() .at("rustc") .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0")) .file( "src/main.rs", &format!( r#" fn main() {{ for i in 0..{} {{ eprintln!("{{{{\"message\": \"test message {{}}\", \"level\": \"warning\", \ \"spans\": [], \"children\": [], \"rendered\": \"test message {{}}\"}}}}", i, i); }} let r = std::process::Command::new("rustc") .args(std::env::args_os().skip(1)) .status(); std::process::exit(r.unwrap().code().unwrap_or(2)); }} "#, COUNT ), ) .build(); let mut expected = String::new(); for i in 0..COUNT { expected.push_str(&format!("test message {}\n", i)); } rustc.cargo("build").run(); let p = project().file("src/lib.rs", "").build(); p.cargo("check") .env("RUSTC", rustc.bin("rustc_alt")) .with_stderr(&format!( "\ [CHECKING] foo [..] {}warning: `foo` (lib) generated 250 warnings [FINISHED] dev [..] ", expected )) .run(); p.cargo("check") .env("RUSTC", rustc.bin("rustc_alt")) .with_stderr(&format!( "\ {}warning: `foo` (lib) generated 250 warnings [FINISHED] dev [..] ", expected )) .run(); } #[cargo_test] fn rustc_workspace_wrapper() { let p = project() .file( "src/lib.rs", "pub fn f() { assert!(true); }\n\ fn unused_func() {}", ) .build(); p.cargo("check -v") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") .run(); // Check without a wrapper should rebuild p.cargo("check -v") .with_stderr_contains( "\ [CHECKING] foo [..] [RUNNING] `rustc[..] [WARNING] [..]unused_func[..] ", ) .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") .run(); // Again, reading from the cache. p.cargo("check -v") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("[FRESH] foo [..]") .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") .run(); // And `check` should also be fresh, reading from cache. p.cargo("check -v") .with_stderr_contains("[FRESH] foo [..]") .with_stderr_contains("[WARNING] [..]unused_func[..]") .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") .run(); } #[cargo_test] fn wacky_hashless_fingerprint() { // On Windows, executables don't have hashes. This checks for a bad // assumption that caused bad caching. let p = project() .file("src/bin/a.rs", "fn main() { let unused = 1; }") .file("src/bin/b.rs", "fn main() {}") .build(); p.cargo("build --bin b") .with_stderr_does_not_contain("[..]unused[..]") .run(); p.cargo("build --bin a") .with_stderr_contains("[..]unused[..]") .run(); // This should not pick up the cache from `a`. p.cargo("build --bin b") .with_stderr_does_not_contain("[..]unused[..]") .run(); } cargo-0.66.0/tests/testsuite/cargo_add/000077500000000000000000000000001432416201200177675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add-basic.in/000077500000000000000000000000001432416201200222035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add-basic.in/Cargo.toml000066400000000000000000000001121432416201200241250ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/add-basic.in/src/000077500000000000000000000000001432416201200227725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add-basic.in/src/lib.rs000066400000000000000000000000001432416201200240740ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_basic/000077500000000000000000000000001432416201200216605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_basic/in000077700000000000000000000000001432416201200245562../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_basic/mod.rs000066400000000000000000000013241432416201200230050ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn add_basic() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/add_basic/out/000077500000000000000000000000001432416201200224675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_basic/out/Cargo.toml000066400000000000000000000001631432416201200244170ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/add_basic/stderr.log000066400000000000000000000001301432416201200236600ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/add_basic/stdout.log000066400000000000000000000000001432416201200236730ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/000077500000000000000000000000001432416201200224325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/in000077700000000000000000000000001432416201200253302../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/mod.rs000066400000000000000000000013441432416201200235610ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn add_multiple() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/out/000077500000000000000000000000001432416201200232415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/out/Cargo.toml000066400000000000000000000002161432416201200251700ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/stderr.log000066400000000000000000000002161432416201200244370ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/add_multiple/stdout.log000066400000000000000000000000001432416201200244450ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/000077500000000000000000000000001432416201200256455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/in000077700000000000000000000000001432416201200305432../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/mod.rs000066400000000000000000000013661432416201200270000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn add_normalized_name_external() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("linked_hash_map Inflector") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/out/000077500000000000000000000000001432416201200264545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/out/Cargo.toml000066400000000000000000000002111432416201200303760ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] inflector = "0.11.4" linked-hash-map = "0.5.4" cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/stderr.log000066400000000000000000000010611432416201200276510ustar00rootroot00000000000000 Updating `dummy-registry` index warning: translating `linked_hash_map` to `linked-hash-map` warning: translating `Inflector` to `inflector` Adding linked-hash-map v0.5.4 to dependencies. Features: - clippy - heapsize - heapsize_impl - nightly - serde - serde_impl - serde_test Adding inflector v0.11.4 to dependencies. Features: + heavyweight + lazy_static + regex - unstable cargo-0.66.0/tests/testsuite/cargo_add/add_normalized_name_external/stdout.log000066400000000000000000000000001432416201200276600ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build/000077500000000000000000000000001432416201200210665ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build/in000077700000000000000000000000001432416201200237642../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build/mod.rs000066400000000000000000000013611432416201200222140ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn build() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("--build my-build-package1 my-build-package2") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/build/out/000077500000000000000000000000001432416201200216755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build/out/Cargo.toml000066400000000000000000000002401432416201200236210ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [build-dependencies] my-build-package1 = "99999.0.0" my-build-package2 = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/build/stderr.log000066400000000000000000000002461432416201200230760ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-build-package1 v99999.0.0 to build-dependencies. Adding my-build-package2 v99999.0.0 to build-dependencies. cargo-0.66.0/tests/testsuite/cargo_add/build/stdout.log000066400000000000000000000000001432416201200231010ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/000077500000000000000000000000001432416201200261105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/000077500000000000000000000000001432416201200265165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/Cargo.toml000066400000000000000000000004431432416201200304470ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/000077500000000000000000000000001432416201200306345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/Cargo.toml000066400000000000000000000001631432416201200325640ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" [features] one = [] two = [] cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/000077500000000000000000000000001432416201200314235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/dependency/src/lib.rs000066400000000000000000000000001432416201200325250ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/000077500000000000000000000000001432416201200273055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/in/src/lib.rs000066400000000000000000000000001432416201200304070ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/mod.rs000066400000000000000000000016451432416201200272430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; #[cargo_test] fn build_prefer_existing_version() { init_alt_registry(); let project = Project::from_template("tests/testsuite/cargo_add/build_prefer_existing_version/in"); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --build") .current_dir(cwd) .assert() .success() .stdout_matches_path("tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log") .stderr_matches_path("tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log"); assert_ui().subset_matches( "tests/testsuite/cargo_add/build_prefer_existing_version/out", &project_root, ); } cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/out/000077500000000000000000000000001432416201200267175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/out/Cargo.toml000066400000000000000000000006431432416201200306520ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" } [build-dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/000077500000000000000000000000001432416201200310355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/out/dependency/Cargo.toml000066400000000000000000000001631432416201200327650ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" [features] one = [] two = [] cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/stderr.log000066400000000000000000000002141432416201200301130ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to build-dependencies. Features: - one - two cargo-0.66.0/tests/testsuite/cargo_add/build_prefer_existing_version/stdout.log000066400000000000000000000000001432416201200301230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/000077500000000000000000000000001432416201200241115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/in/000077500000000000000000000000001432416201200245175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/in/Cargo.toml000066400000000000000000000002511432416201200264450ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] some-package = { package = "my-package1", version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/in/src/000077500000000000000000000000001432416201200253065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/in/src/lib.rs000066400000000000000000000000001432416201200264100ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/mod.rs000066400000000000000000000013661432416201200252440ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn change_rename_target() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package2 --rename some-package") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/out/000077500000000000000000000000001432416201200247205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml000066400000000000000000000002551432416201200266520ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] some-package = { package = "my-package2", version = "99999.0.0", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/stderr.log000066400000000000000000000001421432416201200261140ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package2 v99999.0.0 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/change_rename_target/stdout.log000066400000000000000000000000001432416201200261240ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/default_features/000077500000000000000000000000001432416201200233115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/default_features/in000077700000000000000000000000001432416201200262072../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/default_features/mod.rs000066400000000000000000000014011432416201200244320ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/default_features/out/000077500000000000000000000000001432416201200241205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/default_features/out/Cargo.toml000066400000000000000000000002121432416201200260430ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/default_features/stderr.log000066400000000000000000000002121432416201200253120ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/default_features/stdout.log000066400000000000000000000000001432416201200253240ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/000077500000000000000000000000001432416201200254715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/in/000077500000000000000000000000001432416201200260775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/in/Cargo.toml000066400000000000000000000002331432416201200300250ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = { version = "99999.0.0", default_features = false } cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/in/src/000077500000000000000000000000001432416201200266665ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/in/src/lib.rs000066400000000000000000000000001432416201200277700ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/mod.rs000066400000000000000000000013471432416201200266230ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn deprecated_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package") .current_dir(&cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/out/000077500000000000000000000000001432416201200263005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/out/Cargo.toml000066400000000000000000000002331432416201200302260ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = { version = "99999.0.0", default_features = false } cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/stderr.log000066400000000000000000000001451432416201200274770ustar00rootroot00000000000000error: Use of `default_features` in `my-package` is unsupported, please switch to `default-features` cargo-0.66.0/tests/testsuite/cargo_add/deprecated_default_features/stdout.log000066400000000000000000000000001432416201200275040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/000077500000000000000000000000001432416201200236135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/in/000077500000000000000000000000001432416201200242215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/in/Cargo.toml000066400000000000000000000002461432416201200261530ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev_dependencies] my-package = "99999.0.0" [build_dependencies] my-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/in/src/000077500000000000000000000000001432416201200250105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/in/src/lib.rs000066400000000000000000000000001432416201200261120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/mod.rs000066400000000000000000000013361432416201200247430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn deprecated_section() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package") .current_dir(&cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/out/000077500000000000000000000000001432416201200244225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/out/Cargo.toml000066400000000000000000000002461432416201200263540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev_dependencies] my-package = "99999.0.0" [build_dependencies] my-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/stderr.log000066400000000000000000000001341432416201200256170ustar00rootroot00000000000000error: Deprecated dependency sections are unsupported: dev_dependencies, build_dependencies cargo-0.66.0/tests/testsuite/cargo_add/deprecated_section/stdout.log000066400000000000000000000000001432416201200256260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/000077500000000000000000000000001432416201200250375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/000077500000000000000000000000001432416201200254455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/Cargo.toml000066400000000000000000000001731432416201200273760ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/000077500000000000000000000000001432416201200275635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/Cargo.toml000066400000000000000000000000511432416201200315070ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/000077500000000000000000000000001432416201200303525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/dependency/src/lib.rs000066400000000000000000000000001432416201200314540ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/000077500000000000000000000000001432416201200271305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/Cargo.toml000066400000000000000000000000501432416201200310530ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0"cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/000077500000000000000000000000001432416201200277175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/in/primary/src/lib.rs000066400000000000000000000000001432416201200310210ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/mod.rs000066400000000000000000000013471432416201200261710ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn detect_workspace_inherit() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/000077500000000000000000000000001432416201200256465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/Cargo.toml000066400000000000000000000001731432416201200275770ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/000077500000000000000000000000001432416201200277645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/dependency/Cargo.toml000066400000000000000000000000511432416201200317100ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/000077500000000000000000000000001432416201200273315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/out/primary/Cargo.toml000066400000000000000000000001161432416201200312570ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = true cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/stderr.log000066400000000000000000000000561432416201200270460ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit/stdout.log000066400000000000000000000000001432416201200270520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/000077500000000000000000000000001432416201200267355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/000077500000000000000000000000001432416201200273435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/Cargo.toml000066400000000000000000000002231432416201200312700ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["merge"] } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/000077500000000000000000000000001432416201200314615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/Cargo.toml000066400000000000000000000005001432416201200334040ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/000077500000000000000000000000001432416201200322505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/dependency/src/lib.rs000066400000000000000000000000001432416201200333520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/000077500000000000000000000000001432416201200310265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/Cargo.toml000066400000000000000000000000501432416201200327510ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0"cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/000077500000000000000000000000001432416201200316155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/in/primary/src/lib.rs000066400000000000000000000000001432416201200327170ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/mod.rs000066400000000000000000000014061432416201200300630ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn detect_workspace_inherit_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar", "--features", "test"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/000077500000000000000000000000001432416201200275445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/Cargo.toml000066400000000000000000000002231432416201200314710ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["merge"] } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/000077500000000000000000000000001432416201200316625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/dependency/Cargo.toml000066400000000000000000000005001432416201200336050ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/000077500000000000000000000000001432416201200312275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/out/primary/Cargo.toml000066400000000000000000000001511432416201200331540ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/stderr.log000066400000000000000000000004461432416201200307470ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. Features as of v0.0.0: + default-base + default-merge-base + default-test-base + merge + merge-base + test + test-base - unrelated cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_features/stdout.log000066400000000000000000000000001432416201200307500ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/000077500000000000000000000000001432416201200267445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/000077500000000000000000000000001432416201200273525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/Cargo.toml000066400000000000000000000001731432416201200313030ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/000077500000000000000000000000001432416201200314705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/Cargo.toml000066400000000000000000000000511432416201200334140ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/000077500000000000000000000000001432416201200322575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/dependency/src/lib.rs000066400000000000000000000000001432416201200333610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/000077500000000000000000000000001432416201200310355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/Cargo.toml000066400000000000000000000000501432416201200327600ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0"cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/000077500000000000000000000000001432416201200316245ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/in/primary/src/lib.rs000066400000000000000000000000001432416201200327260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/mod.rs000066400000000000000000000013761432416201200301000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn detect_workspace_inherit_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar", "--optional"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/000077500000000000000000000000001432416201200275535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/Cargo.toml000066400000000000000000000001731432416201200315040ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/000077500000000000000000000000001432416201200316715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/dependency/Cargo.toml000066400000000000000000000000511432416201200336150ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/000077500000000000000000000000001432416201200312365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml000066400000000000000000000001451432416201200331660ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, optional = true } cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stderr.log000066400000000000000000000000671432416201200307550ustar00rootroot00000000000000 Adding foo (workspace) to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/detect_workspace_inherit_optional/stdout.log000066400000000000000000000000001432416201200307570ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev/000077500000000000000000000000001432416201200205455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev/in000077700000000000000000000000001432416201200234432../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev/mod.rs000066400000000000000000000013511432416201200216720ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn dev() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("--dev my-dev-package1 my-dev-package2") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/dev/out/000077500000000000000000000000001432416201200213545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev/out/Cargo.toml000066400000000000000000000002321432416201200233010ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] my-dev-package1 = "99999.0.0" my-dev-package2 = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/dev/stderr.log000066400000000000000000000002361432416201200225540ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-dev-package1 v99999.0.0 to dev-dependencies. Adding my-dev-package2 v99999.0.0 to dev-dependencies. cargo-0.66.0/tests/testsuite/cargo_add/dev/stdout.log000066400000000000000000000000001432416201200225600ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/000077500000000000000000000000001432416201200236055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/in000077700000000000000000000000001432416201200265032../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/mod.rs000066400000000000000000000013511432416201200247320ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn dev_build_conflict() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --dev --build") .current_dir(cwd) .assert() .code(1) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/out/000077500000000000000000000000001432416201200244145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/out/Cargo.toml000066400000000000000000000001121432416201200263360ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/stderr.log000066400000000000000000000003431432416201200256130ustar00rootroot00000000000000error: The argument '--dev' cannot be used with '--build' USAGE: cargo add [OPTIONS] [@] ... cargo add [OPTIONS] --path ... cargo add [OPTIONS] --git ... For more information try --help cargo-0.66.0/tests/testsuite/cargo_add/dev_build_conflict/stdout.log000066400000000000000000000000001432416201200256200ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/000077500000000000000000000000001432416201200255675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/000077500000000000000000000000001432416201200261755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/Cargo.toml000066400000000000000000000004431432416201200301260ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/000077500000000000000000000000001432416201200303135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/Cargo.toml000066400000000000000000000001631432416201200322430ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" [features] one = [] two = [] cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/000077500000000000000000000000001432416201200311025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/dependency/src/lib.rs000066400000000000000000000000001432416201200322040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/000077500000000000000000000000001432416201200267645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/in/src/lib.rs000066400000000000000000000000001432416201200300660ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/mod.rs000066400000000000000000000014141432416201200267140ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn dev_prefer_existing_version() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --dev") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/out/000077500000000000000000000000001432416201200263765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/out/Cargo.toml000066400000000000000000000005641432416201200303330ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["one", "two"], registry = "alternative" } [dev-dependencies] cargo-list-test-fixture-dependency = { path = "dependency" } cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/000077500000000000000000000000001432416201200305145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/out/dependency/Cargo.toml000066400000000000000000000001631432416201200324440ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" [features] one = [] two = [] cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/stderr.log000066400000000000000000000002271432416201200275760ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dev-dependencies. Features as of v0.0.0: - one - two cargo-0.66.0/tests/testsuite/cargo_add/dev_prefer_existing_version/stdout.log000066400000000000000000000000001432416201200276020ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dry_run/000077500000000000000000000000001432416201200214515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dry_run/in000077700000000000000000000000001432416201200243472../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dry_run/mod.rs000066400000000000000000000013341432416201200225770ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn dry_run() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --dry-run") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/dry_run/out/000077500000000000000000000000001432416201200222605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/dry_run/out/Cargo.toml000066400000000000000000000001121432416201200242020ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/dry_run/stderr.log000066400000000000000000000001751432416201200234620ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. warning: aborting add due to dry run cargo-0.66.0/tests/testsuite/cargo_add/dry_run/stdout.log000066400000000000000000000000001432416201200234640ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features/000077500000000000000000000000001432416201200216055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features/in000077700000000000000000000000001432416201200245032../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features/mod.rs000066400000000000000000000013421432416201200227320ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features eyes") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features/out/000077500000000000000000000000001432416201200224145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features/out/Cargo.toml000066400000000000000000000002251432416201200243430ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes"] } cargo-0.66.0/tests/testsuite/cargo_add/features/stderr.log000066400000000000000000000002771432416201200236210ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes - ears - mouth - nose cargo-0.66.0/tests/testsuite/cargo_add/features/stdout.log000066400000000000000000000000001432416201200236200ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_empty/000077500000000000000000000000001432416201200230235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_empty/in000077700000000000000000000000001432416201200257212../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_empty/mod.rs000066400000000000000000000013461432416201200241540ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features_empty() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features ''") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features_empty/out/000077500000000000000000000000001432416201200236325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_empty/out/Cargo.toml000066400000000000000000000001621432416201200255610ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/features_empty/stderr.log000066400000000000000000000002771432416201200250370ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: - ears - eyes - mouth - nose cargo-0.66.0/tests/testsuite/cargo_add/features_empty/stdout.log000066400000000000000000000000001432416201200250360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/000077500000000000000000000000001432416201200261135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/in000077700000000000000000000000001432416201200310112../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/mod.rs000066400000000000000000000014071432416201200272420ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features_multiple_occurrences() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features eyes --features nose") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/out/000077500000000000000000000000001432416201200267225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/out/Cargo.toml000066400000000000000000000002351432416201200306520ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes", "nose"] } cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/stderr.log000066400000000000000000000002771432416201200301270ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes + nose - ears - mouth cargo-0.66.0/tests/testsuite/cargo_add/features_multiple_occurrences/stdout.log000066400000000000000000000000001432416201200301260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/000077500000000000000000000000001432416201200235205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/in/000077500000000000000000000000001432416201200241265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/in/Cargo.toml000066400000000000000000000002251432416201200260550ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes"] } cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/in/src/000077500000000000000000000000001432416201200247155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/in/src/lib.rs000066400000000000000000000000001432416201200260170ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/mod.rs000066400000000000000000000013331432416201200246450ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features_preserve() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/out/000077500000000000000000000000001432416201200243275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/out/Cargo.toml000066400000000000000000000002251432416201200262560ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes"] } cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/stderr.log000066400000000000000000000002771432416201200255340ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes - ears - mouth - nose cargo-0.66.0/tests/testsuite/cargo_add/features_preserve/stdout.log000066400000000000000000000000001432416201200255330ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/000077500000000000000000000000001432416201200245035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/in000077700000000000000000000000001432416201200274012../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/mod.rs000066400000000000000000000013651432416201200256350ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features_spaced_values() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features eyes,nose") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/out/000077500000000000000000000000001432416201200253125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/out/Cargo.toml000066400000000000000000000002351432416201200272420ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes", "nose"] } cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/stderr.log000066400000000000000000000002771432416201200265170ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes + nose - ears - mouth cargo-0.66.0/tests/testsuite/cargo_add/features_spaced_values/stdout.log000066400000000000000000000000001432416201200265160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/000077500000000000000000000000001432416201200233645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/in000077700000000000000000000000001432416201200262622../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/mod.rs000066400000000000000000000013521432416201200245120ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn features_unknown() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features noze") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/out/000077500000000000000000000000001432416201200241735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/out/Cargo.toml000066400000000000000000000001121432416201200261150ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/stderr.log000066400000000000000000000003721432416201200253740ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + noze - ears - eyes - mouth - nose error: unrecognized features: ["noze"] cargo-0.66.0/tests/testsuite/cargo_add/features_unknown/stdout.log000066400000000000000000000000001432416201200253770ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git/000077500000000000000000000000001432416201200205525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git/in000077700000000000000000000000001432416201200234502../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git/mod.rs000066400000000000000000000020531432416201200216770ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git-package", "--git", &git_url]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git/out/000077500000000000000000000000001432416201200213615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git/out/Cargo.toml000066400000000000000000000002351432416201200233110ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] git-package = { git = "[ROOTURL]/git-package", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git/stderr.log000066400000000000000000000002301432416201200225530ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git/stdout.log000066400000000000000000000000001432416201200225650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_branch/000077500000000000000000000000001432416201200220675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_branch/in000077700000000000000000000000001432416201200247652../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_branch/mod.rs000066400000000000000000000023671432416201200232240ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_branch() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let branch = "dev"; let find_head = || (git_repo.head().unwrap().peel_to_commit().unwrap()); git_repo.branch(branch, &find_head(), false).unwrap(); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git-package", "--git", &git_url, "--branch", branch]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_branch/out/000077500000000000000000000000001432416201200226765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_branch/out/Cargo.toml000066400000000000000000000002551432416201200246300ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] git-package = { git = "[ROOTURL]/git-package", branch = "dev", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git_branch/stderr.log000066400000000000000000000002301432416201200240700ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_branch/stdout.log000066400000000000000000000000001432416201200241020ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/000077500000000000000000000000001432416201200243335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/in000077700000000000000000000000001432416201200272312../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/mod.rs000066400000000000000000000015021432416201200254560ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_conflicts_namever() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args([ "my-package@0.4.3", "--git", "https://github.com/dcjanus/invalid", ]) .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/out/000077500000000000000000000000001432416201200251425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/out/Cargo.toml000066400000000000000000000001121432416201200270640ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/stderr.log000066400000000000000000000001411432416201200263350ustar00rootroot00000000000000error: cannot specify a git URL (`https://github.com/dcjanus/invalid`) with a version (`0.4.3`). cargo-0.66.0/tests/testsuite/cargo_add/git_conflicts_namever/stdout.log000066400000000000000000000000001432416201200263460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_dev/000077500000000000000000000000001432416201200214105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_dev/in000077700000000000000000000000001432416201200243062../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_dev/mod.rs000066400000000000000000000020701432416201200225340ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_dev() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git-package", "--git", &git_url, "--dev"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_dev/out/000077500000000000000000000000001432416201200222175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_dev/out/Cargo.toml000066400000000000000000000002161432416201200241460ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] git-package = { git = "[ROOTURL]/git-package" } cargo-0.66.0/tests/testsuite/cargo_add/git_dev/stderr.log000066400000000000000000000002341432416201200234150ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dev-dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_dev/stdout.log000066400000000000000000000000001432416201200234230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/000077500000000000000000000000001432416201200234305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/in000077700000000000000000000000001432416201200263262../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/mod.rs000066400000000000000000000020521432416201200245540ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_inferred_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["--git", &git_url]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/out/000077500000000000000000000000001432416201200242375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/out/Cargo.toml000066400000000000000000000002351432416201200261670ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] git-package = { git = "[ROOTURL]/git-package", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/stderr.log000066400000000000000000000003141432416201200254340ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name/stdout.log000066400000000000000000000000001432416201200254430ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/000077500000000000000000000000001432416201200253435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/in000077700000000000000000000000001432416201200302412../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/mod.rs000066400000000000000000000023731432416201200264750ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_inferred_name_multiple() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "p1/Cargo.toml", &cargo_test_support::basic_manifest("my-package1", "0.3.0+my-package1"), ) .file("p1/src/lib.rs", "") .file( "p2/Cargo.toml", &cargo_test_support::basic_manifest("my-package2", "0.3.0+my-package2"), ) .file("p2/src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["--git", &git_url]) .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/out/000077500000000000000000000000001432416201200261525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/out/Cargo.toml000066400000000000000000000001121432416201200300740ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/stderr.log000066400000000000000000000002101432416201200273420ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` error: multiple packages found at `[ROOTURL]/git-package`: my-package1, my-package2 cargo-0.66.0/tests/testsuite/cargo_add/git_inferred_name_multiple/stdout.log000066400000000000000000000000001432416201200273560ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/000077500000000000000000000000001432416201200236505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/in000077700000000000000000000000001432416201200265462../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/mod.rs000066400000000000000000000024211432416201200247740ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_multiple_names() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "p1/Cargo.toml", &cargo_test_support::basic_manifest("my-package1", "0.3.0+my-package1"), ) .file("p1/src/lib.rs", "") .file( "p2/Cargo.toml", &cargo_test_support::basic_manifest("my-package2", "0.3.0+my-package2"), ) .file("p2/src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["my-package1", "my-package2", "--git", &git_url]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/out/000077500000000000000000000000001432416201200244575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/out/Cargo.toml000066400000000000000000000003401432416201200264040ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { git = "[ROOTURL]/git-package", version = "0.3.0" } my-package2 = { git = "[ROOTURL]/git-package", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/stderr.log000066400000000000000000000003101432416201200256500ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding my-package1 (git) to dependencies. Adding my-package2 (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_multiple_names/stdout.log000066400000000000000000000000001432416201200256630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/000077500000000000000000000000001432416201200237765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/in000077700000000000000000000000001432416201200266742../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/mod.rs000066400000000000000000000021651432416201200251270ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_normalized_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git_package", "--git", &git_url]) .current_dir(cwd) .assert() .failure() // Fuzzy searching for paths isn't supported at this time .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/out/000077500000000000000000000000001432416201200246055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/out/Cargo.toml000066400000000000000000000001121432416201200265270ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/stderr.log000066400000000000000000000002271432416201200260050ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` error: the crate `git_package@[ROOTURL]/git-package` could not be found at `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_normalized_name/stdout.log000066400000000000000000000000001432416201200260110ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/000077500000000000000000000000001432416201200225025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/in/000077500000000000000000000000001432416201200231105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/in/Cargo.toml000066400000000000000000000001121432416201200250320ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/git_registry/in/src/000077500000000000000000000000001432416201200236775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/in/src/lib.rs000066400000000000000000000000001432416201200250010ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/mod.rs000066400000000000000000000022701432416201200236300ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_registry() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("versioned-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("versioned-package", "0.3.0+versioned-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args([ "versioned-package", "--git", &git_url, "--registry", "alternative", ]) .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_registry/out/000077500000000000000000000000001432416201200233115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_registry/out/Cargo.toml000066400000000000000000000003031432416201200252350ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = { git = "[ROOTURL]/versioned-package", version = "0.3.0", registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/git_registry/stderr.log000066400000000000000000000004411432416201200245070ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/versioned-package` Adding versioned-package (git) to dependencies. error: failed to parse manifest at `[ROOT]/case/Cargo.toml` Caused by: dependency (versioned-package) specification is ambiguous. Only one of `git` or `registry` is allowed. cargo-0.66.0/tests/testsuite/cargo_add/git_registry/stdout.log000066400000000000000000000000001432416201200245150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_rev/000077500000000000000000000000001432416201200214265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_rev/in000077700000000000000000000000001432416201200243242../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_rev/mod.rs000066400000000000000000000023121432416201200225510ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_rev() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let find_head = || (git_repo.head().unwrap().peel_to_commit().unwrap()); let head = find_head().id().to_string(); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git-package", "--git", &git_url, "--rev", &head]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_rev/out/000077500000000000000000000000001432416201200222355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_rev/out/Cargo.toml000066400000000000000000000002531432416201200241650ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] git-package = { git = "[ROOTURL]/git-package", rev = "[..]", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git_rev/stderr.log000066400000000000000000000002301432416201200234270ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_rev/stdout.log000066400000000000000000000000001432416201200234410ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_tag/000077500000000000000000000000001432416201200214055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_tag/in000077700000000000000000000000001432416201200243032../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_tag/mod.rs000066400000000000000000000022301432416201200225270ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn git_tag() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let (git_dep, git_repo) = cargo_test_support::git::new_repo("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let tag = "v1.0.0"; cargo_test_support::git::tag(&git_repo, tag); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["git-package", "--git", &git_url, "--tag", tag]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/git_tag/out/000077500000000000000000000000001432416201200222145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/git_tag/out/Cargo.toml000066400000000000000000000002551432416201200241460ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] git-package = { git = "[ROOTURL]/git-package", tag = "v1.0.0", version = "0.3.0" } cargo-0.66.0/tests/testsuite/cargo_add/git_tag/stderr.log000066400000000000000000000002301432416201200234060ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` Adding git-package (git) to dependencies. Updating git repository `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/git_tag/stdout.log000066400000000000000000000000001432416201200234200ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/000077500000000000000000000000001432416201200233015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/in000077700000000000000000000000001432416201200261772../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/mod.rs000066400000000000000000000013401432416201200244240ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn infer_prerelease() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("prerelease_only") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/out/000077500000000000000000000000001432416201200241105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/out/Cargo.toml000066400000000000000000000001741432416201200260420ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] prerelease_only = "0.2.0-alpha.1" cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/stderr.log000066400000000000000000000001411432416201200253030ustar00rootroot00000000000000 Updating `dummy-registry` index Adding prerelease_only v0.2.0-alpha.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/infer_prerelease/stdout.log000066400000000000000000000000001432416201200253140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/000077500000000000000000000000001432416201200222465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/in000077700000000000000000000000001432416201200251442../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/mod.rs000066400000000000000000000013331432416201200233730ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_arg() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --flag") .current_dir(cwd) .assert() .code(1) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/out/000077500000000000000000000000001432416201200230555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/out/Cargo.toml000066400000000000000000000001121432416201200247770ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/stderr.log000066400000000000000000000005171432416201200242570ustar00rootroot00000000000000error: Found argument '--flag' which wasn't expected, or isn't valid in this context If you tried to supply `--flag` as a value rather than a flag, use `-- --flag` USAGE: cargo add [OPTIONS] [@] ... cargo add [OPTIONS] --path ... cargo add [OPTIONS] --git ... For more information try --help cargo-0.66.0/tests/testsuite/cargo_add/invalid_arg/stdout.log000066400000000000000000000000001432416201200242610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/000077500000000000000000000000001432416201200241625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/in000077700000000000000000000000001432416201200270602../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/mod.rs000066400000000000000000000015421432416201200253110ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_git_external() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_url = url::Url::from_directory_path(cwd.join("does-not-exist")) .unwrap() .to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["fake-git", "--git", &git_url]) .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/out/000077500000000000000000000000001432416201200247715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/out/Cargo.toml000066400000000000000000000001121432416201200267130ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/stderr.log000066400000000000000000000004301432416201200261650ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/case/does-not-exist/` ... error: failed to load source for dependency `fake-git` Caused by: Unable to update [ROOTURL]/case/does-not-exist/ Caused by: failed to clone into: [ROOT]/home/.cargo/git/db/does-not-exist-[..] Caused by: ... cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_external/stdout.log000066400000000000000000000000001432416201200261750ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/000077500000000000000000000000001432416201200232605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/in000077700000000000000000000000001432416201200261562../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/mod.rs000066400000000000000000000020671432416201200244120ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_git_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("git-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("git-package", "0.3.0+git-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["not-in-git", "--git", &git_url]) .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/out/000077500000000000000000000000001432416201200240675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/out/Cargo.toml000066400000000000000000000001121432416201200260110ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/stderr.log000066400000000000000000000002261432416201200252660ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/git-package` error: the crate `not-in-git@[ROOTURL]/git-package` could not be found at `[ROOTURL]/git-package` cargo-0.66.0/tests/testsuite/cargo_add/invalid_git_name/stdout.log000066400000000000000000000000001432416201200252730ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/000077500000000000000000000000001432416201200262055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/000077500000000000000000000000001432416201200266135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/Cargo.toml000066400000000000000000000001741432416201200305450ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/000077500000000000000000000000001432416201200307315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/Cargo.toml000066400000000000000000000000511432416201200326550ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/000077500000000000000000000000001432416201200315205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/dependency/src/lib.rs000066400000000000000000000000001432416201200326220ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/000077500000000000000000000000001432416201200302765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/Cargo.toml000066400000000000000000000000511432416201200322220ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/000077500000000000000000000000001432416201200310655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/in/primary/src/lib.rs000066400000000000000000000000001432416201200321670ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/mod.rs000066400000000000000000000013111432416201200273260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_key_inherit_dependency() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "--default-features", "-p", "bar"]) .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/000077500000000000000000000000001432416201200270145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/Cargo.toml000066400000000000000000000001741432416201200307460ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/000077500000000000000000000000001432416201200311325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/dependency/Cargo.toml000066400000000000000000000000511432416201200330560ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/000077500000000000000000000000001432416201200304775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/out/primary/Cargo.toml000066400000000000000000000000511432416201200324230ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stderr.log000066400000000000000000000003021432416201200302060ustar00rootroot00000000000000error: cannot override workspace dependency with `--default-features`, either change `workspace.dependencies.foo.default-features` or define the dependency exclusively in the package's manifest cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_inherit_dependency/stdout.log000066400000000000000000000000001432416201200302200ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/000077500000000000000000000000001432416201200303135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/000077500000000000000000000000001432416201200307215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/Cargo.toml000066400000000000000000000001741432416201200326530ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/000077500000000000000000000000001432416201200330375ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000511432416201200347040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/src/000077500000000000000000000000001432416201200336265ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200346510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/dependency/srccargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/000077500000000000000000000000001432416201200324045ustar00rootroot00000000000000Cargo.toml000066400000000000000000000001161432416201200342530ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = true cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/src/000077500000000000000000000000001432416201200331735ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200342160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/in/primary/srccargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/mod.rs000066400000000000000000000013231432416201200314370ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_key_overwrite_inherit_dependency() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "--default-features", "-p", "bar"]) .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/000077500000000000000000000000001432416201200311225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/Cargo.toml000066400000000000000000000001741432416201200330540ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency/000077500000000000000000000000001432416201200332405ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000511432416201200351050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/dependency[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary/000077500000000000000000000000001432416201200326055ustar00rootroot00000000000000Cargo.toml000066400000000000000000000001161432416201200344540ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/out/primary[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = true cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stderr.log000066400000000000000000000003021432416201200323140ustar00rootroot00000000000000error: cannot override workspace dependency with `--default-features`, either change `workspace.dependencies.foo.default-features` or define the dependency exclusively in the package's manifest cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_overwrite_inherit_dependency/stdout.log000066400000000000000000000000001432416201200323260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/000077500000000000000000000000001432416201200275345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/000077500000000000000000000000001432416201200301425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/Cargo.toml000066400000000000000000000002161432416201200320710ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "dependency-alt"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/000077500000000000000000000000001432416201200330365ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000551432416201200347070ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt[package] name = "foo-alt" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/src/000077500000000000000000000000001432416201200336255ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200346500ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency-alt/srccargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/000077500000000000000000000000001432416201200322605ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000511432416201200341250ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/src/000077500000000000000000000000001432416201200330475ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200340720ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/dependency/srccargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/000077500000000000000000000000001432416201200316255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/Cargo.toml000066400000000000000000000000511432416201200335510ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/000077500000000000000000000000001432416201200324145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/in/primary/src/lib.rs000066400000000000000000000000001432416201200335160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/mod.rs000066400000000000000000000013211432416201200306560ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_key_rename_inherit_dependency() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["--rename", "foo", "foo-alt", "-p", "bar"]) .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/000077500000000000000000000000001432416201200303435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/Cargo.toml000066400000000000000000000002161432416201200322720ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "dependency-alt"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"} cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt/000077500000000000000000000000001432416201200332375ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000551432416201200351100ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency-alt[package] name = "foo-alt" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency/000077500000000000000000000000001432416201200324615ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000511432416201200343260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/dependency[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/000077500000000000000000000000001432416201200320265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/out/primary/Cargo.toml000066400000000000000000000000511432416201200337520ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stderr.log000066400000000000000000000002571432416201200315460ustar00rootroot00000000000000error: cannot override workspace dependency with `--rename`, either change `workspace.dependencies.foo.package` or define the dependency exclusively in the package's manifest cargo-0.66.0/tests/testsuite/cargo_add/invalid_key_rename_inherit_dependency/stdout.log000066400000000000000000000000001432416201200315470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/000077500000000000000000000000001432416201200233035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/in/000077500000000000000000000000001432416201200237115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/in/Cargo.toml000066400000000000000000000001671432416201200256450ustar00rootroot00000000000000[workspace] [package] name = "manifest-invalid-test-fixture" version = "0.1.0" [invalid-section] key = invalid-value cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/in/src/000077500000000000000000000000001432416201200245005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/in/src/lib.rs000066400000000000000000000000001432416201200256020ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/mod.rs000066400000000000000000000013331432416201200244300ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_manifest() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/out/000077500000000000000000000000001432416201200241125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/out/Cargo.toml000066400000000000000000000001671432416201200260460ustar00rootroot00000000000000[workspace] [package] name = "manifest-invalid-test-fixture" version = "0.1.0" [invalid-section] key = invalid-value cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/stderr.log000066400000000000000000000003321432416201200253070ustar00rootroot00000000000000error: failed to parse manifest at `[ROOT]/case/Cargo.toml` Caused by: could not parse input as TOML Caused by: TOML parse error at line 8, column 7 | 8 | key = invalid-value | ^ Unexpected `v` cargo-0.66.0/tests/testsuite/cargo_add/invalid_manifest/stdout.log000066400000000000000000000000001432416201200253160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/000077500000000000000000000000001432416201200243175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/in000077700000000000000000000000001432416201200272152../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/mod.rs000066400000000000000000000014001432416201200254370ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_name_external() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("lets_hope_nobody_ever_publishes_this_crate") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/out/000077500000000000000000000000001432416201200251265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/out/Cargo.toml000066400000000000000000000001121432416201200270500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/stderr.log000066400000000000000000000002101432416201200263160ustar00rootroot00000000000000 Updating `dummy-registry` index error: the crate `lets_hope_nobody_ever_publishes_this_crate` could not be found in registry index. cargo-0.66.0/tests/testsuite/cargo_add/invalid_name_external/stdout.log000066400000000000000000000000001432416201200263320ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/000077500000000000000000000000001432416201200224315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/in000077700000000000000000000000001432416201200253272../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/mod.rs000066400000000000000000000014021432416201200235530ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture --path ./tests/fixtures/local") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/out/000077500000000000000000000000001432416201200232405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/out/Cargo.toml000066400000000000000000000001121432416201200251620ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/stderr.log000066400000000000000000000003441432416201200244400ustar00rootroot00000000000000error: failed to load source for dependency `cargo-list-test-fixture` Caused by: Unable to update [ROOT]/case/tests/fixtures/local Caused by: failed to read `[ROOT]/case/tests/fixtures/local/Cargo.toml` Caused by: [..] cargo-0.66.0/tests/testsuite/cargo_add/invalid_path/stdout.log000066400000000000000000000000001432416201200244440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/000077500000000000000000000000001432416201200234315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/000077500000000000000000000000001432416201200240375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/dependency/000077500000000000000000000000001432416201200261555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/dependency/Cargo.toml000066400000000000000000000001251432416201200301030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/000077500000000000000000000000001432416201200267445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/dependency/src/lib.rs000066400000000000000000000000001432416201200300460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/primary/000077500000000000000000000000001432416201200255225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/primary/Cargo.toml000066400000000000000000000001121432416201200274440ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/000077500000000000000000000000001432416201200263115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/in/primary/src/lib.rs000066400000000000000000000000001432416201200274130ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/mod.rs000066400000000000000000000014021432416201200245530ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_path_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("not-at-path --path ../dependency") .current_dir(&cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/out/000077500000000000000000000000001432416201200242405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/out/dependency/000077500000000000000000000000001432416201200263565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/out/dependency/Cargo.toml000066400000000000000000000001251432416201200303040ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/out/primary/000077500000000000000000000000001432416201200257235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/out/primary/Cargo.toml000066400000000000000000000001121432416201200276450ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/stderr.log000066400000000000000000000001451432416201200254370ustar00rootroot00000000000000error: the crate `not-at-path@[ROOT]/case/dependency` could not be found at `[ROOT]/case/dependency` cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_name/stdout.log000066400000000000000000000000001432416201200254440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/000077500000000000000000000000001432416201200234425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/in000077700000000000000000000000001432416201200263402../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/mod.rs000066400000000000000000000013621432416201200245710ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_path_self() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture --path .") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/out/000077500000000000000000000000001432416201200242515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/out/Cargo.toml000066400000000000000000000001121432416201200261730ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/stderr.log000066400000000000000000000002041432416201200254440ustar00rootroot00000000000000 Adding cargo-list-test-fixture (local) to dependencies. error: cannot add `cargo-list-test-fixture` as a dependency to itself cargo-0.66.0/tests/testsuite/cargo_add/invalid_path_self/stdout.log000066400000000000000000000000001432416201200254550ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/000077500000000000000000000000001432416201200242015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/in000077700000000000000000000000001432416201200270772../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/mod.rs000066400000000000000000000013511432416201200253260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_target_empty() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --target ''") .current_dir(cwd) .assert() .code(1) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/out/000077500000000000000000000000001432416201200250105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/out/Cargo.toml000066400000000000000000000001121432416201200267320ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/stderr.log000066400000000000000000000001601432416201200262040ustar00rootroot00000000000000error: The argument '--target ' requires a value but none was supplied For more information try --help cargo-0.66.0/tests/testsuite/cargo_add/invalid_target_empty/stdout.log000066400000000000000000000000001432416201200262140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/000077500000000000000000000000001432416201200224545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/in000077700000000000000000000000001432416201200253522../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/mod.rs000066400000000000000000000013561432416201200236060ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_vers() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package@invalid-version-string") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/out/000077500000000000000000000000001432416201200232635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/out/Cargo.toml000066400000000000000000000001121432416201200252050ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/stderr.log000066400000000000000000000002061432416201200244600ustar00rootroot00000000000000error: invalid version requirement `invalid-version-string` Caused by: unexpected character 'i' while parsing major version number cargo-0.66.0/tests/testsuite/cargo_add/invalid_vers/stdout.log000066400000000000000000000000001432416201200244670ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features/000077500000000000000000000000001432416201200226405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features/in000077700000000000000000000000001432416201200255362../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features/mod.rs000066400000000000000000000013251432416201200237660ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn list_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["your-face"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/list_features/out/000077500000000000000000000000001432416201200234475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features/out/Cargo.toml000066400000000000000000000001621432416201200253760ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/list_features/stderr.log000066400000000000000000000002771432416201200246540ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: - ears - eyes - mouth - nose cargo-0.66.0/tests/testsuite/cargo_add/list_features/stdout.log000066400000000000000000000000001432416201200246530ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/000077500000000000000000000000001432416201200236545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/000077500000000000000000000000001432416201200242625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/Cargo.toml000066400000000000000000000000741432416201200262130ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/dependency/000077500000000000000000000000001432416201200264005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/dependency/Cargo.toml000066400000000000000000000003311432416201200303250ustar00rootroot00000000000000[package] name = "your-face" version = "0.1.3" [dependencies] my-package = "0.1.1" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/dependency/src/000077500000000000000000000000001432416201200271675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/dependency/src/lib.rs000066400000000000000000000000001432416201200302710ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/optional/000077500000000000000000000000001432416201200261075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/optional/Cargo.toml000066400000000000000000000001361432416201200300370ustar00rootroot00000000000000[package] name = "optional-dependency" version = "0.1.3" [dependencies] my-package = "0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/optional/src/000077500000000000000000000000001432416201200266765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/optional/src/lib.rs000066400000000000000000000000001432416201200300000ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/primary/000077500000000000000000000000001432416201200257455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/primary/Cargo.toml000066400000000000000000000000751432416201200276770ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/primary/src/000077500000000000000000000000001432416201200265345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/in/primary/src/lib.rs000066400000000000000000000000001432416201200276360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/mod.rs000066400000000000000000000014011432416201200247750ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn list_features_path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/000077500000000000000000000000001432416201200244635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/Cargo.toml000066400000000000000000000000741432416201200264140ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/dependency/000077500000000000000000000000001432416201200266015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/dependency/Cargo.toml000066400000000000000000000003311432416201200305260ustar00rootroot00000000000000[package] name = "your-face" version = "0.1.3" [dependencies] my-package = "0.1.1" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/primary/000077500000000000000000000000001432416201200261465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/out/primary/Cargo.toml000066400000000000000000000002071432416201200300750ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.1.3", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/stderr.log000066400000000000000000000003131432416201200256570ustar00rootroot00000000000000 Adding your-face (local) to dependencies. Features: + mouth + nose - eyes - optional-dependency Updating `dummy-registry` index cargo-0.66.0/tests/testsuite/cargo_add/list_features_path/stdout.log000066400000000000000000000000001432416201200256670ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/000077500000000000000000000000001432416201200260545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/000077500000000000000000000000001432416201200264625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/Cargo.toml000066400000000000000000000000741432416201200304130ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/000077500000000000000000000000001432416201200306005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/Cargo.toml000066400000000000000000000003311432416201200325250ustar00rootroot00000000000000[package] name = "your-face" version = "0.1.3" [dependencies] my-package = "0.1.1" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/000077500000000000000000000000001432416201200313675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/dependency/src/lib.rs000066400000000000000000000000001432416201200324710ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/000077500000000000000000000000001432416201200303075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/Cargo.toml000066400000000000000000000001361432416201200322370ustar00rootroot00000000000000[package] name = "optional-dependency" version = "0.1.3" [dependencies] my-package = "0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/000077500000000000000000000000001432416201200310765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/optional/src/lib.rs000066400000000000000000000000001432416201200322000ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/000077500000000000000000000000001432416201200301455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/Cargo.toml000066400000000000000000000000751432416201200320770ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/000077500000000000000000000000001432416201200307345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/in/primary/src/lib.rs000066400000000000000000000000001432416201200320360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/mod.rs000066400000000000000000000015441432416201200272050ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn list_features_path_no_default() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .args([ "your-face", "--path", "../dependency", "--no-default-features", ]) .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/000077500000000000000000000000001432416201200266635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/Cargo.toml000066400000000000000000000000741432416201200306140ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/000077500000000000000000000000001432416201200310015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/dependency/Cargo.toml000066400000000000000000000003311432416201200327260ustar00rootroot00000000000000[package] name = "your-face" version = "0.1.3" [dependencies] my-package = "0.1.1" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/000077500000000000000000000000001432416201200303465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/out/primary/Cargo.toml000066400000000000000000000002411432416201200322730ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.1.3", path = "../dependency", default-features = false } cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/stderr.log000066400000000000000000000003131432416201200300570ustar00rootroot00000000000000 Adding your-face (local) to dependencies. Features: - eyes - mouth - nose - optional-dependency Updating `dummy-registry` index cargo-0.66.0/tests/testsuite/cargo_add/list_features_path_no_default/stdout.log000066400000000000000000000000001432416201200300670ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/000077500000000000000000000000001432416201200227015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/in000077700000000000000000000000001432416201200255772../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/mod.rs000066400000000000000000000013421432416201200240260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn locked_changed() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --locked") .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/out/000077500000000000000000000000001432416201200235105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/out/Cargo.toml000066400000000000000000000001121432416201200254320ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/stderr.log000066400000000000000000000003041432416201200247040ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. error: the manifest file [ROOT]/case/Cargo.toml needs to be updated but --locked was passed to prevent this cargo-0.66.0/tests/testsuite/cargo_add/locked_changed/stdout.log000066400000000000000000000000001432416201200247140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/000077500000000000000000000000001432416201200232445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/in/000077500000000000000000000000001432416201200236525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.lock000066400000000000000000000006321432416201200255600ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "cargo-list-test-fixture" version = "0.0.0" dependencies = [ "my-package", ] [[package]] name = "my-package" version = "99999.0.0+my-package" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62c45acf9e11d2f97f5b318143219c0b4102eafef1c22a4b545b47104691d915" cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/in/Cargo.toml000066400000000000000000000001631432416201200256020ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/in/src/000077500000000000000000000000001432416201200244415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/in/src/lib.rs000066400000000000000000000000001432416201200255430ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/mod.rs000066400000000000000000000013441432416201200243730ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn locked_unchanged() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --locked") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/out/000077500000000000000000000000001432416201200240535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/out/Cargo.toml000066400000000000000000000001631432416201200260030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/stderr.log000066400000000000000000000001301432416201200252440ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/locked_unchanged/stdout.log000066400000000000000000000000001432416201200252570ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/000077500000000000000000000000001432416201200232655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/in/000077500000000000000000000000001432416201200236735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.lock000066400000000000000000000006611432416201200256030ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "cargo-list-test-fixture" version = "0.0.0" dependencies = [ "my-package", "unrelateed-crate", ] [[package]] name = "unrelateed-crate" version = "0.2.0+my-package" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "266de4849a570b5dfda5e8e082a2aff885e9d2d4965dae8f8b6c8535e1ec731f" cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/in/Cargo.toml000066400000000000000000000001651432416201200256250ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] unrelateed-crate = "0.2.0" cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/in/src/000077500000000000000000000000001432416201200244625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/in/src/lib.rs000066400000000000000000000000001432416201200255640ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/mod.rs000066400000000000000000000013331432416201200244120ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn lockfile_updated() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/out/000077500000000000000000000000001432416201200240745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.lock000066400000000000000000000012021432416201200257740ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "cargo-list-test-fixture" version = "0.0.0" dependencies = [ "my-package", "unrelateed-crate", ] [[package]] name = "my-package" version = "99999.0.0+my-package" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62c45acf9e11d2f97f5b318143219c0b4102eafef1c22a4b545b47104691d915" [[package]] name = "unrelateed-crate" version = "0.2.0+my-package" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "266de4849a570b5dfda5e8e082a2aff885e9d2d4965dae8f8b6c8535e1ec731f" cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/out/Cargo.toml000066400000000000000000000002161432416201200260230ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "99999.0.0" unrelateed-crate = "0.2.0" cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/stderr.log000066400000000000000000000001301432416201200252650ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/lockfile_updated/stdout.log000066400000000000000000000000001432416201200253000ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/000077500000000000000000000000001432416201200242645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/000077500000000000000000000000001432416201200246725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/Cargo.toml000066400000000000000000000000601432416201200266160ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/dependency/000077500000000000000000000000001432416201200270105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/dependency/Cargo.toml000066400000000000000000000001101432416201200307300ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/000077500000000000000000000000001432416201200275775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/dependency/src/lib.rs000066400000000000000000000000001432416201200307010ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/primary/000077500000000000000000000000001432416201200263555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/primary/Cargo.toml000066400000000000000000000000751432416201200303070ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/000077500000000000000000000000001432416201200271445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/in/primary/src/lib.rs000066400000000000000000000000001432416201200302460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/mod.rs000066400000000000000000000016061432416201200254140ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn manifest_path_package() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args([ "--manifest-path", "Cargo.toml", "--package", "cargo-list-test-fixture", "cargo-list-test-fixture-dependency", ]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/000077500000000000000000000000001432416201200250735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/Cargo.toml000066400000000000000000000000601432416201200270170ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/dependency/000077500000000000000000000000001432416201200272115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/dependency/Cargo.toml000066400000000000000000000001101432416201200311310ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/primary/000077500000000000000000000000001432416201200265565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/out/primary/Cargo.toml000066400000000000000000000002401432416201200305020ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/stderr.log000066400000000000000000000001111432416201200262630ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/manifest_path_package/stdout.log000066400000000000000000000000001432416201200262770ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/000077500000000000000000000000001432416201200250105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/000077500000000000000000000000001432416201200254165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/Cargo.toml000066400000000000000000000002231432416201200273430ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["merge"] } cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/dependency/000077500000000000000000000000001432416201200275345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/dependency/Cargo.toml000066400000000000000000000005001432416201200314570ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/000077500000000000000000000000001432416201200303235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/dependency/src/lib.rs000066400000000000000000000000001432416201200314250ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/primary/000077500000000000000000000000001432416201200271015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/primary/Cargo.toml000066400000000000000000000001501432416201200310250ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] }cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/000077500000000000000000000000001432416201200276705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/in/primary/src/lib.rs000066400000000000000000000000001432416201200307720ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/mod.rs000066400000000000000000000012551432416201200261400ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn merge_activated_features() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/000077500000000000000000000000001432416201200256175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/Cargo.toml000066400000000000000000000002231432416201200275440ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["merge"] } cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/dependency/000077500000000000000000000000001432416201200277355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/dependency/Cargo.toml000066400000000000000000000005001432416201200316600ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/primary/000077500000000000000000000000001432416201200273025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/out/primary/Cargo.toml000066400000000000000000000001511432416201200312270ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] } cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/stderr.log000066400000000000000000000004461432416201200270220ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. Features as of v0.0.0: + default-base + default-merge-base + default-test-base + merge + merge-base + test + test-base - unrelated cargo-0.66.0/tests/testsuite/cargo_add/merge_activated_features/stdout.log000066400000000000000000000000001432416201200270230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/mod.rs000066400000000000000000000127721432416201200211250ustar00rootroot00000000000000mod add_basic; mod add_multiple; mod add_normalized_name_external; mod build; mod build_prefer_existing_version; mod change_rename_target; mod default_features; mod deprecated_default_features; mod deprecated_section; mod detect_workspace_inherit; mod detect_workspace_inherit_features; mod detect_workspace_inherit_optional; mod dev; mod dev_build_conflict; mod dev_prefer_existing_version; mod dry_run; mod features; mod features_empty; mod features_multiple_occurrences; mod features_preserve; mod features_spaced_values; mod features_unknown; mod git; mod git_branch; mod git_conflicts_namever; mod git_dev; mod git_inferred_name; mod git_inferred_name_multiple; mod git_multiple_names; mod git_normalized_name; mod git_registry; mod git_rev; mod git_tag; mod infer_prerelease; mod invalid_arg; mod invalid_git_external; mod invalid_git_name; mod invalid_key_inherit_dependency; mod invalid_key_overwrite_inherit_dependency; mod invalid_key_rename_inherit_dependency; mod invalid_manifest; mod invalid_name_external; mod invalid_path; mod invalid_path_name; mod invalid_path_self; mod invalid_target_empty; mod invalid_vers; mod list_features; mod list_features_path; mod list_features_path_no_default; mod locked_changed; mod locked_unchanged; mod lockfile_updated; mod manifest_path_package; mod merge_activated_features; mod multiple_conflicts_with_features; mod multiple_conflicts_with_rename; mod namever; mod no_args; mod no_default_features; mod no_optional; mod offline_empty_cache; mod optional; mod overwrite_default_features; mod overwrite_default_features_with_no_default_features; mod overwrite_features; mod overwrite_git_with_path; mod overwrite_inherit_features_noop; mod overwrite_inherit_noop; mod overwrite_inherit_optional_noop; mod overwrite_inline_features; mod overwrite_name_dev_noop; mod overwrite_name_noop; mod overwrite_no_default_features; mod overwrite_no_default_features_with_default_features; mod overwrite_no_optional; mod overwrite_no_optional_with_optional; mod overwrite_optional; mod overwrite_optional_with_no_optional; mod overwrite_path_noop; mod overwrite_path_with_version; mod overwrite_preserves_inline_table; mod overwrite_rename_with_no_rename; mod overwrite_rename_with_rename; mod overwrite_rename_with_rename_noop; mod overwrite_version_with_git; mod overwrite_version_with_path; mod overwrite_with_rename; mod overwrite_workspace_dep; mod overwrite_workspace_dep_features; mod path; mod path_dev; mod path_inferred_name; mod path_inferred_name_conflicts_full_feature; mod path_normalized_name; mod preserve_sorted; mod preserve_unsorted; mod quiet; mod registry; mod rename; mod require_weak; mod target; mod target_cfg; mod unknown_inherited_feature; mod vers; mod workspace_name; mod workspace_path; mod workspace_path_dev; fn init_registry() { cargo_test_support::registry::init(); add_registry_packages(false); } fn init_alt_registry() { cargo_test_support::registry::alt_init(); add_registry_packages(true); } fn add_registry_packages(alt: bool) { for name in [ "my-package", "my-package1", "my-package2", "my-dev-package1", "my-dev-package2", "my-build-package1", "my-build-package2", "toml", "versioned-package", "cargo-list-test-fixture-dependency", "unrelateed-crate", ] { cargo_test_support::registry::Package::new(name, "0.1.1+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "0.2.0+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "0.2.3+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "0.4.1+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "20.0.0+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "99999.0.0+my-package") .alternative(alt) .publish(); cargo_test_support::registry::Package::new(name, "99999.0.0-alpha.1+my-package") .alternative(alt) .publish(); } cargo_test_support::registry::Package::new("prerelease_only", "0.2.0-alpha.1") .alternative(alt) .publish(); cargo_test_support::registry::Package::new("test_breaking", "0.2.0") .alternative(alt) .publish(); cargo_test_support::registry::Package::new("test_nonbreaking", "0.1.1") .alternative(alt) .publish(); // Normalization cargo_test_support::registry::Package::new("linked-hash-map", "0.5.4") .alternative(alt) .feature("clippy", &[]) .feature("heapsize", &[]) .feature("heapsize_impl", &[]) .feature("nightly", &[]) .feature("serde", &[]) .feature("serde_impl", &[]) .feature("serde_test", &[]) .publish(); cargo_test_support::registry::Package::new("inflector", "0.11.4") .alternative(alt) .feature("default", &["heavyweight", "lazy_static", "regex"]) .feature("heavyweight", &[]) .feature("lazy_static", &[]) .feature("regex", &[]) .feature("unstable", &[]) .publish(); cargo_test_support::registry::Package::new("your-face", "99999.0.0+my-package") .alternative(alt) .feature("nose", &[]) .feature("mouth", &[]) .feature("eyes", &[]) .feature("ears", &[]) .publish(); } cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/000077500000000000000000000000001432416201200266175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/in000077700000000000000000000000001432416201200315152../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/mod.rs000066400000000000000000000014061432416201200277450ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn multiple_conflicts_with_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 your-face --features nose") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/000077500000000000000000000000001432416201200274265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/out/Cargo.toml000066400000000000000000000001121432416201200313500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/stderr.log000066400000000000000000000001751432416201200306300ustar00rootroot00000000000000error: feature `nose` must be qualified by the dependency its being activated for, like `my-package1/nose`, `your-face/nose` cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_features/stdout.log000066400000000000000000000000001432416201200306320ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/000077500000000000000000000000001432416201200262505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/in000077700000000000000000000000001432416201200311462../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/mod.rs000066400000000000000000000014071432416201200273770ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn multiple_conflicts_with_rename() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2 --rename renamed") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/000077500000000000000000000000001432416201200270575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/out/Cargo.toml000066400000000000000000000001121432416201200310010ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stderr.log000066400000000000000000000000661432416201200302600ustar00rootroot00000000000000error: cannot specify multiple crates with `--rename` cargo-0.66.0/tests/testsuite/cargo_add/multiple_conflicts_with_rename/stdout.log000066400000000000000000000000001432416201200302630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/namever/000077500000000000000000000000001432416201200214245ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/namever/in000077700000000000000000000000001432416201200243222../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/namever/mod.rs000066400000000000000000000013701432416201200225520ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn namever() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1@>=0.1.1 my-package2@0.2.3 my-package") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/namever/out/000077500000000000000000000000001432416201200222335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/namever/out/Cargo.toml000066400000000000000000000002411432416201200241600ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "99999.0.0" my-package1 = ">=0.1.1" my-package2 = "0.2.3" cargo-0.66.0/tests/testsuite/cargo_add/namever/stderr.log000066400000000000000000000002731432416201200234340ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 >=0.1.1 to dependencies. Adding my-package2 v0.2.3 to dependencies. Adding my-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/namever/stdout.log000066400000000000000000000000001432416201200234370ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_args/000077500000000000000000000000001432416201200214175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_args/in000077700000000000000000000000001432416201200243152../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_args/mod.rs000066400000000000000000000012601432416201200225430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn no_args() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .current_dir(cwd) .assert() .code(1) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/no_args/out/000077500000000000000000000000001432416201200222265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_args/out/Cargo.toml000066400000000000000000000001121432416201200241500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/no_args/stderr.log000066400000000000000000000004131432416201200234230ustar00rootroot00000000000000error: The following required arguments were not provided: |--git > USAGE: cargo add [OPTIONS] [@] ... cargo add [OPTIONS] --path ... cargo add [OPTIONS] --git ... For more information try --help cargo-0.66.0/tests/testsuite/cargo_add/no_args/stdout.log000066400000000000000000000000001432416201200234320ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/000077500000000000000000000000001432416201200240055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/in000077700000000000000000000000001432416201200267032../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/mod.rs000066400000000000000000000014071432416201200251340ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn no_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --no-default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/out/000077500000000000000000000000001432416201200246145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/out/Cargo.toml000066400000000000000000000003321432416201200265420ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", default-features = false } my-package2 = { version = "0.4.1", default-features = false } cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/stderr.log000066400000000000000000000002121432416201200260060ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/no_default_features/stdout.log000066400000000000000000000000001432416201200260200ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_optional/000077500000000000000000000000001432416201200223105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_optional/in000077700000000000000000000000001432416201200252062../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_optional/mod.rs000066400000000000000000000013671432416201200234440ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn no_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --no-optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/no_optional/out/000077500000000000000000000000001432416201200231175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/no_optional/out/Cargo.toml000066400000000000000000000002121432416201200250420ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/no_optional/stderr.log000066400000000000000000000002121432416201200243110ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/no_optional/stdout.log000066400000000000000000000000001432416201200243230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/000077500000000000000000000000001432416201200237525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/in000077700000000000000000000000001432416201200266502../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/mod.rs000066400000000000000000000013501432416201200250760ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn offline_empty_cache() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("--offline my-package") .current_dir(cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/out/000077500000000000000000000000001432416201200245615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/out/Cargo.toml000066400000000000000000000001121432416201200265030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/stderr.log000066400000000000000000000001041432416201200257530ustar00rootroot00000000000000error: the crate `my-package` could not be found in registry index. cargo-0.66.0/tests/testsuite/cargo_add/offline_empty_cache/stdout.log000066400000000000000000000000001432416201200257650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/optional/000077500000000000000000000000001432416201200216145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/optional/in000077700000000000000000000000001432416201200245122../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/optional/mod.rs000066400000000000000000000013611432416201200227420ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/optional/out/000077500000000000000000000000001432416201200224235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/optional/out/Cargo.toml000066400000000000000000000003101432416201200243450ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", optional = true } my-package2 = { version = "0.4.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/optional/stderr.log000066400000000000000000000002341432416201200236210ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to optional dependencies. Adding my-package2 v0.4.1 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/optional/stdout.log000066400000000000000000000000001432416201200236270ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/000077500000000000000000000000001432416201200254175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/in/000077500000000000000000000000001432416201200260255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/in/Cargo.toml000066400000000000000000000002121432416201200277500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/in/src/000077500000000000000000000000001432416201200266145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/in/src/lib.rs000066400000000000000000000000001432416201200277160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/mod.rs000066400000000000000000000014131432416201200265430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/out/000077500000000000000000000000001432416201200262265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/out/Cargo.toml000066400000000000000000000002121432416201200301510ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/stderr.log000066400000000000000000000002121432416201200274200ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features/stdout.log000066400000000000000000000000001432416201200274320ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/000077500000000000000000000000001432416201200325505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/000077500000000000000000000000001432416201200331565ustar00rootroot00000000000000Cargo.toml000066400000000000000000000003301432416201200350230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", default-features = true } my-package2 = { version = "0.4.1", default-features = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/src/000077500000000000000000000000001432416201200337455ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200347700ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/in/srccargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/mod.rs000066400000000000000000000014471432416201200337030ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_default_features_with_no_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --no-default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out/000077500000000000000000000000001432416201200333575ustar00rootroot00000000000000Cargo.toml000066400000000000000000000003321432416201200352260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features/out[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", default-features = false } my-package2 = { version = "0.4.1", default-features = false } stderr.log000066400000000000000000000002121432416201200344720ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_features Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. stdout.log000066400000000000000000000000001432416201200345040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_default_features_with_no_default_featurescargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/000077500000000000000000000000001432416201200237135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/in/000077500000000000000000000000001432416201200243215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/in/Cargo.toml000066400000000000000000000002251432416201200262500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes"] } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/in/src/000077500000000000000000000000001432416201200251105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/in/src/lib.rs000066400000000000000000000000001432416201200262120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/mod.rs000066400000000000000000000013541432416201200250430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features nose") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/out/000077500000000000000000000000001432416201200245225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/out/Cargo.toml000066400000000000000000000002351432416201200264520ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes", "nose"] } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/stderr.log000066400000000000000000000002771432416201200257270ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes + nose - ears - mouth cargo-0.66.0/tests/testsuite/cargo_add/overwrite_features/stdout.log000066400000000000000000000000001432416201200257260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/000077500000000000000000000000001432416201200247275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/000077500000000000000000000000001432416201200253355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/000077500000000000000000000000001432416201200274535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/Cargo.toml000066400000000000000000000001251432416201200314010ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/000077500000000000000000000000001432416201200302425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/dependency/src/lib.rs000066400000000000000000000000001432416201200313440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/000077500000000000000000000000001432416201200270205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/Cargo.toml000066400000000000000000000002731432416201200307520ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { git = "git://git.git", branch = "main", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/000077500000000000000000000000001432416201200276075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/in/primary/src/lib.rs000066400000000000000000000000001432416201200307110ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/mod.rs000066400000000000000000000014371432416201200260610ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_git_with_path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/out/000077500000000000000000000000001432416201200255365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/000077500000000000000000000000001432416201200276545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/out/dependency/Cargo.toml000066400000000000000000000001251432416201200316020ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/000077500000000000000000000000001432416201200272215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml000066400000000000000000000002761432416201200311560ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { optional = true, path = "../dependency", version = "0.0.0" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/stderr.log000066400000000000000000000001221432416201200267300ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_git_with_path/stdout.log000066400000000000000000000000001432416201200267420ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/000077500000000000000000000000001432416201200264705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/000077500000000000000000000000001432416201200270765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/Cargo.toml000066400000000000000000000001731432416201200310270ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/000077500000000000000000000000001432416201200312145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/Cargo.toml000066400000000000000000000000761432416201200331470ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] test = []cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/000077500000000000000000000000001432416201200320035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200331050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/000077500000000000000000000000001432416201200305615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/Cargo.toml000066400000000000000000000001501432416201200325050ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/000077500000000000000000000000001432416201200313505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/in/primary/src/lib.rs000066400000000000000000000000001432416201200324520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/mod.rs000066400000000000000000000012641432416201200276200ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_inherit_features_noop() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/000077500000000000000000000000001432416201200272775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/Cargo.toml000066400000000000000000000001731432416201200312300ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/000077500000000000000000000000001432416201200314155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/dependency/Cargo.toml000066400000000000000000000000761432416201200333500ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] test = []cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/000077500000000000000000000000001432416201200307625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/out/primary/Cargo.toml000066400000000000000000000001511432416201200327070ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stderr.log000066400000000000000000000001461432416201200304770ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. Features as of v0.0.0: + test cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_features_noop/stdout.log000066400000000000000000000000001432416201200305030ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/000077500000000000000000000000001432416201200245725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/000077500000000000000000000000001432416201200252005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/Cargo.toml000066400000000000000000000001731432416201200271310ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/000077500000000000000000000000001432416201200273165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/Cargo.toml000066400000000000000000000000511432416201200312420ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/000077500000000000000000000000001432416201200301055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200312070ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/000077500000000000000000000000001432416201200266635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/Cargo.toml000066400000000000000000000001151432416201200306100ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = truecargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/000077500000000000000000000000001432416201200274525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/in/primary/src/lib.rs000066400000000000000000000000001432416201200305540ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/mod.rs000066400000000000000000000013451432416201200257220ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_inherit_noop() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/000077500000000000000000000000001432416201200254015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/Cargo.toml000066400000000000000000000001731432416201200273320ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/000077500000000000000000000000001432416201200275175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/dependency/Cargo.toml000066400000000000000000000000511432416201200314430ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/000077500000000000000000000000001432416201200270645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/out/primary/Cargo.toml000066400000000000000000000001161432416201200310120ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = true cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/stderr.log000066400000000000000000000000561432416201200266010ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_noop/stdout.log000066400000000000000000000000001432416201200266050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/000077500000000000000000000000001432416201200264775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/000077500000000000000000000000001432416201200271055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/Cargo.toml000066400000000000000000000001731432416201200310360ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/000077500000000000000000000000001432416201200312235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/Cargo.toml000066400000000000000000000000511432416201200331470ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/000077500000000000000000000000001432416201200320125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200331140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/000077500000000000000000000000001432416201200305705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/Cargo.toml000066400000000000000000000001441432416201200325170ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, optional = true }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/000077500000000000000000000000001432416201200313575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/in/primary/src/lib.rs000066400000000000000000000000001432416201200324610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/mod.rs000066400000000000000000000013561432416201200276310ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_inherit_optional_noop() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/000077500000000000000000000000001432416201200273065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/Cargo.toml000066400000000000000000000001731432416201200312370ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency"}cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/000077500000000000000000000000001432416201200314245ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/dependency/Cargo.toml000066400000000000000000000000511432416201200333500ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/000077500000000000000000000000001432416201200307715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml000066400000000000000000000001451432416201200327210ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stderr.log000066400000000000000000000000671432416201200305100ustar00rootroot00000000000000 Adding foo (workspace) to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/stdout.log000066400000000000000000000000001432416201200305120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/000077500000000000000000000000001432416201200252515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/in/000077500000000000000000000000001432416201200256575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/in/Cargo.toml000066400000000000000000000002251432416201200276060ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "99999.0.0", features = ["eyes"] } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/in/src/000077500000000000000000000000001432416201200264465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/in/src/lib.rs000066400000000000000000000000001432416201200275500ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/mod.rs000066400000000000000000000015061432416201200264000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_inline_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line( "unrelateed-crate your-face --features your-face/nose,your-face/mouth -Fyour-face/ears", ) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/out/000077500000000000000000000000001432416201200260605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/out/Cargo.toml000066400000000000000000000003151432416201200300070ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] unrelateed-crate = "99999.0.0" your-face = { version = "99999.0.0", features = ["eyes", "nose", "mouth", "ears"] } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/stderr.log000066400000000000000000000003711432416201200272600ustar00rootroot00000000000000 Updating `dummy-registry` index Adding unrelateed-crate v99999.0.0 to dependencies. Adding your-face v99999.0.0 to dependencies. Features: + ears + eyes + mouth + nose cargo-0.66.0/tests/testsuite/cargo_add/overwrite_inline_features/stdout.log000066400000000000000000000000001432416201200272640ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/000077500000000000000000000000001432416201200247065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/000077500000000000000000000000001432416201200253145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/Cargo.toml000066400000000000000000000004001432416201200272360ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] your-face = { version = "0.0.0", path = "dependency", default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/000077500000000000000000000000001432416201200274325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/Cargo.toml000066400000000000000000000001351432416201200313610ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/000077500000000000000000000000001432416201200302215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200313230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/000077500000000000000000000000001432416201200261035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/in/src/lib.rs000066400000000000000000000000001432416201200272050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/mod.rs000066400000000000000000000013571432416201200260410ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_name_dev_noop() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --dev") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/000077500000000000000000000000001432416201200255155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/Cargo.toml000066400000000000000000000004001432416201200274370ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] your-face = { version = "0.0.0", path = "dependency", default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/000077500000000000000000000000001432416201200276335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/out/dependency/Cargo.toml000066400000000000000000000001351432416201200315620ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/stderr.log000066400000000000000000000001641432416201200267150ustar00rootroot00000000000000 Adding your-face (local) to dev-dependencies. Features: + mouth + nose cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_dev_noop/stdout.log000066400000000000000000000000001432416201200267210ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/000077500000000000000000000000001432416201200240505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/000077500000000000000000000000001432416201200244565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/Cargo.toml000066400000000000000000000004151432416201200264060ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/000077500000000000000000000000001432416201200265745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/Cargo.toml000066400000000000000000000001351432416201200305230ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/000077500000000000000000000000001432416201200273635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200304650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/src/000077500000000000000000000000001432416201200252455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/in/src/lib.rs000066400000000000000000000000001432416201200263470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/mod.rs000066400000000000000000000013451432416201200252000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_name_noop() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/out/000077500000000000000000000000001432416201200246575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml000066400000000000000000000004151432416201200266070ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/000077500000000000000000000000001432416201200267755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/out/dependency/Cargo.toml000066400000000000000000000001351432416201200307240ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/stderr.log000066400000000000000000000001711432416201200260550ustar00rootroot00000000000000 Adding your-face (local) to optional dependencies. Features: + mouth + nose cargo-0.66.0/tests/testsuite/cargo_add/overwrite_name_noop/stdout.log000066400000000000000000000000001432416201200260630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/000077500000000000000000000000001432416201200261135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/in/000077500000000000000000000000001432416201200265215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/in/Cargo.toml000066400000000000000000000002121432416201200304440ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/000077500000000000000000000000001432416201200273105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/in/src/lib.rs000066400000000000000000000000001432416201200304120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/mod.rs000066400000000000000000000014211432416201200272360ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_no_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --no-default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/out/000077500000000000000000000000001432416201200267225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/out/Cargo.toml000066400000000000000000000003321432416201200306500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", default-features = false } my-package2 = { version = "0.4.1", default-features = false } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/stderr.log000066400000000000000000000002121432416201200301140ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features/stdout.log000066400000000000000000000000001432416201200301260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/000077500000000000000000000000001432416201200325505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/000077500000000000000000000000001432416201200331565ustar00rootroot00000000000000Cargo.toml000066400000000000000000000003321432416201200350250ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", default-features = false } my-package2 = { version = "0.4.1", default-features = false } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/src/000077500000000000000000000000001432416201200337455ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200347700ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/in/srccargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/mod.rs000066400000000000000000000014441432416201200337000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_no_default_features_with_default_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --default-features") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out/000077500000000000000000000000001432416201200333575ustar00rootroot00000000000000Cargo.toml000066400000000000000000000002461432416201200352320ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features/out[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0" } my-package2 = { version = "0.4.1" } stderr.log000066400000000000000000000002121432416201200344720ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_features Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. stdout.log000066400000000000000000000000001432416201200345040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_default_features_with_default_featurescargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/000077500000000000000000000000001432416201200244165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/in/000077500000000000000000000000001432416201200250245ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml000066400000000000000000000002121432416201200267470ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/in/src/000077500000000000000000000000001432416201200256135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/in/src/lib.rs000066400000000000000000000000001432416201200267150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs000066400000000000000000000014011432416201200255370ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_no_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --no-optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/out/000077500000000000000000000000001432416201200252255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml000066400000000000000000000002121432416201200271500ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log000066400000000000000000000002121432416201200264170ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional/stdout.log000066400000000000000000000000001432416201200264310ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/000077500000000000000000000000001432416201200273565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/000077500000000000000000000000001432416201200277645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml000066400000000000000000000003121432416201200317100ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", optional = false } my-package2 = { version = "0.4.1", optional = false } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/000077500000000000000000000000001432416201200305535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/src/lib.rs000066400000000000000000000000001432416201200316550ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs000066400000000000000000000014141432416201200305030ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_no_optional_with_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/000077500000000000000000000000001432416201200301655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml000066400000000000000000000003101432416201200321070ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", optional = true } my-package2 = { version = "0.4.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log000066400000000000000000000002341432416201200313630ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to optional dependencies. Adding my-package2 v0.4.1 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stdout.log000066400000000000000000000000001432416201200313710ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/000077500000000000000000000000001432416201200237225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/in/000077500000000000000000000000001432416201200243305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml000066400000000000000000000002121432416201200262530ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = "99999.0.0" my-package2 = "0.4.1" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/in/src/000077500000000000000000000000001432416201200251175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/in/src/lib.rs000066400000000000000000000000001432416201200262210ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/mod.rs000066400000000000000000000013731432416201200250530ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2@0.4.1 --optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/out/000077500000000000000000000000001432416201200245315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml000066400000000000000000000003101432416201200264530ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", optional = true } my-package2 = { version = "0.4.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/stderr.log000066400000000000000000000002341432416201200257270ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to optional dependencies. Adding my-package2 v0.4.1 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional/stdout.log000066400000000000000000000000001432416201200257350ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/000077500000000000000000000000001432416201200273565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/000077500000000000000000000000001432416201200277645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml000066400000000000000000000004051432416201200317130ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [features] default = ["your-face"] other = ["your-face/nose"] [dependencies] your-face = { version = "99999.0.0", optional = true } my-package2 = { version = "0.4.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/000077500000000000000000000000001432416201200305535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/src/lib.rs000066400000000000000000000000001432416201200316550ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs000066400000000000000000000014151432416201200305040ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_optional_with_no_optional() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face my-package2@0.4.1 --no-optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/000077500000000000000000000000001432416201200301655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml000066400000000000000000000003301432416201200321110ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [features] default = [] other = ["your-face/nose"] [dependencies] your-face = { version = "99999.0.0" } my-package2 = { version = "0.4.1" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log000066400000000000000000000003601432416201200313630ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: - ears - eyes - mouth - nose Adding my-package2 v0.4.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stdout.log000066400000000000000000000000001432416201200313710ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/000077500000000000000000000000001432416201200240645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/000077500000000000000000000000001432416201200244725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/Cargo.toml000066400000000000000000000004151432416201200264220ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/000077500000000000000000000000001432416201200266105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/Cargo.toml000066400000000000000000000001351432416201200305370ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/000077500000000000000000000000001432416201200273775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/dependency/src/lib.rs000066400000000000000000000000001432416201200305010ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/src/000077500000000000000000000000001432416201200252615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/in/src/lib.rs000066400000000000000000000000001432416201200263630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/mod.rs000066400000000000000000000013711432416201200252130ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_path_noop() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --path ./dependency") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/out/000077500000000000000000000000001432416201200246735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml000066400000000000000000000004151432416201200266230ustar00rootroot00000000000000[workspace] exclude = ["dependency"] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/000077500000000000000000000000001432416201200270115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/out/dependency/Cargo.toml000066400000000000000000000001351432416201200307400ustar00rootroot00000000000000[workspace] [package] name = "your-face" version = "0.0.0" [features] mouth = [] nose = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/stderr.log000066400000000000000000000001711432416201200260710ustar00rootroot00000000000000 Adding your-face (local) to optional dependencies. Features: + mouth + nose cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_noop/stdout.log000066400000000000000000000000001432416201200260770ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/000077500000000000000000000000001432416201200256315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/000077500000000000000000000000001432416201200262375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/000077500000000000000000000000001432416201200303555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/Cargo.toml000066400000000000000000000001251432416201200323030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/000077500000000000000000000000001432416201200311445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/dependency/src/lib.rs000066400000000000000000000000001432416201200322460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/000077500000000000000000000000001432416201200277225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/Cargo.toml000066400000000000000000000002531432416201200316520ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { optional = true, path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/000077500000000000000000000000001432416201200305115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/in/primary/src/lib.rs000066400000000000000000000000001432416201200316130ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/mod.rs000066400000000000000000000014231432416201200267560ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_path_with_version() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency@20.0") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/out/000077500000000000000000000000001432416201200264405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/000077500000000000000000000000001432416201200305565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/out/dependency/Cargo.toml000066400000000000000000000001251432416201200325040ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/000077500000000000000000000000001432416201200301235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml000066400000000000000000000002451432416201200320540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { optional = true, version = "20.0" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/stderr.log000066400000000000000000000001641432416201200276400ustar00rootroot00000000000000 Updating `dummy-registry` index Adding cargo-list-test-fixture-dependency v20.0 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_path_with_version/stdout.log000066400000000000000000000000001432416201200276440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/000077500000000000000000000000001432416201200266205ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/000077500000000000000000000000001432416201200272265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/Cargo.toml000066400000000000000000000002331432416201200311540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face={version="99999.0.0",features=["eyes"]} # Hello world cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/000077500000000000000000000000001432416201200300155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/in/src/lib.rs000066400000000000000000000000001432416201200311170ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/mod.rs000066400000000000000000000013721432416201200277500ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_preserves_inline_table() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --features nose") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/000077500000000000000000000000001432416201200274275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/out/Cargo.toml000066400000000000000000000002521432416201200313560ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face={ version = "99999.0.0", features = ["eyes", "nose"] } # Hello world cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stderr.log000066400000000000000000000002771432416201200306340ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: + eyes + nose - ears - mouth cargo-0.66.0/tests/testsuite/cargo_add/overwrite_preserves_inline_table/stdout.log000066400000000000000000000000001432416201200306330ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/000077500000000000000000000000001432416201200264225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/000077500000000000000000000000001432416201200270305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/Cargo.toml000066400000000000000000000002451432416201200307610ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/000077500000000000000000000000001432416201200276175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/in/src/lib.rs000066400000000000000000000000001432416201200307210ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/mod.rs000066400000000000000000000013611432416201200275500ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_rename_with_no_rename() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("versioned-package") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/000077500000000000000000000000001432416201200272315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/out/Cargo.toml000066400000000000000000000003051432416201200311570ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } versioned-package = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stderr.log000066400000000000000000000001371432416201200304310ustar00rootroot00000000000000 Updating `dummy-registry` index Adding versioned-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_no_rename/stdout.log000066400000000000000000000000001432416201200304350ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/000077500000000000000000000000001432416201200257265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/000077500000000000000000000000001432416201200263345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/Cargo.toml000066400000000000000000000002451432416201200302650ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/000077500000000000000000000000001432416201200271235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/in/src/lib.rs000066400000000000000000000000001432416201200302250ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/mod.rs000066400000000000000000000013721432416201200270560ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_rename_with_rename() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("versioned-package --rename a2") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/000077500000000000000000000000001432416201200265355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/out/Cargo.toml000066400000000000000000000003431432416201200304650ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } a2 = { version = "99999.0.0", package = "versioned-package" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/stderr.log000066400000000000000000000001371432416201200277350ustar00rootroot00000000000000 Updating `dummy-registry` index Adding versioned-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename/stdout.log000066400000000000000000000000001432416201200277410ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/000077500000000000000000000000001432416201200267615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/000077500000000000000000000000001432416201200273675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/Cargo.toml000066400000000000000000000002451432416201200313200ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/000077500000000000000000000000001432416201200301565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/in/src/lib.rs000066400000000000000000000000001432416201200312600ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/mod.rs000066400000000000000000000013771432416201200301160ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_rename_with_rename_noop() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("versioned-package --rename a1") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/000077500000000000000000000000001432416201200275705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml000066400000000000000000000002451432416201200315210ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] a1 = { package = "versioned-package", version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stderr.log000066400000000000000000000001441432416201200307660ustar00rootroot00000000000000 Updating `dummy-registry` index Adding versioned-package v0.1.1 to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/stdout.log000066400000000000000000000000001432416201200307740ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/000077500000000000000000000000001432416201200254605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/in/000077500000000000000000000000001432416201200260665ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/in/Cargo.toml000066400000000000000000000002251432416201200300150ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = { version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/000077500000000000000000000000001432416201200266555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/in/src/lib.rs000066400000000000000000000000001432416201200277570ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/mod.rs000066400000000000000000000021321432416201200266030ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_version_with_git() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; let git_dep = cargo_test_support::git::new("versioned-package", |project| { project .file( "Cargo.toml", &cargo_test_support::basic_manifest("versioned-package", "0.3.0+versioned-package"), ) .file("src/lib.rs", "") }); let git_url = git_dep.url().to_string(); snapbox::cmd::Command::cargo_ui() .arg("add") .args(["versioned-package", "--git", &git_url]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/out/000077500000000000000000000000001432416201200262675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml000066400000000000000000000002721432416201200302200ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = { version = "0.3.0", optional = true, git = "[ROOTURL]/versioned-package" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/stderr.log000066400000000000000000000002631432416201200274670ustar00rootroot00000000000000 Updating git repository `[ROOTURL]/versioned-package` Adding versioned-package (git) to optional dependencies. Updating git repository `[ROOTURL]/versioned-package` cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_git/stdout.log000066400000000000000000000000001432416201200274730ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/000077500000000000000000000000001432416201200256315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/000077500000000000000000000000001432416201200262375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/000077500000000000000000000000001432416201200303555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/Cargo.toml000066400000000000000000000001251432416201200323030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/000077500000000000000000000000001432416201200311445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/dependency/src/lib.rs000066400000000000000000000000001432416201200322460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/000077500000000000000000000000001432416201200277225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/Cargo.toml000066400000000000000000000002461432416201200316540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/000077500000000000000000000000001432416201200305115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/in/primary/src/lib.rs000066400000000000000000000000001432416201200316130ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/mod.rs000066400000000000000000000014431432416201200267600ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_version_with_path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/out/000077500000000000000000000000001432416201200264405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/000077500000000000000000000000001432416201200305565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/out/dependency/Cargo.toml000066400000000000000000000001251432416201200325040ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/000077500000000000000000000000001432416201200301235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml000066400000000000000000000002761432416201200320600ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", optional = true, path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/stderr.log000066400000000000000000000001221432416201200276320ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to optional dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_version_with_path/stdout.log000066400000000000000000000000001432416201200276440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/000077500000000000000000000000001432416201200243775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/in/000077500000000000000000000000001432416201200250055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/in/Cargo.toml000066400000000000000000000002251432416201200267340ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = { version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/in/src/000077500000000000000000000000001432416201200255745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/in/src/lib.rs000066400000000000000000000000001432416201200266760ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/mod.rs000066400000000000000000000013701432416201200255250ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_with_rename() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("versioned-package --rename renamed") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/out/000077500000000000000000000000001432416201200252065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/out/Cargo.toml000066400000000000000000000003301432416201200271320ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] renamed = { version = "99999.0.0", package = "versioned-package" } versioned-package = { version = "0.1.1", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/stderr.log000066400000000000000000000001371432416201200264060ustar00rootroot00000000000000 Updating `dummy-registry` index Adding versioned-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_with_rename/stdout.log000066400000000000000000000000001432416201200264120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/000077500000000000000000000000001432416201200247235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/000077500000000000000000000000001432416201200253315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/Cargo.toml000066400000000000000000000001741432416201200272630ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency" }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/000077500000000000000000000000001432416201200274475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/Cargo.toml000066400000000000000000000000511432416201200313730ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/000077500000000000000000000000001432416201200302365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/dependency/src/lib.rs000066400000000000000000000000001432416201200313400ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/000077500000000000000000000000001432416201200270145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/Cargo.toml000066400000000000000000000001151432416201200307410ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo.workspace = truecargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/000077500000000000000000000000001432416201200276035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/in/primary/src/lib.rs000066400000000000000000000000001432416201200307050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/mod.rs000066400000000000000000000014001432416201200260430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_workspace_dep() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "--path", "./dependency", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/000077500000000000000000000000001432416201200255325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/Cargo.toml000066400000000000000000000001741432416201200274640ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency" }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/000077500000000000000000000000001432416201200276505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/dependency/Cargo.toml000066400000000000000000000000511432416201200315740ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/000077500000000000000000000000001432416201200272155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/out/primary/Cargo.toml000066400000000000000000000001551432416201200311460ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/stderr.log000066400000000000000000000000521432416201200267260ustar00rootroot00000000000000 Adding foo (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep/stdout.log000066400000000000000000000000001432416201200267360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/000077500000000000000000000000001432416201200266215ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/000077500000000000000000000000001432416201200272275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/Cargo.toml000066400000000000000000000001741432416201200311610ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency" }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/000077500000000000000000000000001432416201200313455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/Cargo.toml000066400000000000000000000005011432416201200332710ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/000077500000000000000000000000001432416201200321345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/dependency/src/lib.rs000066400000000000000000000000001432416201200332360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/000077500000000000000000000000001432416201200307125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/Cargo.toml000066400000000000000000000001501432416201200326360ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/000077500000000000000000000000001432416201200315015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/in/primary/src/lib.rs000066400000000000000000000000001432416201200326030ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/mod.rs000066400000000000000000000014111432416201200277430ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn overwrite_workspace_dep_features() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "--path", "./dependency", "-p", "bar"]) .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/000077500000000000000000000000001432416201200274305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/Cargo.toml000066400000000000000000000001741432416201200313620ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency" }cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/000077500000000000000000000000001432416201200315465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/dependency/Cargo.toml000066400000000000000000000005011432416201200334720ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = [] cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/000077500000000000000000000000001432416201200311135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/out/primary/Cargo.toml000066400000000000000000000002021432416201200330350ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { features = ["test"], path = "../dependency", version = "0.0.0" } cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stderr.log000066400000000000000000000004251432416201200306300ustar00rootroot00000000000000 Adding foo (local) to dependencies. Features: + default-base + default-merge-base + default-test-base + test + test-base - merge - merge-base - unrelated cargo-0.66.0/tests/testsuite/cargo_add/overwrite_workspace_dep_features/stdout.log000066400000000000000000000000001432416201200306340ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/000077500000000000000000000000001432416201200207235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/000077500000000000000000000000001432416201200213315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/dependency/000077500000000000000000000000001432416201200234475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/dependency/Cargo.toml000066400000000000000000000001251432416201200253750ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path/in/dependency/src/000077500000000000000000000000001432416201200242365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/dependency/src/lib.rs000066400000000000000000000000001432416201200253400ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/primary/000077500000000000000000000000001432416201200230145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/primary/Cargo.toml000066400000000000000000000001121432416201200247360ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path/in/primary/src/000077500000000000000000000000001432416201200236035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/in/primary/src/lib.rs000066400000000000000000000000001432416201200247050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/mod.rs000066400000000000000000000014141432416201200220500ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/path/out/000077500000000000000000000000001432416201200215325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/out/dependency/000077500000000000000000000000001432416201200236505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/out/dependency/Cargo.toml000066400000000000000000000001251432416201200255760ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path/out/primary/000077500000000000000000000000001432416201200232155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path/out/primary/Cargo.toml000066400000000000000000000002551432416201200251470ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/path/stderr.log000066400000000000000000000001111432416201200227220ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/path/stdout.log000066400000000000000000000000001432416201200227360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/000077500000000000000000000000001432416201200215615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/000077500000000000000000000000001432416201200221675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/dependency/000077500000000000000000000000001432416201200243055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/dependency/Cargo.toml000066400000000000000000000001251432416201200262330ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/dependency/src/000077500000000000000000000000001432416201200250745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/dependency/src/lib.rs000066400000000000000000000000001432416201200261760ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/primary/000077500000000000000000000000001432416201200236525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/primary/Cargo.toml000066400000000000000000000001121432416201200255740ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/primary/src/000077500000000000000000000000001432416201200244415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/in/primary/src/lib.rs000066400000000000000000000000001432416201200255430ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/mod.rs000066400000000000000000000014261432416201200227110ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn path_dev() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency --dev") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/path_dev/out/000077500000000000000000000000001432416201200223705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/out/dependency/000077500000000000000000000000001432416201200245065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/out/dependency/Cargo.toml000066400000000000000000000001251432416201200264340ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_dev/out/primary/000077500000000000000000000000001432416201200240535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_dev/out/primary/Cargo.toml000066400000000000000000000002361432416201200260040ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] cargo-list-test-fixture-dependency = { path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/path_dev/stderr.log000066400000000000000000000001151432416201200235640ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dev-dependencies. cargo-0.66.0/tests/testsuite/cargo_add/path_dev/stdout.log000066400000000000000000000000001432416201200235740ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/000077500000000000000000000000001432416201200236015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/000077500000000000000000000000001432416201200242075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/dependency/000077500000000000000000000000001432416201200263255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/dependency/Cargo.toml000066400000000000000000000001251432416201200302530ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/000077500000000000000000000000001432416201200271145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/dependency/src/lib.rs000066400000000000000000000000001432416201200302160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/primary/000077500000000000000000000000001432416201200256725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/primary/Cargo.toml000066400000000000000000000001121432416201200276140ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/000077500000000000000000000000001432416201200264615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/in/primary/src/lib.rs000066400000000000000000000000001432416201200275630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/mod.rs000066400000000000000000000014321432416201200247260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn path_inferred_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/out/000077500000000000000000000000001432416201200244105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/out/dependency/000077500000000000000000000000001432416201200265265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/out/dependency/Cargo.toml000066400000000000000000000001251432416201200304540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/out/primary/000077500000000000000000000000001432416201200260735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/out/primary/Cargo.toml000066400000000000000000000002551432416201200300250ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/stderr.log000066400000000000000000000001111432416201200256000ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name/stdout.log000066400000000000000000000000001432416201200256140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/000077500000000000000000000000001432416201200304025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/000077500000000000000000000000001432416201200310105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/Cargo.toml000066400000000000000000000000741432416201200327410ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/000077500000000000000000000000001432416201200331265ustar00rootroot00000000000000Cargo.toml000066400000000000000000000003501432416201200347750ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency[package] name = "your-face" version = "0.1.3" [dependencies] toml_edit = "0.1.5" atty = "0.2.13" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/src/000077500000000000000000000000001432416201200337155ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200347400ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/dependency/srccargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/000077500000000000000000000000001432416201200326355ustar00rootroot00000000000000Cargo.toml000066400000000000000000000001461432416201200345070ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional[package] name = "optional-dep" version = "0.1.3" [dependencies] toml_edit = "0.1.5" atty = "0.2.13" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/src/000077500000000000000000000000001432416201200334245ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200344470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/optional/srccargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/000077500000000000000000000000001432416201200324735ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000751432416201200343460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/src/000077500000000000000000000000001432416201200332625ustar00rootroot00000000000000lib.rs000066400000000000000000000000001432416201200343050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/in/primary/srccargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/mod.rs000066400000000000000000000014501432416201200315270ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn path_inferred_name_conflicts_full_feature() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("--path ../dependency --features your-face/nose") .current_dir(&cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/000077500000000000000000000000001432416201200312115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/Cargo.toml000066400000000000000000000000741432416201200331420ustar00rootroot00000000000000[workspace] members = ["primary", "dependency", "optional"] cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency/000077500000000000000000000000001432416201200333275ustar00rootroot00000000000000Cargo.toml000066400000000000000000000003501432416201200351760ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/dependency[package] name = "your-face" version = "0.1.3" [dependencies] toml_edit = "0.1.5" atty = "0.2.13" optional-dependency = { path = "../optional", optional = true } [features] default = ["mouth"] nose = [] mouth = ["nose"] eyes = [] cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary/000077500000000000000000000000001432416201200326745ustar00rootroot00000000000000Cargo.toml000066400000000000000000000000751432416201200345470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/out/primary[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stderr.log000066400000000000000000000001211432416201200324020ustar00rootroot00000000000000error: `your-face/nose` is unsupported when inferring the crate name, use `nose` cargo-0.66.0/tests/testsuite/cargo_add/path_inferred_name_conflicts_full_feature/stdout.log000066400000000000000000000000001432416201200324150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/000077500000000000000000000000001432416201200241475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/000077500000000000000000000000001432416201200245555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/dependency/000077500000000000000000000000001432416201200266735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/dependency/Cargo.toml000066400000000000000000000001251432416201200306210ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/000077500000000000000000000000001432416201200274625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/dependency/src/lib.rs000066400000000000000000000000001432416201200305640ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/primary/000077500000000000000000000000001432416201200262405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/primary/Cargo.toml000066400000000000000000000001121432416201200301620ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/000077500000000000000000000000001432416201200270275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/in/primary/src/lib.rs000066400000000000000000000000001432416201200301310ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/mod.rs000066400000000000000000000015261432416201200253000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn path_normalized_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo_list_test_fixture_dependency --path ../dependency") .current_dir(&cwd) .assert() .failure() // Fuzzy searching for paths isn't supported at this time .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/out/000077500000000000000000000000001432416201200247565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/out/dependency/000077500000000000000000000000001432416201200270745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/out/dependency/Cargo.toml000066400000000000000000000001251432416201200310220ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/out/primary/000077500000000000000000000000001432416201200264415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/out/primary/Cargo.toml000066400000000000000000000001121432416201200303630ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/stderr.log000066400000000000000000000001741432416201200261570ustar00rootroot00000000000000error: the crate `cargo_list_test_fixture_dependency@[ROOT]/case/dependency` could not be found at `[ROOT]/case/dependency` cargo-0.66.0/tests/testsuite/cargo_add/path_normalized_name/stdout.log000066400000000000000000000000001432416201200261620ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/000077500000000000000000000000001432416201200232025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/in/000077500000000000000000000000001432416201200236105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/in/Cargo.toml000066400000000000000000000002131432416201200255340ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "0.1.1" versioned-package = "0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/in/src/000077500000000000000000000000001432416201200243775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/in/src/lib.rs000066400000000000000000000000001432416201200255010ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/mod.rs000066400000000000000000000013241432416201200243270ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn preserve_sorted() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("toml") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/out/000077500000000000000000000000001432416201200240115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/out/Cargo.toml000066400000000000000000000002361432416201200257420ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = "0.1.1" toml = "99999.0.0" versioned-package = "0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/stderr.log000066400000000000000000000001221432416201200252030ustar00rootroot00000000000000 Updating `dummy-registry` index Adding toml v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/preserve_sorted/stdout.log000066400000000000000000000000001432416201200252150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/000077500000000000000000000000001432416201200235455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/in/000077500000000000000000000000001432416201200241535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/in/Cargo.toml000066400000000000000000000002131432416201200260770ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = "0.1.1" my-package = "0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/in/src/000077500000000000000000000000001432416201200247425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/in/src/lib.rs000066400000000000000000000000001432416201200260440ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/mod.rs000066400000000000000000000013261432416201200246740ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn preserve_unsorted() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("toml") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/out/000077500000000000000000000000001432416201200243545ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/out/Cargo.toml000066400000000000000000000002361432416201200263050ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] versioned-package = "0.1.1" my-package = "0.1.1" toml = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/stderr.log000066400000000000000000000001221432416201200255460ustar00rootroot00000000000000 Updating `dummy-registry` index Adding toml v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/preserve_unsorted/stdout.log000066400000000000000000000000001432416201200255600ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/quiet/000077500000000000000000000000001432416201200211165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/quiet/in000077700000000000000000000000001432416201200240142../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/quiet/mod.rs000066400000000000000000000013271432416201200222460ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn quiet() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("--quiet your-face") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/quiet/out/000077500000000000000000000000001432416201200217255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/quiet/out/Cargo.toml000066400000000000000000000001621432416201200236540ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] your-face = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/quiet/stderr.log000066400000000000000000000000001432416201200231120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/quiet/stdout.log000066400000000000000000000000001432416201200231310ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/000077500000000000000000000000001432416201200216375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/in/000077500000000000000000000000001432416201200222455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/in/Cargo.toml000066400000000000000000000001121432416201200241670ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/registry/in/src/000077500000000000000000000000001432416201200230345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/in/src/lib.rs000066400000000000000000000000001432416201200241360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/mod.rs000066400000000000000000000013771432416201200227740ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_alt_registry; use cargo_test_support::curr_dir; #[cargo_test] fn registry() { init_alt_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2 --registry alternative") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/registry/out/000077500000000000000000000000001432416201200224465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/registry/out/Cargo.toml000066400000000000000000000003361432416201200244000ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package1 = { version = "99999.0.0", registry = "alternative" } my-package2 = { version = "99999.0.0", registry = "alternative" } cargo-0.66.0/tests/testsuite/cargo_add/registry/stderr.log000066400000000000000000000002131432416201200236410ustar00rootroot00000000000000 Updating `alternative` index Adding my-package1 v99999.0.0 to dependencies. Adding my-package2 v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/registry/stdout.log000066400000000000000000000000001432416201200236520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/rename/000077500000000000000000000000001432416201200212365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/rename/in000077700000000000000000000000001432416201200241342../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/rename/mod.rs000066400000000000000000000013421432416201200223630ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn rename() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package --rename renamed") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/rename/out/000077500000000000000000000000001432416201200220455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/rename/out/Cargo.toml000066400000000000000000000002261432416201200237750ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] renamed = { version = "99999.0.0", package = "my-package" } cargo-0.66.0/tests/testsuite/cargo_add/rename/stderr.log000066400000000000000000000001301432416201200232360ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package v99999.0.0 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/rename/stdout.log000066400000000000000000000000001432416201200232510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/000077500000000000000000000000001432416201200224525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/in/000077500000000000000000000000001432416201200230605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/in/Cargo.toml000066400000000000000000000002701432416201200250070ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [features] eyes = ["your-face?/eyes"] [dependencies] your-face = { version = "99999.0.0", optional = true } cargo-0.66.0/tests/testsuite/cargo_add/require_weak/in/src/000077500000000000000000000000001432416201200236475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/in/src/lib.rs000066400000000000000000000000001432416201200247510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/mod.rs000066400000000000000000000013441432416201200236010ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn require_weak() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("your-face --no-optional") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/require_weak/out/000077500000000000000000000000001432416201200232615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/require_weak/out/Cargo.toml000066400000000000000000000002461432416201200252130ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [features] eyes = ["your-face/eyes"] [dependencies] your-face = { version = "99999.0.0" } cargo-0.66.0/tests/testsuite/cargo_add/require_weak/stderr.log000066400000000000000000000002771432416201200244660ustar00rootroot00000000000000 Updating `dummy-registry` index Adding your-face v99999.0.0 to dependencies. Features: - ears - eyes - mouth - nose cargo-0.66.0/tests/testsuite/cargo_add/require_weak/stdout.log000066400000000000000000000000001432416201200244650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target/000077500000000000000000000000001432416201200212555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target/in000077700000000000000000000000001432416201200241532../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target/mod.rs000066400000000000000000000013761432416201200224110ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn target() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2 --target i686-unknown-linux-gnu") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/target/out/000077500000000000000000000000001432416201200220645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target/out/Cargo.toml000066400000000000000000000002541432416201200240150ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [target.i686-unknown-linux-gnu.dependencies] my-package1 = "99999.0.0" my-package2 = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/target/stderr.log000066400000000000000000000003261432416201200232640ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies for target `i686-unknown-linux-gnu`. Adding my-package2 v99999.0.0 to dependencies for target `i686-unknown-linux-gnu`. cargo-0.66.0/tests/testsuite/cargo_add/target/stdout.log000066400000000000000000000000001432416201200232700ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/000077500000000000000000000000001432416201200220745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/in000077700000000000000000000000001432416201200247722../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/mod.rs000066400000000000000000000013651432416201200232260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn target_cfg() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package1 my-package2 --target cfg(unix)") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/out/000077500000000000000000000000001432416201200227035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/out/Cargo.toml000066400000000000000000000002411432416201200246300ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [target."cfg(unix)".dependencies] my-package1 = "99999.0.0" my-package2 = "99999.0.0" cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/stderr.log000066400000000000000000000002741432416201200241050ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package1 v99999.0.0 to dependencies for target `cfg(unix)`. Adding my-package2 v99999.0.0 to dependencies for target `cfg(unix)`. cargo-0.66.0/tests/testsuite/cargo_add/target_cfg/stdout.log000066400000000000000000000000001432416201200241070ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/000077500000000000000000000000001432416201200252345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/000077500000000000000000000000001432416201200256425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/Cargo.toml000066400000000000000000000002341432416201200275710ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["not_recognized"] } cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/000077500000000000000000000000001432416201200277605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/Cargo.toml000066400000000000000000000005001432416201200317030ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/000077500000000000000000000000001432416201200305475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/dependency/src/lib.rs000066400000000000000000000000001432416201200316510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/000077500000000000000000000000001432416201200273255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/Cargo.toml000066400000000000000000000001511432416201200312520ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] } cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/000077500000000000000000000000001432416201200301145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/in/primary/src/lib.rs000066400000000000000000000000001432416201200312160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/mod.rs000066400000000000000000000012561432416201200263650ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn unknown_inherited_feature() { let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .args(["foo", "-p", "bar"]) .current_dir(cwd) .assert() .failure() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/000077500000000000000000000000001432416201200260435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/Cargo.toml000066400000000000000000000002341432416201200277720ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] [workspace.dependencies] foo = { version = "0.0.0", path = "./dependency", features = ["not_recognized"] } cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/000077500000000000000000000000001432416201200301615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/dependency/Cargo.toml000066400000000000000000000005001432416201200321040ustar00rootroot00000000000000[package] name = "foo" version = "0.0.0" [features] default-base = [] default-test-base = [] default-merge-base = [] default = ["default-base", "default-test-base", "default-merge-base"] test-base = [] test = ["test-base", "default-test-base"] merge-base = [] merge = ["merge-base", "default-merge-base"] unrelated = []cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/000077500000000000000000000000001432416201200275265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/out/primary/Cargo.toml000066400000000000000000000001511432416201200314530ustar00rootroot00000000000000[package] name = "bar" version = "0.0.0" [dependencies] foo = { workspace = true, features = ["test"] } cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/stderr.log000066400000000000000000000005651432416201200272500ustar00rootroot00000000000000 Adding foo (workspace) to dependencies. Features as of v0.0.0: + default-base + default-merge-base + default-test-base + not_recognized + test + test-base - merge - merge-base - unrelated error: unrecognized features: ["not_recognized"] cargo-0.66.0/tests/testsuite/cargo_add/unknown_inherited_feature/stdout.log000066400000000000000000000000001432416201200272470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/vers/000077500000000000000000000000001432416201200207465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/vers/in000077700000000000000000000000001432416201200236442../add-basic.inustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/vers/mod.rs000066400000000000000000000013271432416201200220760ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn vers() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = &project_root; snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("my-package@>=0.1.1") .current_dir(cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/vers/out/000077500000000000000000000000001432416201200215555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/vers/out/Cargo.toml000066400000000000000000000001611432416201200235030ustar00rootroot00000000000000[workspace] [package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] my-package = ">=0.1.1" cargo-0.66.0/tests/testsuite/cargo_add/vers/stderr.log000066400000000000000000000001251432416201200227520ustar00rootroot00000000000000 Updating `dummy-registry` index Adding my-package >=0.1.1 to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/vers/stdout.log000066400000000000000000000000001432416201200227610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/000077500000000000000000000000001432416201200227655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/000077500000000000000000000000001432416201200233735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/Cargo.toml000066400000000000000000000000601432416201200253170ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/dependency/000077500000000000000000000000001432416201200255115ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/dependency/Cargo.toml000066400000000000000000000001101432416201200274310ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/dependency/src/000077500000000000000000000000001432416201200263005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/dependency/src/lib.rs000066400000000000000000000000001432416201200274020ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/primary/000077500000000000000000000000001432416201200250565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/primary/Cargo.toml000066400000000000000000000000751432416201200270100ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/primary/src/000077500000000000000000000000001432416201200256455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/in/primary/src/lib.rs000066400000000000000000000000001432416201200267470ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/mod.rs000066400000000000000000000014011432416201200241060ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn workspace_name() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/000077500000000000000000000000001432416201200235745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/Cargo.toml000066400000000000000000000000601432416201200255200ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/dependency/000077500000000000000000000000001432416201200257125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/dependency/Cargo.toml000066400000000000000000000001101432416201200276320ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/primary/000077500000000000000000000000001432416201200252575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/out/primary/Cargo.toml000066400000000000000000000002401432416201200272030ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/stderr.log000066400000000000000000000001111432416201200247640ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/workspace_name/stdout.log000066400000000000000000000000001432416201200250000ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/000077500000000000000000000000001432416201200230015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/000077500000000000000000000000001432416201200234075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/Cargo.toml000066400000000000000000000000601432416201200253330ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/dependency/000077500000000000000000000000001432416201200255255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/dependency/Cargo.toml000066400000000000000000000001101432416201200274450ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/dependency/src/000077500000000000000000000000001432416201200263145ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/dependency/src/lib.rs000066400000000000000000000000001432416201200274160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/primary/000077500000000000000000000000001432416201200250725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/primary/Cargo.toml000066400000000000000000000000751432416201200270240ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/primary/src/000077500000000000000000000000001432416201200256615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/in/primary/src/lib.rs000066400000000000000000000000001432416201200267630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/mod.rs000066400000000000000000000014261432416201200241310ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn workspace_path() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/000077500000000000000000000000001432416201200236105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/Cargo.toml000066400000000000000000000000601432416201200255340ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/dependency/000077500000000000000000000000001432416201200257265ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/dependency/Cargo.toml000066400000000000000000000001101432416201200276460ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/primary/000077500000000000000000000000001432416201200252735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/out/primary/Cargo.toml000066400000000000000000000002401432416201200272170ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dependencies] cargo-list-test-fixture-dependency = { version = "0.0.0", path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/stderr.log000066400000000000000000000001111432416201200250000ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dependencies. cargo-0.66.0/tests/testsuite/cargo_add/workspace_path/stdout.log000066400000000000000000000000001432416201200250140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/000077500000000000000000000000001432416201200236375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/000077500000000000000000000000001432416201200242455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/Cargo.toml000066400000000000000000000000601432416201200261710ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/000077500000000000000000000000001432416201200263635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/Cargo.toml000066400000000000000000000001101432416201200303030ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/000077500000000000000000000000001432416201200271525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/dependency/src/lib.rs000066400000000000000000000000001432416201200302540ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/primary/000077500000000000000000000000001432416201200257305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/primary/Cargo.toml000066400000000000000000000000751432416201200276620ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/000077500000000000000000000000001432416201200265175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/in/primary/src/lib.rs000066400000000000000000000000001432416201200276210ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/mod.rs000066400000000000000000000014401432416201200247630ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use crate::cargo_add::init_registry; use cargo_test_support::curr_dir; #[cargo_test] fn workspace_path_dev() { init_registry(); let project = Project::from_template(curr_dir!().join("in")); let project_root = project.root(); let cwd = project_root.join("primary"); snapbox::cmd::Command::cargo_ui() .arg("add") .arg_line("cargo-list-test-fixture-dependency --path ../dependency --dev") .current_dir(&cwd) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); } cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/000077500000000000000000000000001432416201200244465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/Cargo.toml000066400000000000000000000000601432416201200263720ustar00rootroot00000000000000[workspace] members = ["primary", "dependency"] cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/000077500000000000000000000000001432416201200265645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/dependency/Cargo.toml000066400000000000000000000001101432416201200305040ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture-dependency" version = "0.0.0" cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/primary/000077500000000000000000000000001432416201200261315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/out/primary/Cargo.toml000066400000000000000000000002211432416201200300540ustar00rootroot00000000000000[package] name = "cargo-list-test-fixture" version = "0.0.0" [dev-dependencies] cargo-list-test-fixture-dependency = { path = "../dependency" } cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/stderr.log000066400000000000000000000001151432416201200256420ustar00rootroot00000000000000 Adding cargo-list-test-fixture-dependency (local) to dev-dependencies. cargo-0.66.0/tests/testsuite/cargo_add/workspace_path_dev/stdout.log000066400000000000000000000000001432416201200256520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/cargo_alias_config.rs000066400000000000000000000215601432416201200222270ustar00rootroot00000000000000//! Tests for `[alias]` config command aliases. use std::env; use cargo_test_support::tools::echo_subcommand; use cargo_test_support::{basic_bin_manifest, project}; #[cargo_test] fn alias_incorrect_config_type() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b-cargo-test = 5 "#, ) .build(); p.cargo("b-cargo-test -v") .with_status(101) .with_stderr_contains( "\ [ERROR] invalid configuration for key `alias.b-cargo-test` expected a list, but found a integer for [..]", ) .run(); } #[cargo_test] fn alias_config() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b-cargo-test = "build" "#, ) .build(); p.cargo("b-cargo-test -v") .with_stderr_contains( "\ [COMPILING] foo v0.5.0 [..] [RUNNING] `rustc --crate-name foo [..]", ) .run(); } #[cargo_test] fn dependent_alias() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b-cargo-test = "build" a-cargo-test = ["b-cargo-test", "-v"] "#, ) .build(); p.cargo("a-cargo-test") .with_stderr_contains( "\ [COMPILING] foo v0.5.0 [..] [RUNNING] `rustc --crate-name foo [..]", ) .run(); } #[cargo_test] fn alias_shadowing_external_subcommand() { let echo = echo_subcommand(); let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] echo = "build" "#, ) .build(); let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); paths.push(echo.target_debug_dir()); let path = env::join_paths(paths).unwrap(); p.cargo("echo") .env("PATH", &path) .with_stderr("\ [WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #10049 . [COMPILING] foo v0.5.0 [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn default_args_alias() { let echo = echo_subcommand(); let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] echo = "echo --flag1 --flag2" test-1 = "echo" build = "build --verbose" "#, ) .build(); let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); paths.push(echo.target_debug_dir()); let path = env::join_paths(paths).unwrap(); p.cargo("echo") .env("PATH", &path) .with_status(101) .with_stderr("\ [WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #10049 . error: alias echo has unresolvable recursive definition: echo -> echo ", ) .run(); p.cargo("test-1") .env("PATH", &path) .with_status(101) .with_stderr("\ [WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #10049 . error: alias test-1 has unresolvable recursive definition: test-1 -> echo -> echo ", ) .run(); // Builtins are not expanded by rule p.cargo("build") .with_stderr( "\ [WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn corecursive_alias() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] test-1 = "test-2 --flag1" test-2 = "test-3 --flag2" test-3 = "test-1 --flag3" "#, ) .build(); p.cargo("test-1") .with_status(101) .with_stderr( "error: alias test-1 has unresolvable recursive definition: test-1 -> test-2 -> test-3 -> test-1", ) .run(); p.cargo("test-2") .with_status(101) .with_stderr( "error: alias test-2 has unresolvable recursive definition: test-2 -> test-3 -> test-1 -> test-2", ) .run(); } #[cargo_test] fn alias_list_test() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b-cargo-test = ["build", "--release"] "#, ) .build(); p.cargo("b-cargo-test -v") .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") .with_stderr_contains("[RUNNING] `rustc --crate-name [..]") .run(); } #[cargo_test] fn alias_with_flags_config() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b-cargo-test = "build --release" "#, ) .build(); p.cargo("b-cargo-test -v") .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") .run(); } #[cargo_test] fn alias_cannot_shadow_builtin_command() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] build = "fetch" "#, ) .build(); p.cargo("build") .with_stderr( "\ [WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn alias_override_builtin_alias() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [alias] b = "run" "#, ) .build(); p.cargo("b") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); } #[cargo_test] fn builtin_alias_takes_options() { // #6381 let p = project() .file("src/lib.rs", "") .file( "examples/ex1.rs", r#"fn main() { println!("{}", std::env::args().skip(1).next().unwrap()) }"#, ) .build(); p.cargo("r --example ex1 -- asdf").with_stdout("asdf").run(); } #[cargo_test] fn global_options_with_alias() { // Check that global options are passed through. let p = project().file("src/lib.rs", "").build(); p.cargo("-v c") .with_stderr( "\ [CHECKING] foo [..] [RUNNING] `rustc [..] [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn weird_check() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("-- check --invalid_argument -some-other-argument") .with_stderr( "\ [WARNING] trailing arguments after built-in command `check` are ignored: `--invalid_argument -some-other-argument` [CHECKING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/cargo_command.rs000066400000000000000000000316301432416201200212260ustar00rootroot00000000000000//! Tests for custom cargo commands and other global command features. use std::env; use std::fs; use std::io::Read; use std::path::{Path, PathBuf}; use std::process::Stdio; use std::str; use cargo_test_support::basic_manifest; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::tools::echo_subcommand; use cargo_test_support::{ basic_bin_manifest, cargo_exe, cargo_process, paths, project, project_in_home, }; use cargo_util::paths::join_paths; fn path() -> Vec { env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() } #[cargo_test] fn list_commands_with_descriptions() { let p = project().build(); p.cargo("--list") .with_stdout_contains( " build Compile a local package and all of its dependencies", ) // Assert that `read-manifest` prints the right one-line description followed by another // command, indented. .with_stdout_contains( " read-manifest Print a JSON representation of a Cargo.toml manifest.", ) .run(); } #[cargo_test] fn list_builtin_aliases_with_descriptions() { let p = project().build(); p.cargo("--list") .with_stdout_contains(" b alias: build") .with_stdout_contains(" c alias: check") .with_stdout_contains(" r alias: run") .with_stdout_contains(" t alias: test") .run(); } #[cargo_test] fn list_custom_aliases_with_descriptions() { let p = project_in_home("proj") .file( &paths::home().join(".cargo").join("config"), r#" [alias] myaliasstr = "foo --bar" myaliasvec = ["foo", "--bar"] "#, ) .build(); p.cargo("--list") .with_stdout_contains(" myaliasstr alias: foo --bar") .with_stdout_contains(" myaliasvec alias: foo --bar") .run(); } #[cargo_test] fn list_dedupe() { let p = project() .executable(Path::new("path-test-1").join("cargo-dupe"), "") .executable(Path::new("path-test-2").join("cargo-dupe"), "") .build(); let mut path = path(); path.push(p.root().join("path-test-1")); path.push(p.root().join("path-test-2")); let path = env::join_paths(path.iter()).unwrap(); p.cargo("--list") .env("PATH", &path) .with_stdout_contains_n(" dupe", 1) .run(); } #[cargo_test] fn list_command_looks_at_path() { let proj = project() .executable(Path::new("path-test").join("cargo-1"), "") .build(); let mut path = path(); path.push(proj.root().join("path-test")); let path = env::join_paths(path.iter()).unwrap(); let output = cargo_process("-v --list") .env("PATH", &path) .exec_with_output() .unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!( output.contains("\n 1 "), "missing 1: {}", output ); } #[cargo_test] fn list_command_handles_known_external_commands() { let p = project() .executable(Path::new("path-test").join("cargo-fmt"), "") .build(); let fmt_desc = " fmt Formats all bin and lib files of the current crate using rustfmt."; // Without path - fmt isn't there p.cargo("--list") .env("PATH", "") .with_stdout_does_not_contain(fmt_desc) .run(); // With path - fmt is there with known description let mut path = path(); path.push(p.root().join("path-test")); let path = env::join_paths(path.iter()).unwrap(); p.cargo("--list") .env("PATH", &path) .with_stdout_contains(fmt_desc) .run(); } #[cargo_test] fn list_command_resolves_symlinks() { let proj = project() .symlink(cargo_exe(), Path::new("path-test").join("cargo-2")) .build(); let mut path = path(); path.push(proj.root().join("path-test")); let path = env::join_paths(path.iter()).unwrap(); let output = cargo_process("-v --list") .env("PATH", &path) .exec_with_output() .unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!( output.contains("\n 2 "), "missing 2: {}", output ); } #[cargo_test] fn find_closest_capital_c_to_c() { cargo_process("C") .with_status(101) .with_stderr_contains( "\ error: no such subcommand: `C` Did you mean `c`? ", ) .run(); } #[cargo_test] fn find_closest_capital_b_to_b() { cargo_process("B") .with_status(101) .with_stderr_contains( "\ error: no such subcommand: `B` Did you mean `b`? ", ) .run(); } #[cargo_test] fn find_closest_biuld_to_build() { cargo_process("biuld") .with_status(101) .with_stderr_contains( "\ error: no such subcommand: `biuld` Did you mean `build`? ", ) .run(); // But, if we actually have `biuld`, it must work! // https://github.com/rust-lang/cargo/issues/5201 Package::new("cargo-biuld", "1.0.0") .file( "src/main.rs", r#" fn main() { println!("Similar, but not identical to, build"); } "#, ) .publish(); cargo_process("install cargo-biuld").run(); cargo_process("biuld") .with_stdout("Similar, but not identical to, build\n") .run(); cargo_process("--list") .with_stdout_contains( " build Compile a local package and all of its dependencies\n", ) .with_stdout_contains(" biuld\n") .run(); } #[cargo_test] fn find_closest_alias() { let root = paths::root(); let my_home = root.join("my_home"); fs::create_dir(&my_home).unwrap(); fs::write( &my_home.join("config"), r#" [alias] myalias = "build" "#, ) .unwrap(); cargo_process("myalais") .env("CARGO_HOME", &my_home) .with_status(101) .with_stderr_contains( "\ error: no such subcommand: `myalais` Did you mean `myalias`? ", ) .run(); // But, if no alias is defined, it must not suggest one! cargo_process("myalais") .with_status(101) .with_stderr_contains( "\ error: no such subcommand: `myalais` ", ) .with_stderr_does_not_contain( "\ Did you mean `myalias`? ", ) .run(); } // If a subcommand is more than an edit distance of 3 away, we don't make a suggestion. #[cargo_test] fn find_closest_dont_correct_nonsense() { cargo_process("there-is-no-way-that-there-is-a-command-close-to-this") .cwd(&paths::root()) .with_status(101) .with_stderr( "\ [ERROR] no such subcommand: `there-is-no-way-that-there-is-a-command-close-to-this` View all installed commands with `cargo --list`", ) .run(); } #[cargo_test] fn displays_subcommand_on_error() { cargo_process("invalid-command") .with_status(101) .with_stderr( "\ [ERROR] no such subcommand: `invalid-command` View all installed commands with `cargo --list`", ) .run(); } #[cargo_test] fn override_cargo_home() { let root = paths::root(); let my_home = root.join("my_home"); fs::create_dir(&my_home).unwrap(); fs::write( &my_home.join("config"), r#" [cargo-new] vcs = "none" "#, ) .unwrap(); cargo_process("new foo").env("CARGO_HOME", &my_home).run(); assert!(!paths::root().join("foo/.git").is_dir()); cargo_process("new foo2").run(); assert!(paths::root().join("foo2/.git").is_dir()); } #[cargo_test] fn cargo_subcommand_env() { let src = format!( r#" use std::env; fn main() {{ println!("{{}}", env::var("{}").unwrap()); }} "#, cargo::CARGO_ENV ); let p = project() .at("cargo-envtest") .file("Cargo.toml", &basic_bin_manifest("cargo-envtest")) .file("src/main.rs", &src) .build(); let target_dir = p.target_debug_dir(); p.cargo("build").run(); assert!(p.bin("cargo-envtest").is_file()); let cargo = cargo_exe().canonicalize().unwrap(); let mut path = path(); path.push(target_dir); let path = env::join_paths(path.iter()).unwrap(); cargo_process("envtest") .env("PATH", &path) .with_stdout(cargo.to_str().unwrap()) .run(); } #[cargo_test] fn cargo_cmd_bins_vs_explicit_path() { // Set up `cargo-foo` binary in two places: inside `$HOME/.cargo/bin` and outside of it // // Return paths to both places fn set_up_cargo_foo() -> (PathBuf, PathBuf) { let p = project() .at("cargo-foo") .file("Cargo.toml", &basic_manifest("cargo-foo", "1.0.0")) .file( "src/bin/cargo-foo.rs", r#"fn main() { println!("INSIDE"); }"#, ) .file( "src/bin/cargo-foo2.rs", r#"fn main() { println!("OUTSIDE"); }"#, ) .build(); p.cargo("build").run(); let cargo_bin_dir = paths::home().join(".cargo/bin"); cargo_bin_dir.mkdir_p(); let root_bin_dir = paths::root().join("bin"); root_bin_dir.mkdir_p(); let exe_name = format!("cargo-foo{}", env::consts::EXE_SUFFIX); fs::rename(p.bin("cargo-foo"), cargo_bin_dir.join(&exe_name)).unwrap(); fs::rename(p.bin("cargo-foo2"), root_bin_dir.join(&exe_name)).unwrap(); (root_bin_dir, cargo_bin_dir) } let (outside_dir, inside_dir) = set_up_cargo_foo(); // If `$CARGO_HOME/bin` is not in a path, prefer it over anything in `$PATH`. // // This is the historical behavior we don't want to break. cargo_process("foo").with_stdout_contains("INSIDE").run(); // When `$CARGO_HOME/bin` is in the `$PATH` // use only `$PATH` so the user-defined ordering is respected. { cargo_process("foo") .env( "PATH", join_paths(&[&inside_dir, &outside_dir], "PATH").unwrap(), ) .with_stdout_contains("INSIDE") .run(); cargo_process("foo") // Note: trailing slash .env( "PATH", join_paths(&[inside_dir.join(""), outside_dir.join("")], "PATH").unwrap(), ) .with_stdout_contains("INSIDE") .run(); cargo_process("foo") .env( "PATH", join_paths(&[&outside_dir, &inside_dir], "PATH").unwrap(), ) .with_stdout_contains("OUTSIDE") .run(); cargo_process("foo") // Note: trailing slash .env( "PATH", join_paths(&[outside_dir.join(""), inside_dir.join("")], "PATH").unwrap(), ) .with_stdout_contains("OUTSIDE") .run(); } } #[test] #[cargo_test] fn cargo_subcommand_args() { let p = echo_subcommand(); let cargo_foo_bin = p.bin("cargo-echo"); assert!(cargo_foo_bin.is_file()); let mut path = path(); path.push(p.target_debug_dir()); let path = env::join_paths(path.iter()).unwrap(); cargo_process("echo bar -v --help") .env("PATH", &path) .with_stdout("echo bar -v --help") .run(); } #[cargo_test] fn explain() { cargo_process("--explain E0001") .with_stdout_contains( "This error suggests that the expression arm corresponding to the noted pattern", ) .run(); } #[cargo_test] fn closed_output_ok() { // Checks that closed output doesn't cause an error. let mut p = cargo_process("--list").build_command(); p.stdout(Stdio::piped()).stderr(Stdio::piped()); let mut child = p.spawn().unwrap(); // Close stdout drop(child.stdout.take()); // Read stderr let mut s = String::new(); child .stderr .as_mut() .unwrap() .read_to_string(&mut s) .unwrap(); let status = child.wait().unwrap(); assert!(status.success()); assert!(s.is_empty(), "{}", s); } #[cargo_test] fn subcommand_leading_plus_output_contains() { cargo_process("+nightly") .with_status(101) .with_stderr( "\ error: no such subcommand: `+nightly` Cargo does not handle `+toolchain` directives. Did you mean to invoke `cargo` through `rustup` instead?", ) .run(); } #[cargo_test] fn full_did_you_mean() { cargo_process("bluid") .with_status(101) .with_stderr( "\ error: no such subcommand: `bluid` Did you mean `build`? View all installed commands with `cargo --list`", ) .run(); } cargo-0.66.0/tests/testsuite/cargo_config.rs000066400000000000000000000373251432416201200210640ustar00rootroot00000000000000//! Tests for the `cargo config` command. use super::config::write_config_at; use cargo_test_support::paths; use std::fs; use std::path::PathBuf; fn cargo_process(s: &str) -> cargo_test_support::Execs { let mut p = cargo_test_support::cargo_process(s); // Clear out some of the environment added by the default cargo_process so // the tests don't need to deal with it. p.env_remove("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO") .env_remove("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO") .env_remove("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO") .env_remove("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO") .env_remove("CARGO_INCREMENTAL"); p } #[cargo_test] fn gated() { cargo_process("config get") .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stderr("\ error: the `cargo config` command is unstable, pass `-Z unstable-options` to enable it See https://github.com/rust-lang/cargo/issues/9301 for more information about the `cargo config` command. ") .run(); } fn common_setup() -> PathBuf { write_config_at( paths::home().join(".cargo/config.toml"), " [alias] foo = \"abc --xyz\" [build] jobs = 99 rustflags = [\"--flag-global\"] [profile.dev] opt-level = 3 [profile.dev.package.foo] opt-level = 1 [target.'cfg(target_os = \"linux\")'] runner = \"runme\" # How unknown keys are handled. [extra-table] somekey = \"somevalue\" ", ); let sub_folder = paths::root().join("foo/.cargo"); write_config_at( sub_folder.join("config.toml"), " [alias] sub-example = [\"sub\", \"example\"] [build] rustflags = [\"--flag-directory\"] ", ); sub_folder } #[cargo_test] fn get_toml() { // Notes: // - The "extra-table" is shown without a warning. I'm not sure how that // should be handled, since displaying warnings could cause problems // with ingesting the output. // - Environment variables aren't loaded. :( let sub_folder = common_setup(); cargo_process("config get -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_ALIAS_BAR", "cat dog") .env("CARGO_BUILD_JOBS", "100") // The weird forward slash in the linux line is due to testsuite normalization. .with_stdout( "\ alias.foo = \"abc --xyz\" alias.sub-example = [\"sub\", \"example\"] build.jobs = 99 build.rustflags = [\"--flag-directory\", \"--flag-global\"] extra-table.somekey = \"somevalue\" profile.dev.opt-level = 3 profile.dev.package.foo.opt-level = 1 target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" # The following environment variables may affect the loaded values. # CARGO_ALIAS_BAR=[..]cat dog[..] # CARGO_BUILD_JOBS=100 # CARGO_HOME=[ROOT]/home/.cargo ", ) .with_stderr("") .run(); // Env keys work if they are specific. cargo_process("config get build.jobs -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_JOBS", "100") .with_stdout("build.jobs = 100") .with_stderr("") .run(); // Array value. cargo_process("config get build.rustflags -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stdout("build.rustflags = [\"--flag-directory\", \"--flag-global\"]") .with_stderr("") .run(); // Sub-table cargo_process("config get profile -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stdout( "\ profile.dev.opt-level = 3 profile.dev.package.foo.opt-level = 1 ", ) .with_stderr("") .run(); // Specific profile entry. cargo_process("config get profile.dev.opt-level -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stdout("profile.dev.opt-level = 3") .with_stderr("") .run(); // A key that isn't set. cargo_process("config get build.rustc -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stdout("") .with_stderr("error: config value `build.rustc` is not set") .run(); // A key that is not part of Cargo's config schema. cargo_process("config get not.set -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stdout("") .with_stderr("error: config value `not.set` is not set") .run(); } #[cargo_test] fn get_json() { // Notes: // - This does not show env vars at all. :( let all_json = r#" { "alias": { "foo": "abc --xyz", "sub-example": [ "sub", "example" ] }, "build": { "jobs": 99, "rustflags": [ "--flag-directory", "--flag-global" ] }, "extra-table": { "somekey": "somevalue" }, "profile": { "dev": { "opt-level": 3, "package": { "foo": { "opt-level": 1 } } } }, "target": { "cfg(target_os = \"linux\")": { "runner": "runme" } } } "#; let sub_folder = common_setup(); cargo_process("config get --format=json -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_ALIAS_BAR", "cat dog") .env("CARGO_BUILD_JOBS", "100") .with_json(all_json) .with_stderr( "\ note: The following environment variables may affect the loaded values. CARGO_ALIAS_BAR=[..]cat dog[..] CARGO_BUILD_JOBS=100 CARGO_HOME=[ROOT]/home/.cargo ", ) .run(); // json-value is the same for the entire root table cargo_process("config get --format=json-value -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_json(all_json) .with_stderr( "\ note: The following environment variables may affect the loaded values. CARGO_HOME=[ROOT]/home/.cargo ", ) .run(); cargo_process("config get --format=json build.jobs -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_json( r#" {"build": {"jobs": 99}} "#, ) .with_stderr("") .run(); cargo_process("config get --format=json-value build.jobs -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stdout("99") .with_stderr("") .run(); } #[cargo_test] fn show_origin_toml() { let sub_folder = common_setup(); cargo_process("config get --show-origin -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stdout( "\ alias.foo = \"abc --xyz\" # [ROOT]/home/.cargo/config.toml alias.sub-example = [ \"sub\", # [ROOT]/foo/.cargo/config.toml \"example\", # [ROOT]/foo/.cargo/config.toml ] build.jobs = 99 # [ROOT]/home/.cargo/config.toml build.rustflags = [ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml \"--flag-global\", # [ROOT]/home/.cargo/config.toml ] extra-table.somekey = \"somevalue\" # [ROOT]/home/.cargo/config.toml profile.dev.opt-level = 3 # [ROOT]/home/.cargo/config.toml profile.dev.package.foo.opt-level = 1 # [ROOT]/home/.cargo/config.toml target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" # [ROOT]/home/.cargo/config.toml # The following environment variables may affect the loaded values. # CARGO_HOME=[ROOT]/home/.cargo ", ) .with_stderr("") .run(); cargo_process("config get --show-origin build.rustflags -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") .with_stdout( "\ build.rustflags = [ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml \"--flag-global\", # [ROOT]/home/.cargo/config.toml \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` ] ", ) .with_stderr("") .run(); } #[cargo_test] fn show_origin_toml_cli() { let sub_folder = common_setup(); cargo_process("config get --show-origin build.jobs -Zunstable-options --config build.jobs=123") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_JOBS", "1") .with_stdout("build.jobs = 123 # --config cli option") .with_stderr("") .run(); cargo_process("config get --show-origin build.rustflags -Zunstable-options --config") .arg("build.rustflags=[\"cli1\",\"cli2\"]") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") .with_stdout( "\ build.rustflags = [ \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml \"--flag-global\", # [ROOT]/home/.cargo/config.toml \"cli1\", # --config cli option \"cli2\", # --config cli option \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` ] ", ) .with_stderr("") .run(); } #[cargo_test] fn show_origin_json() { let sub_folder = common_setup(); cargo_process("config get --show-origin --format=json -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stderr("error: the `json` format does not support --show-origin, try the `toml` format instead") .run(); } #[cargo_test] fn unmerged_toml() { let sub_folder = common_setup(); cargo_process("config get --merged=no -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_ALIAS_BAR", "cat dog") .env("CARGO_BUILD_JOBS", "100") .with_stdout( "\ # Environment variables # CARGO=[..] # CARGO_ALIAS_BAR=[..]cat dog[..] # CARGO_BUILD_JOBS=100 # CARGO_HOME=[ROOT]/home/.cargo # [ROOT]/foo/.cargo/config.toml alias.sub-example = [\"sub\", \"example\"] build.rustflags = [\"--flag-directory\"] # [ROOT]/home/.cargo/config.toml alias.foo = \"abc --xyz\" build.jobs = 99 build.rustflags = [\"--flag-global\"] extra-table.somekey = \"somevalue\" profile.dev.opt-level = 3 profile.dev.package.foo.opt-level = 1 target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" ", ) .with_stderr("") .run(); cargo_process("config get --merged=no build.rustflags -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") .with_stdout( "\ # Environment variables # CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] # [ROOT]/foo/.cargo/config.toml build.rustflags = [\"--flag-directory\"] # [ROOT]/home/.cargo/config.toml build.rustflags = [\"--flag-global\"] ", ) .with_stderr("") .run(); cargo_process("config get --merged=no does.not.exist -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_stderr("") .with_stderr("") .run(); cargo_process("config get --merged=no build.rustflags.extra -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stderr( "error: expected table for configuration key `build.rustflags`, \ but found array in [ROOT]/foo/.cargo/config.toml", ) .run(); } #[cargo_test] fn unmerged_toml_cli() { let sub_folder = common_setup(); cargo_process("config get --merged=no build.rustflags -Zunstable-options --config") .arg("build.rustflags=[\"cli1\",\"cli2\"]") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") .with_stdout( "\ # --config cli option build.rustflags = [\"cli1\", \"cli2\"] # Environment variables # CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] # [ROOT]/foo/.cargo/config.toml build.rustflags = [\"--flag-directory\"] # [ROOT]/home/.cargo/config.toml build.rustflags = [\"--flag-global\"] ", ) .with_stderr("") .run(); } #[cargo_test] fn unmerged_json() { let sub_folder = common_setup(); cargo_process("config get --merged=no --format=json -Zunstable-options") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config"]) .with_status(101) .with_stderr( "error: the `json` format does not support --merged=no, try the `toml` format instead", ) .run(); } #[cargo_test] fn includes() { let sub_folder = common_setup(); fs::write( sub_folder.join("config.toml"), " include = 'other.toml' [build] rustflags = [\"--flag-directory\"] ", ) .unwrap(); fs::write( sub_folder.join("other.toml"), " [build] rustflags = [\"--flag-other\"] ", ) .unwrap(); cargo_process("config get build.rustflags -Zunstable-options -Zconfig-include") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config", "config-include"]) .with_stdout(r#"build.rustflags = ["--flag-other", "--flag-directory", "--flag-global"]"#) .with_stderr("") .run(); cargo_process("config get build.rustflags --show-origin -Zunstable-options -Zconfig-include") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config", "config-include"]) .with_stdout( "\ build.rustflags = [ \"--flag-other\", # [ROOT]/foo/.cargo/other.toml \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml \"--flag-global\", # [ROOT]/home/.cargo/config.toml ] ", ) .with_stderr("") .run(); cargo_process("config get --merged=no -Zunstable-options -Zconfig-include") .cwd(&sub_folder.parent().unwrap()) .masquerade_as_nightly_cargo(&["cargo-config", "config-include"]) .with_stdout( "\ # Environment variables # CARGO=[..] # CARGO_HOME=[ROOT]/home/.cargo # [ROOT]/foo/.cargo/other.toml build.rustflags = [\"--flag-other\"] # [ROOT]/foo/.cargo/config.toml build.rustflags = [\"--flag-directory\"] include = \"other.toml\" # [ROOT]/home/.cargo/config.toml alias.foo = \"abc --xyz\" build.jobs = 99 build.rustflags = [\"--flag-global\"] extra-table.somekey = \"somevalue\" profile.dev.opt-level = 3 profile.dev.package.foo.opt-level = 1 target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" ", ) .with_stderr("") .run(); } cargo-0.66.0/tests/testsuite/cargo_env_config.rs000066400000000000000000000101771432416201200217300ustar00rootroot00000000000000//! Tests for `[env]` config. use cargo_test_support::{basic_bin_manifest, project}; #[cargo_test] fn env_basic() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" use std::env; fn main() { println!( "compile-time:{}", env!("ENV_TEST_1233") ); println!( "run-time:{}", env::var("ENV_TEST_1233").unwrap()); } "#, ) .file( ".cargo/config", r#" [env] ENV_TEST_1233 = "Hello" "#, ) .build(); p.cargo("run") .with_stdout_contains("compile-time:Hello") .with_stdout_contains("run-time:Hello") .run(); } #[cargo_test] fn env_invalid() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" fn main() { } "#, ) .file( ".cargo/config", r#" [env] ENV_TEST_BOOL = false "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]could not load config key `env.ENV_TEST_BOOL`") .run(); } #[cargo_test] fn env_force() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" use std::env; fn main() { println!( "ENV_TEST_FORCED:{}", env!("ENV_TEST_FORCED") ); println!( "ENV_TEST_UNFORCED:{}", env!("ENV_TEST_UNFORCED") ); println!( "ENV_TEST_UNFORCED_DEFAULT:{}", env!("ENV_TEST_UNFORCED_DEFAULT") ); } "#, ) .file( ".cargo/config", r#" [env] ENV_TEST_UNFORCED_DEFAULT = "from-config" ENV_TEST_UNFORCED = { value = "from-config", force = false } ENV_TEST_FORCED = { value = "from-config", force = true } "#, ) .build(); p.cargo("run") .env("ENV_TEST_FORCED", "from-env") .env("ENV_TEST_UNFORCED", "from-env") .env("ENV_TEST_UNFORCED_DEFAULT", "from-env") .with_stdout_contains("ENV_TEST_FORCED:from-config") .with_stdout_contains("ENV_TEST_UNFORCED:from-env") .with_stdout_contains("ENV_TEST_UNFORCED_DEFAULT:from-env") .run(); } #[cargo_test] fn env_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo2")) .file( "src/main.rs", r#" use std::env; use std::path::Path; fn main() { println!( "ENV_TEST_REGULAR:{}", env!("ENV_TEST_REGULAR") ); println!( "ENV_TEST_REGULAR_DEFAULT:{}", env!("ENV_TEST_REGULAR_DEFAULT") ); println!( "ENV_TEST_RELATIVE:{}", env!("ENV_TEST_RELATIVE") ); assert!( Path::new(env!("ENV_TEST_RELATIVE")).is_absolute() ); assert!( !Path::new(env!("ENV_TEST_REGULAR")).is_absolute() ); assert!( !Path::new(env!("ENV_TEST_REGULAR_DEFAULT")).is_absolute() ); } "#, ) .file( ".cargo/config", r#" [env] ENV_TEST_REGULAR = { value = "Cargo.toml", relative = false } ENV_TEST_REGULAR_DEFAULT = "Cargo.toml" ENV_TEST_RELATIVE = { value = "Cargo.toml", relative = true } "#, ) .build(); p.cargo("run").run(); } #[cargo_test] fn env_no_override() { let p = project() .file("Cargo.toml", &basic_bin_manifest("unchanged")) .file( "src/main.rs", r#" use std::env; fn main() { println!( "CARGO_PKG_NAME:{}", env!("CARGO_PKG_NAME") ); } "#, ) .file( ".cargo/config", r#" [env] CARGO_PKG_NAME = { value = "from-config", force = true } "#, ) .build(); p.cargo("run") .with_stdout_contains("CARGO_PKG_NAME:unchanged") .run(); } cargo-0.66.0/tests/testsuite/cargo_features.rs000066400000000000000000000423301432416201200214250ustar00rootroot00000000000000//! Tests for `cargo-features` definitions. use cargo_test_support::registry::Package; use cargo_test_support::{project, registry}; #[cargo_test] fn feature_required() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the `im-a-teapot` manifest key is unstable and may not work properly in England Caused by: feature `test-dummy-unstable` is required The package requires the Cargo feature called `test-dummy-unstable`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider adding `cargo-features = [\"test-dummy-unstable\"]` to the top of Cargo.toml \ (above the [package] table) to tell Cargo you are opting in to use this unstable feature. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html for more information \ about the status of this feature. ", ) .run(); // Same, but stable. p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the `im-a-teapot` manifest key is unstable and may not work properly in England Caused by: feature `test-dummy-unstable` is required The package requires the Cargo feature called `test-dummy-unstable`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider trying a newer version of Cargo (this may require the nightly release). See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about the status of this feature. ", ) .run(); } #[cargo_test] fn feature_required_dependency() { // The feature has been stabilized by a future version of Cargo, and // someone published something uses it, but this version of Cargo has not // yet stabilized it. Don't suggest editing Cargo.toml, since published // packages shouldn't be edited. Package::new("bar", "1.0.0") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" im-a-teapot = true "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_status(101) .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] bar v1.0.0 [..] error: failed to download replaced source registry `crates-io` Caused by: failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml` Caused by: the `im-a-teapot` manifest key is unstable and may not work properly in England Caused by: feature `test-dummy-unstable` is required The package requires the Cargo feature called `test-dummy-unstable`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider trying a more recent nightly release. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about the status of this feature. ", ) .run(); // Same, but stable. p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to download `bar v1.0.0` Caused by: unable to get packages from source Caused by: failed to download replaced source registry `crates-io` Caused by: failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml` Caused by: the `im-a-teapot` manifest key is unstable and may not work properly in England Caused by: feature `test-dummy-unstable` is required The package requires the Cargo feature called `test-dummy-unstable`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider trying a newer version of Cargo (this may require the nightly release). See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about the status of this feature. ", ) .run(); } #[cargo_test] fn unknown_feature() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["foo"] [package] name = "a" version = "0.0.1" authors = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: unknown cargo feature `foo` ", ) .run(); } #[cargo_test] fn stable_feature_warns() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-stable"] [package] name = "a" version = "0.0.1" authors = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ warning: the cargo feature `test-dummy-stable` has been stabilized in the 1.0 \ release and is no longer necessary to be listed in the manifest See https://doc.rust-lang.org/[..]cargo/ for more information about using this feature. [COMPILING] a [..] [FINISHED] [..] ", ) .run(); } #[cargo_test(nightly, reason = "-Zallow-features is unstable")] fn allow_features() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("-Zallow-features=test-dummy-unstable build") .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable"]) .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); p.cargo("-Zallow-features=test-dummy-unstable,print-im-a-teapot -Zprint-im-a-teapot build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_stdout("im-a-teapot = true") .run(); p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_status(101) .with_stderr( "\ error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] ", ) .run(); p.cargo("-Zallow-features= build") .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the feature `test-dummy-unstable` is not in the list of allowed features: [] ", ) .run(); } #[cargo_test(nightly, reason = "-Zallow-features is unstable")] fn allow_features_to_rustc() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#, ) .file( "src/lib.rs", r#" #![feature(test_2018_feature)] "#, ) .build(); p.cargo("-Zallow-features= build") .masquerade_as_nightly_cargo(&["allow-features"]) .with_status(101) .with_stderr_contains("[..]E0725[..]") .run(); p.cargo("-Zallow-features=test_2018_feature build") .masquerade_as_nightly_cargo(&["allow-features"]) .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); } #[cargo_test(nightly, reason = "-Zallow-features is unstable")] fn allow_features_in_cfg() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file( ".cargo/config.toml", r#" [unstable] allow-features = ["test-dummy-unstable", "print-im-a-teapot"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); p.cargo("-Zprint-im-a-teapot build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_stdout("im-a-teapot = true") .with_stderr("[FINISHED] [..]") .run(); p.cargo("-Zunstable-options build") .masquerade_as_nightly_cargo(&["allow-features", "test-dummy-unstable", "print-im-a-teapot"]) .with_status(101) .with_stderr( "\ error: the feature `unstable-options` is not in the list of allowed features: [print-im-a-teapot, test-dummy-unstable] ", ) .run(); // -Zallow-features overrides .cargo/config p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_status(101) .with_stderr( "\ error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] ", ) .run(); p.cargo("-Zallow-features= build") .masquerade_as_nightly_cargo(&[ "allow-features", "test-dummy-unstable", "print-im-a-teapot", ]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the feature `test-dummy-unstable` is not in the list of allowed features: [] ", ) .run(); } #[cargo_test] fn nightly_feature_requires_nightly() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ but this is the `stable` channel See [..] See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ information about using this feature. ", ) .run(); } #[cargo_test] fn nightly_feature_requires_nightly_in_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_stderr( "\ [COMPILING] a [..] [COMPILING] b [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `a` as a dependency of package `b v0.0.1 ([..])` Caused by: failed to load source for dependency `a` Caused by: Unable to update [..] Caused by: failed to parse manifest at `[..]` Caused by: the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ but this is the `stable` channel See [..] See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ information about using this feature. ", ) .run(); } #[cargo_test] fn cant_publish() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ but this is the `stable` channel See [..] See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ information about using this feature. ", ) .run(); } #[cargo_test] fn z_flags_rejected() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] im-a-teapot = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -Zprint-im-a-teapot") .with_status(101) .with_stderr( "error: the `-Z` flag is only accepted on the nightly \ channel of Cargo, but this is the `stable` channel\n\ See [..]", ) .run(); p.cargo("build -Zarg") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_status(101) .with_stderr("error: unknown `-Z` flag specified: arg") .run(); p.cargo("build -Zprint-im-a-teapot") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_stdout("im-a-teapot = true\n") .with_stderr( "\ [COMPILING] a [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn publish_allowed() { registry::init(); let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "a" version = "0.0.1" authors = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --token sekrit") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .run(); } #[cargo_test] fn wrong_position() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" cargo-features = ["test-dummy-unstable"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at [..] Caused by: cargo-features = [\"test-dummy-unstable\"] was found in the wrong location: it \ should be set at the top of Cargo.toml before any tables ", ) .run(); } #[cargo_test] fn z_stabilized() { let p = project().file("src/lib.rs", "").build(); p.cargo("check -Z cache-messages") .masquerade_as_nightly_cargo(&["always_nightly"]) .with_stderr( "\ warning: flag `-Z cache-messages` has been stabilized in the 1.40 release, \ and is no longer necessary Message caching is now always enabled. [CHECKING] foo [..] [FINISHED] [..] ", ) .run(); p.cargo("check -Z offline") .masquerade_as_nightly_cargo(&["always_nightly"]) .with_status(101) .with_stderr( "\ error: flag `-Z offline` has been stabilized in the 1.36 release Offline mode is now available via the --offline CLI option ", ) .run(); } cargo-0.66.0/tests/testsuite/cargo_targets.rs000066400000000000000000000027361432416201200212660ustar00rootroot00000000000000//! Tests specifically related to target handling (lib, bins, examples, tests, benches). use cargo_test_support::project; #[cargo_test] fn warn_unmatched_target_filters() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] test = false bench = false "#, ) .file("src/lib.rs", r#"fn main() {}"#) .build(); p.cargo("check --tests --bins --examples --benches") .with_stderr( "\ [WARNING] Target filters `bins`, `tests`, `examples`, `benches` specified, \ but no targets matched. This is a no-op [FINISHED][..] ", ) .run(); } #[cargo_test] fn reserved_windows_target_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[bin]] name = "con" path = "src/main.rs" "#, ) .file("src/main.rs", "fn main() {}") .build(); if cfg!(windows) { p.cargo("check") .with_stderr( "\ [WARNING] binary target `con` is a reserved Windows filename, \ this target will not work on Windows platforms [CHECKING] foo[..] [FINISHED][..] ", ) .run(); } else { p.cargo("check") .with_stderr("[CHECKING] foo[..]\n[FINISHED][..]") .run(); } } cargo-0.66.0/tests/testsuite/cfg.rs000066400000000000000000000277001432416201200171770ustar00rootroot00000000000000//! Tests for cfg() expressions. use cargo_test_support::registry::Package; use cargo_test_support::rustc_host; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn cfg_easy() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [target.'cfg(unix)'.dependencies] b = { path = 'b' } [target."cfg(windows)".dependencies] b = { path = 'b' } "#, ) .file("src/lib.rs", "extern crate b;") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn dont_include() { let other_family = if cfg!(unix) { "windows" } else { "unix" }; let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "a" version = "0.0.1" authors = [] [target.'cfg({})'.dependencies] b = {{ path = 'b' }} "#, other_family ), ) .file("src/lib.rs", "") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] a v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn works_through_the_registry() { Package::new("baz", "0.1.0").publish(); Package::new("bar", "0.1.0") .target_dep("baz", "0.1.0", "cfg(unix)") .target_dep("baz", "0.1.0", "cfg(windows)") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate bar;", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] index [DOWNLOADING] crates ... [DOWNLOADED] [..] [DOWNLOADED] [..] [COMPILING] baz v0.1.0 [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn ignore_version_from_other_platform() { let this_family = if cfg!(unix) { "unix" } else { "windows" }; let other_family = if cfg!(unix) { "windows" } else { "unix" }; Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [target.'cfg({})'.dependencies] bar = "0.1.0" [target.'cfg({})'.dependencies] bar = "0.2.0" "#, this_family, other_family ), ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate bar;", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] index [DOWNLOADING] crates ... [DOWNLOADED] [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn bad_target_spec() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.'cfg(4)'.dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: failed to parse `4` as a cfg expression: unexpected character `4` in cfg, [..] ", ) .run(); } #[cargo_test] fn bad_target_spec2() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.'cfg(bar =)'.dependencies] baz = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: failed to parse `bar =` as a cfg expression: expected a string, but cfg expression ended ", ) .run(); } #[cargo_test] fn multiple_match_ok() { let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "a" version = "0.0.1" authors = [] [target.'cfg(unix)'.dependencies] b = {{ path = 'b' }} [target.'cfg(target_family = "unix")'.dependencies] b = {{ path = 'b' }} [target."cfg(windows)".dependencies] b = {{ path = 'b' }} [target.'cfg(target_family = "windows")'.dependencies] b = {{ path = 'b' }} [target."cfg(any(windows, unix))".dependencies] b = {{ path = 'b' }} [target.{}.dependencies] b = {{ path = 'b' }} "#, rustc_host() ), ) .file("src/lib.rs", "extern crate b;") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn any_ok() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [target."cfg(any(windows, unix))".dependencies] b = { path = 'b' } "#, ) .file("src/lib.rs", "extern crate b;") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build -v").run(); } // https://github.com/rust-lang/cargo/issues/5313 #[cargo_test] #[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] fn cfg_looks_at_rustflags_for_target() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [target.'cfg(with_b)'.dependencies] b = { path = 'b' } "#, ) .file( "src/main.rs", r#" #[cfg(with_b)] extern crate b; fn main() { b::foo(); } "#, ) .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build --target x86_64-unknown-linux-gnu") .env("RUSTFLAGS", "--cfg with_b") .run(); } #[cargo_test] fn bad_cfg_discovery() { // Check error messages when `rustc -v` and `rustc --print=*` parsing fails. // // This is a `rustc` replacement which behaves differently based on an // environment variable. let p = project() .at("compiler") .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) .file( "src/main.rs", r#" fn run_rustc() -> String { let mut cmd = std::process::Command::new("rustc"); for arg in std::env::args_os().skip(1) { cmd.arg(arg); } String::from_utf8(cmd.output().unwrap().stdout).unwrap() } fn main() { let mode = std::env::var("FUNKY_MODE").unwrap(); if mode == "bad-version" { println!("foo"); return; } if std::env::args_os().any(|a| a == "-vV") { print!("{}", run_rustc()); return; } if mode == "no-crate-types" { return; } if mode == "bad-crate-type" { println!("foo"); return; } let output = run_rustc(); let mut lines = output.lines(); let sysroot = loop { let line = lines.next().unwrap(); if line.contains("___") { println!("{}", line); } else { break line; } }; if mode == "no-sysroot" { return; } println!("{}", sysroot); if mode != "bad-cfg" { panic!("unexpected"); } println!("123"); } "#, ) .build(); p.cargo("build").run(); let funky_rustc = p.bin("compiler"); let p = project().file("src/lib.rs", "").build(); p.cargo("build") .env("RUSTC", &funky_rustc) .env("FUNKY_MODE", "bad-version") .with_status(101) .with_stderr( "\ [ERROR] `rustc -vV` didn't have a line for `host:`, got: foo ", ) .run(); p.cargo("build") .env("RUSTC", &funky_rustc) .env("FUNKY_MODE", "no-crate-types") .with_status(101) .with_stderr( "\ [ERROR] malformed output when learning about crate-type bin information command was: `[..]compiler[..] --crate-name ___ [..]` (no output received) ", ) .run(); p.cargo("build") .env("RUSTC", &funky_rustc) .env("FUNKY_MODE", "no-sysroot") .with_status(101) .with_stderr( "\ [ERROR] output of --print=sysroot missing when learning about target-specific information from rustc command was: `[..]compiler[..]--crate-type [..]` --- stdout [..]___[..] [..]___[..] [..]___[..] [..]___[..] [..]___[..] [..]___[..] ", ) .run(); p.cargo("build") .env("RUSTC", &funky_rustc) .env("FUNKY_MODE", "bad-cfg") .with_status(101) .with_stderr( "\ [ERROR] failed to parse the cfg from `rustc --print=cfg`, got: [..]___[..] [..]___[..] [..]___[..] [..]___[..] [..]___[..] [..]___[..] [..] 123 Caused by: failed to parse `123` as a cfg expression: unexpected character `1` in cfg, \ expected parens, a comma, an identifier, or a string ", ) .run(); } #[cargo_test] fn exclusive_dep_kinds() { // Checks for a bug where the same package with different cfg expressions // was not being filtered correctly. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [target.'cfg(abc)'.dependencies] bar = "1.0" [target.'cfg(not(abc))'.build-dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .file("build.rs", "extern crate bar; fn main() {}") .build(); p.cargo("check").run(); p.change_file("src/lib.rs", "extern crate bar;"); p.cargo("check") .with_status(101) // can't find crate for `bar` .with_stderr_contains("[..]E0463[..]") .run(); } cargo-0.66.0/tests/testsuite/check.rs000066400000000000000000000726601432416201200175220ustar00rootroot00000000000000//! Tests for the `cargo check` command. use std::fmt::{self, Write}; use cargo_test_support::install::exe; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::tools; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn check_success() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::baz(); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("check").run(); } #[cargo_test] fn check_fail() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::baz(42); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("check") .with_status(101) .with_stderr_contains("[..]this function takes 0[..]") .run(); } #[cargo_test] fn custom_derive() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", r#" #[macro_use] extern crate bar; trait B { fn b(&self); } #[derive(B)] struct A; fn main() { let a = A; a.b(); } "#, ) .build(); let _bar = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(B)] pub fn derive(_input: TokenStream) -> TokenStream { format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap() } "#, ) .build(); foo.cargo("check").run(); } #[cargo_test] fn check_build() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::baz(); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("check").run(); foo.cargo("build").run(); } #[cargo_test] fn build_check() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::baz(); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("build -v").run(); foo.cargo("check -v").run(); } // Checks that where a project has both a lib and a bin, the lib is only checked // not built. #[cargo_test] fn issue_3418() { let foo = project() .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .build(); foo.cargo("check -v") .with_stderr_contains("[..] --emit=[..]metadata [..]") .run(); } // Some weirdness that seems to be caused by a crate being built as well as // checked, but in this case with a proc macro too. #[cargo_test] fn issue_3419() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] rustc-serialize = "*" "#, ) .file( "src/lib.rs", r#" extern crate rustc_serialize; use rustc_serialize::Decodable; pub fn take() {} "#, ) .file( "src/main.rs", r#" extern crate rustc_serialize; extern crate foo; #[derive(RustcDecodable)] pub struct Foo; fn main() { foo::take::(); } "#, ) .build(); Package::new("rustc-serialize", "1.0.0") .file( "src/lib.rs", r#" pub trait Decodable: Sized { fn decode(d: &mut D) -> Result; } pub trait Decoder { type Error; fn read_struct(&mut self, s_name: &str, len: usize, f: F) -> Result where F: FnOnce(&mut Self) -> Result; } "#, ) .publish(); p.cargo("check").run(); } // Check on a dylib should have a different metadata hash than build. #[cargo_test] fn dylib_check_preserves_build_cache() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [lib] crate-type = ["dylib"] [dependencies] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [..]Compiling foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("check").run(); p.cargo("build") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } // test `cargo rustc --profile check` #[cargo_test] fn rustc_check() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::baz(); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustc --profile check -- --emit=metadata").run(); // Verify compatible usage of --profile with --release, issue #7488 foo.cargo("rustc --profile check --release -- --emit=metadata") .run(); foo.cargo("rustc --profile test --release -- --emit=metadata") .run(); } #[cargo_test] fn rustc_check_err() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", "extern crate bar; fn main() { ::bar::qux(); }", ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustc --profile check -- --emit=metadata") .with_status(101) .with_stderr_contains("[CHECKING] bar [..]") .with_stderr_contains("[CHECKING] foo [..]") .with_stderr_contains("[..]cannot find function `qux` in [..] `bar`") .run(); } #[cargo_test] fn check_all() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [workspace] [dependencies] b = { path = "b" } "#, ) .file("src/main.rs", "fn main() {}") .file("examples/a.rs", "fn main() {}") .file("tests/a.rs", "") .file("src/lib.rs", "") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/main.rs", "fn main() {}") .file("b/src/lib.rs", "") .build(); p.cargo("check --workspace -v") .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") .with_stderr_contains("[..] --crate-name b b/src/lib.rs [..]") .with_stderr_contains("[..] --crate-name b b/src/main.rs [..]") .run(); } #[cargo_test] fn check_all_exclude() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("check --workspace --exclude baz") .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") .with_stderr( "\ [CHECKING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_all_exclude_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("check --workspace --exclude '*z'") .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") .with_stderr( "\ [CHECKING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_virtual_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("check -v") .with_stderr_contains("[..] --crate-name bar bar/src/lib.rs [..]") .with_stderr_contains("[..] --crate-name baz baz/src/lib.rs [..]") .run(); } #[cargo_test] fn check_virtual_manifest_one_project() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("check -p bar") .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") .with_stderr( "\ [CHECKING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_virtual_manifest_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("check -p '*z'") .with_stderr_does_not_contain("[CHECKING] bar v0.1.0 [..]") .with_stderr( "\ [CHECKING] baz v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn exclude_warns_on_non_existing_package() { let p = project().file("src/lib.rs", "").build(); p.cargo("check --workspace --exclude bar") .with_stdout("") .with_stderr( "\ [WARNING] excluded package(s) `bar` not found in workspace `[CWD]` [CHECKING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn targets_selected_default() { let foo = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", "pub fn smth() {}") .file("examples/example1.rs", "fn main() {}") .file("tests/test2.rs", "#[test] fn t() {}") .file("benches/bench3.rs", "") .build(); foo.cargo("check -v") .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") .with_stderr_does_not_contain("[..] --crate-name example1 examples/example1.rs [..]") .with_stderr_does_not_contain("[..] --crate-name test2 tests/test2.rs [..]") .with_stderr_does_not_contain("[..] --crate-name bench3 benches/bench3.rs [..]") .run(); } #[cargo_test] fn targets_selected_all() { let foo = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", "pub fn smth() {}") .file("examples/example1.rs", "fn main() {}") .file("tests/test2.rs", "#[test] fn t() {}") .file("benches/bench3.rs", "") .build(); foo.cargo("check --all-targets -v") .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") .with_stderr_contains("[..] --crate-name example1 examples/example1.rs [..]") .with_stderr_contains("[..] --crate-name test2 tests/test2.rs [..]") .with_stderr_contains("[..] --crate-name bench3 benches/bench3.rs [..]") .run(); } #[cargo_test] fn check_unit_test_profile() { let foo = project() .file( "src/lib.rs", r#" #[cfg(test)] mod tests { #[test] fn it_works() { badtext } } "#, ) .build(); foo.cargo("check").run(); foo.cargo("check --profile test") .with_status(101) .with_stderr_contains("[..]badtext[..]") .run(); } // Verify what is checked with various command-line filters. #[cargo_test] fn check_filters() { let p = project() .file( "src/lib.rs", r#" fn unused_normal_lib() {} #[cfg(test)] mod tests { fn unused_unit_lib() {} } "#, ) .file( "src/main.rs", r#" fn main() {} fn unused_normal_bin() {} #[cfg(test)] mod tests { fn unused_unit_bin() {} } "#, ) .file( "tests/t1.rs", r#" fn unused_normal_t1() {} #[cfg(test)] mod tests { fn unused_unit_t1() {} } "#, ) .file( "examples/ex1.rs", r#" fn main() {} fn unused_normal_ex1() {} #[cfg(test)] mod tests { fn unused_unit_ex1() {} } "#, ) .file( "benches/b1.rs", r#" fn unused_normal_b1() {} #[cfg(test)] mod tests { fn unused_unit_b1() {} } "#, ) .build(); p.cargo("check") .with_stderr_contains("[..]unused_normal_lib[..]") .with_stderr_contains("[..]unused_normal_bin[..]") .with_stderr_does_not_contain("[..]unused_normal_t1[..]") .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") .with_stderr_does_not_contain("[..]unused_normal_b1[..]") .with_stderr_does_not_contain("[..]unused_unit_[..]") .run(); p.root().join("target").rm_rf(); p.cargo("check --tests -v") .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]") .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]") .with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]") .with_stderr_contains("[..]unused_unit_lib[..]") .with_stderr_contains("[..]unused_unit_bin[..]") .with_stderr_contains("[..]unused_normal_lib[..]") .with_stderr_contains("[..]unused_normal_bin[..]") .with_stderr_contains("[..]unused_unit_t1[..]") .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") .with_stderr_does_not_contain("[..]unused_normal_b1[..]") .with_stderr_does_not_contain("[..]unused_unit_b1[..]") .with_stderr_does_not_contain("[..]--crate-type bin[..]") .run(); p.root().join("target").rm_rf(); p.cargo("check --test t1 -v") .with_stderr_contains("[..]unused_normal_lib[..]") .with_stderr_contains("[..]unused_unit_t1[..]") .with_stderr_does_not_contain("[..]unused_unit_lib[..]") .with_stderr_does_not_contain("[..]unused_normal_bin[..]") .with_stderr_does_not_contain("[..]unused_unit_bin[..]") .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") .with_stderr_does_not_contain("[..]unused_normal_b1[..]") .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") .with_stderr_does_not_contain("[..]unused_unit_b1[..]") .run(); p.root().join("target").rm_rf(); p.cargo("check --all-targets -v") .with_stderr_contains("[..]unused_normal_lib[..]") .with_stderr_contains("[..]unused_normal_bin[..]") .with_stderr_contains("[..]unused_normal_t1[..]") .with_stderr_contains("[..]unused_normal_ex1[..]") .with_stderr_contains("[..]unused_normal_b1[..]") .with_stderr_contains("[..]unused_unit_b1[..]") .with_stderr_contains("[..]unused_unit_t1[..]") .with_stderr_contains("[..]unused_unit_lib[..]") .with_stderr_contains("[..]unused_unit_bin[..]") .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") .run(); } #[cargo_test] fn check_artifacts() { // Verify which artifacts are created when running check (#4059). let p = project() .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/t1.rs", "") .file("examples/ex1.rs", "fn main() {}") .file("benches/b1.rs", "") .build(); p.cargo("check").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); p.root().join("target").rm_rf(); p.cargo("check --lib").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); p.root().join("target").rm_rf(); p.cargo("check --bin foo").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); p.root().join("target").rm_rf(); p.cargo("check --test t1").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); assert_eq!(p.glob("target/debug/t1-*").count(), 0); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); assert_eq!(p.glob("target/debug/deps/libt1-*.rmeta").count(), 1); p.root().join("target").rm_rf(); p.cargo("check --example ex1").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p .root() .join("target/debug/examples") .join(exe("ex1")) .is_file()); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); assert_eq!(p.glob("target/debug/examples/libex1-*.rmeta").count(), 1); p.root().join("target").rm_rf(); p.cargo("check --bench b1").run(); assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); assert_eq!(p.glob("target/debug/b1-*").count(), 0); assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); assert_eq!(p.glob("target/debug/deps/libb1-*.rmeta").count(), 1); } #[cargo_test] fn short_message_format() { let foo = project() .file("src/lib.rs", "fn foo() { let _x: bool = 'a'; }") .build(); foo.cargo("check --message-format=short") .with_status(101) .with_stderr_contains( "\ src/lib.rs:1:27: error[E0308]: mismatched types error: could not compile `foo` due to previous error ", ) .run(); } #[cargo_test] fn proc_macro() { let p = project() .file( "Cargo.toml", r#" [package] name = "demo" version = "0.0.1" [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Foo)] pub fn demo(_input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .file( "src/main.rs", r#" #[macro_use] extern crate demo; #[derive(Foo)] struct A; fn main() {} "#, ) .build(); p.cargo("check -v").env("CARGO_LOG", "cargo=trace").run(); } #[cargo_test] fn check_keep_going() { let foo = project() .file("src/bin/one.rs", "compile_error!(\"ONE\"); fn main() {}") .file("src/bin/two.rs", "compile_error!(\"TWO\"); fn main() {}") .build(); // Due to -j1, without --keep-going only one of the two bins would be built. foo.cargo("check -j1 --keep-going -Zunstable-options") .masquerade_as_nightly_cargo(&["keep-going"]) .with_status(101) .with_stderr_contains("error: ONE") .with_stderr_contains("error: TWO") .run(); } #[cargo_test] fn does_not_use_empty_rustc_wrapper() { // An empty RUSTC_WRAPPER environment variable won't be used. // The env var will also override the config, essentially unsetting it. let p = project() .file("src/lib.rs", "") .file( ".cargo/config.toml", r#" [build] rustc-wrapper = "do-not-execute-me" "#, ) .build(); p.cargo("check").env("RUSTC_WRAPPER", "").run(); } #[cargo_test] fn does_not_use_empty_rustc_workspace_wrapper() { let p = project().file("src/lib.rs", "").build(); p.cargo("check").env("RUSTC_WORKSPACE_WRAPPER", "").run(); } #[cargo_test] fn error_from_deep_recursion() -> Result<(), fmt::Error> { let mut big_macro = String::new(); writeln!(big_macro, "macro_rules! m {{")?; for i in 0..130 { writeln!(big_macro, "({}) => {{ m!({}); }};", i, i + 1)?; } writeln!(big_macro, "}}")?; writeln!(big_macro, "m!(0);")?; let p = project().file("src/lib.rs", &big_macro).build(); p.cargo("check --message-format=json") .with_status(101) .with_stdout_contains( "[..]\"message\":\"recursion limit reached while expanding [..]`m[..]`\"[..]", ) .run(); Ok(()) } #[cargo_test] fn rustc_workspace_wrapper_affects_all_workspace_members() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("check") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]") .run(); } #[cargo_test] fn rustc_workspace_wrapper_includes_path_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] [dependencies] baz = { path = "baz" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("check --workspace") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]") .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]") .run(); } #[cargo_test] fn rustc_workspace_wrapper_respects_primary_units() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("check -p bar") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]") .run(); } #[cargo_test] fn rustc_workspace_wrapper_excludes_published_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] [dependencies] baz = "1.0.0" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); Package::new("baz", "1.0.0").publish(); p.cargo("check --workspace -v") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]") .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") .with_stderr_contains("[CHECKING] baz [..]") .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]") .run(); } cargo-0.66.0/tests/testsuite/check_cfg.rs000066400000000000000000000412711432416201200203330ustar00rootroot00000000000000//! Tests for -Zcheck-cfg. use cargo_test_support::{basic_manifest, project}; macro_rules! x { ($tool:tt => $what:tt $(of $who:tt)?) => {{ #[cfg(windows)] { concat!("[RUNNING] [..]", $tool, "[..] --check-cfg ", $what, '(', $($who,)* ')', "[..]") } #[cfg(not(windows))] { concat!("[RUNNING] [..]", $tool, "[..] --check-cfg '", $what, '(', $($who,)* ')', "'", "[..]") } }}; ($tool:tt => $what:tt of $who:tt with $($values:tt)*) => {{ #[cfg(windows)] { concat!("[RUNNING] [..]", $tool, "[..] --check-cfg \"", $what, '(', $who, $(", ", "/\"", $values, "/\"",)* ")", '"', "[..]") } #[cfg(not(windows))] { concat!("[RUNNING] [..]", $tool, "[..] --check-cfg '", $what, '(', $who, $(", ", "\"", $values, "\"",)* ")", "'", "[..]") } }}; } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_with_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar/" } [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}") .build(); p.cargo("build -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature")) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_with_opt_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar/", optional = true } [features] default = ["bar"] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}") .build(); p.cargo("build -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature")) .with_stderr_contains(x!("rustc" => "values" of "feature" with "bar" "default" "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_with_namespaced_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar/", optional = true } [features] f_a = ["dep:bar"] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[allow(dead_code)] fn bar() {}") .build(); p.cargo("build -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_names() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v -Zcheck-cfg=names") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_values() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v -Zcheck-cfg=values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn cli_all_options() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v -Zcheck-cfg=features,names,values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .with_stderr_contains(x!("rustc" => "values")) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_with_cargo_check() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_names_with_check() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check -v -Zcheck-cfg=names") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_values_with_check() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check -v -Zcheck-cfg=values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_test() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("test -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_doctest() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] default = ["f_a"] f_a = [] f_b = [] "#, ) .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}") .build(); p.cargo("test -v --doc -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values" of "feature" with "default" "f_a" "f_b")) .with_stderr_contains(x!("rustdoc" => "values" of "feature" with "default" "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_names_test() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("test -v -Zcheck-cfg=names") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_values_test() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("test -v -Zcheck-cfg=values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_names_doctest() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}") .build(); p.cargo("test -v --doc -Zcheck-cfg=names") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .with_stderr_contains(x!("rustdoc" => "names")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn well_known_values_doctest() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}") .build(); p.cargo("test -v --doc -Zcheck-cfg=values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "values")) .with_stderr_contains(x!("rustdoc" => "values")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn features_doc() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] default = ["f_a"] f_a = [] f_b = [] "#, ) .file("src/lib.rs", "#[allow(dead_code)] fn foo() {}") .build(); p.cargo("doc -v -Zcheck-cfg=features") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustdoc" => "values" of "feature" with "default" "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn build_script_feedback() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#"fn main() { println!("cargo:rustc-check-cfg=names(foo)"); }"#, ) .build(); p.cargo("build -v -Zcheck-cfg=output") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names" of "foo")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn build_script_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#"fn main() { println!("cargo:rustc-check-cfg=names(foo)"); }"#, ) .build(); p.cargo("doc -v -Zcheck-cfg=output") .with_stderr_does_not_contain("rustc [..] --check-cfg [..]") .with_stderr_contains(x!("rustdoc" => "names" of "foo")) .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[..]/build-script-build` [DOCUMENTING] foo [..] [RUNNING] `rustdoc [..] src/main.rs [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .masquerade_as_nightly_cargo(&["check-cfg"]) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn build_script_override() { let target = cargo_test_support::rustc_host(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file("build.rs", "") .file( ".cargo/config", &format!( r#" [target.{}.a] rustc-check-cfg = ["names(foo)"] "#, target ), ) .build(); p.cargo("build -v -Zcheck-cfg=output") .with_stderr_contains(x!("rustc" => "names" of "foo")) .masquerade_as_nightly_cargo(&["check-cfg"]) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn build_script_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "build.rs", r#"fn main() { println!("cargo:rustc-check-cfg=names(foo)"); println!("cargo:rustc-cfg=foo"); }"#, ) .file( "src/lib.rs", r#" /// /// ``` /// extern crate foo; /// /// fn main() { /// foo::foo() /// } /// ``` /// #[cfg(foo)] pub fn foo() {} #[cfg(foo)] #[test] fn test_foo() { foo() } "#, ) .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") .build(); p.cargo("test -v -Zcheck-cfg=output") .with_stderr_contains(x!("rustc" => "names" of "foo")) .with_stderr_contains(x!("rustdoc" => "names" of "foo")) .with_stdout_contains("test test_foo ... ok") .with_stdout_contains("test test_bar ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .masquerade_as_nightly_cargo(&["check-cfg"]) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn config_valid() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] f_a = [] f_b = [] "#, ) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", r#" [unstable] check-cfg = ["features", "names", "values"] "#, ) .build(); p.cargo("build -v -Zcheck-cfg=features,names,values") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains(x!("rustc" => "names")) .with_stderr_contains(x!("rustc" => "values")) .with_stderr_contains(x!("rustc" => "values" of "feature" with "f_a" "f_b")) .run(); } #[cargo_test(nightly, reason = "--check-cfg is unstable")] fn config_invalid() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", r#" [unstable] check-cfg = ["va"] "#, ) .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["check-cfg"]) .with_stderr_contains("error: unstable check-cfg only takes `features`, `names`, `values` or `output` as valid inputs") .with_status(101) .run(); } cargo-0.66.0/tests/testsuite/clean.rs000066400000000000000000000362411432416201200175220ustar00rootroot00000000000000//! Tests for the `cargo clean` command. use cargo_test_support::registry::Package; use cargo_test_support::{ basic_bin_manifest, basic_manifest, git, main_file, project, project_in, rustc_host, }; use glob::GlobError; use std::env; use std::path::{Path, PathBuf}; #[cargo_test] fn cargo_clean_simple() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build").run(); assert!(p.build_dir().is_dir()); p.cargo("clean").run(); assert!(!p.build_dir().is_dir()); } #[cargo_test] fn different_dir() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("src/bar/a.rs", "") .build(); p.cargo("build").run(); assert!(p.build_dir().is_dir()); p.cargo("clean").cwd("src").with_stdout("").run(); assert!(!p.build_dir().is_dir()); } #[cargo_test] fn clean_multiple_packages() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", &basic_bin_manifest("d1")) .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") .file("d2/Cargo.toml", &basic_bin_manifest("d2")) .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") .build(); p.cargo("build -p d1 -p d2 -p foo").run(); let d1_path = &p .build_dir() .join("debug") .join(format!("d1{}", env::consts::EXE_SUFFIX)); let d2_path = &p .build_dir() .join("debug") .join(format!("d2{}", env::consts::EXE_SUFFIX)); assert!(p.bin("foo").is_file()); assert!(d1_path.is_file()); assert!(d2_path.is_file()); p.cargo("clean -p d1 -p d2") .cwd("src") .with_stdout("") .run(); assert!(p.bin("foo").is_file()); assert!(!d1_path.is_file()); assert!(!d2_path.is_file()); } #[cargo_test] fn clean_multiple_packages_in_glob_char_path() { let p = project_in("[d1]") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); let foo_path = &p.build_dir().join("debug").join("deps"); // Assert that build artifacts are produced p.cargo("build").run(); assert_ne!(get_build_artifacts(foo_path).len(), 0); // Assert that build artifacts are destroyed p.cargo("clean -p foo").run(); assert_eq!(get_build_artifacts(foo_path).len(), 0); } fn get_build_artifacts(path: &PathBuf) -> Vec> { let pattern = path.to_str().expect("expected utf-8 path"); let pattern = glob::Pattern::escape(pattern); #[cfg(not(target_env = "msvc"))] const FILE: &str = "foo-*"; #[cfg(target_env = "msvc")] const FILE: &str = "foo.pdb"; let path = PathBuf::from(pattern).join(FILE); let path = path.to_str().expect("expected utf-8 path"); glob::glob(path) .expect("expected glob to run") .into_iter() .collect::>>() } #[cargo_test] fn clean_release() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("build --release").run(); p.cargo("clean -p foo").run(); p.cargo("build --release").with_stdout("").run(); p.cargo("clean -p foo --release").run(); p.cargo("build --release") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] ", ) .run(); p.cargo("build").run(); p.cargo("clean").arg("--release").run(); assert!(p.build_dir().is_dir()); assert!(p.build_dir().join("debug").is_dir()); assert!(!p.build_dir().join("release").is_dir()); } #[cargo_test] fn clean_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("doc").run(); let doc_path = &p.build_dir().join("doc"); assert!(doc_path.is_dir()); p.cargo("clean --doc").run(); assert!(!doc_path.is_dir()); assert!(p.build_dir().is_dir()); } #[cargo_test] fn build_script() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" use std::path::PathBuf; use std::env; fn main() { let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); if env::var("FIRST").is_ok() { std::fs::File::create(out.join("out")).unwrap(); } else { assert!(!out.join("out").exists()); } } "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build").env("FIRST", "1").run(); p.cargo("clean -p foo").run(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[..]build-script-build` [RUNNING] `rustc [..] src/main.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn clean_git() { let git = git::new("dep", |project| { project .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] dep = {{ git = '{}' }} "#, git.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); p.cargo("clean -p dep").with_stdout("").run(); p.cargo("build").run(); } #[cargo_test] fn registry() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.1.0").publish(); p.cargo("build").run(); p.cargo("clean -p bar").with_stdout("").run(); p.cargo("build").run(); } #[cargo_test] fn clean_verbose() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.1.0").publish(); p.cargo("build").run(); p.cargo("clean -p bar --verbose") .with_stderr( "\ [REMOVING] [..] [REMOVING] [..] [REMOVING] [..] [REMOVING] [..] ", ) .run(); p.cargo("build").run(); } #[cargo_test] fn clean_remove_rlib_rmeta() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); assert!(p.target_debug_dir().join("libfoo.rlib").exists()); let rmeta = p.glob("target/debug/deps/*.rmeta").next().unwrap().unwrap(); assert!(rmeta.exists()); p.cargo("clean -p foo").run(); assert!(!p.target_debug_dir().join("libfoo.rlib").exists()); assert!(!rmeta.exists()); } #[cargo_test] fn package_cleans_all_the_things() { // -p cleans everything // Use dashes everywhere to make sure dash/underscore stuff is handled. for crate_type in &["rlib", "dylib", "cdylib", "staticlib", "proc-macro"] { // Try each crate type individually since the behavior changes when // they are combined. let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo-bar" version = "0.1.0" [lib] crate-type = ["{}"] "#, crate_type ), ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("clean -p foo-bar").run(); assert_all_clean(&p.build_dir()); } let p = project() .file( "Cargo.toml", r#" [package] name = "foo-bar" version = "0.1.0" edition = "2018" [lib] crate-type = ["rlib", "dylib", "staticlib"] [[example]] name = "foo-ex-rlib" crate-type = ["rlib"] test = true [[example]] name = "foo-ex-cdylib" crate-type = ["cdylib"] test = true [[example]] name = "foo-ex-bin" test = true "#, ) .file("src/lib.rs", "") .file("src/lib/some-main.rs", "fn main() {}") .file("src/bin/other-main.rs", "fn main() {}") .file("examples/foo-ex-rlib.rs", "") .file("examples/foo-ex-cdylib.rs", "") .file("examples/foo-ex-bin.rs", "fn main() {}") .file("tests/foo-test.rs", "") .file("benches/foo-bench.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("build --all-targets") .env("CARGO_INCREMENTAL", "1") .run(); p.cargo("test --all-targets") .env("CARGO_INCREMENTAL", "1") .run(); p.cargo("check --all-targets") .env("CARGO_INCREMENTAL", "1") .run(); p.cargo("clean -p foo-bar").run(); assert_all_clean(&p.build_dir()); // Try some targets. p.cargo("build --all-targets --target") .arg(rustc_host()) .run(); p.cargo("clean -p foo-bar --target").arg(rustc_host()).run(); assert_all_clean(&p.build_dir()); } // Ensures that all files for the package have been deleted. #[track_caller] fn assert_all_clean(build_dir: &Path) { let walker = walkdir::WalkDir::new(build_dir).into_iter(); for entry in walker.filter_entry(|e| { let path = e.path(); // This is a known limitation, clean can't differentiate between // the different build scripts from different packages. !(path .file_name() .unwrap() .to_str() .unwrap() .starts_with("build_script_build") && path .parent() .unwrap() .file_name() .unwrap() .to_str() .unwrap() == "incremental") }) { let entry = entry.unwrap(); let path = entry.path(); if let ".rustc_info.json" | ".cargo-lock" | "CACHEDIR.TAG" = path.file_name().unwrap().to_str().unwrap() { continue; } if path.is_symlink() || path.is_file() { panic!("{:?} was not cleaned", path); } } } #[cargo_test] fn clean_spec_multiple() { // clean -p foo where foo matches multiple versions Package::new("bar", "1.0.0").publish(); Package::new("bar", "2.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar1 = {version="1.0", package="bar"} bar2 = {version="2.0", package="bar"} "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); // Check suggestion for bad pkgid. p.cargo("clean -p baz") .with_status(101) .with_stderr( "\ error: package ID specification `baz` did not match any packages Did you mean `bar`? ", ) .run(); p.cargo("clean -p bar:1.0.0") .with_stderr( "warning: version qualifier in `-p bar:1.0.0` is ignored, \ cleaning all versions of `bar` found", ) .run(); let mut walker = walkdir::WalkDir::new(p.build_dir()) .into_iter() .filter_map(|e| e.ok()) .filter(|e| { let n = e.file_name().to_str().unwrap(); n.starts_with("bar") || n.starts_with("libbar") }); if let Some(e) = walker.next() { panic!("{:?} was not cleaned", e.path()); } } #[cargo_test] fn clean_spec_reserved() { // Clean when a target (like a test) has a reserved name. In this case, // make sure `clean -p` doesn't delete the reserved directory `build` when // there is a test named `build`. Package::new("bar", "1.0.0") .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .file("tests/build.rs", "") .build(); p.cargo("build --all-targets").run(); assert!(p.target_debug_dir().join("build").is_dir()); let build_test = p.glob("target/debug/deps/build-*").next().unwrap().unwrap(); assert!(build_test.exists()); // Tests are never "uplifted". assert!(p.glob("target/debug/build-*").next().is_none()); p.cargo("clean -p foo").run(); // Should not delete this. assert!(p.target_debug_dir().join("build").is_dir()); // This should not rebuild bar. p.cargo("build -v --all-targets") .with_stderr( "\ [FRESH] bar v1.0.0 [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc [..] [RUNNING] `rustc [..] [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/collisions.rs000066400000000000000000000364421432416201200206210ustar00rootroot00000000000000//! Tests for when multiple artifacts have the same output filename. //! See https://github.com/rust-lang/cargo/issues/6313 for more details. //! Ideally these should never happen, but I don't think we'll ever be able to //! prevent all collisions. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, cross_compile, project}; use std::env; #[cargo_test] fn collision_dylib() { // Path dependencies don't include metadata hash in filename for dylibs. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "1.0.0" [lib] crate-type = ["dylib"] "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "1.0.0" [lib] crate-type = ["dylib"] name = "a" "#, ) .file("b/src/lib.rs", "") .build(); // `j=1` is required because on Windows you'll get an error due to // two processes writing to the file at the same time. p.cargo("build -j=1") .with_stderr_contains(&format!("\ [WARNING] output filename collision. The lib target `a` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the lib target `a` in package `a v1.0.0 ([..]/foo/a)`. Colliding filename is: [..]/foo/target/debug/deps/{}a{} The targets should have unique names. Consider changing their names to be unique or compiling them separately. This may become a hard error in the future; see . ", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX)) .run(); } #[cargo_test] fn collision_example() { // Examples in a workspace can easily collide. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "1.0.0")) .file("a/examples/ex1.rs", "fn main() {}") .file("b/Cargo.toml", &basic_manifest("b", "1.0.0")) .file("b/examples/ex1.rs", "fn main() {}") .build(); // `j=1` is required because on Windows you'll get an error due to // two processes writing to the file at the same time. p.cargo("build --examples -j=1") .with_stderr_contains("\ [WARNING] output filename collision. The example target `ex1` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the example target `ex1` in package `a v1.0.0 ([..]/foo/a)`. Colliding filename is: [..]/foo/target/debug/examples/ex1[EXE] The targets should have unique names. Consider changing their names to be unique or compiling them separately. This may become a hard error in the future; see . ") .run(); } #[cargo_test] // See https://github.com/rust-lang/cargo/issues/7493 #[cfg_attr( any(target_env = "msvc", target_vendor = "apple"), ignore = "--out-dir and examples are currently broken on MSVC and apple" )] fn collision_export() { // `--out-dir` combines some things which can cause conflicts. let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("examples/foo.rs", "fn main() {}") .file("src/main.rs", "fn main() {}") .build(); // -j1 to avoid issues with two processes writing to the same file at the // same time. p.cargo("build -j1 --out-dir=out -Z unstable-options --bins --examples") .masquerade_as_nightly_cargo(&["out-dir"]) .with_stderr_contains("\ [WARNING] `--out-dir` filename collision. The example target `foo` in package `foo v1.0.0 ([..]/foo)` has the same output filename as the bin target `foo` in package `foo v1.0.0 ([..]/foo)`. Colliding filename is: [..]/foo/out/foo[EXE] The exported filenames should be unique. Consider changing their names to be unique or compiling them separately. This may become a hard error in the future; see . ") .run(); } #[cargo_test] fn collision_doc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] foo2 = { path = "foo2" } "#, ) .file("src/lib.rs", "") .file( "foo2/Cargo.toml", r#" [package] name = "foo2" version = "0.1.0" [lib] name = "foo" "#, ) .file("foo2/src/lib.rs", "") .build(); p.cargo("doc -j=1") .with_stderr_contains( "\ [WARNING] output filename collision. The lib target `foo` in package `foo2 v0.1.0 ([..]/foo/foo2)` has the same output \ filename as the lib target `foo` in package `foo v0.1.0 ([..]/foo)`. Colliding filename is: [..]/foo/target/doc/foo/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . ", ) .run(); } #[cargo_test] fn collision_doc_multiple_versions() { // Multiple versions of the same package. Package::new("old-dep", "1.0.0").publish(); Package::new("bar", "1.0.0").dep("old-dep", "1.0").publish(); // Note that this removes "old-dep". Just checking what happens when there // are orphans. Package::new("bar", "2.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" bar2 = { package="bar", version="2.0" } "#, ) .file("src/lib.rs", "") .build(); // Should only document bar 2.0, should not document old-dep. p.cargo("doc") .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v2.0.0 [..] [DOWNLOADED] bar v1.0.0 [..] [DOWNLOADED] old-dep v1.0.0 [..] [CHECKING] old-dep v1.0.0 [CHECKING] bar v2.0.0 [CHECKING] bar v1.0.0 [DOCUMENTING] bar v2.0.0 [FINISHED] [..] [DOCUMENTING] foo v0.1.0 [..] ", ) .run(); } #[cargo_test] fn collision_doc_host_target_feature_split() { // Same dependency built twice due to different features. // // foo v0.1.0 // β”œβ”€β”€ common v1.0.0 // β”‚ └── common-dep v1.0.0 // └── pm v0.1.0 (proc-macro) // └── common v1.0.0 // └── common-dep v1.0.0 // [build-dependencies] // └── common-dep v1.0.0 // // Here `common` and `common-dep` are built twice. `common-dep` has // different features for host versus target. Package::new("common-dep", "1.0.0") .feature("bdep-feat", &[]) .file( "src/lib.rs", r#" /// Some doc pub fn f() {} /// Another doc #[cfg(feature = "bdep-feat")] pub fn bdep_func() {} "#, ) .publish(); Package::new("common", "1.0.0") .dep("common-dep", "1.0") .file( "src/lib.rs", r#" /// Some doc pub fn f() {} "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [dependencies] pm = { path = "pm" } common = "1.0" [build-dependencies] common-dep = { version = "1.0", features = ["bdep-feat"] } "#, ) .file( "src/lib.rs", r#" /// Some doc pub fn f() {} "#, ) .file("build.rs", "fn main() {}") .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" edition = "2018" [lib] proc-macro = true [dependencies] common = "1.0" "#, ) .file( "pm/src/lib.rs", r#" use proc_macro::TokenStream; /// Some doc #[proc_macro] pub fn pm(_input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); // No warnings, no duplicates, common and common-dep only documented once. p.cargo("doc") // Cannot check full output due to https://github.com/rust-lang/cargo/issues/9076 .with_stderr_does_not_contain("[WARNING][..]") .run(); assert!(p.build_dir().join("doc/common_dep/fn.f.html").exists()); assert!(!p .build_dir() .join("doc/common_dep/fn.bdep_func.html") .exists()); assert!(p.build_dir().join("doc/common/fn.f.html").exists()); assert!(p.build_dir().join("doc/pm/macro.pm.html").exists()); assert!(p.build_dir().join("doc/foo/fn.f.html").exists()); } #[cargo_test] fn collision_doc_profile_split() { // Same dependency built twice due to different profile settings. Package::new("common", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = { path = "pm" } common = "1.0" [profile.dev] opt-level = 2 "#, ) .file("src/lib.rs", "") .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [dependencies] common = "1.0" [lib] proc-macro = true "#, ) .file("pm/src/lib.rs", "") .build(); // Just to verify that common is normally built twice. p.cargo("build -v") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] common v1.0.0 [..] [COMPILING] common v1.0.0 [RUNNING] `rustc --crate-name common [..] [RUNNING] `rustc --crate-name common [..] [COMPILING] pm v0.1.0 [..] [RUNNING] `rustc --crate-name pm [..] [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo [..] [FINISHED] [..] ", ) .run(); // Should only document common once, no warnings. p.cargo("doc") .with_stderr_unordered( "\ [CHECKING] common v1.0.0 [DOCUMENTING] common v1.0.0 [DOCUMENTING] pm v0.1.0 [..] [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn collision_doc_sources() { // Different sources with the same package. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" bar2 = { path = "bar", package = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("doc -j=1") .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [WARNING] output filename collision. The lib target `bar` in package `bar v1.0.0` has the same output filename as \ the lib target `bar` in package `bar v1.0.0 ([..]/foo/bar)`. Colliding filename is: [..]/foo/target/doc/bar/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [CHECKING] bar v1.0.0 [..] [DOCUMENTING] bar v1.0.0 [..] [DOCUMENTING] bar v1.0.0 [CHECKING] bar v1.0.0 [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn collision_doc_target() { // collision in doc with --target, doesn't fail due to orphans if cross_compile::disabled() { return; } Package::new("orphaned", "1.0.0").publish(); Package::new("bar", "1.0.0") .dep("orphaned", "1.0") .publish(); Package::new("bar", "2.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar2 = { version = "2.0", package="bar" } bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc --target") .arg(cross_compile::alternate()) .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] orphaned v1.0.0 [..] [DOWNLOADED] bar v2.0.0 [..] [DOWNLOADED] bar v1.0.0 [..] [CHECKING] orphaned v1.0.0 [DOCUMENTING] bar v2.0.0 [CHECKING] bar v2.0.0 [CHECKING] bar v1.0.0 [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn collision_with_root() { // Check for a doc collision between a root package and a dependency. // In this case, `foo-macro` comes from both the workspace and crates.io. // This checks that the duplicate correction code doesn't choke on this // by removing the root unit. Package::new("foo-macro", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["abc", "foo-macro"] "#, ) .file( "abc/Cargo.toml", r#" [package] name = "abc" version = "1.0.0" [dependencies] foo-macro = "1.0" "#, ) .file("abc/src/lib.rs", "") .file( "foo-macro/Cargo.toml", r#" [package] name = "foo-macro" version = "1.0.0" [lib] proc-macro = true [dependencies] abc = {path="../abc"} "#, ) .file("foo-macro/src/lib.rs", "") .build(); p.cargo("doc -j=1") .with_stderr_unordered("\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] foo-macro v1.0.0 [..] warning: output filename collision. The lib target `foo-macro` in package `foo-macro v1.0.0` has the same output filename as the lib target `foo-macro` in package `foo-macro v1.0.0 [..]`. Colliding filename is: [CWD]/target/doc/foo_macro/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [CHECKING] foo-macro v1.0.0 [DOCUMENTING] foo-macro v1.0.0 [CHECKING] abc v1.0.0 [..] [DOCUMENTING] foo-macro v1.0.0 [..] [DOCUMENTING] abc v1.0.0 [..] [FINISHED] [..] ") .run(); } cargo-0.66.0/tests/testsuite/concurrent.rs000066400000000000000000000345401432416201200206220ustar00rootroot00000000000000//! Tests for running multiple `cargo` processes at the same time. use std::fs; use std::net::TcpListener; use std::process::Stdio; use std::sync::mpsc::channel; use std::thread; use std::{env, str}; use cargo_test_support::cargo_process; use cargo_test_support::git; use cargo_test_support::install::{assert_has_installed_exe, cargo_home}; use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, execs, project, slow_cpu_multiplier}; fn pkg(name: &str, vers: &str) { Package::new(name, vers) .file("src/main.rs", "fn main() {{}}") .publish(); } #[cargo_test] fn multiple_installs() { let p = project() .no_manifest() .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("a/src/main.rs", "fn main() {}") .file("b/Cargo.toml", &basic_manifest("bar", "0.0.0")) .file("b/src/main.rs", "fn main() {}"); let p = p.build(); let mut a = p.cargo("install").cwd("a").build_command(); let mut b = p.cargo("install").cwd("b").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn concurrent_installs() { const LOCKED_BUILD: &str = "waiting for file lock on build directory"; pkg("foo", "0.0.1"); pkg("bar", "0.0.1"); let mut a = cargo_process("install foo").build_command(); let mut b = cargo_process("install bar").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD)); assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD)); execs().run_output(&a); execs().run_output(&b); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn one_install_should_be_bad() { let p = project() .no_manifest() .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("a/src/main.rs", "fn main() {}") .file("b/Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("b/src/main.rs", "fn main() {}"); let p = p.build(); let mut a = p.cargo("install").cwd("a").build_command(); let mut b = p.cargo("install").cwd("b").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn multiple_registry_fetches() { let mut pkg = Package::new("bar", "1.0.2"); for i in 0..10 { let name = format!("foo{}", i); Package::new(&name, "1.0.0").publish(); pkg.dep(&name, "*"); } pkg.publish(); let p = project() .no_manifest() .file( "a/Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [dependencies] bar = "*" "#, ) .file("a/src/main.rs", "fn main() {}") .file( "b/Cargo.toml", r#" [package] name = "bar" authors = [] version = "0.0.0" [dependencies] bar = "*" "#, ) .file("b/src/main.rs", "fn main() {}"); let p = p.build(); let mut a = p.cargo("build").cwd("a").build_command(); let mut b = p.cargo("build").cwd("b").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); let suffix = env::consts::EXE_SUFFIX; assert!(p .root() .join("a/target/debug") .join(format!("foo{}", suffix)) .is_file()); assert!(p .root() .join("b/target/debug") .join(format!("bar{}", suffix)) .is_file()); } #[cargo_test] fn git_same_repo_different_tags() { let a = git::new("dep", |project| { project .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) .file("src/lib.rs", "pub fn tag1() {}") }); let repo = git2::Repository::open(&a.root()).unwrap(); git::tag(&repo, "tag1"); a.change_file("src/lib.rs", "pub fn tag2() {}"); git::add(&repo); git::commit(&repo); git::tag(&repo, "tag2"); let p = project() .no_manifest() .file( "a/Cargo.toml", &format!( r#" [package] name = "foo" authors = [] version = "0.0.0" [dependencies] dep = {{ git = '{}', tag = 'tag1' }} "#, a.url() ), ) .file( "a/src/main.rs", "extern crate dep; fn main() { dep::tag1(); }", ) .file( "b/Cargo.toml", &format!( r#" [package] name = "bar" authors = [] version = "0.0.0" [dependencies] dep = {{ git = '{}', tag = 'tag2' }} "#, a.url() ), ) .file( "b/src/main.rs", "extern crate dep; fn main() { dep::tag2(); }", ); let p = p.build(); let mut a = p.cargo("build -v").cwd("a").build_command(); let mut b = p.cargo("build -v").cwd("b").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); } #[cargo_test] fn git_same_branch_different_revs() { let a = git::new("dep", |project| { project .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) .file("src/lib.rs", "pub fn f1() {}") }); let p = project() .no_manifest() .file( "a/Cargo.toml", &format!( r#" [package] name = "foo" authors = [] version = "0.0.0" [dependencies] dep = {{ git = '{}' }} "#, a.url() ), ) .file( "a/src/main.rs", "extern crate dep; fn main() { dep::f1(); }", ) .file( "b/Cargo.toml", &format!( r#" [package] name = "bar" authors = [] version = "0.0.0" [dependencies] dep = {{ git = '{}' }} "#, a.url() ), ) .file( "b/src/main.rs", "extern crate dep; fn main() { dep::f2(); }", ); let p = p.build(); // Generate a Cargo.lock pointing at the current rev, then clear out the // target directory p.cargo("build").cwd("a").run(); fs::remove_dir_all(p.root().join("a/target")).unwrap(); // Make a new commit on the master branch let repo = git2::Repository::open(&a.root()).unwrap(); a.change_file("src/lib.rs", "pub fn f2() {}"); git::add(&repo); git::commit(&repo); // Now run both builds in parallel. The build of `b` should pick up the // newest commit while the build of `a` should use the locked old commit. let mut a = p.cargo("build").cwd("a").build_command(); let mut b = p.cargo("build").cwd("b").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); } #[cargo_test] fn same_project() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", ""); let p = p.build(); let mut a = p.cargo("build").build_command(); let mut b = p.cargo("build").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs().run_output(&a); execs().run_output(&b); } // Make sure that if Cargo dies while holding a lock that it's released and the // next Cargo to come in will take over cleanly. #[cargo_test] fn killing_cargo_releases_the_lock() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" build = "build.rs" "#, ) .file("src/main.rs", "fn main() {}") .file( "build.rs", r#" use std::net::TcpStream; fn main() { if std::env::var("A").is_ok() { TcpStream::connect(&std::env::var("ADDR").unwrap()[..]) .unwrap(); std::thread::sleep(std::time::Duration::new(10, 0)); } } "#, ); let p = p.build(); // Our build script will connect to our local TCP socket to inform us that // it's started and that's how we know that `a` will have the lock // when we kill it. let l = TcpListener::bind("127.0.0.1:0").unwrap(); let mut a = p.cargo("build").build_command(); let mut b = p.cargo("build").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); a.env("ADDR", l.local_addr().unwrap().to_string()) .env("A", "a"); b.env("ADDR", l.local_addr().unwrap().to_string()) .env_remove("A"); // Spawn `a`, wait for it to get to the build script (at which point the // lock is held), then kill it. let mut a = a.spawn().unwrap(); l.accept().unwrap(); a.kill().unwrap(); // Spawn `b`, then just finish the output of a/b the same way the above // tests does. let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); // We killed `a`, so it shouldn't succeed, but `b` should have succeeded. assert!(!a.status.success()); execs().run_output(&b); } #[cargo_test] fn debug_release_ok() { let p = project().file("src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); fs::remove_dir_all(p.root().join("target")).unwrap(); let mut a = p.cargo("build").build_command(); let mut b = p.cargo("build --release").build_command(); a.stdout(Stdio::piped()).stderr(Stdio::piped()); b.stdout(Stdio::piped()).stderr(Stdio::piped()); let a = a.spawn().unwrap(); let b = b.spawn().unwrap(); let a = thread::spawn(move || a.wait_with_output().unwrap()); let b = b.wait_with_output().unwrap(); let a = a.join().unwrap(); execs() .with_stderr_contains( "\ [COMPILING] foo v0.0.1 [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run_output(&a); execs() .with_stderr_contains( "\ [COMPILING] foo v0.0.1 [..] [FINISHED] release [optimized] target(s) in [..] ", ) .run_output(&b); } #[cargo_test] fn no_deadlock_with_git_dependencies() { let dep1 = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let dep2 = git::new("dep2", |project| { project .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" authors = [] version = "0.0.0" [dependencies] dep1 = {{ git = '{}' }} dep2 = {{ git = '{}' }} "#, dep1.url(), dep2.url() ), ) .file("src/main.rs", "fn main() { }"); let p = p.build(); let n_concurrent_builds = 5; let (tx, rx) = channel(); for _ in 0..n_concurrent_builds { let cmd = p .cargo("build") .build_command() .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn(); let tx = tx.clone(); thread::spawn(move || { let result = cmd.unwrap().wait_with_output().unwrap(); tx.send(result).unwrap() }); } for _ in 0..n_concurrent_builds { let result = rx.recv_timeout(slow_cpu_multiplier(30)).expect("Deadlock!"); execs().run_output(&result); } } cargo-0.66.0/tests/testsuite/config.rs000066400000000000000000001204161432416201200177030ustar00rootroot00000000000000//! Tests for config settings. use cargo::core::{PackageIdSpec, Shell}; use cargo::util::config::{self, Config, SslVersionConfig, StringList}; use cargo::util::interning::InternedString; use cargo::util::toml::{self, VecStringOrBool as VSOB}; use cargo::CargoResult; use cargo_test_support::compare; use cargo_test_support::{panic_error, paths, project, symlink_supported, t}; use serde::Deserialize; use std::borrow::Borrow; use std::collections::{BTreeMap, HashMap}; use std::fs; use std::io; use std::os; use std::path::{Path, PathBuf}; /// Helper for constructing a `Config` object. pub struct ConfigBuilder { env: HashMap, unstable: Vec, config_args: Vec, cwd: Option, enable_nightly_features: bool, } impl ConfigBuilder { pub fn new() -> ConfigBuilder { ConfigBuilder { env: HashMap::new(), unstable: Vec::new(), config_args: Vec::new(), cwd: None, enable_nightly_features: false, } } /// Passes a `-Z` flag. pub fn unstable_flag(&mut self, s: impl Into) -> &mut Self { self.unstable.push(s.into()); self } /// Sets an environment variable. pub fn env(&mut self, key: impl Into, val: impl Into) -> &mut Self { self.env.insert(key.into(), val.into()); self } /// Unconditionally enable nightly features, even on stable channels. pub fn nightly_features_allowed(&mut self, allowed: bool) -> &mut Self { self.enable_nightly_features = allowed; self } /// Passes a `--config` flag. pub fn config_arg(&mut self, arg: impl Into) -> &mut Self { self.config_args.push(arg.into()); self } /// Sets the current working directory where config files will be loaded. pub fn cwd(&mut self, path: impl AsRef) -> &mut Self { self.cwd = Some(paths::root().join(path.as_ref())); self } /// Creates the `Config`. pub fn build(&self) -> Config { self.build_err().unwrap() } /// Creates the `Config`, returning a Result. pub fn build_err(&self) -> CargoResult { let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap()); let shell = Shell::from_write(output); let cwd = self.cwd.clone().unwrap_or_else(|| paths::root()); let homedir = paths::home(); let mut config = Config::new(shell, cwd, homedir); config.nightly_features_allowed = self.enable_nightly_features || !self.unstable.is_empty(); config.set_env(self.env.clone()); config.set_search_stop_path(paths::root()); config.configure( 0, false, None, false, false, false, &None, &self.unstable, &self.config_args, )?; Ok(config) } } fn new_config() -> Config { ConfigBuilder::new().build() } /// Read the output from Config. pub fn read_output(config: Config) -> String { drop(config); // Paranoid about flushing the file. let path = paths::root().join("shell.out"); fs::read_to_string(path).unwrap() } #[cargo_test] fn read_env_vars_for_config() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); } "#, ) .build(); p.cargo("build").env("CARGO_BUILD_JOBS", "100").run(); } pub fn write_config(config: &str) { write_config_at(paths::root().join(".cargo/config"), config); } pub fn write_config_at(path: impl AsRef, contents: &str) { let path = paths::root().join(path.as_ref()); fs::create_dir_all(path.parent().unwrap()).unwrap(); fs::write(path, contents).unwrap(); } pub fn write_config_toml(config: &str) { write_config_at(paths::root().join(".cargo/config.toml"), config); } #[cfg(unix)] fn symlink_file(target: &Path, link: &Path) -> io::Result<()> { os::unix::fs::symlink(target, link) } #[cfg(windows)] fn symlink_file(target: &Path, link: &Path) -> io::Result<()> { os::windows::fs::symlink_file(target, link) } fn symlink_config_to_config_toml() { let toml_path = paths::root().join(".cargo/config.toml"); let symlink_path = paths::root().join(".cargo/config"); t!(symlink_file(&toml_path, &symlink_path)); } #[track_caller] pub fn assert_error>(error: E, msgs: &str) { let causes = error .borrow() .chain() .enumerate() .map(|(i, e)| { if i == 0 { e.to_string() } else { format!("Caused by:\n {}", e) } }) .collect::>() .join("\n\n"); assert_match(msgs, &causes); } #[track_caller] pub fn assert_match(expected: &str, actual: &str) { if let Err(e) = compare::match_exact(expected, actual, "output", "", None) { panic_error("", e); } } #[cargo_test] fn get_config() { write_config( "\ [S] f1 = 123 ", ); let config = new_config(); #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { f1: Option, } let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: Some(123) }); let config = ConfigBuilder::new().env("CARGO_S_F1", "456").build(); let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: Some(456) }); } #[cargo_test] fn config_works_with_extension() { write_config_toml( "\ [foo] f1 = 1 ", ); let config = new_config(); assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); } #[cargo_test] fn config_ambiguous_filename_symlink_doesnt_warn() { // Windows requires special permissions to create symlinks. // If we don't have permission, just skip this test. if !symlink_supported() { return; }; write_config_toml( "\ [foo] f1 = 1 ", ); symlink_config_to_config_toml(); let config = new_config(); assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); // It should NOT have warned for the symlink. let output = read_output(config); assert_eq!(output, ""); } #[cargo_test] fn config_ambiguous_filename() { write_config( "\ [foo] f1 = 1 ", ); write_config_toml( "\ [foo] f1 = 2 ", ); let config = new_config(); // It should use the value from the one without the extension for // backwards compatibility. assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); // But it also should have warned. let output = read_output(config); let expected = "\ warning: Both `[..]/.cargo/config` and `[..]/.cargo/config.toml` exist. Using `[..]/.cargo/config` "; assert_match(expected, &output); } #[cargo_test] fn config_unused_fields() { write_config( "\ [S] unused = 456 ", ); let config = ConfigBuilder::new() .env("CARGO_S_UNUSED2", "1") .env("CARGO_S2_UNUSED", "2") .build(); #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { f1: Option, } // This prints a warning (verified below). let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: None }); // This does not print anything, we cannot easily/reliably warn for // environment variables. let s: S = config.get("S2").unwrap(); assert_eq!(s, S { f1: None }); // Verify the warnings. let output = read_output(config); let expected = "\ warning: unused config key `S.unused` in `[..]/.cargo/config` "; assert_match(expected, &output); } #[cargo_test] fn config_load_toml_profile() { write_config( "\ [profile.dev] opt-level = 's' lto = true codegen-units=4 debug = true debug-assertions = true rpath = true panic = 'abort' overflow-checks = true incremental = true [profile.dev.build-override] opt-level = 1 [profile.dev.package.bar] codegen-units = 9 [profile.no-lto] inherits = 'dev' dir-name = 'without-lto' lto = false ", ); let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5") .env("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11") .env("CARGO_PROFILE_DEV_PACKAGE_env_CODEGEN_UNITS", "13") .env("CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL", "2") .build(); // TODO: don't use actual `tomlprofile`. let p: toml::TomlProfile = config.get("profile.dev").unwrap(); let mut packages = BTreeMap::new(); let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap()); let o_profile = toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("2".to_string())), codegen_units: Some(9), ..Default::default() }; packages.insert(key, o_profile); let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap()); let o_profile = toml::TomlProfile { codegen_units: Some(13), ..Default::default() }; packages.insert(key, o_profile); assert_eq!( p, toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("s".to_string())), lto: Some(toml::StringOrBool::Bool(true)), codegen_units: Some(5), debug: Some(toml::U32OrBool::Bool(true)), debug_assertions: Some(true), rpath: Some(true), panic: Some("abort".to_string()), overflow_checks: Some(true), incremental: Some(true), package: Some(packages), build_override: Some(Box::new(toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("1".to_string())), codegen_units: Some(11), ..Default::default() })), ..Default::default() } ); let p: toml::TomlProfile = config.get("profile.no-lto").unwrap(); assert_eq!( p, toml::TomlProfile { lto: Some(toml::StringOrBool::Bool(false)), dir_name: Some(InternedString::new("without-lto")), inherits: Some(InternedString::new("dev")), ..Default::default() } ); } #[cargo_test] fn profile_env_var_prefix() { // Check for a bug with collision on DEBUG vs DEBUG_ASSERTIONS. let config = ConfigBuilder::new() .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false") .build(); let p: toml::TomlProfile = config.get("profile.dev").unwrap(); assert_eq!(p.debug_assertions, Some(false)); assert_eq!(p.debug, None); let config = ConfigBuilder::new() .env("CARGO_PROFILE_DEV_DEBUG", "1") .build(); let p: toml::TomlProfile = config.get("profile.dev").unwrap(); assert_eq!(p.debug_assertions, None); assert_eq!(p.debug, Some(toml::U32OrBool::U32(1))); let config = ConfigBuilder::new() .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false") .env("CARGO_PROFILE_DEV_DEBUG", "1") .build(); let p: toml::TomlProfile = config.get("profile.dev").unwrap(); assert_eq!(p.debug_assertions, Some(false)); assert_eq!(p.debug, Some(toml::U32OrBool::U32(1))); } #[cargo_test] fn config_deserialize_any() { // Some tests to exercise deserialize_any for deserializers that need to // be told the format. write_config( "\ a = true b = ['b'] c = ['c'] ", ); // advanced-env let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_ENVB", "false") .env("CARGO_C", "['d']") .env("CARGO_ENVL", "['a', 'b']") .build(); assert_eq!(config.get::("a").unwrap(), VSOB::Bool(true)); assert_eq!( config.get::("b").unwrap(), VSOB::VecString(vec!["b".to_string()]) ); assert_eq!( config.get::("c").unwrap(), VSOB::VecString(vec!["c".to_string(), "d".to_string()]) ); assert_eq!(config.get::("envb").unwrap(), VSOB::Bool(false)); assert_eq!( config.get::("envl").unwrap(), VSOB::VecString(vec!["a".to_string(), "b".to_string()]) ); // Demonstrate where merging logic isn't very smart. This could be improved. let config = ConfigBuilder::new().env("CARGO_A", "x y").build(); assert_error( config.get::("a").unwrap_err(), "\ error in environment variable `CARGO_A`: could not load config key `a` Caused by: invalid type: string \"x y\", expected a boolean or vector of strings", ); // Normal env. let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_B", "d e") .env("CARGO_C", "f g") .build(); assert_eq!( config.get::("b").unwrap(), VSOB::VecString(vec!["b".to_string(), "d".to_string(), "e".to_string()]) ); assert_eq!( config.get::("c").unwrap(), VSOB::VecString(vec!["c".to_string(), "f".to_string(), "g".to_string()]) ); // config-cli // This test demonstrates that ConfigValue::merge isn't very smart. // It would be nice if it was smarter. let config = ConfigBuilder::new().config_arg("a = ['a']").build_err(); assert_error( config.unwrap_err(), "\ failed to merge --config key `a` into `[..]/.cargo/config` Caused by: failed to merge config value from `--config cli option` into `[..]/.cargo/config`: \ expected boolean, but found array", ); // config-cli and advanced-env let config = ConfigBuilder::new() .unstable_flag("advanced-env") .config_arg("b=['clib']") .config_arg("c=['clic']") .env("CARGO_B", "env1 env2") .env("CARGO_C", "['e1', 'e2']") .build(); assert_eq!( config.get::("b").unwrap(), VSOB::VecString(vec![ "b".to_string(), "clib".to_string(), "env1".to_string(), "env2".to_string() ]) ); assert_eq!( config.get::("c").unwrap(), VSOB::VecString(vec![ "c".to_string(), "clic".to_string(), "e1".to_string(), "e2".to_string() ]) ); } #[cargo_test] fn config_toml_errors() { write_config( "\ [profile.dev] opt-level = 'foo' ", ); let config = new_config(); assert_error( config.get::("profile.dev").unwrap_err(), "\ error in [..]/.cargo/config: could not load config key `profile.dev.opt-level` Caused by: must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"foo\"", ); let config = ConfigBuilder::new() .env("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf") .build(); assert_error( config.get::("profile.dev").unwrap_err(), "\ error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: could not load config key `profile.dev.opt-level` Caused by: must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"asdf\"", ); } #[cargo_test] fn load_nested() { write_config( "\ [nest.foo] f1 = 1 f2 = 2 [nest.bar] asdf = 3 ", ); let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_NEST_foo_f2", "3") .env("CARGO_NESTE_foo_f1", "1") .env("CARGO_NESTE_foo_f2", "3") .env("CARGO_NESTE_bar_asdf", "3") .build(); type Nested = HashMap>; let n: Nested = config.get("nest").unwrap(); let mut expected = HashMap::new(); let mut foo = HashMap::new(); foo.insert("f1".to_string(), 1); foo.insert("f2".to_string(), 3); expected.insert("foo".to_string(), foo); let mut bar = HashMap::new(); bar.insert("asdf".to_string(), 3); expected.insert("bar".to_string(), bar); assert_eq!(n, expected); let n: Nested = config.get("neste").unwrap(); assert_eq!(n, expected); } #[cargo_test] fn get_errors() { write_config( "\ [S] f1 = 123 f2 = 'asdf' big = 123456789 ", ); let config = ConfigBuilder::new() .env("CARGO_E_S", "asdf") .env("CARGO_E_BIG", "123456789") .build(); assert_error( config.get::("foo").unwrap_err(), "missing config key `foo`", ); assert_error( config.get::("foo.bar").unwrap_err(), "missing config key `foo.bar`", ); assert_error( config.get::("S.f2").unwrap_err(), "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string", ); assert_error( config.get::("S.big").unwrap_err(), "\ error in [..].cargo/config: could not load config key `S.big` Caused by: invalid value: integer `123456789`, expected u8", ); // Environment variable type errors. assert_error( config.get::("e.s").unwrap_err(), "error in environment variable `CARGO_E_S`: invalid digit found in string", ); assert_error( config.get::("e.big").unwrap_err(), "\ error in environment variable `CARGO_E_BIG`: could not load config key `e.big` Caused by: invalid value: integer `123456789`, expected i8", ); #[derive(Debug, Deserialize)] #[allow(dead_code)] struct S { f1: i64, f2: String, f3: i64, big: i64, } assert_error(config.get::("S").unwrap_err(), "missing field `f3`"); } #[cargo_test] fn config_get_option() { write_config( "\ [foo] f1 = 1 ", ); let config = ConfigBuilder::new().env("CARGO_BAR_ASDF", "3").build(); assert_eq!(config.get::>("a").unwrap(), None); assert_eq!(config.get::>("a.b").unwrap(), None); assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); assert_eq!(config.get::>("bar.asdf").unwrap(), Some(3)); assert_eq!(config.get::>("bar.zzzz").unwrap(), None); } #[cargo_test] fn config_bad_toml() { write_config("asdf"); let config = new_config(); assert_error( config.get::("foo").unwrap_err(), "\ could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]/.cargo/config` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 5 | 1 | asdf | ^ Unexpected end of input Expected `.` or `=`", ); } #[cargo_test] fn config_get_list() { write_config( "\ l1 = [] l2 = ['one', 'two'] l3 = 123 l4 = ['one', 'two'] [nested] l = ['x'] [nested2] l = ['y'] [nested-empty] ", ); type L = Vec; let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_L4", "['three', 'four']") .env("CARGO_L5", "['a']") .env("CARGO_ENV_EMPTY", "[]") .env("CARGO_ENV_BLANK", "") .env("CARGO_ENV_NUM", "1") .env("CARGO_ENV_NUM_LIST", "[1]") .env("CARGO_ENV_TEXT", "asdf") .env("CARGO_LEPAIR", "['a', 'b']") .env("CARGO_NESTED2_L", "['z']") .env("CARGO_NESTEDE_L", "['env']") .env("CARGO_BAD_ENV", "[zzz]") .build(); assert_eq!(config.get::("unset").unwrap(), vec![] as Vec); assert_eq!(config.get::("l1").unwrap(), vec![] as Vec); assert_eq!(config.get::("l2").unwrap(), vec!["one", "two"]); assert_error( config.get::("l3").unwrap_err(), "\ invalid configuration for key `l3` expected a list, but found a integer for `l3` in [..]/.cargo/config", ); assert_eq!( config.get::("l4").unwrap(), vec!["one", "two", "three", "four"] ); assert_eq!(config.get::("l5").unwrap(), vec!["a"]); assert_eq!(config.get::("env-empty").unwrap(), vec![] as Vec); assert_eq!(config.get::("env-blank").unwrap(), vec![] as Vec); assert_eq!(config.get::("env-num").unwrap(), vec!["1".to_string()]); assert_error( config.get::("env-num-list").unwrap_err(), "error in environment variable `CARGO_ENV_NUM_LIST`: \ expected string, found integer", ); assert_eq!( config.get::("env-text").unwrap(), vec!["asdf".to_string()] ); // "invalid number" here isn't the best error, but I think it's just toml.rs. assert_error( config.get::("bad-env").unwrap_err(), "\ error in environment variable `CARGO_BAD_ENV`: could not parse TOML list: TOML parse error at line 1, column 8 | 1 | value=[zzz] | ^ Unexpected `z` Expected newline or `#` ", ); // Try some other sequence-like types. assert_eq!( config .get::<(String, String, String, String)>("l4") .unwrap(), ( "one".to_string(), "two".to_string(), "three".to_string(), "four".to_string() ) ); assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),)); // Tuple struct #[derive(Debug, Deserialize, Eq, PartialEq)] struct TupS(String, String); assert_eq!( config.get::("lepair").unwrap(), TupS("a".to_string(), "b".to_string()) ); // Nested with an option. #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { l: Option>, } assert_eq!(config.get::("nested-empty").unwrap(), S { l: None }); assert_eq!( config.get::("nested").unwrap(), S { l: Some(vec!["x".to_string()]), } ); assert_eq!( config.get::("nested2").unwrap(), S { l: Some(vec!["y".to_string(), "z".to_string()]), } ); assert_eq!( config.get::("nestede").unwrap(), S { l: Some(vec!["env".to_string()]), } ); } #[cargo_test] fn config_get_other_types() { write_config( "\ ns = 123 ns2 = 456 ", ); let config = ConfigBuilder::new() .env("CARGO_NSE", "987") .env("CARGO_NS2", "654") .build(); #[derive(Debug, Deserialize, Eq, PartialEq)] #[serde(transparent)] struct NewS(i32); assert_eq!(config.get::("ns").unwrap(), NewS(123)); assert_eq!(config.get::("ns2").unwrap(), NewS(654)); assert_eq!(config.get::("nse").unwrap(), NewS(987)); assert_error( config.get::("unset").unwrap_err(), "missing config key `unset`", ); } #[cargo_test] fn config_relative_path() { write_config(&format!( "\ p1 = 'foo/bar' p2 = '../abc' p3 = 'b/c' abs = '{}' ", paths::home().display(), )); let config = ConfigBuilder::new() .env("CARGO_EPATH", "a/b") .env("CARGO_P3", "d/e") .build(); assert_eq!( config .get::("p1") .unwrap() .resolve_path(&config), paths::root().join("foo/bar") ); assert_eq!( config .get::("p2") .unwrap() .resolve_path(&config), paths::root().join("../abc") ); assert_eq!( config .get::("p3") .unwrap() .resolve_path(&config), paths::root().join("d/e") ); assert_eq!( config .get::("abs") .unwrap() .resolve_path(&config), paths::home() ); assert_eq!( config .get::("epath") .unwrap() .resolve_path(&config), paths::root().join("a/b") ); } #[cargo_test] fn config_get_integers() { write_config( "\ npos = 123456789 nneg = -123456789 i64max = 9223372036854775807 ", ); let config = ConfigBuilder::new() .env("CARGO_EPOS", "123456789") .env("CARGO_ENEG", "-1") .env("CARGO_EI64MAX", "9223372036854775807") .build(); assert_eq!( config.get::("i64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::("i64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::("ei64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::("ei64max").unwrap(), 9_223_372_036_854_775_807 ); assert_error( config.get::("nneg").unwrap_err(), "\ error in [..].cargo/config: could not load config key `nneg` Caused by: invalid value: integer `-123456789`, expected u32", ); assert_error( config.get::("eneg").unwrap_err(), "\ error in environment variable `CARGO_ENEG`: could not load config key `eneg` Caused by: invalid value: integer `-1`, expected u32", ); assert_error( config.get::("npos").unwrap_err(), "\ error in [..].cargo/config: could not load config key `npos` Caused by: invalid value: integer `123456789`, expected i8", ); assert_error( config.get::("epos").unwrap_err(), "\ error in environment variable `CARGO_EPOS`: could not load config key `epos` Caused by: invalid value: integer `123456789`, expected i8", ); } #[cargo_test] fn config_get_ssl_version_missing() { write_config( "\ [http] hello = 'world' ", ); let config = new_config(); assert!(config .get::>("http.ssl-version") .unwrap() .is_none()); } #[cargo_test] fn config_get_ssl_version_single() { write_config( "\ [http] ssl-version = 'tlsv1.2' ", ); let config = new_config(); let a = config .get::>("http.ssl-version") .unwrap() .unwrap(); match a { SslVersionConfig::Single(v) => assert_eq!(&v, "tlsv1.2"), SslVersionConfig::Range(_) => panic!("Did not expect ssl version min/max."), }; } #[cargo_test] fn config_get_ssl_version_min_max() { write_config( "\ [http] ssl-version.min = 'tlsv1.2' ssl-version.max = 'tlsv1.3' ", ); let config = new_config(); let a = config .get::>("http.ssl-version") .unwrap() .unwrap(); match a { SslVersionConfig::Single(_) => panic!("Did not expect exact ssl version."), SslVersionConfig::Range(range) => { assert_eq!(range.min, Some(String::from("tlsv1.2"))); assert_eq!(range.max, Some(String::from("tlsv1.3"))); } }; } #[cargo_test] fn config_get_ssl_version_both_forms_configured() { // this is not allowed write_config( "\ [http] ssl-version = 'tlsv1.1' ssl-version.min = 'tlsv1.2' ssl-version.max = 'tlsv1.3' ", ); let config = new_config(); assert_error( config .get::("http.ssl-version") .unwrap_err(), "\ could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]/.cargo/config` Caused by: could not parse input as TOML Caused by: TOML parse error at line 3, column 1 | 3 | ssl-version.min = 'tlsv1.2' | ^ Dotted key `ssl-version` attempted to extend non-table type (string) ", ); assert!(config .get::>("http.ssl-version") .unwrap() .is_none()); } #[cargo_test] /// Assert that unstable options can be configured with the `unstable` table in /// cargo config files fn unstable_table_notation() { write_config( "\ [unstable] print-im-a-teapot = true ", ); let config = ConfigBuilder::new().nightly_features_allowed(true).build(); assert_eq!(config.cli_unstable().print_im_a_teapot, true); } #[cargo_test] /// Assert that dotted notation works for configuring unstable options fn unstable_dotted_notation() { write_config( "\ unstable.print-im-a-teapot = true ", ); let config = ConfigBuilder::new().nightly_features_allowed(true).build(); assert_eq!(config.cli_unstable().print_im_a_teapot, true); } #[cargo_test] /// Assert that Zflags on the CLI take precedence over those from config fn unstable_cli_precedence() { write_config( "\ unstable.print-im-a-teapot = true ", ); let config = ConfigBuilder::new().nightly_features_allowed(true).build(); assert_eq!(config.cli_unstable().print_im_a_teapot, true); let config = ConfigBuilder::new() .unstable_flag("print-im-a-teapot=no") .build(); assert_eq!(config.cli_unstable().print_im_a_teapot, false); } #[cargo_test] /// Assert that attempting to set an unstable flag that doesn't exist via config /// is ignored on stable fn unstable_invalid_flag_ignored_on_stable() { write_config( "\ unstable.an-invalid-flag = 'yes' ", ); assert!(ConfigBuilder::new().build_err().is_ok()); } #[cargo_test] /// Assert that unstable options can be configured with the `unstable` table in /// cargo config files fn unstable_flags_ignored_on_stable() { write_config( "\ [unstable] print-im-a-teapot = true ", ); // Enforce stable channel even when testing on nightly. let config = ConfigBuilder::new().nightly_features_allowed(false).build(); assert_eq!(config.cli_unstable().print_im_a_teapot, false); } #[cargo_test] fn table_merge_failure() { // Config::merge fails to merge entries in two tables. write_config_at( "foo/.cargo/config", " [table] key = ['foo'] ", ); write_config_at( ".cargo/config", " [table] key = 'bar' ", ); #[derive(Debug, Deserialize)] #[allow(dead_code)] struct Table { key: StringList, } let config = ConfigBuilder::new().cwd("foo").build(); assert_error( config.get::("table").unwrap_err(), "\ could not load Cargo configuration Caused by: failed to merge configuration at `[..]/.cargo/config` Caused by: failed to merge key `table` between [..]/foo/.cargo/config and [..]/.cargo/config Caused by: failed to merge key `key` between [..]/foo/.cargo/config and [..]/.cargo/config Caused by: failed to merge config value from `[..]/.cargo/config` into `[..]/foo/.cargo/config`: \ expected array, but found string", ); } #[cargo_test] fn non_string_in_array() { // Currently only strings are supported. write_config("foo = [1, 2, 3]"); let config = new_config(); assert_error( config.get::>("foo").unwrap_err(), "\ could not load Cargo configuration Caused by: failed to load TOML configuration from `[..]/.cargo/config` Caused by: failed to parse key `foo` Caused by: expected string but found integer in list", ); } #[cargo_test] fn struct_with_opt_inner_struct() { // Struct with a key that is Option of another struct. // Check that can be defined with environment variable. #[derive(Deserialize)] struct Inner { value: Option, } #[derive(Deserialize)] struct Foo { inner: Option, } let config = ConfigBuilder::new() .env("CARGO_FOO_INNER_VALUE", "12") .build(); let f: Foo = config.get("foo").unwrap(); assert_eq!(f.inner.unwrap().value.unwrap(), 12); } #[cargo_test] fn struct_with_default_inner_struct() { // Struct with serde defaults. // Check that can be defined with environment variable. #[derive(Deserialize, Default)] #[serde(default)] struct Inner { value: i32, } #[derive(Deserialize, Default)] #[serde(default)] struct Foo { inner: Inner, } let config = ConfigBuilder::new() .env("CARGO_FOO_INNER_VALUE", "12") .build(); let f: Foo = config.get("foo").unwrap(); assert_eq!(f.inner.value, 12); } #[cargo_test] fn overlapping_env_config() { // Issue where one key is a prefix of another. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] struct Ambig { debug: Option, debug_assertions: Option, } let config = ConfigBuilder::new() .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") .build(); let s: Ambig = config.get("ambig").unwrap(); assert_eq!(s.debug_assertions, Some(true)); assert_eq!(s.debug, None); let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "0").build(); let s: Ambig = config.get("ambig").unwrap(); assert_eq!(s.debug_assertions, None); assert_eq!(s.debug, Some(0)); let config = ConfigBuilder::new() .env("CARGO_AMBIG_DEBUG", "1") .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") .build(); let s: Ambig = config.get("ambig").unwrap(); assert_eq!(s.debug_assertions, Some(true)); assert_eq!(s.debug, Some(1)); } #[cargo_test] fn overlapping_env_with_defaults_errors_out() { // Issue where one key is a prefix of another. // This is a limitation of mapping environment variables on to a hierarchy. // Check that we error out when we hit ambiguity in this way, rather than // the more-surprising defaulting through. // If, in the future, we can handle this more correctly, feel free to delete // this test. #[derive(Deserialize, Default)] #[serde(default, rename_all = "kebab-case")] struct Ambig { debug: u32, debug_assertions: bool, } let config = ConfigBuilder::new() .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") .build(); let err = config.get::("ambig").err().unwrap(); assert!(format!("{}", err).contains("missing config key `ambig.debug`")); let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "5").build(); let s: Ambig = config.get("ambig").unwrap(); assert_eq!(s.debug_assertions, bool::default()); assert_eq!(s.debug, 5); let config = ConfigBuilder::new() .env("CARGO_AMBIG_DEBUG", "1") .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") .build(); let s: Ambig = config.get("ambig").unwrap(); assert_eq!(s.debug_assertions, true); assert_eq!(s.debug, 1); } #[cargo_test] fn struct_with_overlapping_inner_struct_and_defaults() { // Struct with serde defaults. // Check that can be defined with environment variable. #[derive(Deserialize, Default)] #[serde(default)] struct Inner { value: i32, } // Containing struct with a prefix of inner // // This is a limitation of mapping environment variables on to a hierarchy. // Check that we error out when we hit ambiguity in this way, rather than // the more-surprising defaulting through. // If, in the future, we can handle this more correctly, feel free to delete // this case. #[derive(Deserialize, Default)] #[serde(default)] struct PrefixContainer { inn: bool, inner: Inner, } let config = ConfigBuilder::new() .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12") .build(); let err = config .get::("prefixcontainer") .err() .unwrap(); assert!(format!("{}", err).contains("missing config key `prefixcontainer.inn`")); let config = ConfigBuilder::new() .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12") .env("CARGO_PREFIXCONTAINER_INN", "true") .build(); let f: PrefixContainer = config.get("prefixcontainer").unwrap(); assert_eq!(f.inner.value, 12); assert_eq!(f.inn, true); // Containing struct where the inner value's field is a prefix of another // // This is a limitation of mapping environment variables on to a hierarchy. // Check that we error out when we hit ambiguity in this way, rather than // the more-surprising defaulting through. // If, in the future, we can handle this more correctly, feel free to delete // this case. #[derive(Deserialize, Default)] #[serde(default)] struct InversePrefixContainer { inner_field: bool, inner: Inner, } let config = ConfigBuilder::new() .env("CARGO_INVERSEPREFIXCONTAINER_INNER_VALUE", "12") .build(); let f: InversePrefixContainer = config.get("inverseprefixcontainer").unwrap(); assert_eq!(f.inner_field, bool::default()); assert_eq!(f.inner.value, 12); } #[cargo_test] fn string_list_tricky_env() { // Make sure StringList handles typed env values. let config = ConfigBuilder::new() .env("CARGO_KEY1", "123") .env("CARGO_KEY2", "true") .env("CARGO_KEY3", "1 2") .build(); let x = config.get::("key1").unwrap(); assert_eq!(x.as_slice(), &["123".to_string()]); let x = config.get::("key2").unwrap(); assert_eq!(x.as_slice(), &["true".to_string()]); let x = config.get::("key3").unwrap(); assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]); } #[cargo_test] fn string_list_wrong_type() { // What happens if StringList is given then wrong type. write_config("some_list = 123"); let config = ConfigBuilder::new().build(); assert_error( config.get::("some_list").unwrap_err(), "\ invalid configuration for key `some_list` expected a string or array of strings, but found a integer for `some_list` in [..]/.cargo/config", ); write_config("some_list = \"1 2\""); let config = ConfigBuilder::new().build(); let x = config.get::("some_list").unwrap(); assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]); } #[cargo_test] fn string_list_advanced_env() { // StringList with advanced env. let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_KEY1", "[]") .env("CARGO_KEY2", "['1 2', '3']") .env("CARGO_KEY3", "[123]") .build(); let x = config.get::("key1").unwrap(); assert_eq!(x.as_slice(), &[] as &[String]); let x = config.get::("key2").unwrap(); assert_eq!(x.as_slice(), &["1 2".to_string(), "3".to_string()]); assert_error( config.get::("key3").unwrap_err(), "error in environment variable `CARGO_KEY3`: expected string, found integer", ); } #[cargo_test] fn parse_strip_with_string() { write_config( "\ [profile.release] strip = 'debuginfo' ", ); let config = new_config(); let p: toml::TomlProfile = config.get("profile.release").unwrap(); let strip = p.strip.unwrap(); assert_eq!(strip, toml::StringOrBool::String("debuginfo".to_string())); } #[cargo_test] fn cargo_target_empty_cfg() { write_config( "\ [build] target-dir = '' ", ); let config = new_config(); assert_error( config.target_dir().unwrap_err(), "the target directory is set to an empty string in [..]/.cargo/config", ); } #[cargo_test] fn cargo_target_empty_env() { let project = project().build(); project.cargo("build") .env("CARGO_TARGET_DIR", "") .with_stderr("error: the target directory is set to an empty string in the `CARGO_TARGET_DIR` environment variable") .with_status(101) .run() } #[cargo_test] fn all_profile_options() { // Check that all profile options can be serialized/deserialized. let base_settings = toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("0".to_string())), lto: Some(toml::StringOrBool::String("thin".to_string())), codegen_backend: Some(InternedString::new("example")), codegen_units: Some(123), debug: Some(toml::U32OrBool::U32(1)), split_debuginfo: Some("packed".to_string()), debug_assertions: Some(true), rpath: Some(true), panic: Some("abort".to_string()), overflow_checks: Some(true), incremental: Some(true), dir_name: Some(InternedString::new("dir_name")), inherits: Some(InternedString::new("debug")), strip: Some(toml::StringOrBool::String("symbols".to_string())), package: None, build_override: None, rustflags: None, }; let mut overrides = BTreeMap::new(); let key = toml::ProfilePackageSpec::Spec(PackageIdSpec::parse("foo").unwrap()); overrides.insert(key, base_settings.clone()); let profile = toml::TomlProfile { build_override: Some(Box::new(base_settings.clone())), package: Some(overrides), ..base_settings }; let profile_toml = toml_edit::easy::to_string(&profile).unwrap(); let roundtrip: toml::TomlProfile = toml_edit::easy::from_str(&profile_toml).unwrap(); let roundtrip_toml = toml_edit::easy::to_string(&roundtrip).unwrap(); compare::assert_match_exact(&profile_toml, &roundtrip_toml); } cargo-0.66.0/tests/testsuite/config_cli.rs000066400000000000000000000337271432416201200205420ustar00rootroot00000000000000//! Tests for the --config CLI option. use super::config::{assert_error, assert_match, read_output, write_config, ConfigBuilder}; use cargo::util::config::Definition; use cargo_test_support::paths; use std::{collections::HashMap, fs}; #[cargo_test] fn basic() { // Simple example. let config = ConfigBuilder::new().config_arg("foo='bar'").build(); assert_eq!(config.get::("foo").unwrap(), "bar"); } #[cargo_test] fn cli_priority() { // Command line takes priority over files and env vars. write_config( " demo_list = ['a'] [build] jobs = 3 rustc = 'file' [term] quiet = false verbose = false ", ); let config = ConfigBuilder::new().build(); assert_eq!(config.get::("build.jobs").unwrap(), 3); assert_eq!(config.get::("build.rustc").unwrap(), "file"); assert_eq!(config.get::("term.quiet").unwrap(), false); assert_eq!(config.get::("term.verbose").unwrap(), false); let config = ConfigBuilder::new() .env("CARGO_BUILD_JOBS", "2") .env("CARGO_BUILD_RUSTC", "env") .env("CARGO_TERM_VERBOSE", "false") .config_arg("build.jobs=1") .config_arg("build.rustc='cli'") .config_arg("term.verbose=true") .build(); assert_eq!(config.get::("build.jobs").unwrap(), 1); assert_eq!(config.get::("build.rustc").unwrap(), "cli"); assert_eq!(config.get::("term.verbose").unwrap(), true); // Setting both term.verbose and term.quiet is invalid and is tested // in the run test suite. let config = ConfigBuilder::new() .env("CARGO_TERM_QUIET", "false") .config_arg("term.quiet=true") .build(); assert_eq!(config.get::("term.quiet").unwrap(), true); } #[cargo_test] fn merges_array() { // Array entries are appended. write_config( " [build] rustflags = ['--file'] ", ); let config = ConfigBuilder::new() .config_arg("build.rustflags = ['--cli']") .build(); assert_eq!( config.get::>("build.rustflags").unwrap(), ["--file", "--cli"] ); // With normal env. let config = ConfigBuilder::new() .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2") .config_arg("build.rustflags = ['--cli']") .build(); // The order of cli/env is a little questionable here, but would require // much more complex merging logic. assert_eq!( config.get::>("build.rustflags").unwrap(), ["--file", "--cli", "--env1", "--env2"] ); // With advanced-env. let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_BUILD_RUSTFLAGS", "--env") .config_arg("build.rustflags = ['--cli']") .build(); assert_eq!( config.get::>("build.rustflags").unwrap(), ["--file", "--cli", "--env"] ); // Merges multiple instances. let config = ConfigBuilder::new() .config_arg("build.rustflags=['--one']") .config_arg("build.rustflags=['--two']") .build(); assert_eq!( config.get::>("build.rustflags").unwrap(), ["--file", "--one", "--two"] ); } #[cargo_test] fn string_list_array() { // Using the StringList type. write_config( " [build] rustflags = ['--file'] ", ); let config = ConfigBuilder::new() .config_arg("build.rustflags = ['--cli']") .build(); assert_eq!( config .get::("build.rustflags") .unwrap() .as_slice(), ["--file", "--cli"] ); // With normal env. let config = ConfigBuilder::new() .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2") .config_arg("build.rustflags = ['--cli']") .build(); assert_eq!( config .get::("build.rustflags") .unwrap() .as_slice(), ["--file", "--cli", "--env1", "--env2"] ); // With advanced-env. let config = ConfigBuilder::new() .unstable_flag("advanced-env") .env("CARGO_BUILD_RUSTFLAGS", "['--env']") .config_arg("build.rustflags = ['--cli']") .build(); assert_eq!( config .get::("build.rustflags") .unwrap() .as_slice(), ["--file", "--cli", "--env"] ); } #[cargo_test] fn merges_table() { // Tables are merged. write_config( " [foo] key1 = 1 key2 = 2 key3 = 3 ", ); let config = ConfigBuilder::new() .config_arg("foo.key2 = 4") .config_arg("foo.key3 = 5") .config_arg("foo.key4 = 6") .build(); assert_eq!(config.get::("foo.key1").unwrap(), 1); assert_eq!(config.get::("foo.key2").unwrap(), 4); assert_eq!(config.get::("foo.key3").unwrap(), 5); assert_eq!(config.get::("foo.key4").unwrap(), 6); // With env. let config = ConfigBuilder::new() .env("CARGO_FOO_KEY3", "7") .env("CARGO_FOO_KEY4", "8") .env("CARGO_FOO_KEY5", "9") .config_arg("foo.key2 = 4") .config_arg("foo.key3 = 5") .config_arg("foo.key4 = 6") .build(); assert_eq!(config.get::("foo.key1").unwrap(), 1); assert_eq!(config.get::("foo.key2").unwrap(), 4); assert_eq!(config.get::("foo.key3").unwrap(), 5); assert_eq!(config.get::("foo.key4").unwrap(), 6); assert_eq!(config.get::("foo.key5").unwrap(), 9); } #[cargo_test] fn merge_array_mixed_def_paths() { // Merging of arrays with different def sites. write_config( " paths = ['file'] ", ); // Create a directory for CWD to differentiate the paths. let somedir = paths::root().join("somedir"); fs::create_dir(&somedir).unwrap(); let config = ConfigBuilder::new() .cwd(&somedir) .config_arg("paths=['cli']") // env is currently ignored for get_list() .env("CARGO_PATHS", "env") .build(); let paths = config.get_list("paths").unwrap().unwrap(); // The definition for the root value is somewhat arbitrary, but currently starts with the file because that is what is loaded first. assert_eq!(paths.definition, Definition::Path(paths::root())); assert_eq!(paths.val.len(), 2); assert_eq!(paths.val[0].0, "file"); assert_eq!(paths.val[0].1.root(&config), paths::root()); assert_eq!(paths.val[1].0, "cli"); assert_eq!(paths.val[1].1.root(&config), somedir); } #[cargo_test] fn enforces_format() { // These dotted key expressions should all be fine. let config = ConfigBuilder::new() .config_arg("a=true") .config_arg(" b.a = true ") .config_arg("c.\"b\".'a'=true") .config_arg("d.\"=\".'='=true") .config_arg("e.\"'\".'\"'=true") .build(); assert_eq!(config.get::("a").unwrap(), true); assert_eq!( config.get::>("b").unwrap(), HashMap::from([("a".to_string(), true)]) ); assert_eq!( config .get::>>("c") .unwrap(), HashMap::from([("b".to_string(), HashMap::from([("a".to_string(), true)]))]) ); assert_eq!( config .get::>>("d") .unwrap(), HashMap::from([("=".to_string(), HashMap::from([("=".to_string(), true)]))]) ); assert_eq!( config .get::>>("e") .unwrap(), HashMap::from([("'".to_string(), HashMap::from([("\"".to_string(), true)]))]) ); // But anything that's not a dotted key expression should be disallowed. let _ = ConfigBuilder::new() .config_arg("[a] foo=true") .build_err() .unwrap_err(); let _ = ConfigBuilder::new() .config_arg("a = true\nb = true") .build_err() .unwrap_err(); // We also disallow overwriting with tables since it makes merging unclear. let _ = ConfigBuilder::new() .config_arg("a = { first = true, second = false }") .build_err() .unwrap_err(); let _ = ConfigBuilder::new() .config_arg("a = { first = true }") .build_err() .unwrap_err(); } #[cargo_test] fn unused_key() { // Unused key passed on command line. let config = ConfigBuilder::new().config_arg("build.unused = 2").build(); config.build_config().unwrap(); let output = read_output(config); let expected = "\ warning: unused config key `build.unused` in `--config cli option` "; assert_match(expected, &output); } #[cargo_test] fn rerooted_remains() { // Re-rooting keeps cli args. let somedir = paths::root().join("somedir"); fs::create_dir_all(somedir.join(".cargo")).unwrap(); fs::write( somedir.join(".cargo").join("config"), " a = 'file1' b = 'file2' ", ) .unwrap(); let mut config = ConfigBuilder::new() .cwd(&somedir) .config_arg("b='cli1'") .config_arg("c='cli2'") .build(); assert_eq!(config.get::("a").unwrap(), "file1"); assert_eq!(config.get::("b").unwrap(), "cli1"); assert_eq!(config.get::("c").unwrap(), "cli2"); config.reload_rooted_at(paths::root()).unwrap(); assert_eq!(config.get::>("a").unwrap(), None); assert_eq!(config.get::("b").unwrap(), "cli1"); assert_eq!(config.get::("c").unwrap(), "cli2"); } #[cargo_test] fn bad_parse() { // Fail to TOML parse. let config = ConfigBuilder::new().config_arg("abc").build_err(); assert_error( config.unwrap_err(), "\ failed to parse value from --config argument `abc` as a dotted key expression Caused by: TOML parse error at line 1, column 4 | 1 | abc | ^ Unexpected end of input Expected `.` or `=` ", ); let config = ConfigBuilder::new().config_arg("").build_err(); assert_error( config.unwrap_err(), "--config argument `` was not a TOML dotted key expression (such as `build.jobs = 2`)", ); } #[cargo_test] fn too_many_values() { // Currently restricted to only 1 value. let config = ConfigBuilder::new().config_arg("a=1\nb=2").build_err(); assert_error( config.unwrap_err(), "\ --config argument `a=1 b=2` was not a TOML dotted key expression (such as `build.jobs = 2`)", ); } #[cargo_test] fn no_disallowed_values() { let config = ConfigBuilder::new() .config_arg("registry.token=\"hello\"") .build_err(); assert_error( config.unwrap_err(), "registry.token cannot be set through --config for security reasons", ); let config = ConfigBuilder::new() .config_arg("registries.crates-io.token=\"hello\"") .build_err(); assert_error( config.unwrap_err(), "registries.crates-io.token cannot be set through --config for security reasons", ); } #[cargo_test] fn no_inline_table_value() { // Disallow inline tables let config = ConfigBuilder::new() .config_arg("a.b={c = \"d\"}") .build_err(); assert_error( config.unwrap_err(), "--config argument `a.b={c = \"d\"}` sets a value to an inline table, which is not accepted" ); } #[cargo_test] fn no_array_of_tables_values() { // Disallow array-of-tables when not in dotted form let config = ConfigBuilder::new() .config_arg("[[a.b]]\nc = \"d\"") .build_err(); assert_error( config.unwrap_err(), "\ --config argument `[[a.b]] c = \"d\"` was not a TOML dotted key expression (such as `build.jobs = 2`)", ); } #[cargo_test] fn no_comments() { // Disallow comments in dotted form. let config = ConfigBuilder::new() .config_arg("a.b = \"c\" # exactly") .build_err(); assert_error( config.unwrap_err(), "\ --config argument `a.b = \"c\" # exactly` includes non-whitespace decoration", ); let config = ConfigBuilder::new() .config_arg("# exactly\na.b = \"c\"") .build_err(); assert_error( config.unwrap_err(), "\ --config argument `# exactly\na.b = \"c\"` includes non-whitespace decoration", ); } #[cargo_test] fn bad_cv_convert() { // ConfigValue does not support all TOML types. let config = ConfigBuilder::new().config_arg("a=2019-12-01").build_err(); assert_error( config.unwrap_err(), "\ failed to convert --config argument `a=2019-12-01` Caused by: failed to parse key `a` Caused by: found TOML configuration value of unknown type `datetime`", ); } #[cargo_test] fn fail_to_merge_multiple_args() { // Error message when multiple args fail to merge. let config = ConfigBuilder::new() .config_arg("foo='a'") .config_arg("foo=['a']") .build_err(); // This is a little repetitive, but hopefully the user can figure it out. assert_error( config.unwrap_err(), "\ failed to merge --config argument `foo=['a']` Caused by: failed to merge key `foo` between --config cli option and --config cli option Caused by: failed to merge config value from `--config cli option` into `--config cli option`: \ expected string, but found array", ); } #[cargo_test] fn cli_path() { // --config path_to_file fs::write(paths::root().join("myconfig.toml"), "key = 123").unwrap(); let config = ConfigBuilder::new() .cwd(paths::root()) .config_arg("myconfig.toml") .build(); assert_eq!(config.get::("key").unwrap(), 123); let config = ConfigBuilder::new().config_arg("missing.toml").build_err(); assert_error( config.unwrap_err(), "\ failed to parse value from --config argument `missing.toml` as a dotted key expression Caused by: TOML parse error at line 1, column 13 | 1 | missing.toml | ^ Unexpected end of input Expected `.` or `=` ", ); } cargo-0.66.0/tests/testsuite/config_include.rs000066400000000000000000000137161432416201200214120ustar00rootroot00000000000000//! Tests for `include` config field. use super::config::{assert_error, write_config, write_config_at, ConfigBuilder}; use cargo_test_support::{no_such_file_err_msg, project}; #[cargo_test] fn gated() { // Requires -Z flag. write_config("include='other'"); write_config_at( ".cargo/other", " othervalue = 1 ", ); let config = ConfigBuilder::new().build(); assert_eq!(config.get::>("othervalue").unwrap(), None); let config = ConfigBuilder::new().unstable_flag("config-include").build(); assert_eq!(config.get::("othervalue").unwrap(), 1); } #[cargo_test] fn simple() { // Simple test. write_config_at( ".cargo/config", " include = 'other' key1 = 1 key2 = 2 ", ); write_config_at( ".cargo/other", " key2 = 3 key3 = 4 ", ); let config = ConfigBuilder::new().unstable_flag("config-include").build(); assert_eq!(config.get::("key1").unwrap(), 1); assert_eq!(config.get::("key2").unwrap(), 2); assert_eq!(config.get::("key3").unwrap(), 4); } #[cargo_test] fn works_with_cli() { write_config_at( ".cargo/config.toml", " include = 'other.toml' [build] rustflags = ['-W', 'unused'] ", ); write_config_at( ".cargo/other.toml", " [build] rustflags = ['-W', 'unsafe-code'] ", ); let p = project().file("src/lib.rs", "").build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [RUNNING] `rustc [..]-W unused` [FINISHED] [..] ", ) .run(); p.cargo("build -v -Z config-include") .masquerade_as_nightly_cargo(&["config-include"]) .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [RUNNING] `rustc [..]-W unsafe-code -W unused` [FINISHED] [..] ", ) .run(); } #[cargo_test] fn left_to_right() { // How it merges multiple includes. write_config_at( ".cargo/config", " include = ['one', 'two'] primary = 1 ", ); write_config_at( ".cargo/one", " one = 1 primary = 2 ", ); write_config_at( ".cargo/two", " two = 2 primary = 3 ", ); let config = ConfigBuilder::new().unstable_flag("config-include").build(); assert_eq!(config.get::("primary").unwrap(), 1); assert_eq!(config.get::("one").unwrap(), 1); assert_eq!(config.get::("two").unwrap(), 2); } #[cargo_test] fn missing_file() { // Error when there's a missing file. write_config("include='missing'"); let config = ConfigBuilder::new() .unstable_flag("config-include") .build_err(); assert_error( config.unwrap_err(), &format!( "\ could not load Cargo configuration Caused by: failed to load config include `missing` from `[..]/.cargo/config` Caused by: failed to read configuration file `[..]/.cargo/missing` Caused by: {}", no_such_file_err_msg() ), ); } #[cargo_test] fn cycle() { // Detects a cycle. write_config_at(".cargo/config", "include='one'"); write_config_at(".cargo/one", "include='two'"); write_config_at(".cargo/two", "include='config'"); let config = ConfigBuilder::new() .unstable_flag("config-include") .build_err(); assert_error( config.unwrap_err(), "\ could not load Cargo configuration Caused by: failed to load config include `one` from `[..]/.cargo/config` Caused by: failed to load config include `two` from `[..]/.cargo/one` Caused by: failed to load config include `config` from `[..]/.cargo/two` Caused by: config `include` cycle detected with path `[..]/.cargo/config`", ); } #[cargo_test] fn cli_include() { // Using --config with include. // CLI takes priority over files. write_config_at( ".cargo/config", " foo = 1 bar = 2 ", ); write_config_at(".cargo/config-foo", "foo = 2"); let config = ConfigBuilder::new() .unstable_flag("config-include") .config_arg("include='.cargo/config-foo'") .build(); assert_eq!(config.get::("foo").unwrap(), 2); assert_eq!(config.get::("bar").unwrap(), 2); } #[cargo_test] fn bad_format() { // Not a valid format. write_config("include = 1"); let config = ConfigBuilder::new() .unstable_flag("config-include") .build_err(); assert_error( config.unwrap_err(), "\ could not load Cargo configuration Caused by: `include` expected a string or list, but found integer in `[..]/.cargo/config`", ); } #[cargo_test] fn cli_include_failed() { // Error message when CLI include fails to load. let config = ConfigBuilder::new() .unstable_flag("config-include") .config_arg("include='foobar'") .build_err(); assert_error( config.unwrap_err(), &format!( "\ failed to load --config include Caused by: failed to load config include `foobar` from `--config cli option` Caused by: failed to read configuration file `[..]/foobar` Caused by: {}", no_such_file_err_msg() ), ); } #[cargo_test] fn cli_merge_failed() { // Error message when CLI include merge fails. write_config("foo = ['a']"); write_config_at( ".cargo/other", " foo = 'b' ", ); let config = ConfigBuilder::new() .unstable_flag("config-include") .config_arg("include='.cargo/other'") .build_err(); // Maybe this error message should mention it was from an include file? assert_error( config.unwrap_err(), "\ failed to merge --config key `foo` into `[..]/.cargo/config` Caused by: failed to merge config value from `[..]/.cargo/other` into `[..]/.cargo/config`: \ expected array, but found string", ); } cargo-0.66.0/tests/testsuite/corrupt_git.rs000066400000000000000000000101031432416201200207660ustar00rootroot00000000000000//! Tests for corrupt git repos. use cargo_test_support::paths; use cargo_test_support::{basic_manifest, git, project}; use cargo_util::paths as cargopaths; use std::fs; use std::path::{Path, PathBuf}; #[cargo_test] fn deleting_database_files() { let project = project(); let git_project = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); project.cargo("build").run(); let mut files = Vec::new(); find_files(&paths::home().join(".cargo/git/db"), &mut files); assert!(!files.is_empty()); let log = "cargo::sources::git=trace"; for file in files { if !file.exists() { continue; } println!("deleting {}", file.display()); cargopaths::remove_file(&file).unwrap(); project.cargo("build -v").env("CARGO_LOG", log).run(); if !file.exists() { continue; } println!("truncating {}", file.display()); make_writable(&file); fs::OpenOptions::new() .write(true) .open(&file) .unwrap() .set_len(2) .unwrap(); project.cargo("build -v").env("CARGO_LOG", log).run(); } } #[cargo_test] fn deleting_checkout_files() { let project = project(); let git_project = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); project.cargo("build").run(); let dir = paths::home() .join(".cargo/git/checkouts") // get the first entry in the checkouts dir for the package's location .read_dir() .unwrap() .next() .unwrap() .unwrap() .path() // get the first child of that checkout dir for our checkout .read_dir() .unwrap() .next() .unwrap() .unwrap() .path() // and throw on .git to corrupt things .join(".git"); let mut files = Vec::new(); find_files(&dir, &mut files); assert!(!files.is_empty()); let log = "cargo::sources::git=trace"; for file in files { if !file.exists() { continue; } println!("deleting {}", file.display()); cargopaths::remove_file(&file).unwrap(); project.cargo("build -v").env("CARGO_LOG", log).run(); if !file.exists() { continue; } println!("truncating {}", file.display()); make_writable(&file); fs::OpenOptions::new() .write(true) .open(&file) .unwrap() .set_len(2) .unwrap(); project.cargo("build -v").env("CARGO_LOG", log).run(); } } fn make_writable(path: &Path) { let mut p = path.metadata().unwrap().permissions(); p.set_readonly(false); fs::set_permissions(path, p).unwrap(); } fn find_files(path: &Path, dst: &mut Vec) { for e in path.read_dir().unwrap() { let e = e.unwrap(); let path = e.path(); if e.file_type().unwrap().is_dir() { find_files(&path, dst); } else { dst.push(path); } } } cargo-0.66.0/tests/testsuite/credential_process.rs000066400000000000000000000315231432416201200223060ustar00rootroot00000000000000//! Tests for credential-process. use cargo_test_support::registry::TestRegistry; use cargo_test_support::{basic_manifest, cargo_process, paths, project, registry, Project}; use std::fs; fn toml_bin(proj: &Project, name: &str) -> String { proj.bin(name).display().to_string().replace('\\', "\\\\") } #[cargo_test] fn gated() { let _alternative = registry::RegistryBuilder::new() .alternative() .no_configure_token() .build(); let _cratesio = registry::RegistryBuilder::new() .no_configure_token() .build(); let p = project() .file( ".cargo/config", r#" [registry] credential-process = "false" "#, ) .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] no upload token found, please run `cargo login` or pass `--token` ", ) .run(); p.change_file( ".cargo/config", r#" [registry.alternative] credential-process = "false" "#, ); p.cargo("publish --no-verify --registry alternative") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] no upload token found, please run `cargo login` or pass `--token` ", ) .run(); } #[cargo_test] fn warn_both_token_and_process() { // Specifying both credential-process and a token in config should issue a warning. let _server = registry::RegistryBuilder::new() .alternative() .no_configure_token() .build(); let p = project() .file( ".cargo/config", r#" [registries.alternative] token = "sekrit" credential-process = "false" "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" authors = [] license = "MIT" homepage = "https://example.com/" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( "\ [ERROR] both `registries.alternative.token` and `registries.alternative.credential-process` \ were specified in the config\n\ Only one of these values may be set, remove one or the other to proceed. ", ) .run(); // Try with global credential-process, and registry-specific `token`. // This should silently use the config token, and not run the "false" exe. p.change_file( ".cargo/config", r#" [registry] credential-process = "false" [registries.alternative] token = "sekrit" "#, ); p.cargo("publish --no-verify --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); } /// Setup for a test that will issue a command that needs to fetch a token. /// /// This does the following: /// /// * Spawn a thread that will act as an API server. /// * Create a simple credential-process that will generate a fake token. /// * Create a simple `foo` project to run the test against. /// * Configure the credential-process config. /// /// Returns returns the simple `foo` project to test against and the API server handle. fn get_token_test() -> (Project, TestRegistry) { // API server that checks that the token is included correctly. let server = registry::RegistryBuilder::new() .no_configure_token() .token("sekrit") .alternative() .http_api() .build(); // The credential process to use. let cred_proj = project() .at("cred_proj") .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) .file("src/main.rs", r#"fn main() { println!("sekrit"); } "#) .build(); cred_proj.cargo("build").run(); let p = project() .file( ".cargo/config", &format!( r#" [registries.alternative] index = "{}" credential-process = ["{}"] "#, server.index_url(), toml_bin(&cred_proj, "test-cred") ), ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" authors = [] license = "MIT" homepage = "https://example.com/" "#, ) .file("src/lib.rs", "") .build(); (p, server) } #[cargo_test] fn publish() { // Checks that credential-process is used for `cargo publish`. let (p, _t) = get_token_test(); p.cargo("publish --no-verify --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); } #[cargo_test] fn basic_unsupported() { // Non-action commands don't support login/logout. let _server = registry::RegistryBuilder::new() .no_configure_token() .build(); cargo_util::paths::append( &paths::home().join(".cargo/config"), br#" [registry] credential-process = "false" "#, ) .unwrap(); cargo_process("login -Z credential-process abcdefg") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] credential process `false` cannot be used to log in, \ the credential-process configuration value must pass the \ `{action}` argument in the config to support this command ", ) .run(); cargo_process("logout -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process", "cargo-logout"]) .with_status(101) .with_stderr( "\ [ERROR] credential process `false` cannot be used to log out, \ the credential-process configuration value must pass the \ `{action}` argument in the config to support this command ", ) .run(); } #[cargo_test] fn login() { let server = registry::RegistryBuilder::new() .no_configure_token() .build(); // The credential process to use. let cred_proj = project() .at("cred_proj") .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) .file( "src/main.rs", &r#" use std::io::Read; fn main() { assert_eq!(std::env::var("CARGO_REGISTRY_NAME").unwrap(), "crates-io"); assert_eq!(std::env::var("CARGO_REGISTRY_API_URL").unwrap(), "__API__"); assert_eq!(std::env::args().skip(1).next().unwrap(), "store"); let mut buffer = String::new(); std::io::stdin().read_to_string(&mut buffer).unwrap(); assert_eq!(buffer, "abcdefg\n"); std::fs::write("token-store", buffer).unwrap(); } "# .replace("__API__", server.api_url().as_str()), ) .build(); cred_proj.cargo("build").run(); cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" [registry] credential-process = ["{}", "{{action}}"] "#, toml_bin(&cred_proj, "test-cred") ) .as_bytes(), ) .unwrap(); cargo_process("login -Z credential-process abcdefg") .masquerade_as_nightly_cargo(&["credential-process"]) .with_stderr( "\ [UPDATING] [..] [LOGIN] token for `crates.io` saved ", ) .run(); assert_eq!( fs::read_to_string(paths::root().join("token-store")).unwrap(), "abcdefg\n" ); } #[cargo_test] fn logout() { let _server = registry::RegistryBuilder::new() .no_configure_token() .build(); // The credential process to use. let cred_proj = project() .at("cred_proj") .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) .file( "src/main.rs", r#" use std::io::Read; fn main() { assert_eq!(std::env::var("CARGO_REGISTRY_NAME").unwrap(), "crates-io"); assert_eq!(std::env::args().skip(1).next().unwrap(), "erase"); std::fs::write("token-store", "").unwrap(); eprintln!("token for `{}` has been erased!", std::env::var("CARGO_REGISTRY_NAME").unwrap()); } "#, ) .build(); cred_proj.cargo("build").run(); cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" [registry] credential-process = ["{}", "{{action}}"] "#, toml_bin(&cred_proj, "test-cred") ) .as_bytes(), ) .unwrap(); cargo_process("logout -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process", "cargo-logout"]) .with_stderr( "\ [UPDATING] [..] token for `crates-io` has been erased! [LOGOUT] token for `crates.io` has been removed from local storage ", ) .run(); assert_eq!( fs::read_to_string(paths::root().join("token-store")).unwrap(), "" ); } #[cargo_test] fn yank() { let (p, _t) = get_token_test(); p.cargo("yank --version 0.1.0 --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_stderr( "\ [UPDATING] [..] [YANK] foo@0.1.0 ", ) .run(); } #[cargo_test] fn owner() { let (p, _t) = get_token_test(); p.cargo("owner --add username --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_stderr( "\ [UPDATING] [..] [OWNER] completed! ", ) .run(); } #[cargo_test] fn libexec_path() { // cargo: prefixed names use the sysroot let _server = registry::RegistryBuilder::new() .no_configure_token() .build(); cargo_util::paths::append( &paths::home().join(".cargo/config"), br#" [registry] credential-process = "cargo:doesnotexist" "#, ) .unwrap(); cargo_process("login -Z credential-process abcdefg") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( // FIXME: Update "Caused by" error message once rust/pull/87704 is merged. // On Windows, changing to a custom executable resolver has changed the // error messages. &format!("\ [UPDATING] [..] [ERROR] failed to execute `[..]libexec/cargo-credential-doesnotexist[EXE]` to store authentication token for registry `crates-io` Caused by: [..] "), ) .run(); } #[cargo_test] fn invalid_token_output() { // Error when credential process does not output the expected format for a token. let _server = registry::RegistryBuilder::new() .alternative() .no_configure_token() .build(); let cred_proj = project() .at("cred_proj") .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) .file("src/main.rs", r#"fn main() { print!("a\nb\n"); } "#) .build(); cred_proj.cargo("build").run(); cargo_util::paths::append( &paths::home().join(".cargo/config"), format!( r#" [registry] credential-process = ["{}"] "#, toml_bin(&cred_proj, "test-cred") ) .as_bytes(), ) .unwrap(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative -Z credential-process") .masquerade_as_nightly_cargo(&["credential-process"]) .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] credential process `[..]test-cred[EXE]` returned more than one line of output; expected a single token ", ) .run(); } cargo-0.66.0/tests/testsuite/cross_compile.rs000066400000000000000000001053731432416201200213040ustar00rootroot00000000000000//! Tests for cross compiling with --target. //! //! See `cargo_test_support::cross_compile` for more detail. use cargo_test_support::rustc_host; use cargo_test_support::{basic_bin_manifest, basic_manifest, cross_compile, project}; #[cargo_test] fn simple_cross() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", &format!( r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, cross_compile::alternate() ), ) .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("build -v --target").arg(&target).run(); assert!(p.target_bin(target, "foo").is_file()); if cross_compile::can_run_on_host() { p.process(&p.target_bin(target, "foo")).run(); } } #[cargo_test] fn simple_cross_config() { if cross_compile::disabled() { return; } let p = project() .file( ".cargo/config", &format!( r#" [build] target = "{}" "#, cross_compile::alternate() ), ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file( "build.rs", &format!( r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, cross_compile::alternate() ), ) .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("build -v").run(); assert!(p.target_bin(target, "foo").is_file()); if cross_compile::can_run_on_host() { p.process(&p.target_bin(target, "foo")).run(); } } #[cargo_test] fn simple_deps() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") .build(); let _p2 = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("src/lib.rs", "pub fn bar() {}") .build(); let target = cross_compile::alternate(); p.cargo("build --target").arg(&target).run(); assert!(p.target_bin(target, "foo").is_file()); if cross_compile::can_run_on_host() { p.process(&p.target_bin(target, "foo")).run(); } } /// Always take care of setting these so that /// `cross_compile::alternate()` is the actually-picked target fn per_crate_target_test( default_target: Option<&'static str>, forced_target: Option<&'static str>, arg_target: Option<&'static str>, ) { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", &format!( r#" cargo-features = ["per-package-target"] [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" {} {} "#, default_target .map(|t| format!(r#"default-target = "{}""#, t)) .unwrap_or(String::new()), forced_target .map(|t| format!(r#"forced-target = "{}""#, t)) .unwrap_or(String::new()), ), ) .file( "build.rs", &format!( r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, cross_compile::alternate() ), ) .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let mut cmd = p.cargo("build -v"); if let Some(t) = arg_target { cmd.arg("--target").arg(&t); } cmd.masquerade_as_nightly_cargo(&["per-package-target"]) .run(); assert!(p.target_bin(cross_compile::alternate(), "foo").is_file()); if cross_compile::can_run_on_host() { p.process(&p.target_bin(cross_compile::alternate(), "foo")) .run(); } } #[cargo_test] fn per_crate_default_target_is_default() { per_crate_target_test(Some(cross_compile::alternate()), None, None); } #[cargo_test] fn per_crate_default_target_gets_overridden() { per_crate_target_test( Some(cross_compile::unused()), None, Some(cross_compile::alternate()), ); } #[cargo_test] fn per_crate_forced_target_is_default() { per_crate_target_test(None, Some(cross_compile::alternate()), None); } #[cargo_test] fn per_crate_forced_target_does_not_get_overridden() { per_crate_target_test( None, Some(cross_compile::alternate()), Some(cross_compile::unused()), ); } #[cargo_test] fn workspace_with_multiple_targets() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [workspace] members = ["native", "cross"] "#, ) .file( "native/Cargo.toml", r#" cargo-features = ["per-package-target"] [package] name = "native" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file( "native/build.rs", &format!( r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, cross_compile::native() ), ) .file( "native/src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::native_arch() ), ) .file( "cross/Cargo.toml", &format!( r#" cargo-features = ["per-package-target"] [package] name = "cross" version = "0.0.0" authors = [] build = "build.rs" default-target = "{}" "#, cross_compile::alternate(), ), ) .file( "cross/build.rs", &format!( r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, cross_compile::alternate() ), ) .file( "cross/src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let mut cmd = p.cargo("build -v"); cmd.masquerade_as_nightly_cargo(&["per-package-target"]) .run(); assert!(p.bin("native").is_file()); assert!(p.target_bin(cross_compile::alternate(), "cross").is_file()); p.process(&p.bin("native")).run(); if cross_compile::can_run_on_host() { p.process(&p.target_bin(cross_compile::alternate(), "cross")) .run(); } } #[cargo_test] fn linker() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "my-linker-tool" "#, target ), ) .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/foo.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); p.cargo("build -v --target") .arg(&target) .with_status(101) .with_stderr_contains(&format!( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc --crate-name foo src/foo.rs [..]--crate-type bin \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [CWD]/target/{target}/debug/deps \ --target {target} \ -C linker=my-linker-tool \ -L dependency=[CWD]/target/{target}/debug/deps \ -L dependency=[CWD]/target/debug/deps` ", target = target, )) .run(); } #[cargo_test(nightly, reason = "plugins are unstable")] fn plugin_with_extra_dylib_dep() { if cross_compile::disabled() { return; } let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] fn main() {} "#, ) .build(); let _bar = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#, ) .file( "src/lib.rs", r#" #![feature(rustc_private)] extern crate baz; extern crate rustc_driver; use rustc_driver::plugin::Registry; #[no_mangle] pub fn __rustc_plugin_registrar(reg: &mut Registry) { println!("{}", baz::baz()); } "#, ) .build(); let _baz = project() .at("baz") .file( "Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] [lib] name = "baz" crate_type = ["dylib"] "#, ) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") .build(); let target = cross_compile::alternate(); foo.cargo("build --target").arg(&target).run(); } #[cargo_test] fn cross_tests() { if !cross_compile::can_run_on_host() { return; } let p = project() .file( "Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "bar" "#, ) .file( "src/bin/bar.rs", &format!( r#" #[allow(unused_extern_crates)] extern crate foo; use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test() {{ main() }} "#, cross_compile::alternate_arch() ), ) .file( "src/lib.rs", &format!( r#" use std::env; pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test_foo() {{ foo() }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("test --target") .arg(&target) .with_stderr(&format!( "\ [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/{triple}/debug/deps/bar-[..][EXE])", triple = target )) .with_stdout_contains("test test_foo ... ok") .with_stdout_contains("test test ... ok") .run(); } #[cargo_test] fn no_cross_doctests() { if cross_compile::disabled() { return; } let p = project() .file( "src/lib.rs", r#" //! ``` //! extern crate foo; //! assert!(true); //! ``` "#, ) .build(); let host_output = "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [DOCTEST] foo "; println!("a"); p.cargo("test").with_stderr(&host_output).run(); println!("b"); let target = rustc_host(); p.cargo("test -v --target") .arg(&target) // Unordered since the two `rustc` invocations happen concurrently. .with_stderr_unordered(&format!( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo [..]--crate-type lib[..] [RUNNING] `rustc --crate-name foo [..]--test[..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[CWD]/target/{target}/debug/deps/foo-[..][EXE]` [DOCTEST] foo [RUNNING] `rustdoc [..]--target {target}[..]` ", )) .with_stdout( " running 0 tests test result: ok. 0 passed[..] running 1 test test src/lib.rs - (line 2) ... ok test result: ok. 1 passed[..] ", ) .run(); println!("c"); let target = cross_compile::alternate(); // This will build the library, but does not build or run doc tests. // This should probably be a warning or error. p.cargo("test -v --doc --target") .arg(&target) .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo [..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \ cross-compilation doctests are not yet supported See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ for more information. ", ) .run(); if !cross_compile::can_run_on_host() { return; } // This tests the library, but does not run the doc tests. p.cargo("test -v --target") .arg(&target) .with_stderr(&format!( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo [..]--test[..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[CWD]/target/{triple}/debug/deps/foo-[..][EXE]` [NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \ cross-compilation doctests are not yet supported See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ for more information. ", triple = target )) .run(); } #[cargo_test] fn simple_cargo_run() { if !cross_compile::can_run_on_host() { return; } let p = project() .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("run --target").arg(&target).run(); } #[cargo_test] fn cross_with_a_build_script() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' "#, ) .file( "build.rs", &format!( r#" use std::env; use std::path::PathBuf; fn main() {{ assert_eq!(env::var("TARGET").unwrap(), "{0}"); let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); path.pop(); assert!(path.file_name().unwrap().to_str().unwrap() .starts_with("foo-")); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); }} "#, target ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v --target") .arg(&target) .with_stderr(&format!( "\ [COMPILING] foo v0.0.0 ([CWD]) [RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]` [RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` [RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", target = target, )) .run(); } #[cargo_test] fn build_script_needed_for_host_and_target() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' [dependencies.d1] path = "d1" [build-dependencies.d2] path = "d2" "#, ) .file( "build.rs", r#" #[allow(unused_extern_crates)] extern crate d2; fn main() { d2::d2(); } "#, ) .file( "src/main.rs", " #[allow(unused_extern_crates)] extern crate d1; fn main() { d1::d1(); } ", ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = 'build.rs' "#, ) .file("d1/src/lib.rs", "pub fn d1() {}") .file( "d1/build.rs", r#" use std::env; fn main() { let target = env::var("TARGET").unwrap(); println!("cargo:rustc-flags=-L /path/to/{}", target); } "#, ) .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] [dependencies.d1] path = "../d1" "#, ) .file( "d2/src/lib.rs", " #[allow(unused_extern_crates)] extern crate d1; pub fn d2() { d1::d1(); } ", ) .build(); p.cargo("build -v --target") .arg(&target) .with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)") .with_stderr_contains( "[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`", ) .with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`") .with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`") .with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)") .with_stderr_contains(&format!( "[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`", host = host )) .with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])") .with_stderr_contains(&format!( "[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \ -L /path/to/{host}`", host = host )) .with_stderr_contains(&format!( "[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..] \ -L /path/to/{target}`", target = target )) .run(); } #[cargo_test] fn build_deps_for_the_right_arch() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.d2] path = "d2" "#, ) .file("src/main.rs", "extern crate d2; fn main() {}") .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) .file("d1/src/lib.rs", "pub fn d1() {}") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "../d1" "#, ) .file("d2/build.rs", "extern crate d1; fn main() {}") .file("d2/src/lib.rs", "") .build(); let target = cross_compile::alternate(); p.cargo("build -v --target").arg(&target).run(); } #[cargo_test] fn build_script_only_host() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#, ) .file("src/main.rs", "fn main() {}") .file("build.rs", "extern crate d1; fn main() {}") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file("d1/src/lib.rs", "pub fn d1() {}") .file( "d1/build.rs", r#" use std::env; fn main() { assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") .contains("target/debug/build/d1-"), "bad: {:?}", env::var("OUT_DIR")); } "#, ) .build(); let target = cross_compile::alternate(); p.cargo("build -v --target").arg(&target).run(); } #[cargo_test] fn plugin_build_script_right_arch() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [lib] name = "foo" plugin = true "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); p.cargo("build -v --target") .arg(cross_compile::alternate()) .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_script_with_platform_specific_dependencies() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#, ) .file( "build.rs", " #[allow(unused_extern_crates)] extern crate d1; fn main() {} ", ) .file("src/lib.rs", "") .file( "d1/Cargo.toml", &format!( r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host ), ) .file( "d1/src/lib.rs", "#[allow(unused_extern_crates)] extern crate d2;", ) .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.0")) .file("d2/src/lib.rs", "") .build(); p.cargo("build -v --target") .arg(&target) .with_stderr(&format!( "\ [COMPILING] d2 v0.0.0 ([..]) [RUNNING] `rustc [..] d2/src/lib.rs [..]` [COMPILING] d1 v0.0.0 ([..]) [RUNNING] `rustc [..] d1/src/lib.rs [..]` [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] build.rs [..]` [RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` [RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", target = target )) .run(); } #[cargo_test] fn platform_specific_dependencies_do_not_leak() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [dependencies.d1] path = "d1" [build-dependencies.d1] path = "d1" "#, ) .file("build.rs", "extern crate d1; fn main() {}") .file("src/lib.rs", "") .file( "d1/Cargo.toml", &format!( r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host ), ) .file("d1/src/lib.rs", "extern crate d2;") .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) .file("d2/src/lib.rs", "") .build(); p.cargo("build -v --target") .arg(&target) .with_status(101) .with_stderr_contains("[..] can't find crate for `d2`[..]") .run(); } #[cargo_test] fn platform_specific_variables_reflected_in_build_scripts() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [target.{host}.dependencies] d1 = {{ path = "d1" }} [target.{target}.dependencies] d2 = {{ path = "d2" }} "#, host = host, target = target ), ) .file( "build.rs", &format!( r#" use std::env; fn main() {{ let platform = env::var("TARGET").unwrap(); let (expected, not_expected) = match &platform[..] {{ "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), _ => panic!("unknown platform") }}; env::var(expected).ok() .expect(&format!("missing {{}}", expected)); env::var(not_expected).err() .expect(&format!("found {{}}", not_expected)); }} "#, host = host, target = target ), ) .file("src/lib.rs", "") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] links = "d1" build = "build.rs" "#, ) .file("d1/build.rs", r#"fn main() { println!("cargo:val=1") }"#) .file("d1/src/lib.rs", "") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] links = "d2" build = "build.rs" "#, ) .file("d2/build.rs", r#"fn main() { println!("cargo:val=1") }"#) .file("d2/src/lib.rs", "") .build(); p.cargo("build -v").run(); p.cargo("build -v --target").arg(&target).run(); } #[cargo_test] #[cfg_attr( target_os = "macos", ignore = "don't have a dylib cross target on macos" )] fn cross_test_dylib() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate_type = ["dylib"] [dependencies.bar] path = "bar" "#, ) .file( "src/lib.rs", r#" extern crate bar as the_bar; pub fn bar() { the_bar::baz(); } #[test] fn foo() { bar(); } "#, ) .file( "tests/test.rs", r#" extern crate foo as the_foo; #[test] fn foo() { the_foo::bar(); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#, ) .file( "bar/src/lib.rs", &format!( r#" use std::env; pub fn baz() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); p.cargo("test --target") .arg(&target) .with_stderr(&format!( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/{arch}/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/{arch}/debug/deps/test-[..][EXE])", arch = cross_compile::alternate() )) .with_stdout_contains_n("test foo ... ok", 2) .run(); } #[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")] fn doctest_xcompile_linker() { if cross_compile::disabled() { return; } let target = cross_compile::alternate(); let p = project() .file( ".cargo/config", &format!( r#" [target.{}] linker = "my-linker-tool" "#, target ), ) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file( "src/lib.rs", r#" /// ``` /// assert_eq!(1, 1); /// ``` pub fn foo() {} "#, ) .build(); // Fails because `my-linker-tool` doesn't actually exist. p.cargo("test --doc -v -Zdoctest-xcompile --target") .arg(&target) .with_status(101) .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stderr_contains(&format!( "\ [RUNNING] `rustdoc --crate-type lib --crate-name foo --test [..]\ --target {target} [..] -C linker=my-linker-tool[..] ", target = target, )) .run(); } cargo-0.66.0/tests/testsuite/cross_publish.rs000066400000000000000000000053571432416201200213230ustar00rootroot00000000000000//! Tests for publishing using the `--target` flag. use std::fs::File; use cargo_test_support::{cross_compile, project, publish, registry}; #[cargo_test] fn simple_cross_package() { if cross_compile::disabled() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] license = "MIT" description = "foo" repository = "bar" "#, ) .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("package --target") .arg(&target) .with_stderr( "\ [PACKAGING] foo v0.0.0 ([CWD]) [VERIFYING] foo v0.0.0 ([CWD]) [COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // Check that the tarball contains the files let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap(); publish::validate_crate_contents( f, "foo-0.0.0.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], &[], ); } #[cargo_test] fn publish_with_target() { if cross_compile::disabled() { return; } registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] license = "MIT" description = "foo" repository = "bar" "#, ) .file( "src/main.rs", &format!( r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, cross_compile::alternate_arch() ), ) .build(); let target = cross_compile::alternate(); p.cargo("publish --token sekrit") .arg("--target") .arg(&target) .with_stderr( "\ [UPDATING] `dummy-registry` index [PACKAGING] foo v0.0.0 ([CWD]) [VERIFYING] foo v0.0.0 ([CWD]) [COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [UPLOADING] foo v0.0.0 ([CWD]) ", ) .run(); } cargo-0.66.0/tests/testsuite/custom_target.rs000066400000000000000000000152411432416201200213150ustar00rootroot00000000000000//! Tests for custom json target specifications. use cargo_test_support::{basic_manifest, project}; use std::fs; const MINIMAL_LIB: &str = r#" #![feature(no_core)] #![feature(lang_items)] #![no_core] #[lang = "sized"] pub trait Sized { // Empty. } #[lang = "copy"] pub trait Copy { // Empty. } "#; const SIMPLE_SPEC: &str = r#" { "llvm-target": "x86_64-unknown-none-gnu", "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", "arch": "x86_64", "target-endian": "little", "target-pointer-width": "64", "target-c-int-width": "32", "os": "none", "linker-flavor": "ld.lld", "linker": "rust-lld", "executables": true } "#; #[cargo_test(nightly, reason = "requires features no_core, lang_items")] fn custom_target_minimal() { let p = project() .file( "src/lib.rs", &" __MINIMAL_LIB__ pub fn foo() -> u32 { 42 } " .replace("__MINIMAL_LIB__", MINIMAL_LIB), ) .file("custom-target.json", SIMPLE_SPEC) .build(); p.cargo("build --lib --target custom-target.json -v").run(); p.cargo("build --lib --target src/../custom-target.json -v") .run(); // Ensure that the correct style of flag is passed to --target with doc tests. p.cargo("test --doc --target src/../custom-target.json -v -Zdoctest-xcompile") .masquerade_as_nightly_cargo(&["doctest-xcompile", "no_core", "lang_items"]) .with_stderr_contains("[RUNNING] `rustdoc [..]--target [..]foo/custom-target.json[..]") .run(); } #[cargo_test(nightly, reason = "requires features no_core, lang_items, auto_traits")] fn custom_target_dependency() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["author@example.com"] [dependencies] bar = { path = "bar" } "#, ) .file( "src/lib.rs", r#" #![feature(no_core)] #![feature(lang_items)] #![feature(auto_traits)] #![no_core] extern crate bar; pub fn foo() -> u32 { bar::bar() } #[lang = "freeze"] unsafe auto trait Freeze {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "bar/src/lib.rs", &" __MINIMAL_LIB__ pub fn bar() -> u32 { 42 } " .replace("__MINIMAL_LIB__", MINIMAL_LIB), ) .file("custom-target.json", SIMPLE_SPEC) .build(); p.cargo("build --lib --target custom-target.json -v").run(); } #[cargo_test(nightly, reason = "requires features no_core, lang_items")] fn custom_bin_target() { let p = project() .file( "src/main.rs", &" #![no_main] __MINIMAL_LIB__ " .replace("__MINIMAL_LIB__", MINIMAL_LIB), ) .file("custom-bin-target.json", SIMPLE_SPEC) .build(); p.cargo("build --target custom-bin-target.json -v").run(); } #[cargo_test(nightly, reason = "requires features no_core, lang_items")] fn changing_spec_rebuilds() { // Changing the .json file will trigger a rebuild. let p = project() .file( "src/lib.rs", &" __MINIMAL_LIB__ pub fn foo() -> u32 { 42 } " .replace("__MINIMAL_LIB__", MINIMAL_LIB), ) .file("custom-target.json", SIMPLE_SPEC) .build(); p.cargo("build --lib --target custom-target.json -v").run(); p.cargo("build --lib --target custom-target.json -v") .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); let spec_path = p.root().join("custom-target.json"); let spec = fs::read_to_string(&spec_path).unwrap(); // Some arbitrary change that I hope is safe. let spec = spec.replace('{', "{\n\"vendor\": \"unknown\",\n"); fs::write(&spec_path, spec).unwrap(); p.cargo("build --lib --target custom-target.json -v") .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); } #[cargo_test(nightly, reason = "requires features no_core, lang_items")] fn changing_spec_relearns_crate_types() { // Changing the .json file will invalidate the cache of crate types. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["cdylib"] "#, ) .file("src/lib.rs", MINIMAL_LIB) .file("custom-target.json", SIMPLE_SPEC) .build(); p.cargo("build --lib --target custom-target.json -v") .with_status(101) .with_stderr("error: cannot produce cdylib for `foo [..]") .run(); // Enable dynamic linking. let spec_path = p.root().join("custom-target.json"); let spec = fs::read_to_string(&spec_path).unwrap(); let spec = spec.replace('{', "{\n\"dynamic-linking\": true,\n"); fs::write(&spec_path, spec).unwrap(); p.cargo("build --lib --target custom-target.json -v") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); } #[cargo_test(nightly, reason = "requires features no_core, lang_items")] fn custom_target_ignores_filepath() { // Changing the path of the .json file will not trigger a rebuild. let p = project() .file( "src/lib.rs", &" __MINIMAL_LIB__ pub fn foo() -> u32 { 42 } " .replace("__MINIMAL_LIB__", MINIMAL_LIB), ) .file("b/custom-target.json", SIMPLE_SPEC) .file("a/custom-target.json", SIMPLE_SPEC) .build(); // Should build the library the first time. p.cargo("build --lib --target a/custom-target.json") .with_stderr( "\ [..]Compiling foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // But not the second time, even though the path to the custom target is dfferent. p.cargo("build --lib --target b/custom-target.json") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } cargo-0.66.0/tests/testsuite/death.rs000066400000000000000000000062261432416201200175250ustar00rootroot00000000000000//! Tests for ctrl-C handling. use std::fs; use std::io::{self, Read}; use std::net::TcpListener; use std::process::{Child, Stdio}; use std::thread; use cargo_test_support::{project, slow_cpu_multiplier}; #[cargo_test] fn ctrl_c_kills_everyone() { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", &format!( r#" use std::net::TcpStream; use std::io::Read; fn main() {{ let mut socket = TcpStream::connect("{}").unwrap(); let _ = socket.read(&mut [0; 10]); panic!("that read should never return"); }} "#, addr ), ) .build(); let mut cargo = p.cargo("build").build_command(); cargo .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1"); let mut child = cargo.spawn().unwrap(); let mut sock = listener.accept().unwrap().0; ctrl_c(&mut child); assert!(!child.wait().unwrap().success()); match sock.read(&mut [0; 10]) { Ok(n) => assert_eq!(n, 0), Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset), } // Ok so what we just did was spawn cargo that spawned a build script, then // we killed cargo in hopes of it killing the build script as well. If all // went well the build script is now dead. On Windows, however, this is // enforced with job objects which means that it may actually be in the // *process* of being torn down at this point. // // Now on Windows we can't completely remove a file until all handles to it // have been closed. Including those that represent running processes. So if // we were to return here then there may still be an open reference to some // file in the build directory. What we want to actually do is wait for the // build script to *complete* exit. Take care of that by blowing away the // build directory here, and panicking if we eventually spin too long // without being able to. for i in 0..10 { match fs::remove_dir_all(&p.root().join("target")) { Ok(()) => return, Err(e) => println!("attempt {}: {}", i, e), } thread::sleep(slow_cpu_multiplier(100)); } panic!( "couldn't remove build directory after a few tries, seems like \ we won't be able to!" ); } #[cfg(unix)] pub fn ctrl_c(child: &mut Child) { let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) }; if r < 0 { panic!("failed to kill: {}", io::Error::last_os_error()); } } #[cfg(windows)] pub fn ctrl_c(child: &mut Child) { child.kill().unwrap(); } cargo-0.66.0/tests/testsuite/dep_info.rs000066400000000000000000000400651432416201200202220ustar00rootroot00000000000000//! Tests for dep-info files. This includes the dep-info file Cargo creates in //! the output directory, and the ones stored in the fingerprint. use cargo_test_support::compare::assert_match_exact; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::Package; use cargo_test_support::{ basic_bin_manifest, basic_manifest, main_file, project, rustc_host, Project, }; use filetime::FileTime; use std::fs; use std::path::Path; use std::str; // Helper for testing dep-info files in the fingerprint dir. #[track_caller] fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) { let mut files = project .glob(fingerprint) .map(|f| f.expect("unwrap glob result")) // Filter out `.json` entries. .filter(|f| f.extension().is_none()); let info_path = files .next() .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint)); assert!(files.next().is_none(), "expected only 1 dep-info file"); let dep_info = fs::read(&info_path).unwrap(); let dep_info = &mut &dep_info[..]; let deps = (0..read_usize(dep_info)) .map(|_| { ( read_u8(dep_info), str::from_utf8(read_bytes(dep_info)).unwrap(), ) }) .collect::>(); test_cb(&info_path, &deps); fn read_usize(bytes: &mut &[u8]) -> usize { let ret = &bytes[..4]; *bytes = &bytes[4..]; u32::from_le_bytes(ret.try_into().unwrap()) as usize } fn read_u8(bytes: &mut &[u8]) -> u8 { let ret = bytes[0]; *bytes = &bytes[1..]; ret } fn read_bytes<'a>(bytes: &mut &'a [u8]) -> &'a [u8] { let n = read_usize(bytes); let ret = &bytes[..n]; *bytes = &bytes[n..]; ret } } fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) { assert_deps(project, fingerprint, |info_path, entries| { for (e_kind, e_path) in expected { let pattern = glob::Pattern::new(e_path).unwrap(); let count = entries .iter() .filter(|(kind, path)| kind == e_kind && pattern.matches(path)) .count(); if count != 1 { panic!( "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}", e_kind, e_path, info_path, count, entries ); } } }) } #[cargo_test] fn build_dep_info() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("build").run(); let depinfo_bin_path = &p.bin("foo").with_extension("d"); assert!(depinfo_bin_path.is_file()); let depinfo = p.read_file(depinfo_bin_path.to_str().unwrap()); let bin_path = p.bin("foo"); let src_path = p.root().join("src").join("foo.rs"); if !depinfo.lines().any(|line| { line.starts_with(&format!("{}:", bin_path.display())) && line.contains(src_path.to_str().unwrap()) }) { panic!( "Could not find {:?}: {:?} in {:?}", bin_path, src_path, depinfo_bin_path ); } } #[cargo_test] fn build_dep_info_lib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["lib"] "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "lib").with_extension("d").is_file()); } #[cargo_test] fn build_dep_info_rlib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "rlib").with_extension("d").is_file()); } #[cargo_test] fn build_dep_info_dylib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["dylib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("build --example=ex").run(); assert!(p.example_lib("ex", "dylib").with_extension("d").is_file()); } #[cargo_test] fn dep_path_inside_target_has_correct_path() { let p = project() .file("Cargo.toml", &basic_bin_manifest("a")) .file("target/debug/blah", "") .file( "src/main.rs", r#" fn main() { let x = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/target/debug/blah")); } "#, ) .build(); p.cargo("build").run(); let depinfo_path = &p.bin("a").with_extension("d"); assert!(depinfo_path.is_file(), "{:?}", depinfo_path); let depinfo = p.read_file(depinfo_path.to_str().unwrap()); let bin_path = p.bin("a"); let target_debug_blah = Path::new("target").join("debug").join("blah"); if !depinfo.lines().any(|line| { line.starts_with(&format!("{}:", bin_path.display())) && line.contains(target_debug_blah.to_str().unwrap()) }) { panic!( "Could not find {:?}: {:?} in {:?}", bin_path, target_debug_blah, depinfo_path ); } } #[cargo_test] fn no_rewrite_if_no_change() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").run(); let dep_info = p.root().join("target/debug/libfoo.d"); let metadata1 = dep_info.metadata().unwrap(); p.cargo("build").run(); let metadata2 = dep_info.metadata().unwrap(); assert_eq!( FileTime::from_last_modification_time(&metadata1), FileTime::from_last_modification_time(&metadata2), ); } #[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")] fn relative_depinfo_paths_ws() { // Test relative dep-info paths in a workspace with --target with // proc-macros and other dependency kinds. Package::new("regdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); Package::new("pmdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); Package::new("bdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); let p = project() /*********** Workspace ***********/ .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) /*********** Main Project ***********/ .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] pm = {path = "../pm"} bar = {path = "../bar"} regdep = "0.1" [build-dependencies] bdep = "0.1" bar = {path = "../bar"} "#, ) .file( "foo/src/main.rs", r#" pm::noop!{} fn main() { bar::f(); regdep::f(); } "#, ) .file("foo/build.rs", "fn main() { bdep::f(); }") /*********** Proc Macro ***********/ .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" edition = "2018" [lib] proc-macro = true [dependencies] pmdep = "0.1" "#, ) .file( "pm/src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro] pub fn noop(_item: TokenStream) -> TokenStream { pmdep::f(); "".parse().unwrap() } "#, ) /*********** Path Dependency `bar` ***********/ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn f() {}") .build(); let host = rustc_host(); p.cargo("build -Z binary-dep-depinfo --target") .arg(&host) .masquerade_as_nightly_cargo(&["binary-dep-depinfo"]) .with_stderr_contains("[COMPILING] foo [..]") .run(); assert_deps_contains( &p, "target/debug/.fingerprint/pm-*/dep-lib-pm", &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")], ); assert_deps_contains( &p, &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo", host), &[ (0, "src/main.rs"), ( 1, &format!( "debug/deps/{}pm-*.{}", paths::get_lib_prefix("proc-macro"), paths::get_lib_extension("proc-macro") ), ), (1, &format!("{}/debug/deps/libbar-*.rlib", host)), (1, &format!("{}/debug/deps/libregdep-*.rlib", host)), ], ); assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")], ); // Make sure it stays fresh. p.cargo("build -Z binary-dep-depinfo --target") .arg(&host) .masquerade_as_nightly_cargo(&["binary-dep-depinfo"]) .with_stderr("[FINISHED] dev [..]") .run(); } #[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")] fn relative_depinfo_paths_no_ws() { // Test relative dep-info paths without a workspace with proc-macros and // other dependency kinds. Package::new("regdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); Package::new("pmdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); Package::new("bdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); let p = project() /*********** Main Project ***********/ .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] pm = {path = "pm"} bar = {path = "bar"} regdep = "0.1" [build-dependencies] bdep = "0.1" bar = {path = "bar"} "#, ) .file( "src/main.rs", r#" pm::noop!{} fn main() { bar::f(); regdep::f(); } "#, ) .file("build.rs", "fn main() { bdep::f(); }") /*********** Proc Macro ***********/ .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" edition = "2018" [lib] proc-macro = true [dependencies] pmdep = "0.1" "#, ) .file( "pm/src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro] pub fn noop(_item: TokenStream) -> TokenStream { pmdep::f(); "".parse().unwrap() } "#, ) /*********** Path Dependency `bar` ***********/ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn f() {}") .build(); p.cargo("build -Z binary-dep-depinfo") .masquerade_as_nightly_cargo(&["binary-dep-depinfo"]) .with_stderr_contains("[COMPILING] foo [..]") .run(); assert_deps_contains( &p, "target/debug/.fingerprint/pm-*/dep-lib-pm", &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")], ); assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-bin-foo", &[ (0, "src/main.rs"), ( 1, &format!( "debug/deps/{}pm-*.{}", paths::get_lib_prefix("proc-macro"), paths::get_lib_extension("proc-macro") ), ), (1, "debug/deps/libbar-*.rlib"), (1, "debug/deps/libregdep-*.rlib"), ], ); assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")], ); // Make sure it stays fresh. p.cargo("build -Z binary-dep-depinfo") .masquerade_as_nightly_cargo(&["binary-dep-depinfo"]) .with_stderr("[FINISHED] dev [..]") .run(); } #[cargo_test] fn reg_dep_source_not_tracked() { // Make sure source files in dep-info file are not tracked for registry dependencies. Package::new("regdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] regdep = "0.1" "#, ) .file("src/lib.rs", "pub fn f() { regdep::f(); }") .build(); p.cargo("build").run(); assert_deps( &p, "target/debug/.fingerprint/regdep-*/dep-lib-regdep", |info_path, entries| { for (kind, path) in entries { if *kind == 1 { panic!( "Did not expect package root relative path type: {:?} in {:?}", path, info_path ); } } }, ); } #[cargo_test(nightly, reason = "-Z binary-dep-depinfo is unstable")] fn canonical_path() { if !cargo_test_support::symlink_supported() { return; } Package::new("regdep", "0.1.0") .file("src/lib.rs", "pub fn f() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] regdep = "0.1" "#, ) .file("src/lib.rs", "pub fn f() { regdep::f(); }") .build(); let real = p.root().join("real_target"); real.mkdir_p(); p.symlink(real, "target"); p.cargo("build -Z binary-dep-depinfo") .masquerade_as_nightly_cargo(&["binary-dep-depinfo"]) .run(); assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-lib-foo", &[(0, "src/lib.rs"), (1, "debug/deps/libregdep-*.rmeta")], ); } #[cargo_test] fn non_local_build_script() { // Non-local build script information is not included. Package::new("bar", "1.0.0") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); } "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); let contents = p.read_file("target/debug/foo.d"); assert_match_exact( "[ROOT]/foo/target/debug/foo[EXE]: [ROOT]/foo/src/main.rs", &contents, ); } cargo-0.66.0/tests/testsuite/directory.rs000066400000000000000000000422261432416201200204440ustar00rootroot00000000000000//! Tests for directory sources. use std::collections::HashMap; use std::fs; use std::str; use serde::Serialize; use cargo_test_support::cargo_process; use cargo_test_support::git; use cargo_test_support::paths; use cargo_test_support::registry::{cksum, Package}; use cargo_test_support::{basic_manifest, project, t, ProjectBuilder}; fn setup() { let root = paths::root(); t!(fs::create_dir(&root.join(".cargo"))); t!(fs::write( root.join(".cargo/config"), r#" [source.crates-io] replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] directory = 'index' "# )); } struct VendorPackage { p: Option, cksum: Checksum, } #[derive(Serialize)] struct Checksum { package: Option, files: HashMap, } impl VendorPackage { fn new(name: &str) -> VendorPackage { VendorPackage { p: Some(project().at(&format!("index/{}", name))), cksum: Checksum { package: Some(String::new()), files: HashMap::new(), }, } } fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { self.p = Some(self.p.take().unwrap().file(name, contents)); self.cksum .files .insert(name.to_string(), cksum(contents.as_bytes())); self } fn disable_checksum(&mut self) -> &mut VendorPackage { self.cksum.package = None; self } fn no_manifest(mut self) -> Self { self.p = self.p.map(|pb| pb.no_manifest()); self } fn build(&mut self) { let p = self.p.take().unwrap(); let json = serde_json::to_string(&self.cksum).unwrap(); let p = p.file(".cargo-checksum.json", &json); let _ = p.build(); } } #[cargo_test] fn simple() { setup(); VendorPackage::new("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn simple_install() { setup(); VendorPackage::new("foo") .file("src/lib.rs", "pub fn foo() {}") .build(); VendorPackage::new("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] foo = "0.0.1" "#, ) .file( "src/main.rs", "extern crate foo; pub fn main() { foo::foo(); }", ) .build(); cargo_process("install bar") .with_stderr( "\ [INSTALLING] bar v0.1.0 [COMPILING] foo v0.0.1 [COMPILING] bar v0.1.0 [FINISHED] release [optimized] target(s) in [..]s [INSTALLING] [..]bar[..] [INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); } #[cargo_test] fn simple_install_fail() { setup(); VendorPackage::new("foo") .file("src/lib.rs", "pub fn foo() {}") .build(); VendorPackage::new("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] foo = "0.1.0" baz = "9.8.7" "#, ) .file( "src/main.rs", "extern crate foo; pub fn main() { foo::foo(); }", ) .build(); cargo_process("install bar") .with_status(101) .with_stderr( " Installing bar v0.1.0 error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]` Caused by: no matching package found searched package name: `baz` perhaps you meant: bar or foo location searched: registry `crates-io` required by package `bar v0.1.0` ", ) .run(); } #[cargo_test] fn install_without_feature_dep() { setup(); VendorPackage::new("foo") .file("src/lib.rs", "pub fn foo() {}") .build(); VendorPackage::new("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] foo = "0.0.1" baz = { version = "9.8.7", optional = true } [features] wantbaz = ["baz"] "#, ) .file( "src/main.rs", "extern crate foo; pub fn main() { foo::foo(); }", ) .build(); cargo_process("install bar") .with_stderr( "\ [INSTALLING] bar v0.1.0 [COMPILING] foo v0.0.1 [COMPILING] bar v0.1.0 [FINISHED] release [optimized] target(s) in [..]s [INSTALLING] [..]bar[..] [INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); } #[cargo_test] fn not_there() { setup(); let _ = project().at("index").build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: no matching package named `bar` found location searched: [..] required by package `foo v0.1.0 ([..])` ", ) .run(); } #[cargo_test] fn multiple() { setup(); VendorPackage::new("bar-0.1.0") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .file(".cargo-checksum", "") .build(); VendorPackage::new("bar-0.2.0") .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("src/lib.rs", "pub fn bar() {}") .file(".cargo-checksum", "") .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn crates_io_then_directory() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); let cksum = Package::new("bar", "0.1.0") .file("src/lib.rs", "pub fn bar() -> u32 { 0 }") .publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] [..] ", ) .run(); setup(); let mut v = VendorPackage::new("bar"); v.file("Cargo.toml", &basic_manifest("bar", "0.1.0")); v.file("src/lib.rs", "pub fn bar() -> u32 { 1 }"); v.cksum.package = Some(cksum); v.build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn crates_io_then_bad_checksum() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); Package::new("bar", "0.1.0").publish(); p.cargo("build").run(); setup(); VendorPackage::new("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: checksum for `bar v0.1.0` changed between lock files this could be indicative of a few possible errors: * the lock file is corrupt * a replacement source in use (e.g., a mirror) returned a different checksum * the source itself may be corrupt in one way or another unable to verify that `bar v0.1.0` is the same as when the lockfile was generated ", ) .run(); } #[cargo_test] fn bad_file_checksum() { setup(); VendorPackage::new("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); t!(fs::write( paths::root().join("index/bar/src/lib.rs"), "fn bar() -> u32 { 0 }" )); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: the listed checksum of `[..]lib.rs` has changed: expected: [..] actual: [..] directory sources are not intended to be edited, if modifications are \ required then it is recommended that `[patch]` is used with a forked copy of \ the source ", ) .run(); } #[cargo_test] fn only_dot_files_ok() { setup(); VendorPackage::new("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); VendorPackage::new("foo") .no_manifest() .file(".bar", "") .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); } #[cargo_test] fn random_files_ok() { setup(); VendorPackage::new("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); VendorPackage::new("foo") .no_manifest() .file("bar", "") .file("../test", "") .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); } #[cargo_test] fn git_lock_file_doesnt_change() { let git = git::new("git", |p| { p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) .file("src/lib.rs", "") }); VendorPackage::new("git") .file("Cargo.toml", &basic_manifest("git", "0.5.0")) .file("src/lib.rs", "") .disable_checksum() .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] git = {{ git = '{0}' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); let lock1 = p.read_lockfile(); let root = paths::root(); t!(fs::create_dir(&root.join(".cargo"))); t!(fs::write( root.join(".cargo/config"), format!( r#" [source.my-git-repo] git = '{}' replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] directory = 'index' "#, git.url() ) )); p.cargo("build") .with_stderr( "\ [COMPILING] [..] [COMPILING] [..] [FINISHED] [..] ", ) .run(); let lock2 = p.read_lockfile(); assert_eq!(lock1, lock2, "lock files changed"); } #[cargo_test] fn git_override_requires_lockfile() { VendorPackage::new("git") .file("Cargo.toml", &basic_manifest("git", "0.5.0")) .file("src/lib.rs", "") .disable_checksum() .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] git = { git = 'https://example.com/' } "#, ) .file("src/lib.rs", "") .build(); let root = paths::root(); t!(fs::create_dir(&root.join(".cargo"))); t!(fs::write( root.join(".cargo/config"), r#" [source.my-git-repo] git = 'https://example.com/' replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] directory = 'index' "# )); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `git` as a dependency of package `foo v0.0.1 ([..])` Caused by: failed to load source for dependency `git` Caused by: Unable to update [..] Caused by: the source my-git-repo requires a lock file to be present first before it can be used against vendored source code remove the source replacement configuration, generate a lock file, and then restore the source replacement configuration to continue the build ", ) .run(); } #[cargo_test] fn workspace_different_locations() { let p = project() .no_manifest() .file( "foo/Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [dependencies] baz = "*" "#, ) .file("foo/src/lib.rs", "") .file("foo/vendor/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("foo/vendor/baz/src/lib.rs", "") .file("foo/vendor/baz/.cargo-checksum.json", "{\"files\":{}}") .file( "bar/Cargo.toml", r#" [package] name = 'bar' version = '0.1.0' [dependencies] baz = "*" "#, ) .file("bar/src/lib.rs", "") .file( ".cargo/config", r#" [build] target-dir = './target' [source.crates-io] replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] directory = 'foo/vendor' "#, ) .build(); p.cargo("build").cwd("foo").run(); p.cargo("build") .cwd("bar") .with_stderr( "\ [COMPILING] bar [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn version_missing() { setup(); VendorPackage::new("foo") .file("src/lib.rs", "pub fn foo() {}") .build(); VendorPackage::new("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] foo = "2" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install bar") .with_stderr( "\ [INSTALLING] bar v0.1.0 error: failed to compile [..] Caused by: failed to select a version for the requirement `foo = \"^2\"` candidate versions found which didn't match: 0.0.1 location searched: directory source `[..] (which is replacing registry `[..]`) required by package `bar v0.1.0` perhaps a crate was updated and forgotten to be re-vendored? ", ) .with_status(101) .run(); } cargo-0.66.0/tests/testsuite/doc.rs000066400000000000000000002173711432416201200172120ustar00rootroot00000000000000//! Tests for the `cargo doc` command. use cargo::core::compiler::RustDocFingerprint; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project}; use cargo_test_support::{rustc_host, symlink_supported, tools}; use std::fs; use std::str; #[cargo_test] fn simple() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "pub fn foo() {}") .build(); p.cargo("doc") .with_stderr( "\ [..] foo v0.0.1 ([CWD]) [..] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); } #[cargo_test] fn doc_no_libs() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "foo" doc = false "#, ) .file("src/main.rs", "bad code") .build(); p.cargo("doc").run(); } #[cargo_test] fn doc_twice() { let p = project().file("src/lib.rs", "pub fn foo() {}").build(); p.cargo("doc") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("doc").with_stdout("").run(); } #[cargo_test] fn doc_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "extern crate bar; pub fn foo() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("doc") .with_stderr( "\ [..] bar v0.0.1 ([CWD]/bar) [..] bar v0.0.1 ([CWD]/bar) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/bar/index.html").is_file()); // Verify that it only emits rmeta for the dependency. assert_eq!(p.glob("target/debug/**/*.rlib").count(), 0); assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 1); p.cargo("doc") .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint") .with_stdout("") .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/bar/index.html").is_file()); } #[cargo_test] fn doc_no_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "extern crate bar; pub fn foo() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("doc --no-deps") .with_stderr( "\ [CHECKING] bar v0.0.1 ([CWD]/bar) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(!p.root().join("target/doc/bar/index.html").is_file()); } #[cargo_test] fn doc_only_bin() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "extern crate bar; pub fn foo() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("doc -v").run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/bar/index.html").is_file()); assert!(p.root().join("target/doc/foo/index.html").is_file()); } #[cargo_test] fn doc_multiple_targets_same_name_lib() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] name = "foo_lib" "#, ) .file("foo/src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [lib] name = "foo_lib" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("doc --workspace") .with_status(101) .with_stderr( "\ error: document output filename collision The lib `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \ the lib `foo_lib` in package `bar v0.1.0 ([ROOT]/foo/bar)`. Only one may be documented at once since they output to the same path. Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml. ", ) .run(); } #[cargo_test] fn doc_multiple_targets_same_name() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[bin]] name = "foo_lib" path = "src/foo_lib.rs" "#, ) .file("foo/src/foo_lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [lib] name = "foo_lib" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("doc --workspace") .with_stderr_unordered( "\ warning: output filename collision. The bin target `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` \ has the same output filename as the lib target `foo_lib` in package \ `bar v0.1.0 ([ROOT]/foo/bar)`. Colliding filename is: [ROOT]/foo/target/doc/foo_lib/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [DOCUMENTING] bar v0.1.0 ([ROOT]/foo/bar) [DOCUMENTING] foo v0.1.0 ([ROOT]/foo/foo) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn doc_multiple_targets_same_name_bin() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("foo/src/bin/foo-cli.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" "#, ) .file("bar/src/bin/foo-cli.rs", "") .build(); p.cargo("doc --workspace") .with_status(101) .with_stderr( "\ error: document output filename collision The bin `foo-cli` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \ the bin `foo-cli` in package `bar v0.1.0 ([ROOT]/foo/bar)`. Only one may be documented at once since they output to the same path. Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml. ", ) .run(); } #[cargo_test] fn doc_multiple_targets_same_name_undoced() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[bin]] name = "foo-cli" "#, ) .file("foo/src/foo-cli.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [[bin]] name = "foo-cli" doc = false "#, ) .file("bar/src/foo-cli.rs", "") .build(); p.cargo("doc --workspace").run(); } #[cargo_test] fn doc_lib_bin_same_name_documents_lib() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .build(); p.cargo("doc") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let doc_html = p.read_file("target/doc/foo/index.html"); assert!(doc_html.contains("Library")); assert!(!doc_html.contains("Binary")); } #[cargo_test] fn doc_lib_bin_same_name_documents_lib_when_requested() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .build(); p.cargo("doc --lib") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let doc_html = p.read_file("target/doc/foo/index.html"); assert!(doc_html.contains("Library")); assert!(!doc_html.contains("Binary")); } #[cargo_test] fn doc_lib_bin_same_name_documents_named_bin_when_requested() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .build(); p.cargo("doc --bin foo") // The checking/documenting lines are sometimes swapped since they run // concurrently. .with_stderr_unordered( "\ warning: output filename collision. The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \ has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`. Colliding filename is: [ROOT]/foo/target/doc/foo/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [CHECKING] foo v0.0.1 ([CWD]) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let doc_html = p.read_file("target/doc/foo/index.html"); assert!(!doc_html.contains("Library")); assert!(doc_html.contains("Binary")); } #[cargo_test] fn doc_lib_bin_same_name_documents_bins_when_requested() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .build(); p.cargo("doc --bins") // The checking/documenting lines are sometimes swapped since they run // concurrently. .with_stderr_unordered( "\ warning: output filename collision. The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \ has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`. Colliding filename is: [ROOT]/foo/target/doc/foo/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [CHECKING] foo v0.0.1 ([CWD]) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let doc_html = p.read_file("target/doc/foo/index.html"); assert!(!doc_html.contains("Library")); assert!(doc_html.contains("Binary")); } #[cargo_test] fn doc_lib_bin_example_same_name_documents_named_example_when_requested() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .file( "examples/ex1.rs", r#" //! Example1 documentation pub fn x() { f(); } "#, ) .build(); p.cargo("doc --example ex1") // The checking/documenting lines are sometimes swapped since they run // concurrently. .with_stderr_unordered( "\ [CHECKING] foo v0.0.1 ([CWD]) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); let doc_html = p.read_file("target/doc/ex1/index.html"); assert!(!doc_html.contains("Library")); assert!(!doc_html.contains("Binary")); assert!(doc_html.contains("Example1")); } #[cargo_test] fn doc_lib_bin_example_same_name_documents_examples_when_requested() { let p = project() .file( "src/main.rs", r#" //! Binary documentation extern crate foo; fn main() { foo::foo(); } "#, ) .file( "src/lib.rs", r#" //! Library documentation pub fn foo() {} "#, ) .file( "examples/ex1.rs", r#" //! Example1 documentation pub fn example1() { f(); } "#, ) .file( "examples/ex2.rs", r#" //! Example2 documentation pub fn example2() { f(); } "#, ) .build(); p.cargo("doc --examples") // The checking/documenting lines are sometimes swapped since they run // concurrently. .with_stderr_unordered( "\ [CHECKING] foo v0.0.1 ([CWD]) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); let example_doc_html_1 = p.read_file("target/doc/ex1/index.html"); let example_doc_html_2 = p.read_file("target/doc/ex2/index.html"); assert!(!example_doc_html_1.contains("Library")); assert!(!example_doc_html_1.contains("Binary")); assert!(!example_doc_html_2.contains("Library")); assert!(!example_doc_html_2.contains("Binary")); assert!(example_doc_html_1.contains("Example1")); assert!(example_doc_html_2.contains("Example2")); } #[cargo_test] fn doc_dash_p() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "extern crate a;") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies.b] path = "../b" "#, ) .file("a/src/lib.rs", "extern crate b;") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("doc -p a") .with_stderr( "\ [..] b v0.0.1 ([CWD]/b) [..] b v0.0.1 ([CWD]/b) [DOCUMENTING] a v0.0.1 ([CWD]/a) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_all_exclude() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("doc --workspace --exclude baz") .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") .with_stderr( "\ [DOCUMENTING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_all_exclude_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("doc --workspace --exclude '*z'") .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") .with_stderr( "\ [DOCUMENTING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_same_name() { let p = project() .file("src/lib.rs", "") .file("src/bin/main.rs", "fn main() {}") .file("examples/main.rs", "fn main() {}") .file("tests/main.rs", "fn main() {}") .build(); p.cargo("doc").run(); } #[cargo_test(nightly, reason = "no_core, lang_items requires nightly")] fn doc_target() { const TARGET: &str = "arm-unknown-linux-gnueabihf"; let p = project() .file( "src/lib.rs", r#" #![feature(no_core, lang_items)] #![no_core] #[lang = "sized"] trait Sized {} extern { pub static A: u32; } "#, ) .build(); p.cargo("doc --verbose --target").arg(TARGET).run(); assert!(p.root().join(&format!("target/{}/doc", TARGET)).is_dir()); assert!(p .root() .join(&format!("target/{}/doc/foo/index.html", TARGET)) .is_file()); } #[cargo_test] fn target_specific_not_documented() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.foo.dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "not rust") .build(); p.cargo("doc").run(); } #[cargo_test] fn output_not_captured() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file( "a/src/lib.rs", " /// ``` /// ` /// ``` pub fn foo() {} ", ) .build(); p.cargo("doc") .with_stderr_contains("[..]unknown start of token: `") .run(); } #[cargo_test] fn target_specific_documented() { let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [target.foo.dependencies] a = {{ path = "a" }} [target.{}.dependencies] a = {{ path = "a" }} "#, rustc_host() ), ) .file( "src/lib.rs", " extern crate a; /// test pub fn foo() {} ", ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file( "a/src/lib.rs", " /// test pub fn foo() {} ", ) .build(); p.cargo("doc").run(); } #[cargo_test] fn no_document_build_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [build-dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "pub fn foo() {}") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file( "a/src/lib.rs", " /// ``` /// β˜ƒ /// ``` pub fn foo() {} ", ) .build(); p.cargo("doc").run(); } #[cargo_test] fn doc_release() { let p = project().file("src/lib.rs", "").build(); p.cargo("build --release").run(); p.cargo("doc --release -v") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc [..] src/lib.rs [..]` [FINISHED] release [optimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_multiple_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" [dependencies.baz] path = "baz" "#, ) .file("src/lib.rs", "extern crate bar; pub fn foo() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("doc -p bar -p baz -v").run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/bar/index.html").is_file()); assert!(p.root().join("target/doc/baz/index.html").is_file()); } #[cargo_test] fn features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" [features] foo = ["bar/bar"] "#, ) .file("src/lib.rs", r#"#[cfg(feature = "foo")] pub fn foo() {}"#) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] bar = [] "#, ) .file( "bar/build.rs", r#" fn main() { println!("cargo:rustc-cfg=bar"); } "#, ) .file( "bar/src/lib.rs", r#"#[cfg(feature = "bar")] pub fn bar() {}"#, ) .build(); p.cargo("doc --features foo") .with_stderr( "\ [COMPILING] bar v0.0.1 [..] [DOCUMENTING] bar v0.0.1 [..] [DOCUMENTING] foo v0.0.1 [..] [FINISHED] [..] ", ) .run(); assert!(p.root().join("target/doc").is_dir()); assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); // Check that turning the feature off will remove the files. p.cargo("doc") .with_stderr( "\ [COMPILING] bar v0.0.1 [..] [DOCUMENTING] bar v0.0.1 [..] [DOCUMENTING] foo v0.0.1 [..] [FINISHED] [..] ", ) .run(); assert!(!p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(!p.root().join("target/doc/bar/fn.bar.html").is_file()); // And switching back will rebuild and bring them back. p.cargo("doc --features foo") .with_stderr( "\ [DOCUMENTING] bar v0.0.1 [..] [DOCUMENTING] foo v0.0.1 [..] [FINISHED] [..] ", ) .run(); assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); } #[cargo_test] fn rerun_when_dir_removed() { let p = project() .file( "src/lib.rs", r#" /// dox pub fn foo() {} "#, ) .build(); p.cargo("doc").run(); assert!(p.root().join("target/doc/foo/index.html").is_file()); fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap(); p.cargo("doc").run(); assert!(p.root().join("target/doc/foo/index.html").is_file()); } #[cargo_test] fn document_only_lib() { let p = project() .file( "src/lib.rs", r#" /// dox pub fn foo() {} "#, ) .file( "src/bin/bar.rs", r#" /// ``` /// β˜ƒ /// ``` pub fn foo() {} fn main() { foo(); } "#, ) .build(); p.cargo("doc --lib").run(); assert!(p.root().join("target/doc/foo/index.html").is_file()); } #[cargo_test] fn plugins_no_use_target() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] proc-macro = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc --target=x86_64-unknown-openbsd -v").run(); } #[cargo_test] fn doc_all_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); // The order in which bar is compiled or documented is not deterministic p.cargo("doc --workspace") .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") .with_stderr_contains("[..] Checking bar v0.1.0 ([..])") .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") .run(); } #[cargo_test] fn doc_all_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); // The order in which bar and baz are documented is not guaranteed p.cargo("doc --workspace") .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") .run(); } #[cargo_test] fn doc_virtual_manifest_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); // The order in which bar and baz are documented is not guaranteed p.cargo("doc") .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") .run(); } #[cargo_test] fn doc_virtual_manifest_one_project() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") .build(); p.cargo("doc -p bar") .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") .with_stderr( "\ [DOCUMENTING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_virtual_manifest_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("doc -p '*z'") .with_stderr_does_not_contain("[DOCUMENTING] bar v0.1.0 [..]") .with_stderr( "\ [DOCUMENTING] baz v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_all_member_dependency_same_name() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" [dependencies] bar = "0.1.0" "#, ) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); Package::new("bar", "0.1.0").publish(); p.cargo("doc --workspace") .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 (registry `dummy-registry`) warning: output filename collision. The lib target `bar` in package `bar v0.1.0` has the same output filename as \ the lib target `bar` in package `bar v0.1.0 ([ROOT]/foo/bar)`. Colliding filename is: [ROOT]/foo/target/doc/bar/index.html The targets should have unique names. This is a known bug where multiple crates with the same name use the same path; see . [DOCUMENTING] bar v0.1.0 [CHECKING] bar v0.1.0 [DOCUMENTING] bar v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn doc_workspace_open_help_message() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); // The order in which bar is compiled or documented is not deterministic p.cargo("doc --workspace --open") .env("BROWSER", tools::echo()) .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") .with_stderr_contains("[..] Opening [..]/bar/index.html") .run(); } #[cargo_test(nightly, reason = "-Zextern-html-root-url is unstable")] fn doc_extern_map_local() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .file(".cargo/config.toml", "doc.extern-map.std = 'local'") .build(); p.cargo("doc -v --no-deps -Zrustdoc-map --open") .env("BROWSER", tools::echo()) .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr( "\ [DOCUMENTING] foo v0.1.0 [..] [RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 0.1.0` [FINISHED] [..] Opening [CWD]/target/doc/foo/index.html ", ) .run(); } #[cargo_test] fn open_no_doc_crate() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [lib] doc = false "#, ) .file("src/lib.rs", "#[cfg(feature)] pub fn f();") .build(); p.cargo("doc --open") .env("BROWSER", "do_not_run_me") .with_status(101) .with_stderr_contains("error: no crates with documentation") .run(); } #[cargo_test] fn doc_workspace_open_different_library_and_package_names() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] name = "foolib" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("doc --open") .env("BROWSER", tools::echo()) .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") .with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html") .with_stdout_contains("[CWD]/target/doc/foolib/index.html") .run(); p.change_file( ".cargo/config.toml", &format!( r#" [doc] browser = ["{}", "a"] "#, tools::echo().display().to_string().replace('\\', "\\\\") ), ); // check that the cargo config overrides the browser env var p.cargo("doc --open") .env("BROWSER", "do_not_run_me") .with_stdout_contains("a [CWD]/target/doc/foolib/index.html") .run(); } #[cargo_test] fn doc_workspace_open_binary() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[bin]] name = "foobin" path = "src/main.rs" "#, ) .file("foo/src/main.rs", "") .build(); p.cargo("doc --open") .env("BROWSER", tools::echo()) .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") .with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html") .run(); } #[cargo_test] fn doc_workspace_open_binary_and_library() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] name = "foolib" [[bin]] name = "foobin" path = "src/main.rs" "#, ) .file("foo/src/lib.rs", "") .file("foo/src/main.rs", "") .build(); p.cargo("doc --open") .env("BROWSER", tools::echo()) .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") .with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html") .run(); } #[cargo_test] fn doc_edition() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] edition = "2018" "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc -v") .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") .run(); p.cargo("test -v") .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") .run(); } #[cargo_test] fn doc_target_edition() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] edition = "2018" "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc -v") .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") .run(); p.cargo("test -v") .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") .run(); } // Tests an issue where depending on different versions of the same crate depending on `cfg`s // caused `cargo doc` to fail. #[cargo_test] fn issue_5345() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.'cfg(all(windows, target_arch = "x86"))'.dependencies] bar = "0.1" [target.'cfg(not(all(windows, target_arch = "x86")))'.dependencies] bar = "0.2" "#, ) .file("src/lib.rs", "extern crate bar;") .build(); Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); foo.cargo("build").run(); foo.cargo("doc").run(); } #[cargo_test] fn doc_private_items() { let foo = project() .file("src/lib.rs", "mod private { fn private_item() {} }") .build(); foo.cargo("doc --document-private-items").run(); assert!(foo.root().join("target/doc").is_dir()); assert!(foo .root() .join("target/doc/foo/private/index.html") .is_file()); } #[cargo_test] fn doc_private_ws() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "fn p() {}") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "fn p2() {}") .file("b/src/bin/b-cli.rs", "fn main() {}") .build(); p.cargo("doc --workspace --bins --lib --document-private-items -v") .with_stderr_contains( "[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]", ) .with_stderr_contains( "[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]", ) .with_stderr_contains( "[RUNNING] `rustdoc [..] b/src/bin/b-cli.rs [..]--document-private-items[..]", ) .run(); } const BAD_INTRA_LINK_LIB: &str = r#" #![deny(broken_intra_doc_links)] /// [bad_link] pub fn foo() {} "#; #[cargo_test] fn doc_cap_lints() { let a = git::new("a", |p| { p.file("Cargo.toml", &basic_lib_manifest("a")) .file("src/lib.rs", BAD_INTRA_LINK_LIB) }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("doc") .with_stderr_unordered( "\ [UPDATING] git repository `[..]` [DOCUMENTING] a v0.5.0 ([..]) [CHECKING] a v0.5.0 ([..]) [DOCUMENTING] foo v0.0.1 ([..]) [FINISHED] dev [..] ", ) .run(); p.root().join("target").rm_rf(); p.cargo("doc -vv") .with_stderr_contains("[WARNING] [..]`bad_link`[..]") .run(); } #[cargo_test] fn doc_message_format() { let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); p.cargo("doc --message-format=json") .with_status(101) .with_json_contains_unordered( r#" { "message": { "children": "{...}", "code": "{...}", "level": "error", "message": "{...}", "rendered": "{...}", "spans": "{...}" }, "package_id": "foo [..]", "manifest_path": "[..]", "reason": "compiler-message", "target": "{...}" } "#, ) .run(); } #[cargo_test] fn doc_json_artifacts() { // Checks the output of json artifact messages. let p = project() .file("src/lib.rs", "") .file("src/bin/somebin.rs", "fn main() {}") .build(); p.cargo("doc --message-format=json") .with_json_contains_unordered( r#" { "reason": "compiler-artifact", "package_id": "foo 0.0.1 [..]", "manifest_path": "[ROOT]/foo/Cargo.toml", "target": { "kind": ["lib"], "crate_types": ["lib"], "name": "foo", "src_path": "[ROOT]/foo/src/lib.rs", "edition": "2015", "doc": true, "doctest": true, "test": true }, "profile": "{...}", "features": [], "filenames": ["[ROOT]/foo/target/debug/deps/libfoo-[..].rmeta"], "executable": null, "fresh": false } { "reason": "compiler-artifact", "package_id": "foo 0.0.1 [..]", "manifest_path": "[ROOT]/foo/Cargo.toml", "target": { "kind": ["lib"], "crate_types": ["lib"], "name": "foo", "src_path": "[ROOT]/foo/src/lib.rs", "edition": "2015", "doc": true, "doctest": true, "test": true }, "profile": "{...}", "features": [], "filenames": ["[ROOT]/foo/target/doc/foo/index.html"], "executable": null, "fresh": false } { "reason": "compiler-artifact", "package_id": "foo 0.0.1 [..]", "manifest_path": "[ROOT]/foo/Cargo.toml", "target": { "kind": ["bin"], "crate_types": ["bin"], "name": "somebin", "src_path": "[ROOT]/foo/src/bin/somebin.rs", "edition": "2015", "doc": true, "doctest": false, "test": true }, "profile": "{...}", "features": [], "filenames": ["[ROOT]/foo/target/doc/somebin/index.html"], "executable": null, "fresh": false } {"reason":"build-finished","success":true} "#, ) .run(); } #[cargo_test] fn short_message_format() { let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); p.cargo("doc --message-format=short") .with_status(101) .with_stderr_contains("src/lib.rs:4:6: error: [..]`bad_link`[..]") .run(); } #[cargo_test] fn doc_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [[example]] crate-type = ["lib"] name = "ex1" doc = true "#, ) .file("src/lib.rs", "pub fn f() {}") .file( "examples/ex1.rs", r#" use foo::f; /// Example pub fn x() { f(); } "#, ) .build(); p.cargo("doc").run(); assert!(p .build_dir() .join("doc") .join("ex1") .join("fn.x.html") .exists()); } #[cargo_test] fn doc_example_with_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [[example]] crate-type = ["lib"] name = "ex" doc = true [dev-dependencies] a = {path = "a"} b = {path = "b"} "#, ) .file("src/lib.rs", "") .file( "examples/ex.rs", r#" use a::fun; /// Example pub fn x() { fun(); } "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" [dependencies] b = {path = "../b"} "#, ) .file("a/src/fun.rs", "pub fn fun() {}") .file("a/src/lib.rs", "pub mod fun;") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("doc --examples").run(); assert!(p .build_dir() .join("doc") .join("ex") .join("fn.x.html") .exists()); } #[cargo_test] fn bin_private_items() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#, ) .file( "src/main.rs", " pub fn foo_pub() {} fn foo_priv() {} struct FooStruct; enum FooEnum {} trait FooTrait {} type FooType = u32; mod foo_mod {} ", ) .build(); p.cargo("doc") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file()); assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file()); assert!(p .root() .join("target/doc/foo/struct.FooStruct.html") .is_file()); assert!(p.root().join("target/doc/foo/enum.FooEnum.html").is_file()); assert!(p .root() .join("target/doc/foo/trait.FooTrait.html") .is_file()); assert!(p.root().join("target/doc/foo/type.FooType.html").is_file()); assert!(p.root().join("target/doc/foo/foo_mod/index.html").is_file()); } #[cargo_test] fn bin_private_items_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file( "src/main.rs", " fn foo_priv() {} pub fn foo_pub() {} ", ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "bar/src/lib.rs", " #[allow(dead_code)] fn bar_priv() {} pub fn bar_pub() {} ", ) .build(); p.cargo("doc") .with_stderr_unordered( "\ [DOCUMENTING] bar v0.0.1 ([..]) [CHECKING] bar v0.0.1 ([..]) [DOCUMENTING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/doc/foo/index.html").is_file()); assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file()); assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file()); assert!(p.root().join("target/doc/bar/index.html").is_file()); assert!(p.root().join("target/doc/bar/fn.bar_pub.html").is_file()); assert!(!p.root().join("target/doc/bar/fn.bar_priv.html").exists()); } #[cargo_test] fn crate_versions() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.4" authors = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc -v") .with_stderr( "\ [DOCUMENTING] foo v1.2.4 [..] [RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 1.2.4` [FINISHED] [..] ", ) .run(); let output_path = p.root().join("target/doc/foo/index.html"); let output_documentation = fs::read_to_string(&output_path).unwrap(); assert!(output_documentation.contains("Version 1.2.4")); } #[cargo_test] fn crate_versions_flag_is_overridden() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.4" authors = [] "#, ) .file("src/lib.rs", "") .build(); let output_documentation = || { let output_path = p.root().join("target/doc/foo/index.html"); fs::read_to_string(&output_path).unwrap() }; let asserts = |html: String| { assert!(!html.contains("1.2.4")); assert!(html.contains("Version 2.0.3")); }; p.cargo("doc") .env("RUSTDOCFLAGS", "--crate-version 2.0.3") .run(); asserts(output_documentation()); p.build_dir().rm_rf(); p.cargo("rustdoc -- --crate-version 2.0.3").run(); asserts(output_documentation()); } #[cargo_test(nightly, reason = "-Zdoctest-in-workspace is unstable")] fn doc_test_in_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = [ "crate-a", "crate-b", ] "#, ) .file( "crate-a/Cargo.toml", r#" [project] name = "crate-a" version = "0.1.0" "#, ) .file( "crate-a/src/lib.rs", "\ //! ``` //! assert_eq!(1, 1); //! ``` ", ) .file( "crate-b/Cargo.toml", r#" [project] name = "crate-b" version = "0.1.0" "#, ) .file( "crate-b/src/lib.rs", "\ //! ``` //! assert_eq!(1, 1); //! ``` ", ) .build(); p.cargo("test -Zdoctest-in-workspace --doc -vv") .masquerade_as_nightly_cargo(&["doctest-in-workspace"]) .with_stderr_contains("[DOCTEST] crate-a") .with_stdout_contains( " running 1 test test crate-a/src/lib.rs - (line 1) ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .with_stderr_contains("[DOCTEST] crate-b") .with_stdout_contains( " running 1 test test crate-b/src/lib.rs - (line 1) ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .run(); } #[cargo_test] fn doc_fingerprint_is_versioning_consistent() { // Random rustc verbose version let old_rustc_verbose_version = format!( "\ rustc 1.41.1 (f3e1a954d 2020-02-24) binary: rustc commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 commit-date: 2020-02-24 host: {} release: 1.41.1 LLVM version: 9.0 ", rustc_host() ); // Create the dummy project. let dummy_project = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.4" authors = [] "#, ) .file("src/lib.rs", "//! These are the docs!") .build(); dummy_project.cargo("doc").run(); let fingerprint: RustDocFingerprint = serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) .expect("JSON Serde fail"); // Check that the fingerprint contains the actual rustc version // which has been used to compile the docs. let output = std::process::Command::new("rustc") .arg("-vV") .output() .expect("Failed to get actual rustc verbose version"); assert_eq!( fingerprint.rustc_vv, (String::from_utf8_lossy(&output.stdout).as_ref()) ); // As the test shows above. Now we have generated the `doc/` folder and inside // the rustdoc fingerprint file is located with the correct rustc version. // So we will remove it and create a new fingerprint with an old rustc version // inside it. We will also place a bogus file inside of the `doc/` folder to ensure // it gets removed as we expect on the next doc compilation. dummy_project.change_file( "target/.rustdoc_fingerprint.json", &old_rustc_verbose_version, ); fs::write( dummy_project.build_dir().join("doc/bogus_file"), String::from("This is a bogus file and should be removed!"), ) .expect("Error writing test bogus file"); // Now if we trigger another compilation, since the fingerprint contains an old version // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) // and generating another one with the actual version. // It should also remove the bogus file we created above. dummy_project.cargo("doc").run(); assert!(!dummy_project.build_dir().join("doc/bogus_file").exists()); let fingerprint: RustDocFingerprint = serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) .expect("JSON Serde fail"); // Check that the fingerprint contains the actual rustc version // which has been used to compile the docs. assert_eq!( fingerprint.rustc_vv, (String::from_utf8_lossy(&output.stdout).as_ref()) ); } #[cargo_test] fn doc_fingerprint_respects_target_paths() { // Random rustc verbose version let old_rustc_verbose_version = format!( "\ rustc 1.41.1 (f3e1a954d 2020-02-24) binary: rustc commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 commit-date: 2020-02-24 host: {} release: 1.41.1 LLVM version: 9.0 ", rustc_host() ); // Create the dummy project. let dummy_project = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.4" authors = [] "#, ) .file("src/lib.rs", "//! These are the docs!") .build(); dummy_project.cargo("doc --target").arg(rustc_host()).run(); let fingerprint: RustDocFingerprint = serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) .expect("JSON Serde fail"); // Check that the fingerprint contains the actual rustc version // which has been used to compile the docs. let output = std::process::Command::new("rustc") .arg("-vV") .output() .expect("Failed to get actual rustc verbose version"); assert_eq!( fingerprint.rustc_vv, (String::from_utf8_lossy(&output.stdout).as_ref()) ); // As the test shows above. Now we have generated the `doc/` folder and inside // the rustdoc fingerprint file is located with the correct rustc version. // So we will remove it and create a new fingerprint with an old rustc version // inside it. We will also place a bogus file inside of the `doc/` folder to ensure // it gets removed as we expect on the next doc compilation. dummy_project.change_file( "target/.rustdoc_fingerprint.json", &old_rustc_verbose_version, ); fs::write( dummy_project .build_dir() .join(rustc_host()) .join("doc/bogus_file"), String::from("This is a bogus file and should be removed!"), ) .expect("Error writing test bogus file"); // Now if we trigger another compilation, since the fingerprint contains an old version // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) // and generating another one with the actual version. // It should also remove the bogus file we created above. dummy_project.cargo("doc --target").arg(rustc_host()).run(); assert!(!dummy_project .build_dir() .join(rustc_host()) .join("doc/bogus_file") .exists()); let fingerprint: RustDocFingerprint = serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) .expect("JSON Serde fail"); // Check that the fingerprint contains the actual rustc version // which has been used to compile the docs. assert_eq!( fingerprint.rustc_vv, (String::from_utf8_lossy(&output.stdout).as_ref()) ); } #[cargo_test] fn doc_fingerprint_unusual_behavior() { // Checks for some unusual circumstances with clearing the doc directory. if !symlink_supported() { return; } let p = project().file("src/lib.rs", "").build(); p.build_dir().mkdir_p(); let real_doc = p.root().join("doc"); real_doc.mkdir_p(); let build_doc = p.build_dir().join("doc"); p.symlink(&real_doc, &build_doc); fs::write(real_doc.join("somefile"), "test").unwrap(); fs::write(real_doc.join(".hidden"), "test").unwrap(); p.cargo("doc").run(); // Make sure for the first run, it does not delete any files and does not // break the symlink. assert!(build_doc.join("somefile").exists()); assert!(real_doc.join("somefile").exists()); assert!(real_doc.join(".hidden").exists()); assert!(real_doc.join("foo/index.html").exists()); // Pretend that the last build was generated by an older version. p.change_file( "target/.rustdoc_fingerprint.json", "{\"rustc_vv\": \"I am old\"}", ); // Change file to trigger a new build. p.change_file("src/lib.rs", "// changed"); p.cargo("doc") .with_stderr( "[DOCUMENTING] foo [..]\n\ [FINISHED] [..]", ) .run(); // This will delete somefile, but not .hidden. assert!(!real_doc.join("somefile").exists()); assert!(real_doc.join(".hidden").exists()); assert!(real_doc.join("foo/index.html").exists()); // And also check the -Z flag behavior. p.change_file( "target/.rustdoc_fingerprint.json", "{\"rustc_vv\": \"I am old\"}", ); // Change file to trigger a new build. p.change_file("src/lib.rs", "// changed2"); fs::write(real_doc.join("somefile"), "test").unwrap(); p.cargo("doc -Z skip-rustdoc-fingerprint") .masquerade_as_nightly_cargo(&["skip-rustdoc-fingerprint"]) .with_stderr( "[DOCUMENTING] foo [..]\n\ [FINISHED] [..]", ) .run(); // Should not have deleted anything. assert!(build_doc.join("somefile").exists()); assert!(real_doc.join("somefile").exists()); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_basic() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#, ) .file("examples/ex.rs", "fn main() { foo::foo(); }") .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }") .build(); p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .with_stderr( "\ [..] foo v0.0.1 ([CWD]) [..] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let doc_html = p.read_file("target/doc/foo/fn.foo.html"); assert!(doc_html.contains("Examples found in repository")); assert!(doc_html.contains("More examples")); // Ensure that the reverse-dependency has its sources generated assert!(p.build_dir().join("doc/src/ex/ex.rs.html").exists()); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_avoid_build_script_cycle() { let p = project() // package with build dependency .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] links = "foo" [workspace] members = ["bar"] [build-dependencies] bar = {path = "bar"} "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main(){}") // dependency .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] links = "bar" "#, ) .file("bar/src/lib.rs", "") .file("bar/build.rs", "fn main(){}") .build(); p.cargo("doc --all -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); } // FIXME: This test is broken with latest nightly 2022-08-02. // The example is calling a function from a proc-macro, but proc-macros don't // export functions. It is not clear what this test is trying to exercise. // #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] #[ignore = "broken, needs fixing"] #[cargo_test] fn scrape_examples_complex_reverse_dependencies() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] a = {path = "a", features = ["feature"]} b = {path = "b"} [workspace] members = ["b"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "fn main() { a::f(); }") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [lib] proc-macro = true [dependencies] b = {path = "../b"} [features] feature = [] "#, ) .file("a/src/lib.rs", "#[cfg(feature)] pub fn f();") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_crate_with_dash() { let p = project() .file( "Cargo.toml", r#" [package] name = "da-sh" version = "0.0.1" authors = [] "#, ) .file("src/lib.rs", "pub fn foo() {}") .file("examples/a.rs", "fn main() { da_sh::foo(); }") .build(); p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); let doc_html = p.read_file("target/doc/da_sh/fn.foo.html"); assert!(doc_html.contains("Examples found in repository")); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_missing_flag() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.4" authors = [] "#, ) .file("src/lib.rs", "//! These are the docs!") .build(); p.cargo("doc -Zrustdoc-scrape-examples") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .with_status(101) .with_stderr("error: -Z rustdoc-scrape-examples must take [..] an argument") .run(); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_configure_profile() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.dev] panic = "abort" "#, ) .file("examples/ex.rs", "fn main() { foo::foo(); }") .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }") .build(); p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); let doc_html = p.read_file("target/doc/foo/fn.foo.html"); assert!(doc_html.contains("Examples found in repository")); assert!(doc_html.contains("More examples")); } #[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")] fn scrape_examples_issue_10545() { let p = project() .file( "Cargo.toml", r#" [workspace] resolver = "2" members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] edition = "2021" [features] default = ["foo"] foo = [] "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] edition = "2021" [lib] proc-macro = true "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"]) .run(); } #[cargo_test] fn lib_before_bin() { // Checks that the library is documented before the binary. // Previously they were built concurrently, which can cause issues // if the bin has intra-doc links to the lib. let p = project() .file( "src/lib.rs", r#" /// Hi pub fn abc() {} "#, ) .file( "src/bin/somebin.rs", r#" //! See [`foo::abc`] fn main() {} "#, ) .build(); // Run check first. This just helps ensure that the test clearly shows the // order of the rustdoc commands. p.cargo("check").run(); // The order of output here should be deterministic. p.cargo("doc -v") .with_stderr( "\ [DOCUMENTING] foo [..] [RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..] [RUNNING] `rustdoc --crate-type bin --crate-name somebin src/bin/somebin.rs [..] [FINISHED] [..] ", ) .run(); // And the link should exist. let bin_html = p.read_file("target/doc/somebin/index.html"); assert!(bin_html.contains("../foo/fn.abc.html")); } #[cargo_test] fn doc_lib_false() { // doc = false for a library let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] doc = false [dependencies] bar = {path = "bar"} "#, ) .file("src/lib.rs", "extern crate bar;") .file("src/bin/some-bin.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [lib] doc = false "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("doc") .with_stderr( "\ [CHECKING] bar v0.1.0 [..] [CHECKING] foo v0.1.0 [..] [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); assert!(!p.build_dir().join("doc/foo").exists()); assert!(!p.build_dir().join("doc/bar").exists()); assert!(p.build_dir().join("doc/some_bin").exists()); } #[cargo_test] fn doc_lib_false_dep() { // doc = false for a dependency // Ensures that the rmeta gets produced let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "extern crate bar;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [lib] doc = false "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("doc") .with_stderr( "\ [CHECKING] bar v0.1.0 [..] [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); assert!(p.build_dir().join("doc/foo").exists()); assert!(!p.build_dir().join("doc/bar").exists()); } #[cargo_test] fn link_to_private_item() { let main = r#" //! [bar] #[allow(dead_code)] fn bar() {} "#; let p = project().file("src/lib.rs", main).build(); p.cargo("doc") .with_stderr_contains("[..] documentation for `foo` links to private item `bar`") .run(); // Check that binaries don't emit a private_intra_doc_links warning. fs::rename(p.root().join("src/lib.rs"), p.root().join("src/main.rs")).unwrap(); p.cargo("doc") .with_stderr( "[DOCUMENTING] foo [..]\n\ [FINISHED] [..]", ) .run(); } cargo-0.66.0/tests/testsuite/edition.rs000066400000000000000000000063651432416201200200770ustar00rootroot00000000000000//! Tests for edition setting. use cargo::core::Edition; use cargo_test_support::{basic_lib_manifest, project}; #[cargo_test] fn edition_works_for_build_script() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' edition = '2018' [build-dependencies] a = { path = 'a' } "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { a::foo(); } "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build -v").run(); } #[cargo_test] fn edition_unstable_gated() { // During the period where a new edition is coming up, but not yet stable, // this test will verify that it cannot be used on stable. If there is no // next edition, it does nothing. let next = match Edition::LATEST_UNSTABLE { Some(next) => next, None => { eprintln!("Next edition is currently not available, skipping test."); return; } }; let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" edition = "{}" "#, next ), ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr(&format!( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: feature `edition{next}` is required The package requires the Cargo feature called `edition{next}`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider trying a newer version of Cargo (this may require the nightly release). See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#edition-{next} \ for more information about the status of this feature. ", next = next )) .run(); } #[cargo_test(nightly, reason = "fundamentally always nightly")] fn edition_unstable() { // During the period where a new edition is coming up, but not yet stable, // this test will verify that it can be used with `cargo-features`. If // there is no next edition, it does nothing. let next = match Edition::LATEST_UNSTABLE { Some(next) => next, None => { eprintln!("Next edition is currently not available, skipping test."); return; } }; let p = project() .file( "Cargo.toml", &format!( r#" cargo-features = ["edition{next}"] [package] name = "foo" version = "0.1.0" edition = "{next}" "#, next = next ), ) .file("src/lib.rs", "") .build(); p.cargo("check") .masquerade_as_nightly_cargo(&["always_nightly"]) .with_stderr( "\ [CHECKING] foo [..] [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/error.rs000066400000000000000000000007431432416201200175670ustar00rootroot00000000000000//! General error tests that don't belong anywhere else. use cargo_test_support::cargo_process; #[cargo_test] fn internal_error() { cargo_process("init") .env("__CARGO_TEST_INTERNAL_ERROR", "1") .with_status(101) .with_stderr( "\ [ERROR] internal error test [NOTE] this is an unexpected cargo internal error [NOTE] we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/ [NOTE] cargo [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/features.rs000066400000000000000000001534551432416201200202650ustar00rootroot00000000000000//! Tests for `[features]` table. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn invalid1() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `baz` which is neither a dependency nor another feature ", ) .run(); } #[cargo_test] fn same_name() { // Feature with the same name as a dependency. let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] baz = [] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("tree -f") .arg("{p} [{f}]") .with_stderr("") .with_stdout( "\ foo v0.0.1 ([..]) [] └── bar v1.0.0 ([..]) [] ", ) .run(); p.cargo("tree --features bar -f") .arg("{p} [{f}]") .with_stderr("") .with_stdout( "\ foo v0.0.1 ([..]) [bar,baz] └── bar v1.0.0 ([..]) [] ", ) .run(); } #[cargo_test] fn invalid3() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] [dependencies.baz] path = "foo" "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `baz`, but `baz` is not an optional dependency A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. ", ) .run(); } #[cargo_test] fn invalid4() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" features = ["bar"] "#, ) .file("src/main.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to select a version for `bar`. ... required by package `foo v0.0.1 ([..])` versions that meet the requirements `*` are: 0.0.1 the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features. failed to select a version for `bar` which could resolve this conflict", ) .run(); p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); p.cargo("build --features test") .with_status(101) .with_stderr("error: Package `foo v0.0.1 ([..])` does not have the feature `test`") .run(); } #[cargo_test] fn invalid5() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dev-dependencies.bar] path = "bar" optional = true "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: dev-dependencies are not allowed to be optional: `bar` ", ) .run(); } #[cargo_test] fn invalid6() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] "#, ) .file("src/main.rs", "") .build(); p.cargo("build --features foo") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `foo` includes `bar/baz`, but `bar` is not a dependency ", ) .run(); } #[cargo_test] fn invalid7() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] bar = [] "#, ) .file("src/main.rs", "") .build(); p.cargo("build --features foo") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `foo` includes `bar/baz`, but `bar` is not a dependency ", ) .run(); } #[cargo_test] fn invalid8() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" features = ["foo/bar"] "#, ) .file("src/main.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build --features foo") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[CWD]/Cargo.toml` Caused by: feature `foo/bar` in dependency `bar` is not allowed to contain slashes If you want to enable features [..] ", ) .run(); } #[cargo_test] fn invalid9() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build --features bar") .with_stderr( "\ error: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with that name, but only optional dependencies can be used as features. ", ).with_status(101).run(); } #[cargo_test] fn invalid10() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" features = ["baz"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies.baz] path = "baz" "#, ) .file("bar/src/lib.rs", "") .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("bar/baz/src/lib.rs", "") .build(); p.cargo("build").with_stderr("\ error: failed to select a version for `bar`. ... required by package `foo v0.0.1 ([..])` versions that meet the requirements `*` are: 0.0.1 the package `foo` depends on `bar`, with features: `baz` but `bar` does not have these features. It has a required dependency with that name, but only optional dependencies can be used as features. failed to select a version for `bar` which could resolve this conflict ").with_status(101) .run(); } #[cargo_test] fn no_transitive_dep_feature_requirement() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.derived] path = "derived" [features] default = ["derived/bar/qux"] "#, ) .file( "src/main.rs", r#" extern crate derived; fn main() { derived::test(); } "#, ) .file( "derived/Cargo.toml", r#" [package] name = "derived" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] qux = [] "#, ) .file( "bar/src/lib.rs", r#" #[cfg(feature = "qux")] pub fn test() { print!("test"); } "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[CWD]/Cargo.toml` Caused by: multiple slashes in feature `derived/bar/qux` (included by feature `default`) are not allowed ", ) .run(); } #[cargo_test] fn no_feature_doesnt_build() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true "#, ) .file( "src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; #[cfg(feature = "bar")] fn main() { bar::bar(); println!("bar") } #[cfg(not(feature = "bar"))] fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.process(&p.bin("foo")).with_stdout("").run(); p.cargo("build --features bar") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.process(&p.bin("foo")).with_stdout("bar\n").run(); } #[cargo_test] fn default_feature_pulled_in() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["bar"] [dependencies.bar] path = "bar" optional = true "#, ) .file( "src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; #[cfg(feature = "bar")] fn main() { bar::bar(); println!("bar") } #[cfg(not(feature = "bar"))] fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.process(&p.bin("foo")).with_stdout("bar\n").run(); p.cargo("build --no-default-features") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.process(&p.bin("foo")).with_stdout("").run(); } #[cargo_test] fn cyclic_feature() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["default"] "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr("[ERROR] cyclic feature dependency: feature `default` depends on itself") .run(); } #[cargo_test] fn cyclic_feature2() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar"] bar = ["foo"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn groups_on_groups_on_groups() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["f1"] f1 = ["f2", "bar"] f2 = ["f3", "f4"] f3 = ["f5", "f6", "baz"] f4 = ["f5", "f7"] f5 = ["f6"] f6 = ["f7"] f7 = ["bar"] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate bar; #[allow(unused_extern_crates)] extern crate baz; fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn many_cli_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate bar; #[allow(unused_extern_crates)] extern crate baz; fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build --features") .arg("bar baz") .with_stderr( "\ [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn union_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" features = ["f1"] [dependencies.d2] path = "d2" features = ["f2"] "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate d1; extern crate d2; fn main() { d2::f1(); d2::f2(); } "#, ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] f1 = ["d2"] [dependencies.d2] path = "../d2" features = ["f1"] optional = true "#, ) .file("d1/src/lib.rs", "") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [features] f1 = [] f2 = [] "#, ) .file( "d2/src/lib.rs", r#" #[cfg(feature = "f1")] pub fn f1() {} #[cfg(feature = "f2")] pub fn f2() {} "#, ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] d2 v0.0.1 ([CWD]/d2) [COMPILING] d1 v0.0.1 ([CWD]/d1) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn many_features_no_rebuilds() { let p = project() .file( "Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies.a] path = "a" features = ["fall"] "#, ) .file("src/main.rs", "fn main() {}") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] ftest = [] ftest2 = [] fall = ["ftest", "ftest2"] "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] a v0.1.0 ([CWD]/a) [COMPILING] b v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.root().move_into_the_past(); p.cargo("build -v") .with_stderr( "\ [FRESH] a v0.1.0 ([..]/a) [FRESH] b v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } // Tests that all cmd lines work with `--features ""` #[cargo_test] fn empty_features() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build --features").arg("").run(); } // Tests that all cmd lines work with `--features ""` #[cargo_test] fn transitive_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::baz(); }") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] baz = [] "#, ) .file( "bar/src/lib.rs", r#"#[cfg(feature = "baz")] pub fn baz() {}"#, ) .build(); p.cargo("build --features foo").run(); } #[cargo_test] fn everything_in_the_lockfile() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] f1 = ["d1/f1"] f2 = ["d2"] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" optional = true [dependencies.d3] path = "d3" optional = true "#, ) .file("src/main.rs", "fn main() {}") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] f1 = [] "#, ) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.2")) .file("d2/src/lib.rs", "") .file( "d3/Cargo.toml", r#" [package] name = "d3" version = "0.0.3" authors = [] [features] f3 = [] "#, ) .file("d3/src/lib.rs", "") .build(); p.cargo("fetch").run(); let lockfile = p.read_lockfile(); assert!( lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile ); assert!( lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile ); assert!( lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile ); } #[cargo_test] fn no_rebuild_when_frobbing_default_feature() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#, ) .file("src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies] a = { path = "../a", features = ["f1"], default-features = false } "#, ) .file("b/src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] default = ["f1"] f1 = [] "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stdout("").run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn unions_work_with_no_default_features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#, ) .file("src/lib.rs", "extern crate a; pub fn foo() { a::a(); }") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies] a = { path = "../a", features = [], default-features = false } "#, ) .file("b/src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] default = ["f1"] f1 = [] "#, ) .file("a/src/lib.rs", r#"#[cfg(feature = "f1")] pub fn a() {}"#) .build(); p.cargo("build").run(); p.cargo("build").with_stdout("").run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn optional_and_dev_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = { path = "foo", optional = true } [dev-dependencies] foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] test v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn activating_feature_activates_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = { path = "foo", optional = true } [features] a = ["foo/a"] "#, ) .file( "src/lib.rs", "extern crate foo; pub fn bar() { foo::bar(); }", ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [features] a = [] "#, ) .file("foo/src/lib.rs", r#"#[cfg(feature = "a")] pub fn bar() {}"#) .build(); p.cargo("build --features a -v").run(); } #[cargo_test] fn dep_feature_in_cmd_line() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.derived] path = "derived" "#, ) .file( "src/main.rs", r#" extern crate derived; fn main() { derived::test(); } "#, ) .file( "derived/Cargo.toml", r#" [package] name = "derived" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [features] default = [] derived-feat = ["bar/some-feat"] "#, ) .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] some-feat = [] "#, ) .file( "bar/src/lib.rs", r#" #[cfg(feature = "some-feat")] pub fn test() { print!("test"); } "#, ) .build(); // The foo project requires that feature "some-feat" in "bar" is enabled. // Building without any features enabled should fail: p.cargo("build") .with_status(101) .with_stderr_contains("[..]unresolved import `bar::test`") .run(); // We should be able to enable the feature "derived-feat", which enables "some-feat", // on the command line. The feature is enabled, thus building should be successful: p.cargo("build --features derived/derived-feat").run(); // Trying to enable features of transitive dependencies is an error p.cargo("build --features bar/some-feat") .with_status(101) .with_stderr("error: package `foo v0.0.1 ([..])` does not have a dependency named `bar`") .run(); // Hierarchical feature specification should still be disallowed p.cargo("build --features derived/bar/some-feat") .with_status(101) .with_stderr("[ERROR] multiple slashes in feature `derived/bar/some-feat` is not allowed") .run(); } #[cargo_test] fn all_features_flag_enables_all_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = [] bar = [] [dependencies.baz] path = "baz" optional = true "#, ) .file( "src/main.rs", r#" #[cfg(feature = "foo")] pub fn foo() {} #[cfg(feature = "bar")] pub fn bar() { extern crate baz; baz::baz(); } fn main() { foo(); bar(); } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build --all-features").run(); } #[cargo_test] fn many_cli_features_comma_delimited() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate bar; #[allow(unused_extern_crates)] extern crate baz; fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build --features bar,baz") .with_stderr( "\ [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn many_cli_features_comma_and_space_delimited() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true [dependencies.bam] path = "bam" optional = true [dependencies.bap] path = "bap" optional = true "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate bar; #[allow(unused_extern_crates)] extern crate baz; #[allow(unused_extern_crates)] extern crate bam; #[allow(unused_extern_crates)] extern crate bap; fn main() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "pub fn baz() {}") .file("bam/Cargo.toml", &basic_manifest("bam", "0.0.1")) .file("bam/src/lib.rs", "pub fn bam() {}") .file("bap/Cargo.toml", &basic_manifest("bap", "0.0.1")) .file("bap/src/lib.rs", "pub fn bap() {}") .build(); p.cargo("build --features") .arg("bar,baz bam bap") .with_stderr( "\ [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn only_dep_is_optional() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ['bar'] [dependencies] bar = { version = "0.1", optional = true } [dev-dependencies] bar = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn all_features_all_crates() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [workspace] members = ['bar'] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [features] foo = [] "#, ) .file("bar/src/main.rs", "#[cfg(feature = \"foo\")] fn main() {}") .build(); p.cargo("build --all-features --workspace").run(); } #[cargo_test] fn feature_off_dylib() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [package] name = "foo" version = "0.0.1" [lib] crate-type = ["dylib"] [features] f1 = [] "#, ) .file( "src/lib.rs", r#" pub fn hello() -> &'static str { if cfg!(feature = "f1") { "f1" } else { "no f1" } } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" [dependencies] foo = { path = ".." } "#, ) .file( "bar/src/main.rs", r#" extern crate foo; fn main() { assert_eq!(foo::hello(), "no f1"); } "#, ) .build(); // Build the dylib with `f1` feature. p.cargo("build --features f1").run(); // Check that building without `f1` uses a dylib without `f1`. p.cargo("run -p bar").run(); } #[cargo_test] fn warn_if_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true [features] default-features = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( r#" [WARNING] `default-features = [".."]` was found in [features]. Did you mean to use `default = [".."]`? [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] "#.trim(), ).run(); } #[cargo_test] fn no_feature_for_non_optional_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } "#, ) .file( "src/main.rs", r#" #[cfg(not(feature = "bar"))] fn main() { } "#, ) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [features] a = [] "#, ) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build --features bar/a").run(); } #[cargo_test] fn features_option_given_twice() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] b = [] "#, ) .file( "src/main.rs", r#" #[cfg(all(feature = "a", feature = "b"))] fn main() {} "#, ) .build(); p.cargo("build --features a --features b").run(); } #[cargo_test] fn multi_multi_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] b = [] c = [] "#, ) .file( "src/main.rs", r#" #[cfg(all(feature = "a", feature = "b", feature = "c"))] fn main() {} "#, ) .build(); p.cargo("build --features a --features").arg("b c").run(); } #[cargo_test] fn cli_parse_ok() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] "#, ) .file( "src/main.rs", r#" #[cfg(feature = "a")] fn main() { assert_eq!(std::env::args().nth(1).unwrap(), "b"); } "#, ) .build(); p.cargo("run --features a b").run(); } #[cargo_test] fn all_features_virtual_ws() { // What happens with `--all-features` in the root of a virtual workspace. // Some of this behavior is a little strange (member dependencies also // have all features enabled, one might expect `f4` to be disabled). let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" edition = "2018" [dependencies] b = {path="../b", optional=true} [features] default = ["f1"] f1 = [] f2 = [] "#, ) .file( "a/src/main.rs", r#" fn main() { if cfg!(feature="f1") { println!("f1"); } if cfg!(feature="f2") { println!("f2"); } #[cfg(feature="b")] b::f(); } "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] default = ["f3"] f3 = [] f4 = [] "#, ) .file( "b/src/lib.rs", r#" pub fn f() { if cfg!(feature="f3") { println!("f3"); } if cfg!(feature="f4") { println!("f4"); } } "#, ) .build(); p.cargo("run").with_stdout("f1\n").run(); p.cargo("run --all-features") .with_stdout("f1\nf2\nf3\nf4\n") .run(); // In `a`, it behaves differently. :( p.cargo("run --all-features") .cwd("a") .with_stdout("f1\nf2\nf3\n") .run(); } #[cargo_test] fn slash_optional_enables() { // --features dep/feat will enable `dep` and set its feature. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = {path="dep", optional=true} "#, ) .file( "src/lib.rs", r#" #[cfg(not(feature="dep"))] compile_error!("dep not set"); "#, ) .file( "dep/Cargo.toml", r#" [package] name = "dep" version = "0.1.0" [features] feat = [] "#, ) .file( "dep/src/lib.rs", r#" #[cfg(not(feature="feat"))] compile_error!("feat not set"); "#, ) .build(); p.cargo("check") .with_status(101) .with_stderr_contains("[..]dep not set[..]") .run(); p.cargo("check --features dep/feat").run(); } #[cargo_test] fn registry_summary_order_doesnt_matter() { // Checks for an issue where the resolver depended on the order of entries // in the registry summary. If there was a non-optional dev-dependency // that appeared before an optional normal dependency, then the resolver // would not activate the optional dependency with a pkg/featname feature // syntax. Package::new("dep", "0.1.0") .feature("feat1", &[]) .file( "src/lib.rs", r#" #[cfg(feature="feat1")] pub fn work() { println!("it works"); } "#, ) .publish(); Package::new("bar", "0.1.0") .feature("bar_feat", &["dep/feat1"]) .add_dep(Dependency::new("dep", "0.1.0").dev()) .add_dep(Dependency::new("dep", "0.1.0").optional(true)) .file( "src/lib.rs", r#" // This will fail to compile without `dep` optional dep activated. extern crate dep; pub fn doit() { dep::work(); } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] bar = { version="0.1", features = ["bar_feat"] } "#, ) .file( "src/main.rs", r#" fn main() { bar::doit(); } "#, ) .build(); p.cargo("run") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] [..] [DOWNLOADED] [..] [COMPILING] dep v0.1.0 [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/foo[EXE]` ", ) .with_stdout("it works") .run(); } #[cargo_test] fn nonexistent_required_features() { Package::new("required_dependency", "0.1.0") .feature("simple", &[]) .publish(); Package::new("optional_dependency", "0.2.0") .feature("optional", &[]) .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [features] existing = [] fancy = ["optional_dependency"] [dependencies] required_dependency = { version = "0.1", optional = false} optional_dependency = { version = "0.2", optional = true} [[example]] name = "ololo" required-features = ["not_present", "existing", "fancy", "required_dependency/not_existing", "required_dependency/simple", "optional_dependency/optional", "not_specified_dependency/some_feature"] "#, ) .file("src/main.rs", "fn main() {}") .file("examples/ololo.rs", "fn main() {}") .build(); p.cargo("build --examples") .with_stderr_contains( "\ [WARNING] invalid feature `not_present` in required-features of target `ololo`: \ `not_present` is not present in [features] section [WARNING] invalid feature `required_dependency/not_existing` in required-features \ of target `ololo`: feature `not_existing` does not exist in package \ `required_dependency v0.1.0` [WARNING] invalid feature `not_specified_dependency/some_feature` in required-features \ of target `ololo`: dependency `not_specified_dependency` does not exist ", ) .run(); } #[cargo_test] fn invalid_feature_names_warning() { // Warnings for more restricted feature syntax. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [features] # Some valid, but unusual names, shouldn't warn. "c++17" = [] "128bit" = [] "_foo" = [] "feat-name" = [] "feat_name" = [] "foo.bar" = [] # Invalid names. "+foo" = [] "-foo" = [] ".foo" = [] "foo:bar" = [] "foo?" = [] "?foo" = [] "β’Άβ’·β’Έ" = [] "aΒΌ" = [] "#, ) .file("src/lib.rs", "") .build(); // Unfortunately the warnings are duplicated due to the Summary being // loaded twice (once in the Workspace, and once in PackageRegistry) and // Cargo does not have a de-duplication system. This should probably be // OK, since I'm not expecting this to affect anyone. p.cargo("check") .with_stderr("\ [WARNING] invalid character `+` in feature `+foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `-` in feature `-foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `.` in feature `.foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `?` in feature `?foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `ΒΌ` in feature `aΒΌ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `?` in feature `foo?` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `β’Ά` in feature `β’Άβ’·β’Έ` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `β’·` in feature `β’Άβ’·β’Έ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [WARNING] invalid character `β’Έ` in feature `β’Άβ’·β’Έ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) This was previously accepted but is being phased out; it will become a hard error in a future release. For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ") .run(); } #[cargo_test] fn invalid_feature_names_error() { // Errors for more restricted feature syntax. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [features] "foo/bar" = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[CWD]/Cargo.toml` Caused by: feature named `foo/bar` is not allowed to contain slashes ", ) .run(); } #[cargo_test] fn default_features_conflicting_warning() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] a = { path = "a", features = ["f1"], default-features = false, default_features = false } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] default = ["f1"] f1 = [] "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains( "[WARNING] conflicting between `default-features` and `default_features` in the `a` dependency.\n `default_features` is ignored and not recommended for use in the future" ) .run(); } cargo-0.66.0/tests/testsuite/features2.rs000066400000000000000000002004251432416201200203350ustar00rootroot00000000000000//! Tests for the new feature resolver. use cargo_test_support::cross_compile::{self, alternate}; use cargo_test_support::install::cargo_home; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::publish::validate_crate_contents; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{basic_manifest, cargo_process, project, rustc_host, Project}; use std::fs::File; /// Switches Cargo.toml to use `resolver = "2"`. pub fn switch_to_resolver_2(p: &Project) { let mut manifest = p.read_file("Cargo.toml"); if manifest.contains("resolver =") { panic!("did not expect manifest to already contain a resolver setting"); } if let Some(index) = manifest.find("[workspace]\n") { manifest.insert_str(index + 12, "resolver = \"2\"\n"); } else if let Some(index) = manifest.find("[package]\n") { manifest.insert_str(index + 10, "resolver = \"2\"\n"); } else { panic!("expected [package] or [workspace] in manifest"); } p.change_file("Cargo.toml", &manifest); } #[cargo_test] fn inactivate_targets() { // Basic test of `itarget`. A shared dependency where an inactive [target] // changes the features. Package::new("common", "1.0.0") .feature("f1", &[]) .file( "src/lib.rs", r#" #[cfg(feature = "f1")] compile_error!("f1 should not activate"); "#, ) .publish(); Package::new("bar", "1.0.0") .add_dep( Dependency::new("common", "1.0") .target("cfg(whatever)") .enable_features(&["f1"]), ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] common = "1.0" bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr_contains("[..]f1 should not activate[..]") .run(); switch_to_resolver_2(&p); p.cargo("check").run(); } #[cargo_test] fn inactive_target_optional() { // Activating optional [target] dependencies for inactivate target. Package::new("common", "1.0.0") .feature("f1", &[]) .feature("f2", &[]) .feature("f3", &[]) .feature("f4", &[]) .file( "src/lib.rs", r#" pub fn f() { if cfg!(feature="f1") { println!("f1"); } if cfg!(feature="f2") { println!("f2"); } if cfg!(feature="f3") { println!("f3"); } if cfg!(feature="f4") { println!("f4"); } } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] common = "1.0" [target.'cfg(whatever)'.dependencies] dep1 = {path='dep1', optional=true} dep2 = {path='dep2', optional=true, features=["f3"]} common = {version="1.0", optional=true, features=["f4"]} [features] foo1 = ["dep1/f2"] foo2 = ["dep2"] "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(feature="foo1") { println!("foo1"); } if cfg!(feature="foo2") { println!("foo2"); } if cfg!(feature="dep1") { println!("dep1"); } if cfg!(feature="dep2") { println!("dep2"); } if cfg!(feature="common") { println!("common"); } common::f(); } "#, ) .file( "dep1/Cargo.toml", r#" [package] name = "dep1" version = "0.1.0" [dependencies] common = {version="1.0", features=["f1"]} [features] f2 = ["common/f2"] "#, ) .file( "dep1/src/lib.rs", r#"compile_error!("dep1 should not build");"#, ) .file( "dep2/Cargo.toml", r#" [package] name = "dep2" version = "0.1.0" [dependencies] common = "1.0" [features] f3 = ["common/f3"] "#, ) .file( "dep2/src/lib.rs", r#"compile_error!("dep2 should not build");"#, ) .build(); p.cargo("run --all-features") .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon\nf1\nf2\nf3\nf4\n") .run(); p.cargo("run --features dep1") .with_stdout("dep1\nf1\n") .run(); p.cargo("run --features foo1") .with_stdout("foo1\ndep1\nf1\nf2\n") .run(); p.cargo("run --features dep2") .with_stdout("dep2\nf3\n") .run(); p.cargo("run --features common") .with_stdout("common\nf4\n") .run(); switch_to_resolver_2(&p); p.cargo("run --all-features") .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon") .run(); p.cargo("run --features dep1").with_stdout("dep1\n").run(); p.cargo("run --features foo1").with_stdout("foo1\n").run(); p.cargo("run --features dep2").with_stdout("dep2\n").run(); p.cargo("run --features common").with_stdout("common").run(); } #[cargo_test] fn itarget_proc_macro() { // itarget inside a proc-macro while cross-compiling if cross_compile::disabled() { return; } Package::new("hostdep", "1.0.0").publish(); Package::new("pm", "1.0.0") .proc_macro(true) .target_dep("hostdep", "1.0", rustc_host()) .file("src/lib.rs", "extern crate hostdep;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = "1.0" "#, ) .file("src/lib.rs", "") .build(); // Old behavior p.cargo("check").run(); p.cargo("check --target").arg(alternate()).run(); // New behavior switch_to_resolver_2(&p); p.cargo("check").run(); p.cargo("check --target").arg(alternate()).run(); // For good measure, just make sure things don't break. p.cargo("check --target").arg(alternate()).run(); } #[cargo_test] fn decouple_host_deps() { // Basic test for `host_dep` decouple. Package::new("common", "1.0.0") .feature("f1", &[]) .file( "src/lib.rs", r#" #[cfg(feature = "f1")] pub fn foo() {} #[cfg(not(feature = "f1"))] pub fn bar() {} "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [build-dependencies] common = {version="1.0", features=["f1"]} [dependencies] common = "1.0" "#, ) .file( "build.rs", r#" use common::foo; fn main() {} "#, ) .file("src/lib.rs", "use common::bar;") .build(); p.cargo("check") .with_status(101) .with_stderr_contains("[..]unresolved import `common::bar`[..]") .run(); switch_to_resolver_2(&p); p.cargo("check").run(); } #[cargo_test] fn decouple_host_deps_nested() { // `host_dep` decouple of transitive dependencies. Package::new("common", "1.0.0") .feature("f1", &[]) .file( "src/lib.rs", r#" #[cfg(feature = "f1")] pub fn foo() {} #[cfg(not(feature = "f1"))] pub fn bar() {} "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [build-dependencies] bdep = {path="bdep"} [dependencies] common = "1.0" "#, ) .file( "build.rs", r#" use bdep::foo; fn main() {} "#, ) .file("src/lib.rs", "use common::bar;") .file( "bdep/Cargo.toml", r#" [package] name = "bdep" version = "0.1.0" edition = "2018" [dependencies] common = {version="1.0", features=["f1"]} "#, ) .file("bdep/src/lib.rs", "pub use common::foo;") .build(); p.cargo("check") .with_status(101) .with_stderr_contains("[..]unresolved import `common::bar`[..]") .run(); switch_to_resolver_2(&p); p.cargo("check").run(); } #[cargo_test] fn decouple_dev_deps() { // Basic test for `dev_dep` decouple. Package::new("common", "1.0.0") .feature("f1", &[]) .feature("f2", &[]) .file( "src/lib.rs", r#" // const ensures it uses the correct dependency at *build time* // compared to *link time*. #[cfg(all(feature="f1", not(feature="f2")))] pub const X: u32 = 1; #[cfg(all(feature="f1", feature="f2"))] pub const X: u32 = 3; pub fn foo() -> u32 { let mut res = 0; if cfg!(feature = "f1") { res |= 1; } if cfg!(feature = "f2") { res |= 2; } res } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] common = {version="1.0", features=["f1"]} [dev-dependencies] common = {version="1.0", features=["f2"]} "#, ) .file( "src/main.rs", r#" fn main() { let expected: u32 = std::env::args().skip(1).next().unwrap().parse().unwrap(); assert_eq!(foo::foo(), expected); assert_eq!(foo::build_time(), expected); assert_eq!(common::foo(), expected); assert_eq!(common::X, expected); } #[test] fn test_bin() { assert_eq!(foo::foo(), 3); assert_eq!(common::foo(), 3); assert_eq!(common::X, 3); assert_eq!(foo::build_time(), 3); } "#, ) .file( "src/lib.rs", r#" pub fn foo() -> u32 { common::foo() } pub fn build_time() -> u32 { common::X } #[test] fn test_lib() { assert_eq!(foo(), 3); assert_eq!(common::foo(), 3); assert_eq!(common::X, 3); } "#, ) .file( "tests/t1.rs", r#" #[test] fn test_t1() { assert_eq!(foo::foo(), 3); assert_eq!(common::foo(), 3); assert_eq!(common::X, 3); assert_eq!(foo::build_time(), 3); } #[test] fn test_main() { // Features are unified for main when run with `cargo test`, // even with the new resolver. let s = std::process::Command::new("target/debug/foo") .arg("3") .status().unwrap(); assert!(s.success()); } "#, ) .build(); // Old behavior p.cargo("run 3").run(); p.cargo("test").run(); // New behavior switch_to_resolver_2(&p); p.cargo("run 1").run(); p.cargo("test").run(); } #[cargo_test] fn build_script_runtime_features() { // Check that the CARGO_FEATURE_* environment variable is set correctly. // // This has a common dependency between build/normal/dev-deps, and it // queries which features it was built with in different circumstances. Package::new("common", "1.0.0") .feature("normal", &[]) .feature("dev", &[]) .feature("build", &[]) .file( "build.rs", r#" fn is_set(name: &str) -> bool { std::env::var(name) == Ok("1".to_string()) } fn main() { let mut res = 0; if is_set("CARGO_FEATURE_NORMAL") { res |= 1; } if is_set("CARGO_FEATURE_DEV") { res |= 2; } if is_set("CARGO_FEATURE_BUILD") { res |= 4; } println!("cargo:rustc-cfg=RunCustomBuild=\"{}\"", res); let mut res = 0; if cfg!(feature = "normal") { res |= 1; } if cfg!(feature = "dev") { res |= 2; } if cfg!(feature = "build") { res |= 4; } println!("cargo:rustc-cfg=CustomBuild=\"{}\"", res); } "#, ) .file( "src/lib.rs", r#" pub fn foo() -> u32 { let mut res = 0; if cfg!(feature = "normal") { res |= 1; } if cfg!(feature = "dev") { res |= 2; } if cfg!(feature = "build") { res |= 4; } res } pub fn build_time() -> u32 { #[cfg(RunCustomBuild="1")] return 1; #[cfg(RunCustomBuild="3")] return 3; #[cfg(RunCustomBuild="4")] return 4; #[cfg(RunCustomBuild="5")] return 5; #[cfg(RunCustomBuild="7")] return 7; } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [build-dependencies] common = {version="1.0", features=["build"]} [dependencies] common = {version="1.0", features=["normal"]} [dev-dependencies] common = {version="1.0", features=["dev"]} "#, ) .file( "build.rs", r#" fn main() { assert_eq!(common::foo(), common::build_time()); println!("cargo:rustc-cfg=from_build=\"{}\"", common::foo()); } "#, ) .file( "src/lib.rs", r#" pub fn foo() -> u32 { common::foo() } pub fn build_time() -> u32 { common::build_time() } #[test] fn test_lib() { assert_eq!(common::foo(), common::build_time()); assert_eq!(common::foo(), std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); } "#, ) .file( "src/main.rs", r#" fn main() { assert_eq!(common::foo(), common::build_time()); assert_eq!(common::foo(), std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); } #[test] fn test_bin() { assert_eq!(common::foo(), common::build_time()); assert_eq!(common::foo(), std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); } "#, ) .file( "tests/t1.rs", r#" #[test] fn test_t1() { assert_eq!(common::foo(), common::build_time()); assert_eq!(common::foo(), std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); } #[test] fn test_main() { // Features are unified for main when run with `cargo test`, // even with the new resolver. let s = std::process::Command::new("target/debug/foo") .status().unwrap(); assert!(s.success()); } "#, ) .build(); // Old way, unifies all 3. p.cargo("run").env("CARGO_FEATURE_EXPECT", "7").run(); p.cargo("test").env("CARGO_FEATURE_EXPECT", "7").run(); // New behavior. switch_to_resolver_2(&p); // normal + build unify p.cargo("run").env("CARGO_FEATURE_EXPECT", "1").run(); // dev_deps are still unified with `cargo test` p.cargo("test").env("CARGO_FEATURE_EXPECT", "3").run(); } #[cargo_test] fn cyclical_dev_dep() { // Check how a cyclical dev-dependency will work. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [features] dev = [] [dev-dependencies] foo = { path = '.', features = ["dev"] } "#, ) .file( "src/lib.rs", r#" pub fn assert_dev(enabled: bool) { assert_eq!(enabled, cfg!(feature="dev")); } #[test] fn test_in_lib() { assert_dev(true); } "#, ) .file( "src/main.rs", r#" fn main() { let expected: bool = std::env::args().skip(1).next().unwrap().parse().unwrap(); foo::assert_dev(expected); } "#, ) .file( "tests/t1.rs", r#" #[test] fn integration_links() { foo::assert_dev(true); // The lib linked with main.rs will also be unified. let s = std::process::Command::new("target/debug/foo") .arg("true") .status().unwrap(); assert!(s.success()); } "#, ) .build(); // Old way unifies features. p.cargo("run true").run(); // dev feature should always be enabled in tests. p.cargo("test").run(); // New behavior. switch_to_resolver_2(&p); // Should decouple main. p.cargo("run false").run(); // And this should be no different. p.cargo("test").run(); } #[cargo_test] fn all_feature_opts() { // All feature options at once. Package::new("common", "1.0.0") .feature("normal", &[]) .feature("build", &[]) .feature("dev", &[]) .feature("itarget", &[]) .file( "src/lib.rs", r#" pub fn feats() -> u32 { let mut res = 0; if cfg!(feature="normal") { res |= 1; } if cfg!(feature="build") { res |= 2; } if cfg!(feature="dev") { res |= 4; } if cfg!(feature="itarget") { res |= 8; } res } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] common = {version = "1.0", features=["normal"]} [dev-dependencies] common = {version = "1.0", features=["dev"]} [build-dependencies] common = {version = "1.0", features=["build"]} [target.'cfg(whatever)'.dependencies] common = {version = "1.0", features=["itarget"]} "#, ) .file( "src/main.rs", r#" fn main() { expect(); } fn expect() { let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap(); assert_eq!(expected, common::feats()); } #[test] fn from_test() { expect(); } "#, ) .build(); p.cargo("run").env("EXPECTED_FEATS", "15").run(); p.cargo("test").env("EXPECTED_FEATS", "15").run(); // New behavior. switch_to_resolver_2(&p); // Only normal feature. p.cargo("run").env("EXPECTED_FEATS", "1").run(); // only normal+dev p.cargo("test").env("EXPECTED_FEATS", "5").run(); } #[cargo_test] fn required_features_host_dep() { // Check that required-features handles build-dependencies correctly. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [[bin]] name = "x" required-features = ["bdep/f1"] [build-dependencies] bdep = {path="bdep"} "#, ) .file("build.rs", "fn main() {}") .file( "src/bin/x.rs", r#" fn main() {} "#, ) .file( "bdep/Cargo.toml", r#" [package] name = "bdep" version = "0.1.0" [features] f1 = [] "#, ) .file("bdep/src/lib.rs", "") .build(); p.cargo("run") .with_status(101) .with_stderr( "\ [ERROR] target `x` in package `foo` requires the features: `bdep/f1` Consider enabling them by passing, e.g., `--features=\"bdep/f1\"` ", ) .run(); // New behavior. switch_to_resolver_2(&p); p.cargo("run --features bdep/f1").run(); } #[cargo_test] fn disabled_shared_host_dep() { // Check for situation where an optional dep of a shared dep is enabled in // a normal dependency, but disabled in an optional one. The unit tree is: // foo // β”œβ”€β”€ foo build.rs // | └── common (BUILD dependency, NO FEATURES) // └── common (Normal dependency, default features) // └── somedep Package::new("somedep", "1.0.0") .file( "src/lib.rs", r#" pub fn f() { println!("hello from somedep"); } "#, ) .publish(); Package::new("common", "1.0.0") .feature("default", &["somedep"]) .add_dep(Dependency::new("somedep", "1.0").optional(true)) .file( "src/lib.rs", r#" pub fn check_somedep() -> bool { #[cfg(feature="somedep")] { extern crate somedep; somedep::f(); true } #[cfg(not(feature="somedep"))] { println!("no somedep"); false } } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" edition = "2018" resolver = "2" [dependencies] common = "1.0" [build-dependencies] common = {version = "1.0", default-features = false} "#, ) .file( "src/main.rs", "fn main() { assert!(common::check_somedep()); }", ) .file( "build.rs", "fn main() { assert!(!common::check_somedep()); }", ) .build(); p.cargo("run -v").with_stdout("hello from somedep").run(); } #[cargo_test] fn required_features_inactive_dep() { // required-features with an inactivated dep. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [target.'cfg(whatever)'.dependencies] bar = {path="bar"} [[bin]] name = "foo" required-features = ["feat1"] [features] feat1 = [] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("check").with_stderr("[FINISHED] [..]").run(); p.cargo("check --features=feat1") .with_stderr("[CHECKING] foo[..]\n[FINISHED] [..]") .run(); } #[cargo_test] fn decouple_proc_macro() { // proc macro features are not shared Package::new("common", "1.0.0") .feature("somefeat", &[]) .file( "src/lib.rs", r#" pub const fn foo() -> bool { cfg!(feature="somefeat") } #[cfg(feature="somefeat")] pub const FEAT_ONLY_CONST: bool = true; "#, ) .publish(); Package::new("pm", "1.0.0") .proc_macro(true) .feature_dep("common", "1.0", &["somefeat"]) .file( "src/lib.rs", r#" extern crate proc_macro; extern crate common; #[proc_macro] pub fn foo(input: proc_macro::TokenStream) -> proc_macro::TokenStream { assert!(common::foo()); "".parse().unwrap() } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" edition = "2018" [dependencies] pm = "1.0" common = "1.0" "#, ) .file( "src/lib.rs", r#" //! Test with docs. //! //! ```rust //! pm::foo!{} //! fn main() { //! let expected = std::env::var_os("TEST_EXPECTS_ENABLED").is_some(); //! assert_eq!(expected, common::foo(), "common is wrong"); //! } //! ``` "#, ) .file( "src/main.rs", r#" pm::foo!{} fn main() { println!("it is {}", common::foo()); } "#, ) .build(); p.cargo("run") .env("TEST_EXPECTS_ENABLED", "1") .with_stdout("it is true") .run(); // Make sure the test is fallible. p.cargo("test --doc") .with_status(101) .with_stdout_contains("[..]common is wrong[..]") .run(); p.cargo("test --doc").env("TEST_EXPECTS_ENABLED", "1").run(); p.cargo("doc").run(); assert!(p .build_dir() .join("doc/common/constant.FEAT_ONLY_CONST.html") .exists()); // cargo doc should clean in-between runs, but it doesn't, and leaves stale files. // https://github.com/rust-lang/cargo/issues/6783 (same for removed items) p.build_dir().join("doc").rm_rf(); // New behavior. switch_to_resolver_2(&p); p.cargo("run").with_stdout("it is false").run(); p.cargo("test --doc").run(); p.cargo("doc").run(); assert!(!p .build_dir() .join("doc/common/constant.FEAT_ONLY_CONST.html") .exists()); } #[cargo_test] fn proc_macro_ws() { // Checks for bug with proc-macro in a workspace with dependency (shouldn't panic). let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "pm"] resolver = "2" "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [features] feat1 = [] "#, ) .file("foo/src/lib.rs", "") .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true [dependencies] foo = { path = "../foo", features=["feat1"] } "#, ) .file("pm/src/lib.rs", "") .build(); p.cargo("check -p pm -v") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]--cfg[..]feat1[..]") .run(); // This may be surprising that `foo` doesn't get built separately. It is // because pm might have other units (binaries, tests, etc.), and so the // feature resolver must assume that normal deps get unified with it. This // is related to the bigger issue where the features selected in a // workspace depend on which packages are selected. p.cargo("check --workspace -v") .with_stderr( "\ [FRESH] foo v0.1.0 [..] [FRESH] pm v0.1.0 [..] [FINISHED] dev [..] ", ) .run(); // Selecting just foo will build without unification. p.cargo("check -p foo -v") // Make sure `foo` is built without feat1 .with_stderr_line_without(&["[RUNNING] `rustc --crate-name foo"], &["--cfg[..]feat1"]) .run(); } #[cargo_test] fn has_dev_dep_for_test() { // Check for a bug where the decision on whether or not "dev dependencies" // should be used did not consider `check --profile=test`. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dev-dependencies] dep = { path = 'dep', features = ['f1'] } "#, ) .file( "src/lib.rs", r#" #[test] fn t1() { dep::f(); } "#, ) .file( "dep/Cargo.toml", r#" [package] name = "dep" version = "0.1.0" [features] f1 = [] "#, ) .file( "dep/src/lib.rs", r#" #[cfg(feature = "f1")] pub fn f() {} "#, ) .build(); p.cargo("check -v") .with_stderr( "\ [CHECKING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo [..] [FINISHED] [..] ", ) .run(); p.cargo("check -v --profile=test") .with_stderr( "\ [CHECKING] dep v0.1.0 [..] [RUNNING] `rustc --crate-name dep [..] [CHECKING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo [..] [FINISHED] [..] ", ) .run(); // New resolver should not be any different. switch_to_resolver_2(&p); p.cargo("check -v --profile=test") .with_stderr( "\ [FRESH] dep [..] [FRESH] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn build_dep_activated() { // Build dependencies always match the host for [target.*.build-dependencies]. if cross_compile::disabled() { return; } Package::new("somedep", "1.0.0") .file("src/lib.rs", "") .publish(); Package::new("targetdep", "1.0.0").publish(); Package::new("hostdep", "1.0.0") // Check that "for_host" is sticky. .target_dep("somedep", "1.0", rustc_host()) .feature("feat1", &[]) .file( "src/lib.rs", r#" extern crate somedep; #[cfg(not(feature="feat1"))] compile_error!{"feat1 missing"} "#, ) .publish(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" # This should never be selected. [target.'{}'.build-dependencies] targetdep = "1.0" [target.'{}'.build-dependencies] hostdep = {{version="1.0", features=["feat1"]}} "#, alternate(), rustc_host() ), ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("check").run(); p.cargo("check --target").arg(alternate()).run(); // New behavior. switch_to_resolver_2(&p); p.cargo("check").run(); p.cargo("check --target").arg(alternate()).run(); } #[cargo_test] fn resolver_bad_setting() { // Unknown setting in `resolver` let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: `resolver` setting `foo` is not valid, valid options are \"1\" or \"2\" ", ) .run(); } #[cargo_test] fn resolver_original() { // resolver="1" uses old unification behavior. Package::new("common", "1.0.0") .feature("f1", &[]) .file( "src/lib.rs", r#" #[cfg(feature = "f1")] compile_error!("f1 should not activate"); "#, ) .publish(); Package::new("bar", "1.0.0") .add_dep( Dependency::new("common", "1.0") .target("cfg(whatever)") .enable_features(&["f1"]), ) .publish(); let manifest = |resolver| { format!( r#" [package] name = "foo" version = "0.1.0" resolver = "{}" [dependencies] common = "1.0" bar = "1.0" "#, resolver ) }; let p = project() .file("Cargo.toml", &manifest("1")) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr_contains("[..]f1 should not activate[..]") .run(); p.change_file("Cargo.toml", &manifest("2")); p.cargo("check").run(); } #[cargo_test] fn resolver_not_both() { // Can't specify resolver in both workspace and package. let p = project() .file( "Cargo.toml", r#" [workspace] resolver = "2" [package] name = "foo" version = "0.1.0" resolver = "2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: cannot specify `resolver` field in both `[workspace]` and `[package]` ", ) .run(); } #[cargo_test] fn resolver_ws_member() { // Can't specify `resolver` in a ws member. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" resolver = "2" "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ warning: resolver for the non root package will be ignored, specify resolver at the workspace root: package: [..]/foo/a/Cargo.toml workspace: [..]/foo/Cargo.toml [CHECKING] a v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn resolver_ws_root_and_member() { // Check when specified in both ws root and member. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] resolver = "2" "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" resolver = "2" "#, ) .file("a/src/lib.rs", "") .build(); // Ignores if they are the same. p.cargo("check") .with_stderr( "\ [CHECKING] a v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn resolver_enables_new_features() { // resolver="2" enables all the things. Package::new("common", "1.0.0") .feature("normal", &[]) .feature("build", &[]) .feature("dev", &[]) .feature("itarget", &[]) .file( "src/lib.rs", r#" pub fn feats() -> u32 { let mut res = 0; if cfg!(feature="normal") { res |= 1; } if cfg!(feature="build") { res |= 2; } if cfg!(feature="dev") { res |= 4; } if cfg!(feature="itarget") { res |= 8; } res } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] resolver = "2" "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" edition = "2018" [dependencies] common = {version = "1.0", features=["normal"]} [dev-dependencies] common = {version = "1.0", features=["dev"]} [build-dependencies] common = {version = "1.0", features=["build"]} [target.'cfg(whatever)'.dependencies] common = {version = "1.0", features=["itarget"]} "#, ) .file( "a/src/main.rs", r#" fn main() { expect(); } fn expect() { let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap(); assert_eq!(expected, common::feats()); } #[test] fn from_test() { expect(); } "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] ping = [] "#, ) .file( "b/src/main.rs", r#" fn main() { if cfg!(feature="ping") { println!("pong"); } } "#, ) .build(); // Only normal. p.cargo("run --bin a") .env("EXPECTED_FEATS", "1") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] common [..] [COMPILING] common v1.0.0 [COMPILING] a v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/a[EXE]` ", ) .run(); // only normal+dev p.cargo("test").cwd("a").env("EXPECTED_FEATS", "5").run(); // Can specify features of packages from a different directory. p.cargo("run -p b --features=ping") .cwd("a") .with_stdout("pong") .run(); } #[cargo_test] fn install_resolve_behavior() { // install honors the resolver behavior. Package::new("common", "1.0.0") .feature("f1", &[]) .file( "src/lib.rs", r#" #[cfg(feature = "f1")] compile_error!("f1 should not activate"); "#, ) .publish(); Package::new("bar", "1.0.0").dep("common", "1.0").publish(); Package::new("foo", "1.0.0") .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" resolver = "2" [target.'cfg(whatever)'.dependencies] common = {version="1.0", features=["f1"]} [dependencies] bar = "1.0" "#, ) .file("src/main.rs", "fn main() {}") .publish(); cargo_process("install foo").run(); } #[cargo_test] fn package_includes_resolve_behavior() { // `cargo package` will inherit the correct resolve behavior. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] resolver = "2" "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = ["Zzz"] description = "foo" license = "MIT" homepage = "https://example.com/" "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("package").cwd("a").run(); let rewritten_toml = format!( r#"{} [package] name = "a" version = "0.1.0" authors = ["Zzz"] description = "foo" homepage = "https://example.com/" license = "MIT" resolver = "2" "#, cargo::core::package::MANIFEST_PREAMBLE ); let f = File::open(&p.root().join("target/package/a-0.1.0.crate")).unwrap(); validate_crate_contents( f, "a-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[("Cargo.toml", &rewritten_toml)], ); } #[cargo_test] fn tree_all() { // `cargo tree` with the new feature resolver. Package::new("log", "0.4.8").feature("serde", &[]).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [target.'cfg(whatever)'.dependencies] log = {version="*", features=["serde"]} "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree --target=all") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── log v0.4.8 ", ) .run(); } #[cargo_test] fn shared_dep_same_but_dependencies() { // Checks for a bug of nondeterminism. This scenario creates a shared // dependency `dep` which needs to be built twice (once as normal, and // once as a build dep). However, in both cases the flags to `dep` are the // same, the only difference is what it links to. The normal dependency // should link to `subdep` with the feature disabled, and the build // dependency should link to it with it enabled. Crucially, the `--target` // flag should not be specified, otherwise Unit.kind would be different // and avoid the collision, and this bug won't manifest. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bin1", "bin2"] resolver = "2" "#, ) .file( "bin1/Cargo.toml", r#" [package] name = "bin1" version = "0.1.0" [dependencies] dep = { path = "../dep" } "#, ) .file("bin1/src/main.rs", "fn main() { dep::feat_func(); }") .file( "bin2/Cargo.toml", r#" [package] name = "bin2" version = "0.1.0" [build-dependencies] dep = { path = "../dep" } subdep = { path = "../subdep", features = ["feat"] } "#, ) .file("bin2/build.rs", "fn main() { dep::feat_func(); }") .file("bin2/src/main.rs", "fn main() {}") .file( "dep/Cargo.toml", r#" [package] name = "dep" version = "0.1.0" [dependencies] subdep = { path = "../subdep" } "#, ) .file( "dep/src/lib.rs", "pub fn feat_func() { subdep::feat_func(); }", ) .file( "subdep/Cargo.toml", r#" [package] name = "subdep" version = "0.1.0" [features] feat = [] "#, ) .file( "subdep/src/lib.rs", r#" pub fn feat_func() { #[cfg(feature = "feat")] println!("cargo:warning=feat: enabled"); #[cfg(not(feature = "feat"))] println!("cargo:warning=feat: not enabled"); } "#, ) .build(); p.cargo("build --bin bin1 --bin bin2") // unordered because bin1 and bin2 build at the same time .with_stderr_unordered( "\ [COMPILING] subdep [..] [COMPILING] dep [..] [COMPILING] bin2 [..] [COMPILING] bin1 [..] warning: feat: enabled [FINISHED] [..] ", ) .run(); p.process(p.bin("bin1")) .with_stdout("cargo:warning=feat: not enabled") .run(); // Make sure everything stays cached. p.cargo("build -v --bin bin1 --bin bin2") .with_stderr_unordered( "\ [FRESH] subdep [..] [FRESH] dep [..] [FRESH] bin1 [..] warning: feat: enabled [FRESH] bin2 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn test_proc_macro() { // Running `cargo test` on a proc-macro, with a shared dependency that has // different features. // // There was a bug where `shared` was built twice (once with feature "B" // and once without), and both copies linked into the unit test. This // would cause a type failure when used in an intermediate dependency // (the-macro-support). let p = project() .file( "Cargo.toml", r#" [package] name = "runtime" version = "0.1.0" resolver = "2" [dependencies] the-macro = { path = "the-macro", features = ['a'] } [build-dependencies] shared = { path = "shared", features = ['b'] } "#, ) .file("src/lib.rs", "") .file( "the-macro/Cargo.toml", r#" [package] name = "the-macro" version = "0.1.0" [lib] proc-macro = true test = false [dependencies] the-macro-support = { path = "../the-macro-support" } shared = { path = "../shared" } [dev-dependencies] runtime = { path = ".." } [features] a = [] "#, ) .file( "the-macro/src/lib.rs", " fn _test() { the_macro_support::foo(shared::Foo); } ", ) .file( "the-macro-support/Cargo.toml", r#" [package] name = "the-macro-support" version = "0.1.0" [dependencies] shared = { path = "../shared" } "#, ) .file( "the-macro-support/src/lib.rs", " pub fn foo(_: shared::Foo) {} ", ) .file( "shared/Cargo.toml", r#" [package] name = "shared" version = "0.1.0" [features] b = [] "#, ) .file("shared/src/lib.rs", "pub struct Foo;") .build(); p.cargo("test --manifest-path the-macro/Cargo.toml").run(); } #[cargo_test] fn doc_optional() { // Checks for a bug where `cargo doc` was failing with an inactive target // that enables a shared optional dependency. Package::new("spin", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("spin", "1.0").optional(true)) .publish(); // The enabler package enables the `spin` feature, which we don't want. Package::new("enabler", "1.0.0") .feature_dep("bar", "1.0", &["spin"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [target.'cfg(whatever)'.dependencies] enabler = "1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("doc") .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] spin v1.0.0 [..] [DOWNLOADED] bar v1.0.0 [..] [DOCUMENTING] bar v1.0.0 [CHECKING] bar v1.0.0 [DOCUMENTING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn minimal_download() { // Various checks that it only downloads the minimum set of dependencies // needed in various situations. // // This checks several permutations of the different // host_dep/dev_dep/itarget settings. These 3 are planned to be stabilized // together, so there isn't much need to be concerned about how the behave // independently. However, there are some cases where they do behave // independently. Specifically: // // * `cargo test` forces dev_dep decoupling to be disabled. // * `cargo tree --target=all` forces ignore_inactive_targets off and decouple_dev_deps off. // * `cargo tree --target=all -e normal` forces ignore_inactive_targets off. // // However, `cargo tree` is a little weird because it downloads everything // anyways. // // So to summarize the different permutations: // // dev_dep | host_dep | itarget | Notes // --------|----------|---------|---------------------------- // | | | -Zfeatures=compare (new resolver should behave same as old) // | | βœ“ | This scenario should not happen. // | βœ“ | | `cargo tree --target=all -Zfeatures=all`† // | βœ“ | βœ“ | `cargo test` // βœ“ | | | This scenario should not happen. // βœ“ | | βœ“ | This scenario should not happen. // βœ“ | βœ“ | | `cargo tree --target=all -e normal -Z features=all`† // βœ“ | βœ“ | βœ“ | A normal build. // // † β€” However, `cargo tree` downloads everything. Package::new("normal", "1.0.0").publish(); Package::new("normal_pm", "1.0.0").publish(); Package::new("normal_opt", "1.0.0").publish(); Package::new("dev_dep", "1.0.0").publish(); Package::new("dev_dep_pm", "1.0.0").publish(); Package::new("build_dep", "1.0.0").publish(); Package::new("build_dep_pm", "1.0.0").publish(); Package::new("build_dep_opt", "1.0.0").publish(); Package::new("itarget_normal", "1.0.0").publish(); Package::new("itarget_normal_pm", "1.0.0").publish(); Package::new("itarget_dev_dep", "1.0.0").publish(); Package::new("itarget_dev_dep_pm", "1.0.0").publish(); Package::new("itarget_build_dep", "1.0.0").publish(); Package::new("itarget_build_dep_pm", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] normal = "1.0" normal_pm = "1.0" normal_opt = { version = "1.0", optional = true } [dev-dependencies] dev_dep = "1.0" dev_dep_pm = "1.0" [build-dependencies] build_dep = "1.0" build_dep_pm = "1.0" build_dep_opt = { version = "1.0", optional = true } [target.'cfg(whatever)'.dependencies] itarget_normal = "1.0" itarget_normal_pm = "1.0" [target.'cfg(whatever)'.dev-dependencies] itarget_dev_dep = "1.0" itarget_dev_dep_pm = "1.0" [target.'cfg(whatever)'.build-dependencies] itarget_build_dep = "1.0" itarget_build_dep_pm = "1.0" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); let clear = || { cargo_home().join("registry/cache").rm_rf(); cargo_home().join("registry/src").rm_rf(); p.build_dir().rm_rf(); }; // none // Should be the same as `-Zfeatures=all` p.cargo("check -Zfeatures=compare") .masquerade_as_nightly_cargo(&["features=compare"]) .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] normal_pm v1.0.0 [..] [DOWNLOADED] normal v1.0.0 [..] [DOWNLOADED] build_dep_pm v1.0.0 [..] [DOWNLOADED] build_dep v1.0.0 [..] [COMPILING] build_dep v1.0.0 [COMPILING] build_dep_pm v1.0.0 [CHECKING] normal_pm v1.0.0 [CHECKING] normal v1.0.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); clear(); // New behavior switch_to_resolver_2(&p); // all p.cargo("check") .with_stderr_unordered( "\ [DOWNLOADING] crates ... [DOWNLOADED] normal_pm v1.0.0 [..] [DOWNLOADED] normal v1.0.0 [..] [DOWNLOADED] build_dep_pm v1.0.0 [..] [DOWNLOADED] build_dep v1.0.0 [..] [COMPILING] build_dep v1.0.0 [COMPILING] build_dep_pm v1.0.0 [CHECKING] normal v1.0.0 [CHECKING] normal_pm v1.0.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); clear(); // This disables decouple_dev_deps. p.cargo("test --no-run") .with_stderr_unordered( "\ [DOWNLOADING] crates ... [DOWNLOADED] normal_pm v1.0.0 [..] [DOWNLOADED] normal v1.0.0 [..] [DOWNLOADED] dev_dep_pm v1.0.0 [..] [DOWNLOADED] dev_dep v1.0.0 [..] [DOWNLOADED] build_dep_pm v1.0.0 [..] [DOWNLOADED] build_dep v1.0.0 [..] [COMPILING] build_dep v1.0.0 [COMPILING] build_dep_pm v1.0.0 [COMPILING] normal_pm v1.0.0 [COMPILING] normal v1.0.0 [COMPILING] dev_dep_pm v1.0.0 [COMPILING] dev_dep v1.0.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE]) ", ) .run(); clear(); // This disables itarget, but leaves decouple_dev_deps enabled. p.cargo("tree -e normal --target=all") .with_stderr_unordered( "\ [DOWNLOADING] crates ... [DOWNLOADED] normal v1.0.0 [..] [DOWNLOADED] normal_pm v1.0.0 [..] [DOWNLOADED] build_dep v1.0.0 [..] [DOWNLOADED] build_dep_pm v1.0.0 [..] [DOWNLOADED] itarget_normal v1.0.0 [..] [DOWNLOADED] itarget_normal_pm v1.0.0 [..] [DOWNLOADED] itarget_build_dep v1.0.0 [..] [DOWNLOADED] itarget_build_dep_pm v1.0.0 [..] ", ) .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) β”œβ”€β”€ itarget_normal v1.0.0 β”œβ”€β”€ itarget_normal_pm v1.0.0 β”œβ”€β”€ normal v1.0.0 └── normal_pm v1.0.0 ", ) .run(); clear(); // This disables itarget and decouple_dev_deps. p.cargo("tree --target=all") .with_stderr_unordered( "\ [DOWNLOADING] crates ... [DOWNLOADED] normal_pm v1.0.0 [..] [DOWNLOADED] normal v1.0.0 [..] [DOWNLOADED] itarget_normal_pm v1.0.0 [..] [DOWNLOADED] itarget_normal v1.0.0 [..] [DOWNLOADED] itarget_dev_dep_pm v1.0.0 [..] [DOWNLOADED] itarget_dev_dep v1.0.0 [..] [DOWNLOADED] itarget_build_dep_pm v1.0.0 [..] [DOWNLOADED] itarget_build_dep v1.0.0 [..] [DOWNLOADED] dev_dep_pm v1.0.0 [..] [DOWNLOADED] dev_dep v1.0.0 [..] [DOWNLOADED] build_dep_pm v1.0.0 [..] [DOWNLOADED] build_dep v1.0.0 [..] ", ) .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) β”œβ”€β”€ itarget_normal v1.0.0 β”œβ”€β”€ itarget_normal_pm v1.0.0 β”œβ”€β”€ normal v1.0.0 └── normal_pm v1.0.0 [build-dependencies] β”œβ”€β”€ build_dep v1.0.0 β”œβ”€β”€ build_dep_pm v1.0.0 β”œβ”€β”€ itarget_build_dep v1.0.0 └── itarget_build_dep_pm v1.0.0 [dev-dependencies] β”œβ”€β”€ dev_dep v1.0.0 β”œβ”€β”€ dev_dep_pm v1.0.0 β”œβ”€β”€ itarget_dev_dep v1.0.0 └── itarget_dev_dep_pm v1.0.0 ", ) .run(); clear(); } #[cargo_test] fn pm_with_int_shared() { // This is a somewhat complex scenario of a proc-macro in a workspace with // an integration test where the proc-macro is used for other things, and // *everything* is built at once (`--workspace --all-targets // --all-features`). There was a bug where the UnitFor settings were being // incorrectly computed based on the order that the graph was traversed. // // There are some uncertainties about exactly how proc-macros should behave // with `--workspace`, see https://github.com/rust-lang/cargo/issues/8312. // // This uses a const-eval hack to do compile-time feature checking. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "pm", "shared"] resolver = "2" "#, ) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] pm = { path = "../pm" } shared = { path = "../shared", features = ["norm-feat"] } "#, ) .file( "foo/src/lib.rs", r#" // foo->shared always has both features set const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize]; "#, ) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true [dependencies] shared = { path = "../shared", features = ["host-feat"] } "#, ) .file( "pm/src/lib.rs", r#" // pm->shared always has just host const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==1) as usize]; "#, ) .file( "pm/tests/pm_test.rs", r#" // integration test gets both set const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize]; "#, ) .file( "shared/Cargo.toml", r#" [package] name = "shared" version = "0.1.0" [features] norm-feat = [] host-feat = [] "#, ) .file( "shared/src/lib.rs", r#" pub const FEATS: u32 = { if cfg!(feature="norm-feat") && cfg!(feature="host-feat") { 3 } else if cfg!(feature="norm-feat") { 2 } else if cfg!(feature="host-feat") { 1 } else { 0 } }; "#, ) .build(); p.cargo("build --workspace --all-targets --all-features -v") .with_stderr_unordered( "\ [COMPILING] shared [..] [RUNNING] `rustc --crate-name shared [..]--crate-type lib [..] [RUNNING] `rustc --crate-name shared [..]--crate-type lib [..] [RUNNING] `rustc --crate-name shared [..]--test[..] [COMPILING] pm [..] [RUNNING] `rustc --crate-name pm [..]--crate-type proc-macro[..] [RUNNING] `rustc --crate-name pm [..]--test[..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..]--test[..] [RUNNING] `rustc --crate-name pm_test [..]--test[..] [RUNNING] `rustc --crate-name foo [..]--crate-type lib[..] [FINISHED] [..] ", ) .run(); // And again, should stay fresh. p.cargo("build --workspace --all-targets --all-features -v") .with_stderr_unordered( "\ [FRESH] shared [..] [FRESH] pm [..] [FRESH] foo [..] [FINISHED] [..]", ) .run(); } #[cargo_test] fn doc_proc_macro() { // Checks for a bug when documenting a proc-macro with a dependency. The // doc unit builder was not carrying the "for host" setting through the // dependencies, and the `pm-dep` dependency was causing a panic because // it was looking for target features instead of host features. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [dependencies] pm = { path = "pm" } "#, ) .file("src/lib.rs", "") .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true [dependencies] pm-dep = { path = "../pm-dep" } "#, ) .file("pm/src/lib.rs", "") .file("pm-dep/Cargo.toml", &basic_manifest("pm-dep", "0.1.0")) .file("pm-dep/src/lib.rs", "") .build(); // Unfortunately this cannot check the output because what it prints is // nondeterministic. Sometimes it says "Compiling pm-dep" and sometimes // "Checking pm-dep". This is because it is both building it and checking // it in parallel (building so it can build the proc-macro, and checking // so rustdoc can load it). p.cargo("doc").run(); } #[cargo_test] fn edition_2021_default_2() { // edition = 2021 defaults to v2 resolver. Package::new("common", "1.0.0") .feature("f1", &[]) .file("src/lib.rs", "") .publish(); Package::new("bar", "1.0.0") .add_dep( Dependency::new("common", "1.0") .target("cfg(whatever)") .enable_features(&["f1"]), ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] common = "1.0" bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); // First without edition. p.cargo("tree -f") .arg("{p} feats:{f}") .with_stdout( "\ foo v0.1.0 [..] β”œβ”€β”€ bar v1.0.0 feats: └── common v1.0.0 feats:f1 ", ) .run(); p.change_file( "Cargo.toml", r#" cargo-features = ["edition2021"] [package] name = "foo" version = "0.1.0" edition = "2021" [dependencies] common = "1.0" bar = "1.0" "#, ); // Importantly, this does not include `f1` on `common`. p.cargo("tree -f") .arg("{p} feats:{f}") .with_stdout( "\ foo v0.1.0 [..] β”œβ”€β”€ bar v1.0.0 feats: └── common v1.0.0 feats: ", ) .run(); } #[cargo_test] fn all_features_merges_with_features() { Package::new("dep", "0.1.0") .feature("feat1", &[]) .file( "src/lib.rs", r#" #[cfg(feature="feat1")] pub fn work() { println!("it works"); } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [features] a = [] [dependencies] dep = "0.1" [[example]] name = "ex" required-features = ["a", "dep/feat1"] "#, ) .file( "examples/ex.rs", r#" fn main() { dep::work(); } "#, ) .file("src/lib.rs", "") .build(); p.cargo("run --example ex --all-features --features dep/feat1") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] [..] [COMPILING] dep v0.1.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/examples/ex[EXE]` ", ) .with_stdout("it works") .run(); switch_to_resolver_2(&p); p.cargo("run --example ex --all-features --features dep/feat1") .with_stderr( "\ [FINISHED] [..] [RUNNING] `target/debug/examples/ex[EXE]` ", ) .with_stdout("it works") .run(); } cargo-0.66.0/tests/testsuite/features_namespaced.rs000066400000000000000000000627721432416201200224460ustar00rootroot00000000000000//! Tests for namespaced features. use super::features2::switch_to_resolver_2; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{project, publish}; #[cargo_test] fn dependency_with_crate_syntax() { // Registry dependency uses dep: syntax. Package::new("baz", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("baz", "1.0").optional(true)) .feature("feat", &["dep:baz"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = {version="1.0", features=["feat"]} "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] [..] [DOWNLOADED] [..] [CHECKING] baz v1.0.0 [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn namespaced_invalid_feature() { // Specifies a feature that doesn't exist. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `baz` which is neither a dependency nor another feature ", ) .run(); } #[cargo_test] fn namespaced_invalid_dependency() { // Specifies a dep:name that doesn't exist. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] bar = ["dep:baz"] "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `dep:baz`, but `baz` is not listed as a dependency ", ) .run(); } #[cargo_test] fn namespaced_non_optional_dependency() { // Specifies a dep:name for a dependency that is not optional. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] bar = ["dep:baz"] [dependencies] baz = "0.1" "#, ) .file("src/main.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `dep:baz`, but `baz` is not an optional dependency A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. ", ) .run(); } #[cargo_test] fn namespaced_implicit_feature() { // Backwards-compatible with old syntax. Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] bar = ["baz"] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] [..] [CHECKING] foo v0.0.1 [..] [FINISHED] [..] ", ) .run(); p.cargo("check --features baz") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.0 [..] [CHECKING] baz v0.1.0 [CHECKING] foo v0.0.1 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn namespaced_shadowed_dep() { // An optional dependency is not listed in the features table, and its // implicit feature is overridden. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] baz = [] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: optional dependency `baz` is not included in any feature Make sure that `dep:baz` is included in one of features in the [features] table. ", ) .run(); } #[cargo_test] fn namespaced_shadowed_non_optional() { // Able to specify a feature with the same name as a required dependency. Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] baz = [] [dependencies] baz = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check").run(); } #[cargo_test] fn namespaced_implicit_non_optional() { // Includes a non-optional dependency in [features] table. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] bar = ["baz"] [dependencies] baz = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").with_status(101).with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: feature `bar` includes `baz`, but `baz` is not an optional dependency A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. ", ).run(); } #[cargo_test] fn namespaced_same_name() { // Explicitly listing an optional dependency in the [features] table. Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [features] baz = ["dep:baz"] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(feature="baz") { println!("baz"); } } "#, ) .build(); p.cargo("run") .with_stderr( "\ [UPDATING] [..] [COMPILING] foo v0.0.1 [..] [FINISHED] [..] [RUNNING] [..] ", ) .with_stdout("") .run(); p.cargo("run --features baz") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.0 [..] [COMPILING] baz v0.1.0 [COMPILING] foo v0.0.1 [..] [FINISHED] [..] [RUNNING] [..] ", ) .with_stdout("baz") .run(); } #[cargo_test] fn no_implicit_feature() { // Using `dep:` will not create an implicit feature. Package::new("regex", "1.0.0").publish(); Package::new("lazy_static", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] regex = { version = "1.0", optional = true } lazy_static = { version = "1.0", optional = true } [features] regex = ["dep:regex", "dep:lazy_static"] "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(feature = "regex") { println!("regex"); } if cfg!(feature = "lazy_static") { println!("lazy_static"); } } "#, ) .build(); p.cargo("run") .with_stderr( "\ [UPDATING] [..] [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/foo[EXE]` ", ) .with_stdout("") .run(); p.cargo("run --features regex") .with_stderr_unordered( "\ [DOWNLOADING] crates ... [DOWNLOADED] regex v1.0.0 [..] [DOWNLOADED] lazy_static v1.0.0 [..] [COMPILING] regex v1.0.0 [COMPILING] lazy_static v1.0.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/foo[EXE]` ", ) .with_stdout("regex") .run(); p.cargo("run --features lazy_static") .with_stderr( "\ [ERROR] Package `foo v0.1.0 [..]` does not have feature `lazy_static`. \ It has an optional dependency with that name, but that dependency uses the \"dep:\" \ syntax in the features table, so it does not have an implicit feature with that name. ", ) .with_status(101) .run(); } #[cargo_test] fn crate_syntax_bad_name() { // "dep:bar" = [] Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version="1.0", optional=true } [features] "dep:bar" = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("check --features dep:bar") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..]/foo/Cargo.toml` Caused by: feature named `dep:bar` is not allowed to start with `dep:` ", ) .run(); } #[cargo_test] fn crate_syntax_in_dep() { // features = ["dep:baz"] Package::new("baz", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("baz", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", features = ["dep:baz"] } "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[CWD]/Cargo.toml` Caused by: feature `dep:baz` in dependency `bar` is not allowed to use explicit `dep:` syntax If you want to enable [..] ", ) .run(); } #[cargo_test] fn crate_syntax_cli() { // --features dep:bar Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional=true } "#, ) .file("src/lib.rs", "") .build(); p.cargo("check --features dep:bar") .with_status(101) .with_stderr( "\ [ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax ", ) .run(); switch_to_resolver_2(&p); p.cargo("check --features dep:bar") .with_status(101) .with_stderr( "\ [ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax ", ) .run(); } #[cargo_test] fn crate_required_features() { // required-features = ["dep:bar"] Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional=true } [[bin]] name = "foo" required-features = ["dep:bar"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] invalid feature `dep:bar` in required-features of target `foo`: \ `dep:` prefixed feature values are not allowed in required-features ", ) .run(); } #[cargo_test] fn json_exposed() { // Checks that the implicit dep: values are exposed in JSON. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional=true } "#, ) .file("src/lib.rs", "") .build(); p.cargo("metadata --no-deps") .with_json( r#" { "packages": [ { "name": "foo", "version": "0.1.0", "id": "foo 0.1.0 [..]", "license": null, "license_file": null, "description": null, "homepage": null, "documentation": null, "source": null, "dependencies": "{...}", "targets": "{...}", "features": { "bar": ["dep:bar"] }, "manifest_path": "[..]foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "edition": "2015", "links": null } ], "workspace_members": "{...}", "resolve": null, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]foo", "metadata": null } "#, ) .run(); } #[cargo_test] fn crate_feature_with_explicit() { // crate_name/feat_name syntax where crate_name already has a feature defined. // NOTE: I don't know if this is actually ideal behavior. Package::new("bar", "1.0.0") .feature("bar_feat", &[]) .file( "src/lib.rs", r#" #[cfg(not(feature="bar_feat"))] compile_error!("bar_feat is not enabled"); "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version="1.0", optional = true } [features] f1 = ["bar/bar_feat"] bar = ["dep:bar", "f2"] f2 = [] "#, ) .file( "src/lib.rs", r#" #[cfg(not(feature="bar"))] compile_error!("bar should be enabled"); #[cfg(not(feature="f2"))] compile_error!("f2 should be enabled"); "#, ) .build(); p.cargo("check --features f1") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn optional_explicit_without_crate() { // "feat" syntax when there is no implicit "feat" feature because it is // explicitly listed elsewhere. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } [features] feat1 = ["dep:bar"] feat2 = ["bar"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..] Caused by: feature `feat2` includes `bar`, but `bar` is an optional dependency without an implicit feature Use `dep:bar` to enable the dependency. ", ) .run(); } #[cargo_test] fn tree() { Package::new("baz", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("baz", "1.0").optional(true)) .feature("feat1", &["dep:baz"]) .feature("feat2", &[]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", features = ["feat1"], optional=true } [features] a = ["bar/feat2"] bar = ["dep:bar"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout("foo v0.1.0 ([ROOT]/foo)") .run(); p.cargo("tree -e features --features a") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) β”œβ”€β”€ bar feature \"default\" β”‚ └── bar v1.0.0 β”‚ └── baz feature \"default\" β”‚ └── baz v1.0.0 └── bar feature \"feat1\" └── bar v1.0.0 (*) ", ) .run(); p.cargo("tree -e features --features a -i bar") .with_stdout( "\ bar v1.0.0 β”œβ”€β”€ bar feature \"default\" β”‚ └── foo v0.1.0 ([ROOT]/foo) β”‚ β”œβ”€β”€ foo feature \"a\" (command-line) β”‚ β”œβ”€β”€ foo feature \"bar\" β”‚ β”‚ └── foo feature \"a\" (command-line) β”‚ └── foo feature \"default\" (command-line) β”œβ”€β”€ bar feature \"feat1\" β”‚ └── foo v0.1.0 ([ROOT]/foo) (*) └── bar feature \"feat2\" └── foo feature \"a\" (command-line) ", ) .run(); p.cargo("tree -e features --features bar") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) β”œβ”€β”€ bar feature \"default\" β”‚ └── bar v1.0.0 β”‚ └── baz feature \"default\" β”‚ └── baz v1.0.0 └── bar feature \"feat1\" └── bar v1.0.0 (*) ", ) .run(); p.cargo("tree -e features --features bar -i bar") .with_stdout( "\ bar v1.0.0 β”œβ”€β”€ bar feature \"default\" β”‚ └── foo v0.1.0 ([ROOT]/foo) β”‚ β”œβ”€β”€ foo feature \"bar\" (command-line) β”‚ └── foo feature \"default\" (command-line) └── bar feature \"feat1\" └── foo v0.1.0 ([ROOT]/foo) (*) ", ) .run(); } #[cargo_test] fn tree_no_implicit() { // tree without an implicit feature Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional=true } [features] a = ["dep:bar"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout("foo v0.1.0 ([ROOT]/foo)") .run(); p.cargo("tree -e features --all-features") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) └── bar feature \"default\" └── bar v1.0.0 ", ) .run(); p.cargo("tree -e features -i bar --all-features") .with_stdout( "\ bar v1.0.0 └── bar feature \"default\" └── foo v0.1.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" (command-line) └── foo feature \"default\" (command-line) ", ) .run(); } #[cargo_test] fn publish_no_implicit() { // Does not include implicit features or dep: syntax on publish. Package::new("opt-dep1", "1.0.0").publish(); Package::new("opt-dep2", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" license = "MIT" homepage = "https://example.com/" [dependencies] opt-dep1 = { version = "1.0", optional = true } opt-dep2 = { version = "1.0", optional = true } [features] feat = ["opt-dep1"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --token sekrit") .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "opt-dep1", "optional": true, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" }, { "default_features": true, "features": [], "kind": "normal", "name": "opt-dep2", "optional": true, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": null, "features": { "feat": ["opt-dep1"] }, "homepage": "https://example.com/", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "foo" version = "0.1.0" description = "foo" homepage = "https://example.com/" license = "MIT" [dependencies.opt-dep1] version = "1.0" optional = true [dependencies.opt-dep2] version = "1.0" optional = true [features] feat = ["opt-dep1"] "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn publish() { // Publish behavior with explicit dep: syntax. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" license = "MIT" homepage = "https://example.com/" [dependencies] bar = { version = "1.0", optional = true } [features] feat1 = [] feat2 = ["dep:bar"] feat3 = ["feat2"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --token sekrit") .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [VERIFYING] foo v0.1.0 [..] [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": true, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": null, "features": { "feat1": [], "feat2": ["dep:bar"], "feat3": ["feat2"] }, "homepage": "https://example.com/", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "foo" version = "0.1.0" description = "foo" homepage = "https://example.com/" license = "MIT" [dependencies.bar] version = "1.0" optional = true [features] feat1 = [] feat2 = ["dep:bar"] feat3 = ["feat2"] "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } cargo-0.66.0/tests/testsuite/fetch.rs000066400000000000000000000066101432416201200175260ustar00rootroot00000000000000//! Tests for the `cargo fetch` command. use cargo_test_support::registry::Package; use cargo_test_support::rustc_host; use cargo_test_support::{basic_manifest, cross_compile, project}; #[cargo_test] fn no_deps() { let p = project() .file("src/main.rs", "mod a; fn main() {}") .file("src/a.rs", "") .build(); p.cargo("fetch").with_stdout("").run(); } #[cargo_test] fn fetch_all_platform_dependencies_when_no_target_is_given() { if cross_compile::disabled() { return; } Package::new("d1", "1.2.3") .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) .file("src/lib.rs", "") .publish(); Package::new("d2", "0.1.2") .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) .file("src/lib.rs", "") .publish(); let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [target.{host}.dependencies] d1 = "1.2.3" [target.{target}.dependencies] d2 = "0.1.2" "#, host = host, target = target ), ) .file("src/lib.rs", "") .build(); p.cargo("fetch") .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]") .run(); } #[cargo_test] fn fetch_platform_specific_dependencies() { if cross_compile::disabled() { return; } Package::new("d1", "1.2.3") .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) .file("src/lib.rs", "") .publish(); Package::new("d2", "0.1.2") .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) .file("src/lib.rs", "") .publish(); let target = cross_compile::alternate(); let host = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [target.{host}.dependencies] d1 = "1.2.3" [target.{target}.dependencies] d2 = "0.1.2" "#, host = host, target = target ), ) .file("src/lib.rs", "") .build(); p.cargo("fetch --target") .arg(&host) .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]") .run(); p.cargo("fetch --target") .arg(&target) .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]") .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]") .run(); } #[cargo_test] fn fetch_warning() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" misspelled = "wut" "#, ) .file("src/lib.rs", "") .build(); p.cargo("fetch") .with_stderr("[WARNING] unused manifest key: package.misspelled") .run(); } cargo-0.66.0/tests/testsuite/fix.rs000066400000000000000000001422161432416201200172260ustar00rootroot00000000000000//! Tests for the `cargo fix` command. use cargo::core::Edition; use cargo_test_support::compare::assert_match_exact; use cargo_test_support::git; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::tools; use cargo_test_support::{basic_manifest, is_nightly, project}; #[cargo_test] fn do_not_fix_broken_builds() { let p = project() .file( "src/lib.rs", r#" pub fn foo() { let mut x = 3; drop(x); } pub fn foo2() { let _x: u32 = "a"; } "#, ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_status(101) .with_stderr_contains("[ERROR] could not compile `foo` due to previous error") .run(); assert!(p.read_file("src/lib.rs").contains("let mut x = 3;")); } #[cargo_test] fn fix_broken_if_requested() { let p = project() .file( "src/lib.rs", r#" fn foo(a: &u32) -> u32 { a + 1 } pub fn bar() { foo(1); } "#, ) .build(); p.cargo("fix --allow-no-vcs --broken-code") .env("__CARGO_FIX_YOLO", "1") .run(); } #[cargo_test] fn broken_fixes_backed_out() { // This works as follows: // - Create a `rustc` shim (the "foo" project) which will pretend that the // verification step fails. // - There is an empty build script so `foo` has `OUT_DIR` to track the steps. // - The first "check", `foo` creates a file in OUT_DIR, and it completes // successfully with a warning diagnostic to remove unused `mut`. // - rustfix removes the `mut`. // - The second "check" to verify the changes, `foo` swaps out the content // with something that fails to compile. It creates a second file so it // won't do anything in the third check. // - cargo fix discovers that the fix failed, and it backs out the changes. // - The third "check" is done to display the original diagnostics of the // original code. let p = project() .file( "foo/Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [workspace] "#, ) .file( "foo/src/main.rs", r#" use std::env; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; use std::process::{self, Command}; fn main() { // Ignore calls to things like --print=file-names and compiling build.rs. // Also compatible for rustc invocations with `@path` argfile. let is_lib_rs = env::args_os() .map(PathBuf::from) .flat_map(|p| if let Some(p) = p.to_str().unwrap_or_default().strip_prefix("@") { fs::read_to_string(p).unwrap().lines().map(PathBuf::from).collect() } else { vec![p] }) .any(|l| l == Path::new("src/lib.rs")); if is_lib_rs { let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let first = path.join("first"); let second = path.join("second"); if first.exists() && !second.exists() { fs::write("src/lib.rs", b"not rust code").unwrap(); fs::File::create(&second).unwrap(); } else { fs::File::create(&first).unwrap(); } } let status = Command::new("rustc") .args(env::args().skip(1)) .status() .expect("failed to run rustc"); process::exit(status.code().unwrap_or(2)); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = 'bar' version = '0.1.0' [workspace] "#, ) .file("bar/build.rs", "fn main() {}") .file( "bar/src/lib.rs", r#" pub fn foo() { let mut x = 3; drop(x); } "#, ) .build(); // Build our rustc shim p.cargo("build").cwd("foo").run(); // Attempt to fix code, but our shim will always fail the second compile p.cargo("fix --allow-no-vcs --lib") .cwd("bar") .env("__CARGO_FIX_YOLO", "1") .env("RUSTC", p.root().join("foo/target/debug/foo")) .with_stderr_contains( "warning: failed to automatically apply fixes suggested by rustc \ to crate `bar`\n\ \n\ after fixes were automatically applied the compiler reported \ errors within these files:\n\ \n \ * src/lib.rs\n\ \n\ This likely indicates a bug in either rustc or cargo itself,\n\ and we would appreciate a bug report! You're likely to see \n\ a number of compiler warnings after this message which cargo\n\ attempted to fix but failed. If you could open an issue at\n\ [..]\n\ quoting the full output of this command we'd be very appreciative!\n\ Note that you may be able to make some more progress in the near-term\n\ fixing code with the `--broken-code` flag\n\ \n\ The following errors were reported:\n\ error: expected one of `!` or `::`, found `rust`\n\ ", ) .with_stderr_contains("Original diagnostics will follow.") .with_stderr_contains("[WARNING] variable does not need to be mutable") .with_stderr_does_not_contain("[..][FIXED][..]") .run(); // Make sure the fix which should have been applied was backed out assert!(p.read_file("bar/src/lib.rs").contains("let mut x = 3;")); } #[cargo_test] fn fix_path_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = 'bar' } [workspace] "#, ) .file( "src/lib.rs", r#" extern crate bar; pub fn foo() -> u32 { let mut x = 3; x } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" pub fn foo() -> u32 { let mut x = 3; x } "#, ) .build(); p.cargo("fix --allow-no-vcs -p foo -p bar") .env("__CARGO_FIX_YOLO", "1") .with_stdout("") .with_stderr_unordered( "\ [CHECKING] bar v0.1.0 ([..]) [FIXED] bar/src/lib.rs (1 fix) [CHECKING] foo v0.1.0 ([..]) [FIXED] src/lib.rs (1 fix) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn do_not_fix_non_relevant_deps() { let p = project() .no_manifest() .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = '../bar' } [workspace] "#, ) .file("foo/src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" pub fn foo() -> u32 { let mut x = 3; x } "#, ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .cwd("foo") .run(); assert!(p.read_file("bar/src/lib.rs").contains("mut")); } #[cargo_test] fn prepare_for_2018() { let p = project() .file( "src/lib.rs", r#" #![allow(unused)] mod foo { pub const FOO: &str = "fooo"; } mod bar { use ::foo::FOO; } fn main() { let x = ::foo::FOO; } "#, ) .build(); let stderr = "\ [CHECKING] foo v0.0.1 ([..]) [MIGRATING] src/lib.rs from 2015 edition to 2018 [FIXED] src/lib.rs (2 fixes) [FINISHED] [..] "; p.cargo("fix --edition --allow-no-vcs") .with_stderr(stderr) .with_stdout("") .run(); println!("{}", p.read_file("src/lib.rs")); assert!(p.read_file("src/lib.rs").contains("use crate::foo::FOO;")); assert!(p .read_file("src/lib.rs") .contains("let x = crate::foo::FOO;")); } #[cargo_test] fn local_paths() { let p = project() .file( "src/lib.rs", r#" use test::foo; mod test { pub fn foo() {} } pub fn f() { foo(); } "#, ) .build(); p.cargo("fix --edition --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.0.1 ([..]) [MIGRATING] src/lib.rs from 2015 edition to 2018 [FIXED] src/lib.rs (1 fix) [FINISHED] [..] ", ) .with_stdout("") .run(); println!("{}", p.read_file("src/lib.rs")); assert!(p.read_file("src/lib.rs").contains("use crate::test::foo;")); } #[cargo_test] fn upgrade_extern_crate() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = '2018' [workspace] [dependencies] bar = { path = 'bar' } "#, ) .file( "src/lib.rs", r#" #![warn(rust_2018_idioms)] extern crate bar; use bar::bar; pub fn foo() { ::bar::bar(); bar(); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); let stderr = "\ [CHECKING] bar v0.1.0 ([..]) [CHECKING] foo v0.1.0 ([..]) [FIXED] src/lib.rs (1 fix) [FINISHED] [..] "; p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stderr(stderr) .with_stdout("") .run(); println!("{}", p.read_file("src/lib.rs")); assert!(!p.read_file("src/lib.rs").contains("extern crate")); } #[cargo_test] fn specify_rustflags() { let p = project() .file( "src/lib.rs", r#" #![allow(unused)] mod foo { pub const FOO: &str = "fooo"; } fn main() { let x = ::foo::FOO; } "#, ) .build(); p.cargo("fix --edition --allow-no-vcs") .env("RUSTFLAGS", "-C linker=cc") .with_stderr( "\ [CHECKING] foo v0.0.1 ([..]) [MIGRATING] src/lib.rs from 2015 edition to 2018 [FIXED] src/lib.rs (1 fix) [FINISHED] [..] ", ) .with_stdout("") .run(); } #[cargo_test] fn no_changes_necessary() { let p = project().file("src/lib.rs", "").build(); let stderr = "\ [CHECKING] foo v0.0.1 ([..]) [FINISHED] [..] "; p.cargo("fix --allow-no-vcs") .with_stderr(stderr) .with_stdout("") .run(); } #[cargo_test] fn fixes_extra_mut() { let p = project() .file( "src/lib.rs", r#" pub fn foo() -> u32 { let mut x = 3; x } "#, ) .build(); let stderr = "\ [CHECKING] foo v0.0.1 ([..]) [FIXED] src/lib.rs (1 fix) [FINISHED] [..] "; p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stderr(stderr) .with_stdout("") .run(); } #[cargo_test] fn fixes_two_missing_ampersands() { let p = project() .file( "src/lib.rs", r#" pub fn foo() -> u32 { let mut x = 3; let mut y = 3; x + y } "#, ) .build(); let stderr = "\ [CHECKING] foo v0.0.1 ([..]) [FIXED] src/lib.rs (2 fixes) [FINISHED] [..] "; p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stderr(stderr) .with_stdout("") .run(); } #[cargo_test] fn tricky() { let p = project() .file( "src/lib.rs", r#" pub fn foo() -> u32 { let mut x = 3; let mut y = 3; x + y } "#, ) .build(); let stderr = "\ [CHECKING] foo v0.0.1 ([..]) [FIXED] src/lib.rs (2 fixes) [FINISHED] [..] "; p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stderr(stderr) .with_stdout("") .run(); } #[cargo_test] fn preserve_line_endings() { let p = project() .file( "src/lib.rs", "fn add(a: &u32) -> u32 { a + 1 }\r\n\ pub fn foo() -> u32 { let mut x = 3; add(&x) }\r\n\ ", ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .run(); assert!(p.read_file("src/lib.rs").contains("\r\n")); } #[cargo_test] fn fix_deny_warnings() { let p = project() .file( "src/lib.rs", "#![deny(warnings)] pub fn foo() { let mut x = 3; drop(x); } ", ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .run(); } #[cargo_test] fn fix_deny_warnings_but_not_others() { let p = project() .file( "src/lib.rs", " #![deny(unused_mut)] pub fn foo() -> u32 { let mut x = 3; x } pub fn bar() { #[allow(unused_mut)] let mut _y = 4; } ", ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .run(); assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); assert!(p.read_file("src/lib.rs").contains("let mut _y = 4;")); } #[cargo_test] fn fix_two_files() { let p = project() .file( "src/lib.rs", " pub mod bar; pub fn foo() -> u32 { let mut x = 3; x } ", ) .file( "src/bar.rs", " pub fn foo() -> u32 { let mut x = 3; x } ", ) .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stderr_contains("[FIXED] src/bar.rs (1 fix)") .with_stderr_contains("[FIXED] src/lib.rs (1 fix)") .run(); assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); assert!(!p.read_file("src/bar.rs").contains("let mut x = 3;")); } #[cargo_test] fn fixes_missing_ampersand() { let p = project() .file("src/main.rs", "fn main() { let mut x = 3; drop(x); }") .file( "src/lib.rs", r#" pub fn foo() { let mut x = 3; drop(x); } #[test] pub fn foo2() { let mut x = 3; drop(x); } "#, ) .file( "tests/a.rs", r#" #[test] pub fn foo() { let mut x = 3; drop(x); } "#, ) .file("examples/foo.rs", "fn main() { let mut x = 3; drop(x); }") .file("build.rs", "fn main() { let mut x = 3; drop(x); }") .build(); p.cargo("fix --all-targets --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .with_stdout("") .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") .with_stderr_contains("[FIXED] build.rs (1 fix)") // Don't assert number of fixes for this one, as we don't know if we're // fixing it once or twice! We run this all concurrently, and if we // compile (and fix) in `--test` mode first, we get two fixes. Otherwise // we'll fix one non-test thing, and then fix another one later in // test mode. .with_stderr_contains("[FIXED] src/lib.rs[..]") .with_stderr_contains("[FIXED] src/main.rs (1 fix)") .with_stderr_contains("[FIXED] examples/foo.rs (1 fix)") .with_stderr_contains("[FIXED] tests/a.rs (1 fix)") .with_stderr_contains("[FINISHED] [..]") .run(); p.cargo("build").run(); p.cargo("test").run(); } #[cargo_test] fn fix_features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [features] bar = [] [workspace] "#, ) .file( "src/lib.rs", r#" #[cfg(feature = "bar")] pub fn foo() -> u32 { let mut x = 3; x } "#, ) .build(); p.cargo("fix --allow-no-vcs").run(); p.cargo("build").run(); p.cargo("fix --features bar --allow-no-vcs").run(); p.cargo("build --features bar").run(); } #[cargo_test] fn shows_warnings() { let p = project() .file( "src/lib.rs", "#[deprecated] fn bar() {} pub fn foo() { let _ = bar(); }", ) .build(); p.cargo("fix --allow-no-vcs") .with_stderr_contains("[..]warning: use of deprecated[..]") .run(); } #[cargo_test] fn warns_if_no_vcs_detected() { let p = project().file("src/lib.rs", "pub fn foo() {}").build(); p.cargo("fix") .with_status(101) .with_stderr( "error: no VCS found for this package and `cargo fix` can potentially perform \ destructive changes; if you'd like to suppress this error pass `--allow-no-vcs`\ ", ) .run(); p.cargo("fix --allow-no-vcs").run(); } #[cargo_test] fn warns_about_dirty_working_directory() { let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); p.change_file("src/lib.rs", ""); p.cargo("fix") .with_status(101) .with_stderr( "\ error: the working directory of this package has uncommitted changes, \ and `cargo fix` can potentially perform destructive changes; if you'd \ like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ commit the changes to these files: * src/lib.rs (dirty) ", ) .run(); p.cargo("fix --allow-dirty").run(); } #[cargo_test] fn warns_about_staged_working_directory() { let (p, repo) = git::new_repo("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); p.change_file("src/lib.rs", "pub fn bar() {}"); git::add(&repo); p.cargo("fix") .with_status(101) .with_stderr( "\ error: the working directory of this package has uncommitted changes, \ and `cargo fix` can potentially perform destructive changes; if you'd \ like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ commit the changes to these files: * src/lib.rs (staged) ", ) .run(); p.cargo("fix --allow-staged").run(); } #[cargo_test] fn does_not_warn_about_clean_working_directory() { let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); p.cargo("fix").run(); } #[cargo_test] fn does_not_warn_about_dirty_ignored_files() { let p = git::new("foo", |p| { p.file("src/lib.rs", "pub fn foo() {}") .file(".gitignore", "bar\n") }); p.change_file("bar", ""); p.cargo("fix").run(); } #[cargo_test] fn fix_all_targets_by_default() { let p = project() .file("src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") .file("tests/foo.rs", "pub fn foo() { let mut x = 3; drop(x); }") .build(); p.cargo("fix --allow-no-vcs") .env("__CARGO_FIX_YOLO", "1") .run(); assert!(!p.read_file("src/lib.rs").contains("let mut x")); assert!(!p.read_file("tests/foo.rs").contains("let mut x")); } #[cargo_test] fn prepare_for_unstable() { // During the period where a new edition is coming up, but not yet stable, // this test will verify that it cannot be migrated to on stable. If there // is no next edition, it does nothing. let next = match Edition::LATEST_UNSTABLE { Some(next) => next, None => { eprintln!("Next edition is currently not available, skipping test."); return; } }; let latest_stable = Edition::LATEST_STABLE; let prev = latest_stable.previous().unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" edition = "{}" "#, latest_stable ), ) .file("src/lib.rs", "") .build(); // -j1 to make the error more deterministic (otherwise there can be // multiple errors since they run in parallel). p.cargo("fix --edition --allow-no-vcs -j1") .with_stderr(&format_args!("\ [CHECKING] foo [..] [WARNING] `src/lib.rs` is on the latest edition, but trying to migrate to edition {next}. Edition {next} is unstable and not allowed in this release, consider trying the nightly release channel. If you are trying to migrate from the previous edition ({prev}), the process requires following these steps: 1. Start with `edition = \"{prev}\"` in `Cargo.toml` 2. Run `cargo fix --edition` 3. Modify `Cargo.toml` to set `edition = \"{latest_stable}\"` 4. Run `cargo build` or `cargo test` to verify the fixes worked More details may be found at https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html [FINISHED] [..] ", next=next, latest_stable=latest_stable, prev=prev)) .run(); if !is_nightly() { // The rest of this test is fundamentally always nightly. return; } p.cargo("fix --edition --allow-no-vcs") .masquerade_as_nightly_cargo(&["always_nightly"]) .with_stderr(&format!( "\ [CHECKING] foo [..] [MIGRATING] src/lib.rs from {latest_stable} edition to {next} [FINISHED] [..] ", latest_stable = latest_stable, next = next, )) .run(); } #[cargo_test] fn prepare_for_latest_stable() { // This is the stable counterpart of prepare_for_unstable. let latest_stable = Edition::LATEST_STABLE; let previous = latest_stable.previous().unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = 'foo' version = '0.1.0' edition = '{}' "#, previous ), ) .file("src/lib.rs", "") .build(); p.cargo("fix --edition --allow-no-vcs") .with_stderr(&format!( "\ [CHECKING] foo [..] [MIGRATING] src/lib.rs from {} edition to {} [FINISHED] [..] ", previous, latest_stable )) .run(); } #[cargo_test(nightly, reason = "fundamentally always nightly")] fn prepare_for_already_on_latest_unstable() { // During the period where a new edition is coming up, but not yet stable, // this test will check what happens if you are already on the latest. If // there is no next edition, it does nothing. let next_edition = match Edition::LATEST_UNSTABLE { Some(next) => next, None => { eprintln!("Next edition is currently not available, skipping test."); return; } }; let p = project() .file( "Cargo.toml", &format!( r#" cargo-features = ["edition{}"] [package] name = 'foo' version = '0.1.0' edition = '{}' "#, next_edition, next_edition ), ) .file("src/lib.rs", "") .build(); p.cargo("fix --edition --allow-no-vcs") .masquerade_as_nightly_cargo(&["always_nightly"]) .with_stderr_contains("[CHECKING] foo [..]") .with_stderr_contains(&format!( "\ [WARNING] `src/lib.rs` is already on the latest edition ({next_edition}), unable to migrate further ", next_edition = next_edition )) .run(); } #[cargo_test] fn prepare_for_already_on_latest_stable() { // Stable counterpart of prepare_for_already_on_latest_unstable. if Edition::LATEST_UNSTABLE.is_some() { eprintln!("This test cannot run while the latest edition is unstable, skipping."); return; } let latest_stable = Edition::LATEST_STABLE; let p = project() .file( "Cargo.toml", &format!( r#" [package] name = 'foo' version = '0.1.0' edition = '{}' "#, latest_stable ), ) .file("src/lib.rs", "") .build(); p.cargo("fix --edition --allow-no-vcs") .with_stderr_contains("[CHECKING] foo [..]") .with_stderr_contains(&format!( "\ [WARNING] `src/lib.rs` is already on the latest edition ({latest_stable}), unable to migrate further ", latest_stable = latest_stable )) .run(); } #[cargo_test] fn fix_overlapping() { let p = project() .file( "src/lib.rs", r#" pub fn foo() {} pub struct A; pub mod bar { pub fn baz() { ::foo::<::A>(); } } "#, ) .build(); p.cargo("fix --allow-no-vcs --edition --lib") .with_stderr( "\ [CHECKING] foo [..] [MIGRATING] src/lib.rs from 2015 edition to 2018 [FIXED] src/lib.rs (2 fixes) [FINISHED] dev [..] ", ) .run(); let contents = p.read_file("src/lib.rs"); println!("{}", contents); assert!(contents.contains("crate::foo::()")); } #[cargo_test] fn fix_idioms() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' edition = '2018' "#, ) .file( "src/lib.rs", r#" use std::any::Any; pub fn foo() { let _x: Box = Box::new(3); } "#, ) .build(); let stderr = "\ [CHECKING] foo [..] [FIXED] src/lib.rs (1 fix) [FINISHED] [..] "; p.cargo("fix --edition-idioms --allow-no-vcs") .with_stderr(stderr) .run(); assert!(p.read_file("src/lib.rs").contains("Box")); } #[cargo_test] fn idioms_2015_ok() { let p = project().file("src/lib.rs", "").build(); p.cargo("fix --edition-idioms --allow-no-vcs").run(); } #[cargo_test] fn shows_warnings_on_second_run_without_changes() { let p = project() .file( "src/lib.rs", r#" #[deprecated] fn bar() {} pub fn foo() { let _ = bar(); } "#, ) .build(); p.cargo("fix --allow-no-vcs") .with_stderr_contains("[..]warning: use of deprecated[..]") .run(); p.cargo("fix --allow-no-vcs") .with_stderr_contains("[..]warning: use of deprecated[..]") .run(); } #[cargo_test] fn shows_warnings_on_second_run_without_changes_on_multiple_targets() { let p = project() .file( "src/lib.rs", r#" #[deprecated] fn bar() {} pub fn foo() { let _ = bar(); } "#, ) .file( "src/main.rs", r#" #[deprecated] fn bar() {} fn main() { let _ = bar(); } "#, ) .file( "tests/foo.rs", r#" #[deprecated] fn bar() {} #[test] fn foo_test() { let _ = bar(); } "#, ) .file( "tests/bar.rs", r#" #[deprecated] fn bar() {} #[test] fn foo_test() { let _ = bar(); } "#, ) .file( "examples/fooxample.rs", r#" #[deprecated] fn bar() {} fn main() { let _ = bar(); } "#, ) .build(); p.cargo("fix --allow-no-vcs --all-targets") .with_stderr_contains(" --> examples/fooxample.rs:6:29") .with_stderr_contains(" --> src/lib.rs:6:29") .with_stderr_contains(" --> src/main.rs:6:29") .with_stderr_contains(" --> tests/bar.rs:7:29") .with_stderr_contains(" --> tests/foo.rs:7:29") .run(); p.cargo("fix --allow-no-vcs --all-targets") .with_stderr_contains(" --> examples/fooxample.rs:6:29") .with_stderr_contains(" --> src/lib.rs:6:29") .with_stderr_contains(" --> src/main.rs:6:29") .with_stderr_contains(" --> tests/bar.rs:7:29") .with_stderr_contains(" --> tests/foo.rs:7:29") .run(); } #[cargo_test] fn doesnt_rebuild_dependencies() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = 'bar' } [workspace] "#, ) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("fix --allow-no-vcs -p foo") .env("__CARGO_FIX_YOLO", "1") .with_stdout("") .with_stderr( "\ [CHECKING] bar v0.1.0 ([..]) [CHECKING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("fix --allow-no-vcs -p foo") .env("__CARGO_FIX_YOLO", "1") .with_stdout("") .with_stderr( "\ [CHECKING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn does_not_crash_with_rustc_wrapper() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("fix --allow-no-vcs") .env("RUSTC_WRAPPER", tools::echo_wrapper()) .run(); p.build_dir().rm_rf(); p.cargo("fix --allow-no-vcs --verbose") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .run(); } #[cargo_test] fn uses_workspace_wrapper_and_primary_wrapper_override() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("fix --allow-no-vcs --verbose") .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) .with_stderr_contains("WRAPPER CALLED: rustc src/lib.rs --crate-name foo [..]") .run(); } #[cargo_test] fn only_warn_for_relevant_crates() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = { path = 'a' } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" "#, ) .file( "a/src/lib.rs", " pub fn foo() {} pub mod bar { use foo; pub fn baz() { foo() } } ", ) .build(); p.cargo("fix --allow-no-vcs --edition") .with_stderr( "\ [CHECKING] a v0.1.0 ([..]) [CHECKING] foo v0.1.0 ([..]) [MIGRATING] src/lib.rs from 2015 edition to 2018 [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fix_to_broken_code() { let p = project() .file( "foo/Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [workspace] "#, ) .file( "foo/src/main.rs", r#" use std::env; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; use std::process::{self, Command}; fn main() { // Ignore calls to things like --print=file-names and compiling build.rs. // Also compatible for rustc invocations with `@path` argfile. let is_lib_rs = env::args_os() .map(PathBuf::from) .flat_map(|p| if let Some(p) = p.to_str().unwrap_or_default().strip_prefix("@") { fs::read_to_string(p).unwrap().lines().map(PathBuf::from).collect() } else { vec![p] }) .any(|l| l == Path::new("src/lib.rs")); if is_lib_rs { let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let path = path.join("foo"); if path.exists() { panic!() } else { fs::File::create(&path).unwrap(); } } let status = Command::new("rustc") .args(env::args().skip(1)) .status() .expect("failed to run rustc"); process::exit(status.code().unwrap_or(2)); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = 'bar' version = '0.1.0' [workspace] "#, ) .file("bar/build.rs", "fn main() {}") .file("bar/src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") .build(); // Build our rustc shim p.cargo("build").cwd("foo").run(); // Attempt to fix code, but our shim will always fail the second compile p.cargo("fix --allow-no-vcs --broken-code") .cwd("bar") .env("RUSTC", p.root().join("foo/target/debug/foo")) .with_status(101) .with_stderr_contains("[WARNING] failed to automatically apply fixes [..]") .run(); assert_eq!( p.read_file("bar/src/lib.rs"), "pub fn foo() { let x = 3; drop(x); }" ); } #[cargo_test] fn fix_with_common() { let p = project() .file("src/lib.rs", "") .file( "tests/t1.rs", "mod common; #[test] fn t1() { common::try(); }", ) .file( "tests/t2.rs", "mod common; #[test] fn t2() { common::try(); }", ) .file("tests/common/mod.rs", "pub fn try() {}") .build(); p.cargo("fix --edition --allow-no-vcs").run(); assert_eq!(p.read_file("tests/common/mod.rs"), "pub fn r#try() {}"); } #[cargo_test] fn fix_in_existing_repo_weird_ignore() { // Check that ignore doesn't ignore the repo itself. let p = git::new("foo", |project| { project .file("src/lib.rs", "") .file(".gitignore", "foo\ninner\n") .file("inner/file", "") }); p.cargo("fix").run(); // This is questionable about whether it is the right behavior. It should // probably be checking if any source file for the current project is // ignored. p.cargo("fix") .cwd("inner") .with_stderr_contains("[ERROR] no VCS found[..]") .with_status(101) .run(); p.cargo("fix").cwd("src").run(); } #[cargo_test] fn fix_color_message() { // Check that color appears in diagnostics. let p = project() .file("src/lib.rs", "std::compile_error!{\"color test\"}") .build(); p.cargo("fix --allow-no-vcs --color=always") .with_stderr_contains("[..]\x1b[[..]") .with_status(101) .run(); p.cargo("fix --allow-no-vcs --color=never") .with_stderr_contains("error: color test") .with_stderr_does_not_contain("[..]\x1b[[..]") .with_status(101) .run(); } #[cargo_test] fn edition_v2_resolver_report() { // Show a report if the V2 resolver shows differences. Package::new("common", "1.0.0") .feature("f1", &[]) .feature("dev-feat", &[]) .add_dep(Dependency::new("opt_dep", "1.0").optional(true)) .publish(); Package::new("opt_dep", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep( Dependency::new("common", "1.0") .target("cfg(whatever)") .enable_features(&["f1"]), ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] common = "1.0" bar = "1.0" [build-dependencies] common = { version = "1.0", features = ["opt_dep"] } [dev-dependencies] common = { version="1.0", features=["dev-feat"] } "#, ) .file("src/lib.rs", "") .build(); p.cargo("fix --edition --allow-no-vcs") .with_stderr_unordered("\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] common v1.0.0 [..] [DOWNLOADED] bar v1.0.0 [..] [DOWNLOADED] opt_dep v1.0.0 [..] note: Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo. This may cause some dependencies to be built with fewer features enabled than previously. More information about the resolver changes may be found at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html When building the following dependencies, the given features will no longer be used: common v1.0.0 removed features: dev-feat, f1, opt_dep common v1.0.0 (as host dependency) removed features: dev-feat, f1 The following differences only apply when building with dev-dependencies: common v1.0.0 removed features: f1, opt_dep [CHECKING] opt_dep v1.0.0 [CHECKING] common v1.0.0 [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [MIGRATING] src/lib.rs from 2018 edition to 2021 [FINISHED] [..] ") .run(); } #[cargo_test] fn rustfix_handles_multi_spans() { // Checks that rustfix handles a single diagnostic with multiple // suggestion spans (non_fmt_panic in this case). let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file( "src/lib.rs", r#" pub fn foo() { panic!(format!("hey")); } "#, ) .build(); p.cargo("fix --allow-no-vcs").run(); assert!(p.read_file("src/lib.rs").contains(r#"panic!("hey");"#)); } #[cargo_test] fn fix_edition_2021() { // Can migrate 2021, even when lints are allowed. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" "#, ) .file( "src/lib.rs", r#" #![allow(ellipsis_inclusive_range_patterns)] pub fn f() -> bool { let x = 123; match x { 0...100 => true, _ => false, } } "#, ) .build(); p.cargo("fix --edition --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.1.0 [..] [MIGRATING] src/lib.rs from 2018 edition to 2021 [FIXED] src/lib.rs (1 fix) [FINISHED] [..] ", ) .run(); assert!(p.read_file("src/lib.rs").contains(r#"0..=100 => true,"#)); } #[cargo_test] fn fix_shared_cross_workspace() { // Fixing a file that is shared between multiple packages in the same workspace. // Make sure two processes don't try to fix the same file at the same time. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "pub mod shared;") // This will fix both unused and bare trait. .file("foo/src/shared.rs", "pub fn fixme(x: Box<&Fn() -> ()>) {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" #[path="../../foo/src/shared.rs"] pub mod shared; "#, ) .build(); // The output here can be either of these two, depending on who runs first: // [FIXED] bar/src/../../foo/src/shared.rs (2 fixes) // [FIXED] foo/src/shared.rs (2 fixes) p.cargo("fix --allow-no-vcs") .with_stderr_unordered( "\ [CHECKING] foo v0.1.0 [..] [CHECKING] bar v0.1.0 [..] [FIXED] [..]foo/src/shared.rs (2 fixes) [FINISHED] [..] ", ) .run(); assert_match_exact( "pub fn fixme(_x: Box<&dyn Fn() -> ()>) {}", &p.read_file("foo/src/shared.rs"), ); } #[cargo_test] fn abnormal_exit() { // rustc fails unexpectedly after applying fixes, should show some error information. // // This works with a proc-macro that runs three times: // - First run (collect diagnostics pass): writes a file, exits normally. // - Second run (verify diagnostics work): it detects the presence of the // file, removes the file, and aborts the process. // - Third run (collecting messages to display): file not found, exits normally. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = {path="pm"} "#, ) .file( "src/lib.rs", r#" pub fn f() { let mut x = 1; pm::crashme!(); } "#, ) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" edition = "2018" [lib] proc-macro = true "#, ) .file( "pm/src/lib.rs", r#" use proc_macro::TokenStream; #[proc_macro] pub fn crashme(_input: TokenStream) -> TokenStream { // Use a file to succeed on the first pass, and fail on the second. let p = std::env::var_os("ONCE_PATH").unwrap(); let check_path = std::path::Path::new(&p); if check_path.exists() { eprintln!("I'm not a diagnostic."); std::fs::remove_file(check_path).unwrap(); std::process::abort(); } else { std::fs::write(check_path, "").unwrap(); "".parse().unwrap() } } "#, ) .build(); p.cargo("fix --lib --allow-no-vcs") .env( "ONCE_PATH", paths::root().join("proc-macro-run-once").to_str().unwrap(), ) .with_stderr_contains( "[WARNING] failed to automatically apply fixes suggested by rustc to crate `foo`", ) .with_stderr_contains("I'm not a diagnostic.") // "signal: 6, SIGABRT: process abort signal" on some platforms .with_stderr_contains("rustc exited abnormally: [..]") .with_stderr_contains("Original diagnostics will follow.") .run(); } #[cargo_test] fn fix_with_run_cargo_in_proc_macros() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" use proc_macro::*; #[proc_macro] pub fn foo(_input: TokenStream) -> TokenStream { let output = std::process::Command::new(env!("CARGO")) .args(&["metadata", "--format-version=1"]) .output() .unwrap(); eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap()); println!("{}", std::str::from_utf8(&output.stdout).unwrap()); "".parse().unwrap() } "#, ) .file( "src/bin/main.rs", r#" use foo::foo; fn main() { foo!("bar") } "#, ) .build(); p.cargo("fix --allow-no-vcs") .with_stderr_does_not_contain("error: could not find .rs file in rustc args") .run(); } #[cargo_test] fn non_edition_lint_migration() { // Migrating to a new edition where a non-edition lint causes problems. let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file( "src/lib.rs", r#" // This is only used in a test. // To be correct, this should be gated on #[cfg(test)], but // sometimes people don't do that. If the unused_imports // lint removes this, then the unittest will fail to compile. use std::str::from_utf8; pub mod foo { pub const FOO: &[u8] = &[102, 111, 111]; } #[test] fn example() { assert_eq!( from_utf8(::foo::FOO), Ok("foo") ); } "#, ) .build(); // Check that it complains about an unused import. p.cargo("check --lib") .with_stderr_contains("[..]unused_imports[..]") .with_stderr_contains("[..]std::str::from_utf8[..]") .run(); p.cargo("fix --edition --allow-no-vcs").run(); let contents = p.read_file("src/lib.rs"); // Check it does not remove the "unused" import. assert!(contents.contains("use std::str::from_utf8;")); // Check that it made the edition migration. assert!(contents.contains("from_utf8(crate::foo::FOO)")); } // For rust-lang/cargo#9857 #[cargo_test] fn fix_in_dependency() { Package::new("bar", "1.0.0") .file( "src/lib.rs", r#" #[macro_export] macro_rules! m { ($i:tt) => { let $i = 1; }; } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file( "src/lib.rs", r#" pub fn foo() { bar::m!(abc); } "#, ) .build(); p.cargo("fix --allow-no-vcs") .with_stderr_does_not_contain("[FIXED] [..]") .run(); } cargo-0.66.0/tests/testsuite/freshness.rs000066400000000000000000002134331432416201200204400ustar00rootroot00000000000000//! Tests for fingerprinting (rebuild detection). use filetime::FileTime; use std::fs::{self, OpenOptions}; use std::io; use std::io::prelude::*; use std::net::TcpListener; use std::path::{Path, PathBuf}; use std::process::Stdio; use std::thread; use std::time::SystemTime; use super::death; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::Package; use cargo_test_support::{ basic_manifest, is_coarse_mtime, project, rustc_host, rustc_host_env, sleep_ms, }; #[cargo_test] fn modifying_and_moving() { let p = project() .file("src/main.rs", "mod a; fn main() {}") .file("src/a.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stdout("").run(); p.root().move_into_the_past(); p.root().join("target").move_into_the_past(); p.change_file("src/a.rs", "#[allow(unused)]fn main() {}"); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]file not found[..]") .run(); } #[cargo_test] fn modify_only_some_files() { let p = project() .file("src/lib.rs", "mod a;") .file("src/a.rs", "") .file("src/main.rs", "mod b; fn main() {}") .file("src/b.rs", "") .file("tests/test.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("test").run(); sleep_ms(1000); assert!(p.bin("foo").is_file()); let lib = p.root().join("src/lib.rs"); p.change_file("src/lib.rs", "invalid rust code"); p.change_file("src/b.rs", "#[allow(unused)]fn foo() {}"); lib.move_into_the_past(); // Make sure the binary is rebuilt, not the lib p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn rebuild_sub_package_then_while_package() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies.a] path = "a" [dependencies.b] path = "b" "#, ) .file("src/lib.rs", "extern crate a; extern crate b;") .file( "a/Cargo.toml", r#" [package] name = "a" authors = [] version = "0.0.1" [dependencies.b] path = "../b" "#, ) .file("a/src/lib.rs", "extern crate b;") .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] b [..] [COMPILING] a [..] [COMPILING] foo [..] [FINISHED] dev [..] ", ) .run(); if is_coarse_mtime() { sleep_ms(1000); } p.change_file("b/src/lib.rs", "pub fn b() {}"); p.cargo("build -pb -v") .with_stderr( "\ [COMPILING] b [..] [RUNNING] `rustc --crate-name b [..] [FINISHED] dev [..] ", ) .run(); p.change_file( "src/lib.rs", "extern crate a; extern crate b; pub fn toplevel() {}", ); p.cargo("build -v") .with_stderr( "\ [FRESH] b [..] [COMPILING] a [..] [RUNNING] `rustc --crate-name a [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..] [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn changing_lib_features_caches_targets() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [features] foo = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [..]Compiling foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --features foo") .with_stderr( "\ [..]Compiling foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); /* Targets should be cached from the first build */ p.cargo("build") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.cargo("build").with_stdout("").run(); p.cargo("build --features foo") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } #[cargo_test] fn changing_profiles_caches_targets() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [profile.dev] panic = "abort" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [..]Compiling foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("test") .with_stderr( "\ [..]Compiling foo v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE]) [DOCTEST] foo ", ) .run(); /* Targets should be cached from the first build */ p.cargo("build") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.cargo("test foo") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE]) ", ) .run(); } #[cargo_test] fn changing_bin_paths_common_target_features_caches_targets() { // Make sure dep_cache crate is built once per feature let p = project() .no_manifest() .file( ".cargo/config", r#" [build] target-dir = "./target" "#, ) .file( "dep_crate/Cargo.toml", r#" [package] name = "dep_crate" version = "0.0.1" authors = [] [features] ftest = [] "#, ) .file( "dep_crate/src/lib.rs", r#" #[cfg(feature = "ftest")] pub fn yo() { println!("ftest on") } #[cfg(not(feature = "ftest"))] pub fn yo() { println!("ftest off") } "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies] dep_crate = {path = "../dep_crate", features = []} "#, ) .file("a/src/lib.rs", "") .file( "a/src/main.rs", r#" extern crate dep_crate; use dep_crate::yo; fn main() { yo(); } "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies] dep_crate = {path = "../dep_crate", features = ["ftest"]} "#, ) .file("b/src/lib.rs", "") .file( "b/src/main.rs", r#" extern crate dep_crate; use dep_crate::yo; fn main() { yo(); } "#, ) .build(); /* Build and rebuild a/. Ensure dep_crate only builds once */ p.cargo("run") .cwd("a") .with_stdout("ftest off") .with_stderr( "\ [..]Compiling dep_crate v0.0.1 ([..]) [..]Compiling a v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/a[EXE]` ", ) .run(); p.cargo("clean -p a").cwd("a").run(); p.cargo("run") .cwd("a") .with_stdout("ftest off") .with_stderr( "\ [..]Compiling a v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/a[EXE]` ", ) .run(); /* Build and rebuild b/. Ensure dep_crate only builds once */ p.cargo("run") .cwd("b") .with_stdout("ftest on") .with_stderr( "\ [..]Compiling dep_crate v0.0.1 ([..]) [..]Compiling b v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/b[EXE]` ", ) .run(); p.cargo("clean -p b").cwd("b").run(); p.cargo("run") .cwd("b") .with_stdout("ftest on") .with_stderr( "\ [..]Compiling b v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/b[EXE]` ", ) .run(); /* Build a/ package again. If we cache different feature dep builds correctly, * this should not cause a rebuild of dep_crate */ p.cargo("clean -p a").cwd("a").run(); p.cargo("run") .cwd("a") .with_stdout("ftest off") .with_stderr( "\ [..]Compiling a v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/a[EXE]` ", ) .run(); /* Build b/ package again. If we cache different feature dep builds correctly, * this should not cause a rebuild */ p.cargo("clean -p b").cwd("b").run(); p.cargo("run") .cwd("b") .with_stdout("ftest on") .with_stderr( "\ [..]Compiling b v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]target/debug/b[EXE]` ", ) .run(); } #[cargo_test] fn changing_bin_features_caches_targets() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [features] foo = [] "#, ) .file( "src/main.rs", r#" fn main() { let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" }; println!("{}", msg); } "#, ) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.rename_run("foo", "off1").with_stdout("feature off").run(); p.cargo("build --features foo") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.rename_run("foo", "on1").with_stdout("feature on").run(); /* Targets should be cached from the first build */ let mut e = p.cargo("build"); // MSVC does not include hash in binary filename, so it gets recompiled. if cfg!(target_env = "msvc") { e.with_stderr("[COMPILING] foo[..]\n[FINISHED] dev[..]"); } else { e.with_stderr("[FINISHED] dev[..]"); } e.run(); p.rename_run("foo", "off2").with_stdout("feature off").run(); let mut e = p.cargo("build --features foo"); if cfg!(target_env = "msvc") { e.with_stderr("[COMPILING] foo[..]\n[FINISHED] dev[..]"); } else { e.with_stderr("[FINISHED] dev[..]"); } e.run(); p.rename_run("foo", "on2").with_stdout("feature on").run(); } #[cargo_test] fn rebuild_tests_if_lib_changes() { let p = project() .file("src/lib.rs", "pub fn foo() {}") .file( "tests/foo.rs", r#" extern crate foo; #[test] fn test() { foo::foo(); } "#, ) .build(); p.cargo("build").run(); p.cargo("test").run(); sleep_ms(1000); p.change_file("src/lib.rs", ""); p.cargo("build -v").run(); p.cargo("test -v") .with_status(101) .with_stderr_contains("[..]cannot find function `foo`[..]") .run(); } #[cargo_test] fn no_rebuild_transitive_target_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } [dev-dependencies] b = { path = "b" } "#, ) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [target.foo.dependencies] c = { path = "../c" } "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies] c = { path = "../c" } "#, ) .file("b/src/lib.rs", "") .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) .file("c/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("test --no-run") .with_stderr( "\ [COMPILING] c v0.0.1 ([..]) [COMPILING] b v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE]) [EXECUTABLE] tests/foo.rs (target/debug/deps/foo-[..][EXE]) ", ) .run(); } #[cargo_test] fn rerun_if_changed_in_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "a/build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); } "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn same_build_dir_cached_packages() { let p = project() .no_manifest() .file( "a1/Cargo.toml", r#" [package] name = "a1" version = "0.0.1" authors = [] [dependencies] b = { path = "../b" } "#, ) .file("a1/src/lib.rs", "") .file( "a2/Cargo.toml", r#" [package] name = "a2" version = "0.0.1" authors = [] [dependencies] b = { path = "../b" } "#, ) .file("a2/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies] c = { path = "../c" } "#, ) .file("b/src/lib.rs", "") .file( "c/Cargo.toml", r#" [package] name = "c" version = "0.0.1" authors = [] [dependencies] d = { path = "../d" } "#, ) .file("c/src/lib.rs", "") .file("d/Cargo.toml", &basic_manifest("d", "0.0.1")) .file("d/src/lib.rs", "") .file( ".cargo/config", r#" [build] target-dir = "./target" "#, ) .build(); p.cargo("build") .cwd("a1") .with_stderr(&format!( "\ [COMPILING] d v0.0.1 ({dir}/d) [COMPILING] c v0.0.1 ({dir}/c) [COMPILING] b v0.0.1 ({dir}/b) [COMPILING] a1 v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", dir = p.url().to_file_path().unwrap().to_str().unwrap() )) .run(); p.cargo("build") .cwd("a2") .with_stderr( "\ [COMPILING] a2 v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn no_rebuild_if_build_artifacts_move_backwards_in_time() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); p.root().move_into_the_past(); p.cargo("build") .with_stdout("") .with_stderr("[FINISHED] [..]") .run(); } #[cargo_test] fn rebuild_if_build_artifacts_move_forward_in_time() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); p.root().move_into_the_future(); p.cargo("build") .env("CARGO_LOG", "") .with_stdout("") .with_stderr( "\ [COMPILING] a v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rebuild_if_environment_changes() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" description = "old desc" version = "0.0.1" authors = [] "#, ) .file( "src/main.rs", r#" fn main() { println!("{}", env!("CARGO_PKG_DESCRIPTION")); } "#, ) .build(); p.cargo("run") .with_stdout("old desc") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); p.change_file( "Cargo.toml", r#" [package] name = "foo" description = "new desc" version = "0.0.1" authors = [] "#, ); p.cargo("run") .with_stdout("new desc") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); } #[cargo_test] fn no_rebuild_when_rename_dir() { let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [workspace] [dependencies] foo = { path = "foo" } "#, ) .file("src/_unused.rs", "") .file("build.rs", "fn main() {}") .file("foo/Cargo.toml", &basic_manifest("foo", "0.0.1")) .file("foo/src/lib.rs", "") .file("foo/build.rs", "fn main() {}") .build(); // make sure the most recently modified file is `src/lib.rs`, not // `Cargo.toml`, to expose a historical bug where we forgot to strip the // `Cargo.toml` path from looking for the package root. cargo_test_support::sleep_ms(100); fs::write(p.root().join("src/lib.rs"), "").unwrap(); p.cargo("build").run(); let mut new = p.root(); new.pop(); new.push("bar"); fs::rename(p.root(), &new).unwrap(); p.cargo("build") .cwd(&new) .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } #[cargo_test] fn unused_optional_dep() { Package::new("registry1", "0.1.0").publish(); Package::new("registry2", "0.1.0").publish(); Package::new("registry3", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "p" authors = [] version = "0.1.0" [dependencies] bar = { path = "bar" } baz = { path = "baz" } registry1 = "*" "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.1" authors = [] [dev-dependencies] registry2 = "*" "#, ) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.1" authors = [] [dependencies] registry3 = { version = "*", optional = true } "#, ) .file("baz/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn path_dev_dep_registry_updates() { Package::new("registry1", "0.1.0").publish(); Package::new("registry2", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "p" authors = [] version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.1" authors = [] [dependencies] registry1 = "*" [dev-dependencies] baz = { path = "../baz"} "#, ) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.1" authors = [] [dependencies] registry2 = "*" "#, ) .file("baz/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn change_panic_mode() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ['bar', 'baz'] [profile.dev] panic = 'abort' "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.1" authors = [] [lib] proc-macro = true [dependencies] bar = { path = '../bar' } "#, ) .file("baz/src/lib.rs", "extern crate bar;") .build(); p.cargo("build -p bar").run(); p.cargo("build -p baz").run(); } #[cargo_test] fn dont_rebuild_based_on_plugins() { let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.1" [workspace] members = ['baz'] [dependencies] proc-macro-thing = { path = 'proc-macro-thing' } "#, ) .file("src/lib.rs", "") .file( "proc-macro-thing/Cargo.toml", r#" [package] name = "proc-macro-thing" version = "0.1.1" [lib] proc-macro = true [dependencies] qux = { path = '../qux' } "#, ) .file("proc-macro-thing/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.1" [dependencies] qux = { path = '../qux' } "#, ) .file("baz/src/main.rs", "fn main() {}") .file("qux/Cargo.toml", &basic_manifest("qux", "0.1.1")) .file("qux/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build -p baz").run(); p.cargo("build").with_stderr("[FINISHED] [..]\n").run(); p.cargo("build -p bar") .with_stderr("[FINISHED] [..]\n") .run(); } #[cargo_test] fn reuse_workspace_lib() { let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.1" [workspace] [dependencies] baz = { path = 'baz' } "#, ) .file("src/lib.rs", "") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) .file("baz/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("test -p baz -v --no-run") .with_stderr( "\ [COMPILING] baz v0.1.1 ([..]) [RUNNING] `rustc[..] --test [..]` [FINISHED] [..] [EXECUTABLE] `[..]/target/debug/deps/baz-[..][EXE]` ", ) .run(); } #[cargo_test] fn reuse_shared_build_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] shared = {path = "shared"} [workspace] members = ["shared", "bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("shared/Cargo.toml", &basic_manifest("shared", "0.0.1")) .file("shared/src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" [build-dependencies] shared = { path = "../shared" } "#, ) .file("bar/src/lib.rs", "") .file("bar/build.rs", "fn main() {}") .build(); p.cargo("build --workspace").run(); // This should not recompile! p.cargo("build -p foo -v") .with_stderr( "\ [FRESH] shared [..] [FRESH] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn changing_rustflags_is_cached() { let p = project().file("src/lib.rs", "").build(); // This isn't ever cached, we always have to recompile for _ in 0..2 { p.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); p.cargo("build") .env("RUSTFLAGS", "-C linker=cc") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } } #[cargo_test] fn update_dependency_mtime_does_not_rebuild() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build -Z mtime-on-use") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .env("RUSTFLAGS", "-C linker=cc") .with_stderr( "\ [COMPILING] bar v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); // This does not make new files, but it does update the mtime of the dependency. p.cargo("build -p bar -Z mtime-on-use") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .env("RUSTFLAGS", "-C linker=cc") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); // This should not recompile! p.cargo("build -Z mtime-on-use") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .env("RUSTFLAGS", "-C linker=cc") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) { // Cargo is experimenting with letting outside projects develop some // limited forms of GC for target_dir. This is one of the forms. // Specifically, Cargo is updating the mtime of a file in // target/profile/.fingerprint each time it uses the fingerprint. // So a cleaner can remove files associated with a fingerprint // if all the files in the fingerprint's folder are older then a time stamp without // effecting any builds that happened since that time stamp. let mut cleaned = false; dir.push(".fingerprint"); for fing in fs::read_dir(&dir).unwrap() { let fing = fing.unwrap(); let outdated = |f: io::Result| { filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap()) <= timestamp }; if fs::read_dir(fing.path()).unwrap().all(outdated) { fs::remove_dir_all(fing.path()).unwrap(); println!("remove: {:?}", fing.path()); // a real cleaner would remove the big files in deps and build as well // but fingerprint is sufficient for our tests cleaned = true; } else { } } assert!( cleaned, "called fingerprint_cleaner, but there was nothing to remove" ); } #[cargo_test] fn fingerprint_cleaner_does_not_rebuild() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } [features] a = [] "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build -Z mtime-on-use") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .run(); p.cargo("build -Z mtime-on-use --features a") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); if is_coarse_mtime() { sleep_ms(1000); } let timestamp = filetime::FileTime::from_system_time(SystemTime::now()); if is_coarse_mtime() { sleep_ms(1000); } // This does not make new files, but it does update the mtime. p.cargo("build -Z mtime-on-use --features a") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); fingerprint_cleaner(p.target_debug_dir(), timestamp); // This should not recompile! p.cargo("build -Z mtime-on-use --features a") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); // But this should be cleaned and so need a rebuild p.cargo("build -Z mtime-on-use") .masquerade_as_nightly_cargo(&["mtime-on-use"]) .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn reuse_panic_build_dep_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [build-dependencies] bar = { path = "bar" } [dev-dependencies] bar = { path = "bar" } [profile.dev] panic = "abort" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); // Check that `bar` is not built twice. It is only needed once (without `panic`). p.cargo("test --lib --no-run -v") .with_stderr( "\ [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name build_script_build [..] [RUNNING] [..]build-script-build` [RUNNING] `rustc --crate-name foo src/lib.rs [..]--test[..] [FINISHED] [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` ", ) .run(); } #[cargo_test] fn reuse_panic_pm() { // foo(panic) -> bar(panic) // somepm(nopanic) -> bar(nopanic) let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } somepm = { path = "somepm" } [profile.dev] panic = "abort" "#, ) .file("src/lib.rs", "extern crate bar;") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .file( "somepm/Cargo.toml", r#" [package] name = "somepm" version = "0.0.1" [lib] proc-macro = true [dependencies] bar = { path = "../bar" } "#, ) .file("somepm/src/lib.rs", "extern crate bar;") .build(); // bar is built once without panic (for proc-macro) and once with (for the // normal dependency). p.cargo("build -v") .with_stderr_unordered( "\ [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C debuginfo=2 [..] [RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C debuginfo=2 [..] [COMPILING] somepm [..] [RUNNING] `rustc --crate-name somepm [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]-C panic=abort[..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn bust_patched_dep() { Package::new("registry1", "0.1.0").publish(); Package::new("registry2", "0.1.0") .dep("registry1", "0.1.0") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] registry2 = "0.1.0" [patch.crates-io] registry1 = { path = "reg1new" } "#, ) .file("src/lib.rs", "") .file("reg1new/Cargo.toml", &basic_manifest("registry1", "0.1.0")) .file("reg1new/src/lib.rs", "") .build(); p.cargo("build").run(); if is_coarse_mtime() { sleep_ms(1000); } p.change_file("reg1new/src/lib.rs", ""); if is_coarse_mtime() { sleep_ms(1000); } p.cargo("build") .with_stderr( "\ [COMPILING] registry1 v0.1.0 ([..]) [COMPILING] registry2 v0.1.0 [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); p.cargo("build -v") .with_stderr( "\ [FRESH] registry1 v0.1.0 ([..]) [FRESH] registry2 v0.1.0 [FRESH] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rebuild_on_mid_build_file_modification() { let server = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = server.local_addr().unwrap(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["root", "proc_macro_dep"] "#, ) .file( "root/Cargo.toml", r#" [package] name = "root" version = "0.1.0" authors = [] [dependencies] proc_macro_dep = { path = "../proc_macro_dep" } "#, ) .file( "root/src/lib.rs", r#" #[macro_use] extern crate proc_macro_dep; #[derive(Noop)] pub struct X; "#, ) .file( "proc_macro_dep/Cargo.toml", r#" [package] name = "proc_macro_dep" version = "0.1.0" authors = [] [lib] proc-macro = true "#, ) .file( "proc_macro_dep/src/lib.rs", &format!( r#" extern crate proc_macro; use std::io::Read; use std::net::TcpStream; use proc_macro::TokenStream; #[proc_macro_derive(Noop)] pub fn noop(_input: TokenStream) -> TokenStream {{ let mut stream = TcpStream::connect("{}").unwrap(); let mut v = Vec::new(); stream.read_to_end(&mut v).unwrap(); "".parse().unwrap() }} "#, addr ), ) .build(); let root = p.root(); let t = thread::spawn(move || { let socket = server.accept().unwrap().0; sleep_ms(1000); let mut file = OpenOptions::new() .write(true) .append(true) .open(root.join("root/src/lib.rs")) .unwrap(); writeln!(file, "// modified").expect("Failed to append to root sources"); drop(file); drop(socket); drop(server.accept().unwrap()); }); p.cargo("build") .with_stderr( "\ [COMPILING] proc_macro_dep v0.1.0 ([..]/proc_macro_dep) [COMPILING] root v0.1.0 ([..]/root) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [COMPILING] root v0.1.0 ([..]/root) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); t.join().ok().unwrap(); } #[cargo_test] fn dirty_both_lib_and_test() { // This tests that all artifacts that depend on the results of a build // script will get rebuilt when the build script reruns, even for separate // commands. It does the following: // // 1. Project "foo" has a build script which will compile a small // staticlib to link against. Normally this would use the `cc` crate, // but here we just use rustc to avoid the `cc` dependency. // 2. Build the library. // 3. Build the unit test. The staticlib intentionally has a bad value. // 4. Rewrite the staticlib with the correct value. // 5. Build the library again. // 6. Build the unit test. This should recompile. let slib = |n| { format!( r#" #[no_mangle] pub extern "C" fn doit() -> i32 {{ return {}; }} "#, n ) }; let p = project() .file( "src/lib.rs", r#" extern "C" { fn doit() -> i32; } #[test] fn t1() { assert_eq!(unsafe { doit() }, 1, "doit assert failure"); } "#, ) .file( "build.rs", r#" use std::env; use std::path::PathBuf; use std::process::Command; fn main() { let rustc = env::var_os("RUSTC").unwrap(); let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); assert!( Command::new(rustc) .args(&[ "--crate-type=staticlib", "--out-dir", out_dir.to_str().unwrap(), "slib.rs" ]) .status() .unwrap() .success(), "slib build failed" ); println!("cargo:rustc-link-lib=slib"); println!("cargo:rustc-link-search={}", out_dir.display()); } "#, ) .file("slib.rs", &slib(2)) .build(); p.cargo("build").run(); // 2 != 1 p.cargo("test --lib") .with_status(101) .with_stdout_contains("[..]doit assert failure[..]") .run(); if is_coarse_mtime() { // #5918 sleep_ms(1000); } // Fix the mistake. p.change_file("slib.rs", &slib(1)); p.cargo("build").run(); // This should recompile with the new static lib, and the test should pass. p.cargo("test --lib").run(); } #[cargo_test] fn script_fails_stay_dirty() { // Check if a script is aborted (such as hitting Ctrl-C) that it will re-run. // Steps: // 1. Build to establish fingerprints. // 2. Make a change that triggers the build script to re-run. Abort the // script while it is running. // 3. Run the build again and make sure it re-runs the script. let p = project() .file( "build.rs", r#" mod helper; fn main() { println!("cargo:rerun-if-changed=build.rs"); helper::doit(); } "#, ) .file("helper.rs", "pub fn doit() {}") .file("src/lib.rs", "") .build(); p.cargo("build").run(); if is_coarse_mtime() { sleep_ms(1000); } p.change_file("helper.rs", r#"pub fn doit() {panic!("Crash!");}"#); p.cargo("build") .with_stderr_contains("[..]Crash![..]") .with_status(101) .run(); // There was a bug where this second call would be "fresh". p.cargo("build") .with_stderr_contains("[..]Crash![..]") .with_status(101) .run(); } #[cargo_test] fn simulated_docker_deps_stay_cached() { // Test what happens in docker where the nanoseconds are zeroed out. Package::new("regdep", "1.0.0").publish(); Package::new("regdep_old_style", "1.0.0") .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .publish(); Package::new("regdep_env", "1.0.0") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-env-changed=SOMEVAR"); } "#, ) .file("src/lib.rs", "") .publish(); Package::new("regdep_rerun", "1.0.0") .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); } "#, ) .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pathdep = { path = "pathdep" } regdep = "1.0" regdep_old_style = "1.0" regdep_env = "1.0" regdep_rerun = "1.0" "#, ) .file( "src/lib.rs", " extern crate pathdep; extern crate regdep; extern crate regdep_old_style; extern crate regdep_env; extern crate regdep_rerun; ", ) .file("build.rs", "fn main() {}") .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0")) .file("pathdep/src/lib.rs", "") .build(); p.cargo("build").run(); let already_zero = { // This happens on HFS with 1-second timestamp resolution, // or other filesystems where it just so happens to write exactly on a // 1-second boundary. let metadata = fs::metadata(p.root().join("src/lib.rs")).unwrap(); let mtime = FileTime::from_last_modification_time(&metadata); mtime.nanoseconds() == 0 }; // Recursively remove `nanoseconds` from every path. fn zeropath(path: &Path) { for entry in walkdir::WalkDir::new(path) .into_iter() .filter_map(|e| e.ok()) { let metadata = fs::metadata(entry.path()).unwrap(); let mtime = metadata.modified().unwrap(); let mtime_duration = mtime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); let trunc_mtime = FileTime::from_unix_time(mtime_duration.as_secs() as i64, 0); let atime = metadata.accessed().unwrap(); let atime_duration = atime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); let trunc_atime = FileTime::from_unix_time(atime_duration.as_secs() as i64, 0); if let Err(e) = filetime::set_file_times(entry.path(), trunc_atime, trunc_mtime) { // Windows doesn't allow changing filetimes on some things // (directories, other random things I'm not sure why). Just // ignore them. if e.kind() == std::io::ErrorKind::PermissionDenied { println!("PermissionDenied filetime on {:?}", entry.path()); } else { panic!("FileTime error on {:?}: {:?}", entry.path(), e); } } } } zeropath(&p.root()); zeropath(&paths::home()); if already_zero { println!("already zero"); // If it was already truncated, then everything stays fresh. p.cargo("build -v") .with_stderr_unordered( "\ [FRESH] pathdep [..] [FRESH] regdep [..] [FRESH] regdep_env [..] [FRESH] regdep_old_style [..] [FRESH] regdep_rerun [..] [FRESH] foo [..] [FINISHED] [..] ", ) .run(); } else { println!("not already zero"); // It is not ideal that `foo` gets recompiled, but that is the current // behavior. Currently mtimes are ignored for registry deps. // // Note that this behavior is due to the fact that `foo` has a build // script in "old" mode where it doesn't print `rerun-if-*`. In this // mode we use `Precalculated` to fingerprint a path dependency, where // `Precalculated` is an opaque string which has the most recent mtime // in it. It differs between builds because one has nsec=0 and the other // likely has a nonzero nsec. Hence, the rebuild. p.cargo("build -v") .with_stderr_unordered( "\ [FRESH] pathdep [..] [FRESH] regdep [..] [FRESH] regdep_env [..] [FRESH] regdep_old_style [..] [FRESH] regdep_rerun [..] [COMPILING] foo [..] [RUNNING] [..]/foo-[..]/build-script-build[..] [RUNNING] `rustc --crate-name foo[..] [FINISHED] [..] ", ) .run(); } } #[cargo_test] fn metadata_change_invalidates() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); for attr in &[ "authors = [\"foo\"]", "description = \"desc\"", "homepage = \"https://example.com\"", "repository =\"https://example.com\"", ] { let mut file = OpenOptions::new() .write(true) .append(true) .open(p.root().join("Cargo.toml")) .unwrap(); writeln!(file, "{}", attr).unwrap(); p.cargo("build") .with_stderr_contains("[COMPILING] foo [..]") .run(); } p.cargo("build -v") .with_stderr_contains("[FRESH] foo[..]") .run(); assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); } #[cargo_test] fn edition_change_invalidates() { const MANIFEST: &str = r#" [package] name = "foo" version = "0.1.0" "#; let p = project() .file("Cargo.toml", MANIFEST) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.change_file("Cargo.toml", &format!("{}edition = \"2018\"", MANIFEST)); p.cargo("build") .with_stderr_contains("[COMPILING] foo [..]") .run(); p.change_file( "Cargo.toml", &format!( r#"{}edition = "2018" [lib] edition = "2015" "#, MANIFEST ), ); p.cargo("build") .with_stderr_contains("[COMPILING] foo [..]") .run(); p.cargo("build -v") .with_stderr_contains("[FRESH] foo[..]") .run(); assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); } #[cargo_test] fn rename_with_path_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = 'a' } "#, ) .file("src/lib.rs", "extern crate a; pub fn foo() { a::foo(); }") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] b = { path = 'b' } "#, ) .file("a/src/lib.rs", "extern crate b; pub fn foo() { b::foo() }") .file( "a/b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] "#, ) .file("a/b/src/lib.rs", "pub fn foo() { }"); let p = p.build(); p.cargo("build").run(); // Now rename the root directory and rerun `cargo run`. Not only should we // not build anything but we also shouldn't crash. let mut new = p.root(); new.pop(); new.push("foo2"); fs::rename(p.root(), &new).unwrap(); p.cargo("build") .cwd(&new) .with_stderr("[FINISHED] [..]") .run(); } #[cargo_test] fn move_target_directory_with_path_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#, ) .file("src/lib.rs", "extern crate a; pub use a::print_msg;") .file( "a/build.rs", r###" use std::env; use std::fs; use std::path::Path; fn main() { println!("cargo:rerun-if-changed=build.rs"); let out_dir = env::var("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("hello.rs"); fs::write(&dest_path, r#" pub fn message() -> &'static str { "Hello, World!" } "#).unwrap(); } "###, ) .file( "a/src/lib.rs", r#" include!(concat!(env!("OUT_DIR"), "/hello.rs")); pub fn print_msg() { message(); } "#, ); let p = p.build(); let mut parent = p.root(); parent.pop(); p.cargo("build").run(); let new_target = p.root().join("target2"); fs::rename(p.root().join("target"), &new_target).unwrap(); p.cargo("build") .env("CARGO_TARGET_DIR", &new_target) .with_stderr("[FINISHED] [..]") .run(); } #[cargo_test] fn rerun_if_changes() { let p = project() .file( "build.rs", r#" fn main() { println!("cargo:rerun-if-env-changed=FOO"); if std::env::var("FOO").is_ok() { println!("cargo:rerun-if-env-changed=BAR"); } } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.cargo("build -v") .env("FOO", "1") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "1") .with_stderr("[FINISHED] [..]") .run(); p.cargo("build -v") .env("FOO", "1") .env("BAR", "1") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .env("FOO", "1") .env("BAR", "1") .with_stderr("[FINISHED] [..]") .run(); p.cargo("build -v") .env("BAR", "2") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `[..]build-script-build` [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .env("BAR", "2") .with_stderr("[FINISHED] [..]") .run(); } #[cargo_test] fn channel_shares_filenames() { // Test that different "nightly" releases use the same output filename. // Create separate rustc binaries to emulate running different toolchains. let nightly1 = format!( "\ rustc 1.44.0-nightly (38114ff16 2020-03-21) binary: rustc commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a commit-date: 2020-03-21 host: {} release: 1.44.0-nightly LLVM version: 9.0 ", rustc_host() ); let nightly2 = format!( "\ rustc 1.44.0-nightly (a5b09d354 2020-03-31) binary: rustc commit-hash: a5b09d35473615e7142f5570f5c5fad0caf68bd2 commit-date: 2020-03-31 host: {} release: 1.44.0-nightly LLVM version: 9.0 ", rustc_host() ); let beta1 = format!( "\ rustc 1.43.0-beta.3 (4c587bbda 2020-03-25) binary: rustc commit-hash: 4c587bbda04ab55aaf56feab11dfdfe387a85d7a commit-date: 2020-03-25 host: {} release: 1.43.0-beta.3 LLVM version: 9.0 ", rustc_host() ); let beta2 = format!( "\ rustc 1.42.0-beta.5 (4e1c5f0e9 2020-02-28) binary: rustc commit-hash: 4e1c5f0e9769a588b91c977e3d81e140209ef3a2 commit-date: 2020-02-28 host: {} release: 1.42.0-beta.5 LLVM version: 9.0 ", rustc_host() ); let stable1 = format!( "\ rustc 1.42.0 (b8cedc004 2020-03-09) binary: rustc commit-hash: b8cedc00407a4c56a3bda1ed605c6fc166655447 commit-date: 2020-03-09 host: {} release: 1.42.0 LLVM version: 9.0 ", rustc_host() ); let stable2 = format!( "\ rustc 1.41.1 (f3e1a954d 2020-02-24) binary: rustc commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 commit-date: 2020-02-24 host: {} release: 1.41.1 LLVM version: 9.0 ", rustc_host() ); let compiler = project() .at("compiler") .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) .file( "src/main.rs", r#" fn main() { if std::env::args_os().any(|a| a == "-vV") { print!("{}", env!("FUNKY_VERSION_TEST")); return; } let mut cmd = std::process::Command::new("rustc"); cmd.args(std::env::args_os().skip(1)); assert!(cmd.status().unwrap().success()); } "#, ) .build(); let makeit = |version, vv| { // Force a rebuild. compiler.target_debug_dir().join("deps").rm_rf(); compiler.cargo("build").env("FUNKY_VERSION_TEST", vv).run(); fs::rename(compiler.bin("compiler"), compiler.bin(version)).unwrap(); }; makeit("nightly1", nightly1); makeit("nightly2", nightly2); makeit("beta1", beta1); makeit("beta2", beta2); makeit("stable1", stable1); makeit("stable2", stable2); // Run `cargo check` with different rustc versions to observe its behavior. let p = project().file("src/lib.rs", "").build(); // Runs `cargo check` and returns the rmeta filename created. // Checks that the freshness matches the given value. let check = |version, fresh| -> String { let output = p .cargo("check --message-format=json") .env("RUSTC", compiler.bin(version)) .exec_with_output() .unwrap(); // Collect the filenames generated. let mut artifacts: Vec<_> = std::str::from_utf8(&output.stdout) .unwrap() .lines() .filter_map(|line| { let value: serde_json::Value = serde_json::from_str(line).unwrap(); if value["reason"].as_str().unwrap() == "compiler-artifact" { assert_eq!(value["fresh"].as_bool().unwrap(), fresh); let filenames = value["filenames"].as_array().unwrap(); assert_eq!(filenames.len(), 1); Some(filenames[0].to_string()) } else { None } }) .collect(); // Should only generate one rmeta file. assert_eq!(artifacts.len(), 1); artifacts.pop().unwrap() }; let nightly1_name = check("nightly1", false); assert_eq!(check("nightly1", true), nightly1_name); assert_eq!(check("nightly2", false), nightly1_name); // same as before assert_eq!(check("nightly2", true), nightly1_name); // Should rebuild going back to nightly1. assert_eq!(check("nightly1", false), nightly1_name); let beta1_name = check("beta1", false); assert_ne!(beta1_name, nightly1_name); assert_eq!(check("beta1", true), beta1_name); assert_eq!(check("beta2", false), beta1_name); // same as before assert_eq!(check("beta2", true), beta1_name); // Should rebuild going back to beta1. assert_eq!(check("beta1", false), beta1_name); let stable1_name = check("stable1", false); assert_ne!(stable1_name, nightly1_name); assert_ne!(stable1_name, beta1_name); let stable2_name = check("stable2", false); assert_ne!(stable1_name, stable2_name); // Check everything is fresh. assert_eq!(check("stable1", true), stable1_name); assert_eq!(check("stable2", true), stable2_name); assert_eq!(check("beta1", true), beta1_name); assert_eq!(check("nightly1", true), nightly1_name); } #[cargo_test] fn linking_interrupted() { // Interrupt during the linking phase shouldn't leave test executable as "fresh". // This is used to detect when linking starts, then to pause the linker so // that the test can kill cargo. let link_listener = TcpListener::bind("127.0.0.1:0").unwrap(); let link_addr = link_listener.local_addr().unwrap(); // This is used to detect when rustc exits. let rustc_listener = TcpListener::bind("127.0.0.1:0").unwrap(); let rustc_addr = rustc_listener.local_addr().unwrap(); // Create a linker that we can interrupt. let linker = project() .at("linker") .file("Cargo.toml", &basic_manifest("linker", "1.0.0")) .file( "src/main.rs", &r#" fn main() { // Figure out the output filename. let output = match std::env::args().find(|a| a.starts_with("/OUT:")) { Some(s) => s[5..].to_string(), None => { let mut args = std::env::args(); loop { if args.next().unwrap() == "-o" { break; } } args.next().unwrap() } }; std::fs::remove_file(&output).unwrap(); std::fs::write(&output, "").unwrap(); // Tell the test that we are ready to be interrupted. let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); // Wait for the test to kill us. std::thread::sleep(std::time::Duration::new(60, 0)); } "# .replace("__ADDR__", &link_addr.to_string()), ) .build(); linker.cargo("build").run(); // Create a wrapper around rustc that will tell us when rustc is finished. let rustc = project() .at("rustc-waiter") .file("Cargo.toml", &basic_manifest("rustc-waiter", "1.0.0")) .file( "src/main.rs", &r#" fn main() { let mut conn = None; // Check for a normal build (not -vV or --print). if std::env::args().any(|arg| arg == "t1") { // Tell the test that rustc has started. conn = Some(std::net::TcpStream::connect("__ADDR__").unwrap()); } let status = std::process::Command::new("rustc") .args(std::env::args().skip(1)) .status() .expect("rustc to run"); std::process::exit(status.code().unwrap_or(1)); } "# .replace("__ADDR__", &rustc_addr.to_string()), ) .build(); rustc.cargo("build").run(); // Build it once so that the fingerprint gets saved to disk. let p = project() .file("src/lib.rs", "") .file("tests/t1.rs", "") .build(); p.cargo("test --test t1 --no-run").run(); // Make a change, start a build, then interrupt it. p.change_file("src/lib.rs", "// modified"); let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); // NOTE: This assumes that the paths to the linker or rustc are not in the // fingerprint. But maybe they should be? let mut cmd = p .cargo("test --test t1 --no-run") .env(&linker_env, linker.bin("linker")) .env("RUSTC", rustc.bin("rustc-waiter")) .build_command(); let mut child = cmd .stdout(Stdio::null()) .stderr(Stdio::null()) .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1") .spawn() .unwrap(); // Wait for rustc to start. let mut rustc_conn = rustc_listener.accept().unwrap().0; // Wait for linking to start. drop(link_listener.accept().unwrap()); // Interrupt the child. death::ctrl_c(&mut child); assert!(!child.wait().unwrap().success()); // Wait for rustc to exit. If we don't wait, then the command below could // start while rustc is still being torn down. let mut buf = [0]; drop(rustc_conn.read_exact(&mut buf)); // Build again, shouldn't be fresh. p.cargo("test --test t1") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] [..] [RUNNING] tests/t1.rs (target/debug/deps/t1[..]) ", ) .run(); } #[cargo_test] #[cfg_attr( not(all(target_arch = "x86_64", target_os = "windows", target_env = "msvc")), ignore )] fn lld_is_fresh() { // Check for bug when using lld linker that it remains fresh with dylib. let p = project() .file( ".cargo/config", r#" [target.x86_64-pc-windows-msvc] linker = "rust-lld" rustflags = ["-C", "link-arg=-fuse-ld=lld"] "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["dylib"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build -v") .with_stderr("[FRESH] foo [..]\n[FINISHED] [..]") .run(); } #[cargo_test] fn env_in_code_causes_rebuild() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file( "src/main.rs", r#" fn main() { println!("{:?}", option_env!("FOO")); println!("{:?}", option_env!("FOO\nBAR")); } "#, ) .build(); p.cargo("build").env_remove("FOO").run(); p.cargo("build") .env_remove("FOO") .with_stderr("[FINISHED] [..]") .run(); p.cargo("build") .env("FOO", "bar") .with_stderr("[COMPILING][..]\n[FINISHED][..]") .run(); p.cargo("build") .env("FOO", "bar") .with_stderr("[FINISHED][..]") .run(); p.cargo("build") .env("FOO", "baz") .with_stderr("[COMPILING][..]\n[FINISHED][..]") .run(); p.cargo("build") .env("FOO", "baz") .with_stderr("[FINISHED][..]") .run(); p.cargo("build") .env_remove("FOO") .with_stderr("[COMPILING][..]\n[FINISHED][..]") .run(); p.cargo("build") .env_remove("FOO") .with_stderr("[FINISHED][..]") .run(); let interesting = " #!$\nabc\r\\\t\u{8}\r\n"; p.cargo("build").env("FOO", interesting).run(); p.cargo("build") .env("FOO", interesting) .with_stderr("[FINISHED][..]") .run(); p.cargo("build").env("FOO\nBAR", interesting).run(); p.cargo("build") .env("FOO\nBAR", interesting) .with_stderr("[FINISHED][..]") .run(); } #[cargo_test] fn env_build_script_no_rebuild() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file( "build.rs", r#" fn main() { println!("cargo:rustc-env=FOO=bar"); } "#, ) .file( "src/main.rs", r#" fn main() { println!("{:?}", env!("FOO")); } "#, ) .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn cargo_env_changes() { // Checks that changes to the env var CARGO in the dep-info file triggers // a rebuild. let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file( "src/main.rs", r#" fn main() { println!("{:?}", env!("CARGO")); } "#, ) .build(); let cargo_exe = cargo_test_support::cargo_exe(); let other_cargo_path = p.root().join(cargo_exe.file_name().unwrap()); std::fs::hard_link(&cargo_exe, &other_cargo_path).unwrap(); let other_cargo = || { let mut pb = cargo_test_support::process(&other_cargo_path); pb.cwd(p.root()); cargo_test_support::execs().with_process_builder(pb) }; p.cargo("check").run(); other_cargo() .arg("check") .arg("-v") .with_stderr( "\ [CHECKING] foo [..] [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); // And just to confirm that without using env! it doesn't rebuild. p.change_file("src/main.rs", "fn main() {}"); p.cargo("check") .with_stderr( "\ [CHECKING] foo [..] [FINISHED] [..] ", ) .run(); other_cargo() .arg("check") .arg("-v") .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn changing_linker() { // Changing linker should rebuild. let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build").run(); let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); p.cargo("build --verbose") .env(&linker_env, "nonexistent-linker") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` [ERROR] [..]linker[..] ", ) .run(); } cargo-0.66.0/tests/testsuite/future_incompat_report.rs000066400000000000000000000321641432416201200232370ustar00rootroot00000000000000//! Tests for future-incompat-report messages //! //! Note that these tests use the -Zfuture-incompat-test for rustc. //! This causes rustc to treat *every* lint as future-incompatible. //! This is done because future-incompatible lints are inherently //! ephemeral, but we don't want to continually update these tests. //! So we pick some random lint that will likely always be the same //! over time. use super::config::write_config_toml; use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project, Project}; // An arbitrary lint (unused_variables) that triggers a lint. // We use a special flag to force it to generate a report. const FUTURE_EXAMPLE: &'static str = "fn main() { let x = 1; }"; // Some text that will be displayed when the lint fires. const FUTURE_OUTPUT: &'static str = "[..]unused_variables[..]"; fn simple_project() -> Project { project() .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("src/main.rs", FUTURE_EXAMPLE) .build() } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn output_on_stable() { let p = simple_project(); p.cargo("check") .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains(FUTURE_OUTPUT) .with_stderr_contains("[..]cargo report[..]") .run(); } // This feature is stable, and should not be gated #[cargo_test] fn no_gate_future_incompat_report() { let p = simple_project(); p.cargo("build --future-incompat-report") .with_status(0) .run(); p.cargo("report future-incompatibilities --id foo") .with_stderr_contains("error: no reports are currently available") .with_status(101) .run(); } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn test_zero_future_incompat() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) .file("src/main.rs", "fn main() {}") .build(); // No note if --future-incompat-report is not specified. p.cargo("build") .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr( "\ [COMPILING] foo v0.0.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("build --future-incompat-report") .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr( "\ [FINISHED] [..] note: 0 dependencies had future-incompatible warnings ", ) .run(); } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn test_single_crate() { let p = simple_project(); for command in &["build", "check", "rustc", "test"] { let check_has_future_compat = || { p.cargo(command) .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains(FUTURE_OUTPUT) .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]") .with_stderr_does_not_contain("[..]incompatibility[..]") .run(); }; // Check that we show a message with no [future-incompat-report] config section write_config_toml(""); check_has_future_compat(); // Check that we show a message with `frequency = "always"` write_config_toml( "\ [future-incompat-report] frequency = 'always' ", ); check_has_future_compat(); // Check that we do not show a message with `frequency = "never"` write_config_toml( "\ [future-incompat-report] frequency = 'never' ", ); p.cargo(command) .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains(FUTURE_OUTPUT) .with_stderr_does_not_contain("[..]rejected[..]") .with_stderr_does_not_contain("[..]incompatibility[..]") .run(); // Check that passing `--future-incompat-report` overrides `frequency = 'never'` p.cargo(command).arg("--future-incompat-report") .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains(FUTURE_OUTPUT) .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]") .with_stderr_contains(" - foo@0.0.0[..]") .run(); } } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn test_multi_crate() { Package::new("first-dep", "0.0.1") .file("src/lib.rs", FUTURE_EXAMPLE) .publish(); Package::new("second-dep", "0.0.2") .file("src/lib.rs", FUTURE_EXAMPLE) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" [dependencies] first-dep = "*" second-dep = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); for command in &["build", "check", "rustc", "test"] { p.cargo(command) .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_does_not_contain(FUTURE_OUTPUT) .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2") // Check that we don't have the 'triggers' message shown at the bottom of this loop, // and that we don't explain how to show a per-package report .with_stderr_does_not_contain("[..]triggers[..]") .with_stderr_does_not_contain("[..]--package[..]") .with_stderr_does_not_contain("[..]-p[..]") .run(); p.cargo(command).arg("--future-incompat-report") .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2") .with_stderr_contains(" - first-dep@0.0.1") .with_stderr_contains(" - second-dep@0.0.2") .run(); p.cargo("report future-incompatibilities").arg("--package").arg("first-dep@0.0.1") .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:") .with_stdout_contains(FUTURE_OUTPUT) .with_stdout_does_not_contain("[..]second-dep-0.0.2/src[..]") .run(); p.cargo("report future-incompatibilities").arg("--package").arg("second-dep@0.0.2") .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:") .with_stdout_contains(FUTURE_OUTPUT) .with_stdout_does_not_contain("[..]first-dep-0.0.1/src[..]") .run(); } // Test that passing the correct id via '--id' doesn't generate a warning message let output = p .cargo("build") .env("RUSTFLAGS", "-Zfuture-incompat-test") .exec_with_output() .unwrap(); // Extract the 'id' from the stdout. We are looking // for the id in a line of the form "run `cargo report future-incompatibilities --id yZ7S`" // which is generated by Cargo to tell the user what command to run // This is just to test that passing the id suppresses the warning mesasge. Any users needing // access to the report from a shell script should use the `--future-incompat-report` flag let stderr = std::str::from_utf8(&output.stderr).unwrap(); // Find '--id ' in the output let mut iter = stderr.split(' '); iter.find(|w| *w == "--id").unwrap(); let id = iter .next() .unwrap_or_else(|| panic!("Unexpected output:\n{}", stderr)); // Strip off the trailing '`' included in the output let id: String = id.chars().take_while(|c| *c != '`').collect(); p.cargo(&format!("report future-incompatibilities --id {}", id)) .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:") .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:") .run(); // Test without --id, and also the full output of the report. let output = p .cargo("report future-incompat") .exec_with_output() .unwrap(); let output = std::str::from_utf8(&output.stdout).unwrap(); assert!(output.starts_with("The following warnings were discovered")); let mut lines = output .lines() // Skip the beginning of the per-package information. .skip_while(|line| !line.starts_with("The package")); for expected in &["first-dep v0.0.1", "second-dep v0.0.2"] { assert_eq!( &format!( "The package `{}` currently triggers the following future incompatibility lints:", expected ), lines.next().unwrap(), "Bad output:\n{}", output ); let mut count = 0; while let Some(line) = lines.next() { if line.is_empty() { break; } count += 1; } assert!(count > 0); } assert_eq!(lines.next(), None); } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn color() { let p = simple_project(); p.cargo("check") .env("RUSTFLAGS", "-Zfuture-incompat-test") .masquerade_as_nightly_cargo(&["future-incompat-test"]) .run(); p.cargo("report future-incompatibilities") .with_stdout_does_not_contain("[..]\x1b[[..]") .run(); p.cargo("report future-incompatibilities") .env("CARGO_TERM_COLOR", "always") .with_stdout_contains("[..]\x1b[[..]") .run(); } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn bad_ids() { let p = simple_project(); p.cargo("report future-incompatibilities --id 1") .with_status(101) .with_stderr("error: no reports are currently available") .run(); p.cargo("check") .env("RUSTFLAGS", "-Zfuture-incompat-test") .masquerade_as_nightly_cargo(&["future-incompat-test"]) .run(); p.cargo("report future-incompatibilities --id foo") .with_status(1) .with_stderr("error: Invalid value: could not parse `foo` as a number") .run(); p.cargo("report future-incompatibilities --id 7") .with_status(101) .with_stderr( "\ error: could not find report with ID 7 Available IDs are: 1 ", ) .run(); } #[cargo_test( nightly, reason = "-Zfuture-incompat-test requires nightly (permanently)" )] fn suggestions_for_updates() { Package::new("with_updates", "1.0.0") .file("src/lib.rs", FUTURE_EXAMPLE) .publish(); Package::new("big_update", "1.0.0") .file("src/lib.rs", FUTURE_EXAMPLE) .publish(); Package::new("without_updates", "1.0.0") .file("src/lib.rs", FUTURE_EXAMPLE) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] with_updates = "1" big_update = "1" without_updates = "1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); Package::new("with_updates", "1.0.1") .file("src/lib.rs", "") .publish(); Package::new("with_updates", "1.0.2") .file("src/lib.rs", "") .publish(); Package::new("with_updates", "3.0.1") .file("src/lib.rs", "") .publish(); Package::new("big_update", "2.0.0") .file("src/lib.rs", "") .publish(); // This is a hack to force cargo to update the index. Cargo can't do this // automatically because doing a network update on every build would be a // bad idea. Under normal circumstances, we'll hope the user has done // something else along the way to trigger an update (building some other // project or something). This could use some more consideration of how to // handle this better (maybe only trigger an update if it hasn't updated // in a long while?). p.cargo("update -p without_updates").run(); let update_message = "\ - Some affected dependencies have newer versions available. You may want to consider updating them to a newer version to see if the issue has been fixed. big_update v1.0.0 has the following newer versions available: 2.0.0 with_updates v1.0.0 has the following newer versions available: 1.0.1, 1.0.2, 3.0.1 "; p.cargo("check --future-incompat-report") .masquerade_as_nightly_cargo(&["future-incompat-test"]) .env("RUSTFLAGS", "-Zfuture-incompat-test") .with_stderr_contains(update_message) .run(); p.cargo("report future-incompatibilities") .with_stdout_contains(update_message) .run() } cargo-0.66.0/tests/testsuite/generate_lockfile.rs000066400000000000000000000132721432416201200221010ustar00rootroot00000000000000//! Tests for the `cargo generate-lockfile` command. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, paths, project, ProjectBuilder}; use std::fs; #[cargo_test] fn adding_and_removing_packages() { let p = project() .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); let lock1 = p.read_lockfile(); // add a dep p.change_file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies.bar] path = "bar" "#, ); p.cargo("generate-lockfile").run(); let lock2 = p.read_lockfile(); assert_ne!(lock1, lock2); // change the dep p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2")); p.cargo("generate-lockfile").run(); let lock3 = p.read_lockfile(); assert_ne!(lock1, lock3); assert_ne!(lock2, lock3); // remove the dep println!("lock4"); p.change_file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#, ); p.cargo("generate-lockfile").run(); let lock4 = p.read_lockfile(); assert_eq!(lock1, lock4); } #[cargo_test] fn no_index_update() { Package::new("serde", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies] serde = "1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile") .with_stderr("[UPDATING] `[..]` index") .run(); p.cargo("generate-lockfile -Zno-index-update") .masquerade_as_nightly_cargo(&["no-index-update"]) .with_stdout("") .with_stderr("") .run(); } #[cargo_test] fn preserve_metadata() { let p = project() .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); let metadata = r#" [metadata] bar = "baz" foo = "bar" "#; let lock = p.read_lockfile(); let data = lock + metadata; p.change_file("Cargo.lock", &data); // Build and make sure the metadata is still there p.cargo("build").run(); let lock = p.read_lockfile(); assert!(lock.contains(metadata.trim()), "{}", lock); // Update and make sure the metadata is still there p.cargo("update").run(); let lock = p.read_lockfile(); assert!(lock.contains(metadata.trim()), "{}", lock); } #[cargo_test] fn preserve_line_endings_issue_2076() { let p = project() .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); let lockfile = p.root().join("Cargo.lock"); p.cargo("generate-lockfile").run(); assert!(lockfile.is_file()); p.cargo("generate-lockfile").run(); let lock0 = p.read_lockfile(); assert!(lock0.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); let lock1 = lock0.replace("\n", "\r\n"); p.change_file("Cargo.lock", &lock1); p.cargo("generate-lockfile").run(); let lock2 = p.read_lockfile(); assert!(lock2.starts_with("# This file is automatically @generated by Cargo.\r\n# It is not intended for manual editing.\r\n")); assert_eq!(lock1, lock2); } #[cargo_test] fn cargo_update_generate_lockfile() { let p = project().file("src/main.rs", "fn main() {}").build(); let lockfile = p.root().join("Cargo.lock"); assert!(!lockfile.is_file()); p.cargo("update").with_stdout("").run(); assert!(lockfile.is_file()); fs::remove_file(p.root().join("Cargo.lock")).unwrap(); assert!(!lockfile.is_file()); p.cargo("update").with_stdout("").run(); assert!(lockfile.is_file()); } #[cargo_test] fn duplicate_entries_in_lockfile() { let _a = ProjectBuilder::new(paths::root().join("a")) .file( "Cargo.toml", r#" [package] name = "a" authors = [] version = "0.0.1" [dependencies] common = {path="common"} "#, ) .file("src/lib.rs", "") .build(); let common_toml = &basic_manifest("common", "0.0.1"); let _common_in_a = ProjectBuilder::new(paths::root().join("a/common")) .file("Cargo.toml", common_toml) .file("src/lib.rs", "") .build(); let b = ProjectBuilder::new(paths::root().join("b")) .file( "Cargo.toml", r#" [package] name = "b" authors = [] version = "0.0.1" [dependencies] common = {path="common"} a = {path="../a"} "#, ) .file("src/lib.rs", "") .build(); let _common_in_b = ProjectBuilder::new(paths::root().join("b/common")) .file("Cargo.toml", common_toml) .file("src/lib.rs", "") .build(); // should fail due to a duplicate package `common` in the lock file b.cargo("build") .with_status(101) .with_stderr_contains( "[..]package collision in the lockfile: packages common [..] and \ common [..] are different, but only one can be written to \ lockfile unambiguously", ) .run(); } cargo-0.66.0/tests/testsuite/git.rs000066400000000000000000002717611432416201200172330ustar00rootroot00000000000000//! Tests for git support. use std::fs; use std::io::prelude::*; use std::net::{TcpListener, TcpStream}; use std::path::Path; use std::str; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::{basic_lib_manifest, basic_manifest, git, main_file, path2url, project}; use cargo_test_support::{sleep_ms, t, Project}; #[cargo_test] fn cargo_compile_simple_git_dep() { let project = project(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); let git_root = git_project.root(); project .cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] dep1 v0.5.0 ({}#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", path2url(&git_root), path2url(&git_root), )) .run(); assert!(project.bin("foo").is_file()); project .process(&project.bin("foo")) .with_stdout("hello world\n") .run(); } #[cargo_test] fn cargo_compile_git_dep_branch() { let project = project(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); // Make a new branch based on the current HEAD commit let repo = git2::Repository::open(&git_project.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); let head = repo.find_commit(head).unwrap(); repo.branch("branchy", &head, true).unwrap(); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' branch = "branchy" "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); let git_root = git_project.root(); project .cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", path2url(&git_root), path2url(&git_root), )) .run(); assert!(project.bin("foo").is_file()); project .process(&project.bin("foo")) .with_stdout("hello world\n") .run(); } #[cargo_test] fn cargo_compile_git_dep_tag() { let project = project(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); // Make a tag corresponding to the current HEAD let repo = git2::Repository::open(&git_project.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); repo.tag( "v0.1.0", &repo.find_object(head, None).unwrap(), &repo.signature().unwrap(), "make a new tag", false, ) .unwrap(); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' tag = "v0.1.0" "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); let git_root = git_project.root(); project .cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", path2url(&git_root), path2url(&git_root), )) .run(); assert!(project.bin("foo").is_file()); project .process(&project.bin("foo")) .with_stdout("hello world\n") .run(); project.cargo("build").run(); } #[cargo_test] fn cargo_compile_git_dep_pull_request() { let project = project(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); // Make a reference in GitHub's pull request ref naming convention. let repo = git2::Repository::open(&git_project.root()).unwrap(); let oid = repo.refname_to_id("HEAD").unwrap(); let force = false; let log_message = "open pull request"; repo.reference("refs/pull/330/head", oid, force, log_message) .unwrap(); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.0" [dependencies] dep1 = {{ git = "{}", rev = "refs/pull/330/head" }} "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); let git_root = git_project.root(); project .cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] dep1 v0.5.0 ({}?rev=refs/pull/330/head#[..])\n\ [COMPILING] foo v0.0.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", path2url(&git_root), path2url(&git_root), )) .run(); assert!(project.bin("foo").is_file()); } #[cargo_test] fn cargo_compile_with_nested_paths() { let git_project = git::new("dep1", |project| { project .file( "Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [dependencies.dep2] version = "0.5.0" path = "vendor/dep2" [lib] name = "dep1" "#, ) .file( "src/dep1.rs", r#" extern crate dep2; pub fn hello() -> &'static str { dep2::hello() } "#, ) .file("vendor/dep2/Cargo.toml", &basic_lib_manifest("dep2")) .file( "vendor/dep2/src/dep2.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] version = "0.5.0" git = '{}' [[bin]] name = "foo" "#, git_project.url() ), ) .file( "src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello world\n").run(); } #[cargo_test] fn cargo_compile_with_malformed_nested_paths() { let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) .file("vendor/dep2/Cargo.toml", "!INVALID!") .file( "vendor/dep3/Cargo.toml", r#" [project] name = "dep3" version = "0.5.0" [dependencies] subdep1 = { path = "../require-extra-build-step" } "#, ) .file("vendor/dep3/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] version = "0.5.0" git = '{}' [[bin]] name = "foo" "#, git_project.url() ), ) .file( "src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello world\n").run(); } #[cargo_test] fn cargo_compile_with_meta_package() { let git_project = git::new("meta-dep", |project| { project .file("dep1/Cargo.toml", &basic_lib_manifest("dep1")) .file( "dep1/src/dep1.rs", r#" pub fn hello() -> &'static str { "this is dep1" } "#, ) .file("dep2/Cargo.toml", &basic_lib_manifest("dep2")) .file( "dep2/src/dep2.rs", r#" pub fn hello() -> &'static str { "this is dep2" } "#, ) }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] version = "0.5.0" git = '{}' [dependencies.dep2] version = "0.5.0" git = '{}' [[bin]] name = "foo" "#, git_project.url(), git_project.url() ), ) .file( "src/foo.rs", &main_file( r#""{} {}", dep1::hello(), dep2::hello()"#, &["dep1", "dep2"], ), ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")) .with_stdout("this is dep1 this is dep2\n") .run(); } #[cargo_test] fn cargo_compile_with_short_ssh_git() { let url = "git@github.com:a/dep"; let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep] git = "{}" [[bin]] name = "foo" "#, url ), ) .file( "src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); p.cargo("build") .with_status(101) .with_stdout("") .with_stderr(&format!( "\ [ERROR] failed to parse manifest at `[..]` Caused by: invalid url `{}`: relative URL without a base ", url )) .run(); } #[cargo_test] fn two_revs_same_deps() { let bar = git::new("meta-dep", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }); let repo = git2::Repository::open(&bar.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Commit the changes and make sure we trigger a recompile bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); git::add(&repo); let rev2 = git::commit(&repo); let foo = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' rev = "{}" [dependencies.baz] path = "../baz" "#, bar.url(), rev1 ), ) .file( "src/main.rs", r#" extern crate bar; extern crate baz; fn main() { assert_eq!(bar::bar(), 1); assert_eq!(baz::baz(), 2); } "#, ) .build(); let _baz = project() .at("baz") .file( "Cargo.toml", &format!( r#" [package] name = "baz" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' rev = "{}" "#, bar.url(), rev2 ), ) .file( "src/lib.rs", r#" extern crate bar; pub fn baz() -> i32 { bar::bar() } "#, ) .build(); foo.cargo("build -v").run(); assert!(foo.bin("foo").is_file()); foo.process(&foo.bin("foo")).run(); } #[cargo_test] fn recompilation() { let git_project = git::new("bar", |project| { project .file("Cargo.toml", &basic_lib_manifest("bar")) .file("src/bar.rs", "pub fn bar() {}") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' "#, git_project.url() ), ) .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"])) .build(); // First time around we should compile both foo and bar p.cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] bar v0.5.0 ({}#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", git_project.url(), git_project.url(), )) .run(); // Don't recompile the second time p.cargo("build").with_stdout("").run(); // Modify a file manually, shouldn't trigger a recompile git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#); p.cargo("build").with_stdout("").run(); p.cargo("update") .with_stderr(&format!( "[UPDATING] git repository `{}`", git_project.url() )) .run(); p.cargo("build").with_stdout("").run(); // Commit the changes and make sure we don't trigger a recompile because the // lock file says not to change let repo = git2::Repository::open(&git_project.root()).unwrap(); git::add(&repo); git::commit(&repo); println!("compile after commit"); p.cargo("build").with_stdout("").run(); p.root().move_into_the_past(); // Update the dependency and carry on! p.cargo("update") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ ", git_project.url() )) .run(); println!("going for the last compile"); p.cargo("build") .with_stderr(&format!( "[COMPILING] bar v0.5.0 ({}#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", git_project.url(), )) .run(); // Make sure clean only cleans one dep p.cargo("clean -p foo").with_stdout("").run(); p.cargo("build") .with_stderr( "[COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]", ) .run(); } #[cargo_test] fn update_with_shared_deps() { let git_project = git::new("bar", |project| { project .file("Cargo.toml", &basic_lib_manifest("bar")) .file("src/bar.rs", "pub fn bar() {}") }); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] path = "dep1" [dependencies.dep2] path = "dep2" "#, ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate dep1; #[allow(unused_extern_crates)] extern crate dep2; fn main() {} "#, ) .file( "dep1/Cargo.toml", &format!( r#" [package] name = "dep1" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' "#, git_project.url() ), ) .file("dep1/src/lib.rs", "") .file( "dep2/Cargo.toml", &format!( r#" [package] name = "dep2" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' "#, git_project.url() ), ) .file("dep2/src/lib.rs", "") .build(); // First time around we should compile both foo and bar p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{git}` [COMPILING] bar v0.5.0 ({git}#[..]) [COMPILING] [..] v0.5.0 ([..]) [COMPILING] [..] v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", git = git_project.url(), )) .run(); // Modify a file manually, and commit it git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#); let repo = git2::Repository::open(&git_project.root()).unwrap(); let old_head = repo.head().unwrap().target().unwrap(); git::add(&repo); git::commit(&repo); sleep_ms(1000); // By default, not transitive updates println!("dep1 update"); p.cargo("update -p dep1").with_stdout("").run(); // Don't do anything bad on a weird --precise argument println!("bar bad precise update"); p.cargo("update -p bar --precise 0.1.2") .with_status(101) .with_stderr( "\ [ERROR] Unable to update [..] Caused by: precise value for git is not a git revision: 0.1.2 Caused by: unable to parse OID - contains invalid characters; class=Invalid (3) ", ) .run(); // Specifying a precise rev to the old rev shouldn't actually update // anything because we already have the rev in the db. println!("bar precise update"); p.cargo("update -p bar --precise") .arg(&old_head.to_string()) .with_stdout("") .run(); // Updating aggressively should, however, update the repo. println!("dep1 aggressive update"); p.cargo("update -p dep1 --aggressive") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ ", git_project.url() )) .run(); // Make sure we still only compile one version of the git repo println!("build"); p.cargo("build") .with_stderr(&format!( "\ [COMPILING] bar v0.5.0 ({git}#[..]) [COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) [COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", git = git_project.url(), )) .run(); // We should be able to update transitive deps p.cargo("update -p bar") .with_stderr(&format!( "[UPDATING] git repository `{}`", git_project.url() )) .run(); } #[cargo_test] fn dep_with_submodule() { let project = project(); let git_project = git::new("dep1", |project| { project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) }); let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")); let repo = git2::Repository::open(&git_project.root()).unwrap(); let url = path2url(git_project2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("src")); git::commit(&repo); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/lib.rs", "extern crate dep1; pub fn foo() { dep1::dep() }", ) .build(); project .cargo("build") .with_stderr( "\ [UPDATING] git repository [..] [UPDATING] git submodule `file://[..]/dep2` [COMPILING] dep1 [..] [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn dep_with_bad_submodule() { let project = project(); let git_project = git::new("dep1", |project| { project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) }); let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")); let repo = git2::Repository::open(&git_project.root()).unwrap(); let url = path2url(git_project2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("src")); git::commit(&repo); // now amend the first commit on git_project2 to make submodule ref point to not-found // commit let repo = git2::Repository::open(&git_project2.root()).unwrap(); let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap(); let commit = repo.find_commit(original_submodule_ref).unwrap(); commit .amend( Some("refs/heads/master"), None, None, None, Some("something something"), None, ) .unwrap(); let p = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/lib.rs", "extern crate dep1; pub fn foo() { dep1::dep() }", ) .build(); let expected = format!( "\ [UPDATING] git repository [..] [UPDATING] git submodule `file://[..]/dep2` [ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]` Caused by: failed to load source for dependency `dep1` Caused by: Unable to update {} Caused by: failed to update submodule `src` Caused by: object not found - no match for id [..] ", path2url(git_project.root()) ); p.cargo("build") .with_stderr(expected) .with_status(101) .run(); } #[cargo_test] fn dep_with_skipped_submodule() { // Ensure we skip dependency submodules if their update strategy is `none`. let qux = git::new("qux", |project| { project.no_manifest().file("README", "skip me") }); let bar = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) .file("src/lib.rs", "") }); // `qux` is a submodule of `bar`, but we don't want to update it. let repo = git2::Repository::open(&bar.root()).unwrap(); git::add_submodule(&repo, qux.url().as_str(), Path::new("qux")); let mut conf = git2::Config::open(&bar.root().join(".gitmodules")).unwrap(); conf.set_str("submodule.qux.update", "none").unwrap(); git::add(&repo); git::commit(&repo); let foo = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = "{}" "#, bar.url() ), ) .file("src/main.rs", "fn main() {}") .build(); foo.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]/bar` [SKIPPING] git submodule `file://[..]/qux` [..] [COMPILING] bar [..] [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn ambiguous_published_deps() { let project = project(); let git_project = git::new("dep", |project| { project .file( "aaa/Cargo.toml", &format!( r#" [project] name = "bar" version = "0.5.0" publish = true "# ), ) .file("aaa/src/lib.rs", "") .file( "bbb/Cargo.toml", &format!( r#" [project] name = "bar" version = "0.5.0" publish = true "# ), ) .file("bbb/src/lib.rs", "") }); let p = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] git = '{}' "#, git_project.url() ), ) .file("src/main.rs", "fn main() { }") .build(); p.cargo("build").run(); p.cargo("run") .with_stderr( "\ [WARNING] skipping duplicate package `bar` found at `[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); } #[cargo_test] fn two_deps_only_update_one() { let project = project(); let git1 = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let git2 = git::new("dep2", |project| { project .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) .file("src/lib.rs", "") }); let p = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' [dependencies.dep2] git = '{}' "#, git1.url(), git2.url() ), ) .file("src/main.rs", "fn main() {}") .build(); fn oid_to_short_sha(oid: git2::Oid) -> String { oid.to_string()[..8].to_string() } fn git_repo_head_sha(p: &Project) -> String { let repo = git2::Repository::open(p.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); oid_to_short_sha(head) } println!("dep1 head sha: {}", git_repo_head_sha(&git1)); println!("dep2 head sha: {}", git_repo_head_sha(&git2)); p.cargo("build") .with_stderr( "[UPDATING] git repository `[..]`\n\ [UPDATING] git repository `[..]`\n\ [COMPILING] [..] v0.5.0 ([..])\n\ [COMPILING] [..] v0.5.0 ([..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); git1.change_file("src/lib.rs", "pub fn foo() {}"); let repo = git2::Repository::open(&git1.root()).unwrap(); git::add(&repo); let oid = git::commit(&repo); println!("dep1 head sha: {}", oid_to_short_sha(oid)); p.cargo("update -p dep1") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ ", git1.url() )) .run(); } #[cargo_test] fn stale_cached_version() { let bar = git::new("meta-dep", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }); // Update the git database in the cache with the current state of the git // repo let foo = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url() ), ) .file( "src/main.rs", r#" extern crate bar; fn main() { assert_eq!(bar::bar(), 1) } "#, ) .build(); foo.cargo("build").run(); foo.process(&foo.bin("foo")).run(); // Update the repo, and simulate someone else updating the lock file and then // us pulling it down. bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 1 + 0 }"); let repo = git2::Repository::open(&bar.root()).unwrap(); git::add(&repo); git::commit(&repo); sleep_ms(1000); let rev = repo.revparse_single("HEAD").unwrap().id(); foo.change_file( "Cargo.lock", &format!( r#" [[package]] name = "foo" version = "0.0.0" dependencies = [ 'bar 0.0.0 (git+{url}#{hash})' ] [[package]] name = "bar" version = "0.0.0" source = 'git+{url}#{hash}' "#, url = bar.url(), hash = rev ), ); // Now build! foo.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{bar}` [COMPILING] bar v0.0.0 ({bar}#[..]) [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", bar = bar.url(), )) .run(); foo.process(&foo.bin("foo")).run(); } #[cargo_test] fn dep_with_changed_submodule() { let project = project(); let git_project = git::new("dep1", |project| { project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) }); let git_project2 = git::new("dep2", |project| { project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") }); let git_project3 = git::new("dep3", |project| { project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") }); let repo = git2::Repository::open(&git_project.root()).unwrap(); let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src")); git::commit(&repo); let p = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/main.rs", " extern crate dep1; pub fn main() { println!(\"{}\", dep1::dep()) } ", ) .build(); println!("first run"); p.cargo("run") .with_stderr( "[UPDATING] git repository `[..]`\n\ [UPDATING] git submodule `file://[..]/dep2`\n\ [COMPILING] dep1 v0.5.0 ([..])\n\ [COMPILING] foo v0.5.0 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in \ [..]\n\ [RUNNING] `target/debug/foo[EXE]`\n", ) .with_stdout("project2\n") .run(); git_project.change_file( ".gitmodules", &format!( "[submodule \"src\"]\n\tpath = src\n\turl={}", git_project3.url() ), ); // Sync the submodule and reset it to the new remote. sub.sync().unwrap(); { let subrepo = sub.open().unwrap(); subrepo .remote_add_fetch("origin", "refs/heads/*:refs/heads/*") .unwrap(); subrepo .remote_set_url("origin", &git_project3.url().to_string()) .unwrap(); let mut origin = subrepo.find_remote("origin").unwrap(); origin.fetch(&Vec::::new(), None, None).unwrap(); let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); let obj = subrepo.find_object(id, None).unwrap(); subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); } sub.add_to_index(true).unwrap(); git::add(&repo); git::commit(&repo); sleep_ms(1000); // Update the dependency and carry on! println!("update"); p.cargo("update -v") .with_stderr("") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [UPDATING] git submodule `file://[..]/dep3`\n\ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ ", git_project.url() )) .run(); println!("last run"); p.cargo("run") .with_stderr( "[COMPILING] dep1 v0.5.0 ([..])\n\ [COMPILING] foo v0.5.0 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in \ [..]\n\ [RUNNING] `target/debug/foo[EXE]`\n", ) .with_stdout("project3\n") .run(); } #[cargo_test] fn dev_deps_with_testing() { let p2 = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#, ) }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" git = '{}' "#, p2.url() ), ) .file( "src/main.rs", r#" fn main() {} #[cfg(test)] mod tests { extern crate bar; #[test] fn foo() { bar::gimme(); } } "#, ) .build(); // Generate a lock file which did not use `bar` to compile, but had to update // `bar` to generate the lock file p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{bar}` [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", bar = p2.url() )) .run(); // Make sure we use the previous resolution of `bar` instead of updating it // a second time. p.cargo("test") .with_stderr( "\ [COMPILING] [..] v0.5.0 ([..]) [COMPILING] [..] v0.5.0 ([..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test tests::foo ... ok") .run(); } #[cargo_test] fn git_build_cmd_freshness() { let foo = git::new("foo", |project| { project .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .file(".gitignore", "src/bar.rs") }); foo.root().move_into_the_past(); sleep_ms(1000); foo.cargo("build") .with_stderr( "\ [COMPILING] foo v0.0.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // Smoke test to make sure it doesn't compile again println!("first pass"); foo.cargo("build").with_stdout("").run(); // Modify an ignored file and make sure we don't rebuild println!("second pass"); foo.change_file("src/bar.rs", ""); foo.cargo("build").with_stdout("").run(); } #[cargo_test] fn git_name_not_always_needed() { let p2 = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#, ) }); let repo = git2::Repository::open(&p2.root()).unwrap(); let mut cfg = repo.config().unwrap(); let _ = cfg.remove("user.name"); let _ = cfg.remove("user.email"); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.bar] git = '{}' "#, p2.url() ), ) .file("src/main.rs", "fn main() {}") .build(); // Generate a lock file which did not use `bar` to compile, but had to update // `bar` to generate the lock file p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{bar}` [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", bar = p2.url() )) .run(); } #[cargo_test] fn git_repo_changing_no_rebuild() { let bar = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }); // Lock p1 to the first rev in the git repo let p1 = project() .at("p1") .file( "Cargo.toml", &format!( r#" [project] name = "p1" version = "0.5.0" authors = [] build = 'build.rs' [dependencies.bar] git = '{}' "#, bar.url() ), ) .file("src/main.rs", "fn main() {}") .file("build.rs", "fn main() {}") .build(); p1.root().move_into_the_past(); p1.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{bar}` [COMPILING] [..] [COMPILING] [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", bar = bar.url() )) .run(); // Make a commit to lock p2 to a different rev bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); let repo = git2::Repository::open(&bar.root()).unwrap(); git::add(&repo); git::commit(&repo); // Lock p2 to the second rev let p2 = project() .at("p2") .file( "Cargo.toml", &format!( r#" [project] name = "p2" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p2.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{bar}` [COMPILING] [..] [COMPILING] [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", bar = bar.url() )) .run(); // And now for the real test! Make sure that p1 doesn't get rebuilt // even though the git repo has changed. p1.cargo("build").with_stdout("").run(); } #[cargo_test] fn git_dep_build_cmd() { let p = git::new("foo", |project| { project .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" [[bin]] name = "foo" "#, ) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [lib] name = "bar" path = "src/bar.rs" "#, ) .file( "bar/src/bar.rs.in", r#" pub fn gimme() -> i32 { 0 } "#, ) .file( "bar/build.rs", r#" use std::fs; fn main() { fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); } "#, ) }); p.root().join("bar").move_into_the_past(); p.cargo("build").run(); p.process(&p.bin("foo")).with_stdout("0\n").run(); // Touching bar.rs.in should cause the `build` command to run again. p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }"); p.cargo("build").run(); p.process(&p.bin("foo")).with_stdout("1\n").run(); } #[cargo_test] fn fetch_downloads() { let bar = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("fetch") .with_stderr(&format!( "[UPDATING] git repository `{url}`", url = bar.url() )) .run(); p.cargo("fetch").with_stdout("").run(); } #[cargo_test] fn warnings_in_git_dep() { let bar = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "fn unused() {}") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_stderr(&format!( "[UPDATING] git repository `{}`\n\ [COMPILING] bar v0.5.0 ({}#[..])\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", bar.url(), bar.url(), )) .run(); } #[cargo_test] fn update_ambiguous() { let bar1 = git::new("bar1", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") }); let bar2 = git::new("bar2", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.6.0")) .file("src/lib.rs", "") }); let baz = git::new("baz", |project| { project .file( "Cargo.toml", &format!( r#" [package] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] git = '{}' "#, bar2.url() ), ) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' [dependencies.baz] git = '{}' "#, bar1.url(), baz.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); p.cargo("update -p bar") .with_status(101) .with_stderr( "\ [ERROR] There are multiple `bar` packages in your project, and the specification `bar` \ is ambiguous. Please re-run this command with `-p ` where `` is one of the \ following: bar@0.[..].0 bar@0.[..].0 ", ) .run(); } #[cargo_test] fn update_one_dep_in_repo_with_many_deps() { let bar = git::new("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) .file("a/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' [dependencies.a] git = '{}' "#, bar.url(), bar.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); p.cargo("update -p bar") .with_stderr(&format!("[UPDATING] git repository `{}`", bar.url())) .run(); } #[cargo_test] fn switch_deps_does_not_update_transitive() { let transitive = git::new("transitive", |project| { project .file("Cargo.toml", &basic_manifest("transitive", "0.5.0")) .file("src/lib.rs", "") }); let dep1 = git::new("dep1", |project| { project .file( "Cargo.toml", &format!( r#" [package] name = "dep" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.transitive] git = '{}' "#, transitive.url() ), ) .file("src/lib.rs", "") }); let dep2 = git::new("dep2", |project| { project .file( "Cargo.toml", &format!( r#" [package] name = "dep" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.transitive] git = '{}' "#, transitive.url() ), ) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep1.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{}` [UPDATING] git repository `{}` [COMPILING] transitive [..] [COMPILING] dep [..] [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", dep1.url(), transitive.url() )) .run(); // Update the dependency to point to the second repository, but this // shouldn't update the transitive dependency which is the same. p.change_file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep2.url() ), ); p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{}` [COMPILING] dep [..] [COMPILING] foo [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", dep2.url() )) .run(); } #[cargo_test] fn update_one_source_updates_all_packages_in_that_git_source() { let dep = git::new("dep", |project| { project .file( "Cargo.toml", r#" [package] name = "dep" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) .file("a/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep.url() ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); let repo = git2::Repository::open(&dep.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Just be sure to change a file dep.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); git::add(&repo); git::commit(&repo); p.cargo("update -p dep").run(); let lockfile = p.read_lockfile(); assert!( !lockfile.contains(&rev1.to_string()), "{} in {}", rev1, lockfile ); } #[cargo_test] fn switch_sources() { let a1 = git::new("a1", |project| { project .file("Cargo.toml", &basic_manifest("a", "0.5.0")) .file("src/lib.rs", "") }); let a2 = git::new("a2", |project| { project .file("Cargo.toml", &basic_manifest("a", "0.5.1")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.b] path = "b" "#, ) .file("src/main.rs", "fn main() {}") .file( "b/Cargo.toml", &format!( r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies.a] git = '{}' "#, a1.url() ), ) .file("b/src/lib.rs", "pub fn main() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]a1` [COMPILING] a v0.5.0 ([..]a1#[..] [COMPILING] b v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.change_file( "b/Cargo.toml", &format!( r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies.a] git = '{}' "#, a2.url() ), ); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]a2` [COMPILING] a v0.5.1 ([..]a2#[..] [COMPILING] b v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn dont_require_submodules_are_checked_out() { let p = project().build(); let git1 = git::new("dep1", |p| { p.file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("a/foo", "") }); let git2 = git::new("dep2", |p| p); let repo = git2::Repository::open(&git1.root()).unwrap(); let url = path2url(git2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("a/submodule")); git::commit(&repo); git2::Repository::init(&p.root()).unwrap(); let url = path2url(git1.root()).to_string(); let dst = paths::home().join("foo"); git2::Repository::clone(&url, &dst).unwrap(); git1.cargo("build -v").cwd(&dst).run(); } #[cargo_test] fn doctest_same_name() { let a2 = git::new("a2", |p| { p.file("Cargo.toml", &basic_manifest("a", "0.5.0")) .file("src/lib.rs", "pub fn a2() {}") }); let a1 = git::new("a1", |p| { p.file( "Cargo.toml", &format!( r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] a = {{ git = '{}' }} "#, a2.url() ), ) .file("src/lib.rs", "extern crate a; pub fn a1() {}") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a1.url() ), ) .file( "src/lib.rs", r#" #[macro_use] extern crate a; "#, ) .build(); p.cargo("test -v").run(); } #[cargo_test] fn lints_are_suppressed() { let a = git::new("a", |p| { p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( "src/lib.rs", " use std::option; ", ) }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `[..]` [COMPILING] a v0.5.0 ([..]) [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn denied_lints_are_allowed() { let a = git::new("a", |p| { p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( "src/lib.rs", " #![deny(warnings)] use std::option; ", ) }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `[..]` [COMPILING] a v0.5.0 ([..]) [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn add_a_git_dep() { let git = git::new("git", |p| { p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ path = 'a' }} git = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); assert!(paths::home().join(".cargo/git/CACHEDIR.TAG").is_file()); p.change_file( "a/Cargo.toml", &format!( r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies] git = {{ git = '{}' }} "#, git.url() ), ); p.cargo("build").run(); } #[cargo_test] fn two_at_rev_instead_of_tag() { let git = git::new("git", |p| { p.file("Cargo.toml", &basic_manifest("git1", "0.5.0")) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("git2", "0.5.0")) .file("a/src/lib.rs", "") }); // Make a tag corresponding to the current HEAD let repo = git2::Repository::open(&git.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); repo.tag( "v0.1.0", &repo.find_object(head, None).unwrap(), &repo.signature().unwrap(), "make a new tag", false, ) .unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] git1 = {{ git = '{0}', rev = 'v0.1.0' }} git2 = {{ git = '{0}', rev = 'v0.1.0' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); p.cargo("build -v").run(); } #[cargo_test] fn include_overrides_gitignore() { // Make sure that `package.include` takes precedence over .gitignore. let p = git::new("foo", |repo| { repo.file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" include = ["src/lib.rs", "ignored.txt", "Cargo.toml"] "#, ) .file( ".gitignore", r#" /target Cargo.lock ignored.txt "#, ) .file("src/lib.rs", "") .file("ignored.txt", "") .file("build.rs", "fn main() {}") }); p.cargo("build").run(); p.change_file("ignored.txt", "Trigger rebuild."); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.5.0 ([..]) [RUNNING] `[..]build-script-build[..]` [RUNNING] `rustc --crate-name foo src/lib.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("package --list --allow-dirty") .with_stdout( "\ Cargo.toml Cargo.toml.orig ignored.txt src/lib.rs ", ) .run(); } #[cargo_test] fn invalid_git_dependency_manifest() { let project = project(); let git_project = git::new("dep1", |project| { project .file( "Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] categories = ["algorithms"] categories = ["algorithms"] [lib] name = "dep1" "#, ) .file( "src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]), ) .build(); let git_root = git_project.root(); project .cargo("build") .with_status(101) .with_stderr(&format!( "\ [UPDATING] git repository `{}` [ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 ([..])` Caused by: failed to load source for dependency `dep1` Caused by: Unable to update {} Caused by: failed to parse manifest at `[..]` Caused by: could not parse input as TOML Caused by: TOML parse error at line 8, column 21 | 8 | categories = [\"algorithms\"] | ^ Duplicate key `categories` in table `project` ", path2url(&git_root), path2url(&git_root), )) .run(); } #[cargo_test] fn failed_submodule_checkout() { let project = project(); let git_project = git::new("dep1", |project| { project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) }); let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")); let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let done = Arc::new(AtomicBool::new(false)); let done2 = done.clone(); let t = thread::spawn(move || { while !done2.load(Ordering::SeqCst) { if let Ok((mut socket, _)) = listener.accept() { drop(socket.write_all(b"foo\r\n")); } } }); let repo = git2::Repository::open(&git_project2.root()).unwrap(); let url = format!("https://{}:{}/", addr.ip(), addr.port()); { let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap(); let subrepo = s.open().unwrap(); let mut cfg = subrepo.config().unwrap(); cfg.set_str("user.email", "foo@bar.com").unwrap(); cfg.set_str("user.name", "Foo Bar").unwrap(); git::commit(&subrepo); s.add_finalize().unwrap(); } git::commit(&repo); drop((repo, url)); let repo = git2::Repository::open(&git_project.root()).unwrap(); let url = path2url(git_project2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("src")); git::commit(&repo); drop(repo); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); project .cargo("build") .with_status(101) .with_stderr_contains(" failed to update submodule `src`") .with_stderr_contains(" failed to update submodule `bar`") .run(); project .cargo("build") .with_status(101) .with_stderr_contains(" failed to update submodule `src`") .with_stderr_contains(" failed to update submodule `bar`") .run(); done.store(true, Ordering::SeqCst); drop(TcpStream::connect(&addr)); t.join().unwrap(); } #[cargo_test(requires_git)] fn use_the_cli() { let project = project(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .file( ".cargo/config", " [net] git-fetch-with-cli = true ", ) .build(); let stderr = "\ [UPDATING] git repository `[..]` [RUNNING] `git fetch [..]` [COMPILING] dep1 [..] [RUNNING] `rustc [..]` [COMPILING] foo [..] [RUNNING] `rustc [..]` [FINISHED] [..] "; project.cargo("build -v").with_stderr(stderr).run(); assert!(paths::home().join(".cargo/git/CACHEDIR.TAG").is_file()); } #[cargo_test] fn templatedir_doesnt_cause_problems() { let git_project2 = git::new("dep2", |project| { project .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) .file("src/lib.rs", "") }); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "fo" version = "0.5.0" authors = [] [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/main.rs", "fn main() {}") .build(); fs::write( paths::home().join(".gitconfig"), format!( r#" [init] templatedir = {} "#, git_project2 .url() .to_file_path() .unwrap() .to_str() .unwrap() .replace("\\", "/") ), ) .unwrap(); p.cargo("build").run(); } #[cargo_test(requires_git)] fn git_with_cli_force() { // Supports a force-pushed repo. let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", r#"pub fn f() { println!("one"); }"#) }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" edition = "2018" [dependencies] dep1 = {{ git = "{}" }} "#, git_project.url() ), ) .file("src/main.rs", "fn main() { dep1::f(); }") .file( ".cargo/config", " [net] git-fetch-with-cli = true ", ) .build(); p.cargo("build").run(); p.rename_run("foo", "foo1").with_stdout("one").run(); // commit --amend a change that will require a force fetch. let repo = git2::Repository::open(&git_project.root()).unwrap(); git_project.change_file("src/lib.rs", r#"pub fn f() { println!("two"); }"#); git::add(&repo); let id = repo.refname_to_id("HEAD").unwrap(); let commit = repo.find_commit(id).unwrap(); let tree_id = t!(t!(repo.index()).write_tree()); t!(commit.amend( Some("HEAD"), None, None, None, None, Some(&t!(repo.find_tree(tree_id))) )); // Perform the fetch. p.cargo("update").run(); p.cargo("build").run(); p.rename_run("foo", "foo2").with_stdout("two").run(); } #[cargo_test(requires_git)] fn git_fetch_cli_env_clean() { // This tests that git-fetch-with-cli works when GIT_DIR environment // variable is set (for whatever reason). let git_dep = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let git_proj = git::new("foo", |project| { project .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] dep1 = {{ git = '{}' }} "#, git_dep.url() ), ) .file("src/lib.rs", "pub extern crate dep1;") .file( ".cargo/config", " [net] git-fetch-with-cli = true ", ) }); // The directory set here isn't too important. Pointing to our own git // directory causes git to be confused and fail. Can also point to an // empty directory, or a nonexistent one. git_proj .cargo("fetch") .env("GIT_DIR", git_proj.root().join(".git")) .run(); } #[cargo_test] fn dirty_submodule() { // `cargo package` warns for dirty file in submodule. let (git_project, repo) = git::new_repo("foo", |project| { project .file("Cargo.toml", &basic_manifest("foo", "0.5.0")) // This is necessary because `git::add` is too eager. .file(".gitignore", "/target") }); let git_project2 = git::new("src", |project| { project.no_manifest().file("lib.rs", "pub fn f() {}") }); let url = path2url(git_project2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("src")); // Submodule added, but not committed. git_project .cargo("package --no-verify") .with_status(101) .with_stderr( "\ [WARNING] manifest has no [..] See [..] [ERROR] 1 files in the working directory contain changes that were not yet committed into git: .gitmodules to proceed despite [..] ", ) .run(); git::commit(&repo); git_project.cargo("package --no-verify").run(); // Modify file, check for warning. git_project.change_file("src/lib.rs", ""); git_project .cargo("package --no-verify") .with_status(101) .with_stderr( "\ [WARNING] manifest has no [..] See [..] [ERROR] 1 files in the working directory contain changes that were not yet committed into git: src/lib.rs to proceed despite [..] ", ) .run(); // Commit the change. let sub_repo = git2::Repository::open(git_project.root().join("src")).unwrap(); git::add(&sub_repo); git::commit(&sub_repo); git::add(&repo); git::commit(&repo); git_project.cargo("package --no-verify").run(); // Try with a nested submodule. let git_project3 = git::new("bar", |project| project.no_manifest().file("mod.rs", "")); let url = path2url(git_project3.root()).to_string(); git::add_submodule(&sub_repo, &url, Path::new("bar")); git_project .cargo("package --no-verify") .with_status(101) .with_stderr( "\ [WARNING] manifest has no [..] See [..] [ERROR] 1 files in the working directory contain changes that were not yet committed into git: src/.gitmodules to proceed despite [..] ", ) .run(); // Commit the submodule addition. git::commit(&sub_repo); git::add(&repo); git::commit(&repo); git_project.cargo("package --no-verify").run(); // Modify within nested submodule. git_project.change_file("src/bar/new_file.rs", "//test"); git_project .cargo("package --no-verify") .with_status(101) .with_stderr( "\ [WARNING] manifest has no [..] See [..] [ERROR] 1 files in the working directory contain changes that were not yet committed into git: src/bar/new_file.rs to proceed despite [..] ", ) .run(); // And commit the change. let sub_sub_repo = git2::Repository::open(git_project.root().join("src/bar")).unwrap(); git::add(&sub_sub_repo); git::commit(&sub_sub_repo); git::add(&sub_repo); git::commit(&sub_repo); git::add(&repo); git::commit(&repo); git_project.cargo("package --no-verify").run(); } #[cargo_test] fn default_not_master() { let project = project(); // Create a repository with a `master` branch, but switch the head to a // branch called `main` at the same time. let (git_project, repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", "pub fn foo() {}") }); let head_id = repo.head().unwrap().target().unwrap(); let head = repo.find_commit(head_id).unwrap(); repo.branch("main", &head, false).unwrap(); repo.set_head("refs/heads/main").unwrap(); // Then create a commit on the new `main` branch so `master` and `main` // differ. git_project.change_file("src/lib.rs", "pub fn bar() {}"); git::add(&repo); git::commit(&repo); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "pub fn foo() { dep1::bar() }") .build(); project .cargo("build") .with_stderr( "\ [UPDATING] git repository `[..]` [COMPILING] dep1 v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn historical_lockfile_works() { let project = project(); let (git_project, repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", "") }); let head_id = repo.head().unwrap().target().unwrap(); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies] dep1 = {{ git = '{}', branch = 'master' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); project.cargo("build").run(); project.change_file( "Cargo.lock", &format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "dep1" version = "0.5.0" source = "git+{}#{}" [[package]] name = "foo" version = "0.5.0" dependencies = [ "dep1", ] "#, git_project.url(), head_id ), ); project .cargo("build") .with_stderr("[FINISHED] [..]\n") .run(); } #[cargo_test] fn historical_lockfile_works_with_vendor() { let project = project(); let (git_project, repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", "") }); let head_id = repo.head().unwrap().target().unwrap(); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies] dep1 = {{ git = '{}', branch = 'master' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); let output = project.cargo("vendor").exec_with_output().unwrap(); project.change_file(".cargo/config", str::from_utf8(&output.stdout).unwrap()); project.change_file( "Cargo.lock", &format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "dep1" version = "0.5.0" source = "git+{}#{}" [[package]] name = "foo" version = "0.5.0" dependencies = [ "dep1", ] "#, git_project.url(), head_id ), ); project.cargo("build").run(); } #[cargo_test] fn two_dep_forms() { let project = project(); let (git_project, _repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", "") }); let project = project .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies] dep1 = {{ git = '{}', branch = 'master' }} a = {{ path = 'a' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .file( "a/Cargo.toml", &format!( r#" [project] name = "a" version = "0.5.0" [dependencies] dep1 = {{ git = '{}' }} "#, git_project.url() ), ) .file("a/src/lib.rs", "") .build(); // This'll download the git repository twice, one with HEAD and once with // the master branch. Then it'll compile 4 crates, the 2 git deps, then // the two local deps. project .cargo("build") .with_stderr( "\ [UPDATING] [..] [UPDATING] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn metadata_master_consistency() { // SourceId consistency in the `cargo metadata` output when `master` is // explicit or implicit, using new or old Cargo.lock. let (git_project, git_repo) = git::new_repo("bar", |project| { project .file("Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("src/lib.rs", "") }); let bar_hash = git_repo.head().unwrap().target().unwrap().to_string(); // Explicit branch="master" with a lock file created before 1.47 (does not contain ?branch=master). let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = {{ git = "{}", branch = "master" }} "#, git_project.url() ), ) .file( "Cargo.lock", &format!( r#" [[package]] name = "bar" version = "1.0.0" source = "git+{}#{}" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar", ] "#, git_project.url(), bar_hash, ), ) .file("src/lib.rs", "") .build(); let metadata = |bar_source| -> String { r#" { "packages": [ { "name": "bar", "version": "1.0.0", "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", "license": null, "license_file": null, "description": null, "source": "__BAR_SOURCE__#__BAR_HASH__", "dependencies": [], "targets": "{...}", "features": {}, "manifest_path": "[..]", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null }, { "name": "foo", "version": "0.1.0", "id": "foo 0.1.0 [..]", "license": null, "license_file": null, "description": null, "source": null, "dependencies": [ { "name": "bar", "source": "__BAR_SOURCE__", "req": "*", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null } ], "targets": "{...}", "features": {}, "manifest_path": "[..]", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } ], "workspace_members": [ "foo 0.1.0 [..]" ], "resolve": { "nodes": [ { "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 [..]", "dependencies": [ "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)" ], "deps": [ { "name": "bar", "pkg": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] } ], "root": "foo 0.1.0 [..]" }, "target_directory": "[..]", "version": 1, "workspace_root": "[..]", "metadata": null } "# .replace("__BAR_SOURCE__", bar_source) .replace("__BAR_HASH__", &bar_hash) }; let bar_source = format!("git+{}?branch=master", git_project.url()); p.cargo("metadata").with_json(&metadata(&bar_source)).run(); // Conversely, remove branch="master" from Cargo.toml, but use a new Cargo.lock that has ?branch=master. let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = {{ git = "{}" }} "#, git_project.url() ), ) .file( "Cargo.lock", &format!( r#" [[package]] name = "bar" version = "1.0.0" source = "git+{}?branch=master#{}" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar", ] "#, git_project.url(), bar_hash ), ) .file("src/lib.rs", "") .build(); // No ?branch=master! let bar_source = format!("git+{}", git_project.url()); p.cargo("metadata").with_json(&metadata(&bar_source)).run(); } #[cargo_test] fn git_with_force_push() { // Checks that cargo can handle force-pushes to git repos. // This works by having a git dependency that is updated with an amend // commit, and tries with various forms (default branch, branch, rev, // tag). let main = |text| format!(r#"pub fn f() {{ println!("{}"); }}"#, text); let (git_project, repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", &main("one")) }); let manifest = |extra| { format!( r#" [project] name = "foo" version = "0.0.1" edition = "2018" [dependencies] dep1 = {{ git = "{}"{} }} "#, git_project.url(), extra ) }; let p = project() .file("Cargo.toml", &manifest("")) .file("src/main.rs", "fn main() { dep1::f(); }") .build(); // Download the original and make sure it is OK. p.cargo("build").run(); p.rename_run("foo", "foo1").with_stdout("one").run(); let find_head = || t!(t!(repo.head()).peel_to_commit()); let amend_commit = |text| { // commit --amend a change that will require a force fetch. git_project.change_file("src/lib.rs", &main(text)); git::add(&repo); let commit = find_head(); let tree_id = t!(t!(repo.index()).write_tree()); t!(commit.amend( Some("HEAD"), None, None, None, None, Some(&t!(repo.find_tree(tree_id))) )); }; let mut rename_annoyance = 1; let mut verify = |text: &str| { // Perform the fetch. p.cargo("update").run(); p.cargo("build").run(); rename_annoyance += 1; p.rename_run("foo", &format!("foo{}", rename_annoyance)) .with_stdout(text) .run(); }; amend_commit("two"); verify("two"); // Try with a rev. let head1 = find_head().id().to_string(); let extra = format!(", rev = \"{}\"", head1); p.change_file("Cargo.toml", &manifest(&extra)); verify("two"); amend_commit("three"); let head2 = find_head().id().to_string(); assert_ne!(&head1, &head2); let extra = format!(", rev = \"{}\"", head2); p.change_file("Cargo.toml", &manifest(&extra)); verify("three"); // Try with a tag. git::tag(&repo, "my-tag"); p.change_file("Cargo.toml", &manifest(", tag = \"my-tag\"")); verify("three"); amend_commit("tag-three"); let head = t!(t!(repo.head()).peel(git2::ObjectType::Commit)); t!(repo.tag("my-tag", &head, &t!(repo.signature()), "move tag", true)); verify("tag-three"); // Try with a branch. let br = t!(repo.branch("awesome-stuff", &find_head(), false)); t!(repo.checkout_tree(&t!(br.get().peel(git2::ObjectType::Tree)), None)); t!(repo.set_head("refs/heads/awesome-stuff")); git_project.change_file("src/lib.rs", &main("awesome-three")); git::add(&repo); git::commit(&repo); p.change_file("Cargo.toml", &manifest(", branch = \"awesome-stuff\"")); verify("awesome-three"); amend_commit("awesome-four"); verify("awesome-four"); } #[cargo_test] fn corrupted_checkout() { // Test what happens if the checkout is corrupted somehow. _corrupted_checkout(false); } #[cargo_test] fn corrupted_checkout_with_cli() { // Test what happens if the checkout is corrupted somehow with git cli. _corrupted_checkout(true); } fn _corrupted_checkout(with_cli: bool) { let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] dep1 = {{ git = "{}" }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("fetch").run(); let mut paths = t!(glob::glob( paths::home() .join(".cargo/git/checkouts/dep1-*/*") .to_str() .unwrap() )); let path = paths.next().unwrap().unwrap(); let ok = path.join(".cargo-ok"); // Deleting this file simulates an interrupted checkout. t!(fs::remove_file(&ok)); // This should refresh the checkout. let mut e = p.cargo("fetch"); if with_cli { e.env("CARGO_NET_GIT_FETCH_WITH_CLI", "true"); } e.run(); assert!(ok.exists()); } cargo-0.66.0/tests/testsuite/git_auth.rs000066400000000000000000000245351432416201200202470ustar00rootroot00000000000000//! Tests for git authentication. use std::collections::HashSet; use std::io::prelude::*; use std::io::BufReader; use std::net::{SocketAddr, TcpListener}; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use cargo_test_support::paths; use cargo_test_support::{basic_manifest, project}; fn setup_failed_auth_test() -> (SocketAddr, JoinHandle<()>, Arc) { let server = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = server.local_addr().unwrap(); fn headers(rdr: &mut dyn BufRead) -> HashSet { let valid = ["GET", "Authorization", "Accept"]; rdr.lines() .map(|s| s.unwrap()) .take_while(|s| s.len() > 2) .map(|s| s.trim().to_string()) .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix))) .collect() } let connections = Arc::new(AtomicUsize::new(0)); let connections2 = connections.clone(); let t = thread::spawn(move || { let mut conn = BufReader::new(server.accept().unwrap().0); let req = headers(&mut conn); connections2.fetch_add(1, SeqCst); conn.get_mut() .write_all( b"HTTP/1.1 401 Unauthorized\r\n\ WWW-Authenticate: Basic realm=\"wheee\"\r\n\ Content-Length: 0\r\n\ \r\n", ) .unwrap(); assert_eq!( req, vec![ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Accept: */*", ] .into_iter() .map(|s| s.to_string()) .collect() ); let req = headers(&mut conn); connections2.fetch_add(1, SeqCst); conn.get_mut() .write_all( b"HTTP/1.1 401 Unauthorized\r\n\ WWW-Authenticate: Basic realm=\"wheee\"\r\n\ \r\n", ) .unwrap(); assert_eq!( req, vec![ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Authorization: Basic Zm9vOmJhcg==", "Accept: */*", ] .into_iter() .map(|s| s.to_string()) .collect() ); }); let script = project() .at("script") .file("Cargo.toml", &basic_manifest("script", "0.1.0")) .file( "src/main.rs", r#" fn main() { println!("username=foo"); println!("password=bar"); } "#, ) .build(); script.cargo("build -v").run(); let script = script.bin("script"); let config = paths::home().join(".gitconfig"); let mut config = git2::Config::open(&config).unwrap(); config .set_str( "credential.helper", // This is a bash script so replace `\` with `/` for Windows &script.display().to_string().replace("\\", "/"), ) .unwrap(); (addr, t, connections) } // Tests that HTTP auth is offered from `credential.helper`. #[cargo_test] fn http_auth_offered() { let (addr, t, connections) = setup_failed_auth_test(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "http://127.0.0.1:{}/foo/bar" "#, addr.port() ), ) .file("src/main.rs", "") .file( ".cargo/config", "[net] retry = 0 ", ) .build(); // This is a "contains" check because the last error differs by platform, // may span multiple lines, and isn't relevant to this test. p.cargo("build") .with_status(101) .with_stderr_contains(&format!( "\ [UPDATING] git repository `http://{addr}/foo/bar` [ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update http://{addr}/foo/bar Caused by: failed to clone into: [..] Caused by: failed to authenticate when downloading repository * attempted to find username/password via `credential.helper`, but [..] if the git CLI succeeds then `net.git-fetch-with-cli` may help here https://[..] Caused by: ", addr = addr )) .run(); assert_eq!(connections.load(SeqCst), 2); t.join().ok().unwrap(); } // Boy, sure would be nice to have a TLS implementation in rust! #[cargo_test] fn https_something_happens() { let server = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = server.local_addr().unwrap(); let t = thread::spawn(move || { let mut conn = server.accept().unwrap().0; drop(conn.write(b"1234")); drop(conn.shutdown(std::net::Shutdown::Write)); drop(conn.read(&mut [0; 16])); }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "https://127.0.0.1:{}/foo/bar" "#, addr.port() ), ) .file("src/main.rs", "") .file( ".cargo/config", "[net] retry = 0 ", ) .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains(&format!( "[UPDATING] git repository `https://{addr}/foo/bar`", addr = addr )) .with_stderr_contains(&format!( "\ Caused by: {errmsg} ", errmsg = if cfg!(windows) { "[..]failed to send request: [..]" } else if cfg!(target_os = "macos") { // macOS is difficult to tests as some builds may use Security.framework, // while others may use OpenSSL. In that case, let's just not verify the error // message here. "[..]" } else { "[..]SSL error: [..]" } )) .run(); t.join().ok().unwrap(); } // It would sure be nice to have an SSH implementation in Rust! #[cargo_test] fn ssh_something_happens() { let server = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = server.local_addr().unwrap(); let t = thread::spawn(move || { drop(server.accept().unwrap()); }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "ssh://127.0.0.1:{}/foo/bar" "#, addr.port() ), ) .file("src/main.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains(&format!( "[UPDATING] git repository `ssh://{addr}/foo/bar`", addr = addr )) .with_stderr_contains( "\ Caused by: [..]failed to start SSH session: Failed getting banner[..] ", ) .run(); t.join().ok().unwrap(); } #[cargo_test] fn net_err_suggests_fetch_with_cli() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] foo = { git = "ssh://needs-proxy.invalid/git" } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [UPDATING] git repository `ssh://needs-proxy.invalid/git` warning: spurious network error[..] warning: spurious network error[..] [ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` Caused by: failed to load source for dependency `foo` Caused by: Unable to update ssh://needs-proxy.invalid/git Caused by: failed to clone into: [..] Caused by: network failure seems to have happened if a proxy or similar is necessary `net.git-fetch-with-cli` may help here https://[..] Caused by: failed to resolve address for needs-proxy.invalid[..] ", ) .run(); p.change_file( ".cargo/config", " [net] git-fetch-with-cli = true ", ); p.cargo("build -v") .with_status(101) .with_stderr_contains("[..]Unable to update[..]") .with_stderr_does_not_contain("[..]try enabling `git-fetch-with-cli`[..]") .run(); } #[cargo_test] fn instead_of_url_printed() { let (addr, t, _connections) = setup_failed_auth_test(); let config = paths::home().join(".gitconfig"); let mut config = git2::Config::open(&config).unwrap(); config .set_str( &format!("url.http://{}/.insteadOf", addr), "https://foo.bar/", ) .unwrap(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "https://foo.bar/foo/bar" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr(&format!( "\ [UPDATING] git repository `https://foo.bar/foo/bar` [ERROR] failed to get `bar` as a dependency of package `foo [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update https://foo.bar/foo/bar Caused by: failed to clone into: [..] Caused by: failed to authenticate when downloading repository: http://{addr}/foo/bar * attempted to find username/password via `credential.helper`, but maybe the found credentials were incorrect if the git CLI succeeds then `net.git-fetch-with-cli` may help here https://[..] Caused by: [..] ", addr = addr )) .run(); t.join().ok().unwrap(); } cargo-0.66.0/tests/testsuite/git_gc.rs000066400000000000000000000054241432416201200176730ustar00rootroot00000000000000//! Tests for git garbage collection. use std::env; use std::ffi::OsStr; use std::path::PathBuf; use cargo_test_support::git; use cargo_test_support::paths; use cargo_test_support::project; use cargo_test_support::registry::Package; use url::Url; fn find_index() -> PathBuf { let dir = paths::home().join(".cargo/registry/index"); dir.read_dir().unwrap().next().unwrap().unwrap().path() } fn run_test(path_env: Option<&OsStr>) { const N: usize = 50; let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .build(); Package::new("bar", "0.1.0").publish(); foo.cargo("build").run(); let index = find_index(); let path = paths::home().join("tmp"); let url = Url::from_file_path(&path).unwrap().to_string(); let repo = git2::Repository::init(&path).unwrap(); let index = git2::Repository::open(&index).unwrap(); let mut cfg = repo.config().unwrap(); cfg.set_str("user.email", "foo@bar.com").unwrap(); cfg.set_str("user.name", "Foo Bar").unwrap(); let mut cfg = index.config().unwrap(); cfg.set_str("user.email", "foo@bar.com").unwrap(); cfg.set_str("user.name", "Foo Bar").unwrap(); for _ in 0..N { git::commit(&repo); index .remote_anonymous(&url) .unwrap() .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None) .unwrap(); } drop((repo, index)); Package::new("bar", "0.1.1").publish(); let before = find_index() .join(".git/objects/pack") .read_dir() .unwrap() .count(); assert!(before > N); let mut cmd = foo.cargo("update"); cmd.env("__CARGO_PACKFILE_LIMIT", "10"); if let Some(path) = path_env { cmd.env("PATH", path); } cmd.env("CARGO_LOG", "trace"); cmd.run(); let after = find_index() .join(".git/objects/pack") .read_dir() .unwrap() .count(); assert!( after < before, "packfiles before: {}\n\ packfiles after: {}", before, after ); } #[cargo_test(requires_git)] fn use_git_gc() { run_test(None); } #[cargo_test] fn avoid_using_git() { let path = env::var_os("PATH").unwrap_or_default(); let mut paths = env::split_paths(&path).collect::>(); let idx = paths .iter() .position(|p| p.join("git").exists() || p.join("git.exe").exists()); match idx { Some(i) => { paths.remove(i); } None => return, } run_test(Some(&env::join_paths(&paths).unwrap())); } cargo-0.66.0/tests/testsuite/glob_targets.rs000066400000000000000000000333471432416201200211200ustar00rootroot00000000000000//! Tests for target filter flags with glob patterns. use cargo_test_support::{project, Project}; #[cargo_test] fn build_example() { full_project() .cargo("build -v --example 'ex*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name example1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_bin() { full_project() .cargo("build -v --bin 'bi*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bin1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_bench() { full_project() .cargo("build -v --bench 'be*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_test() { full_project() .cargo("build -v --test 'te*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_example() { full_project() .cargo("check -v --example 'ex*1'") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name example1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_bin() { full_project() .cargo("check -v --bin 'bi*1'") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bin1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_bench() { full_project() .cargo("check -v --bench 'be*1'") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bench1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn check_test() { full_project() .cargo("check -v --test 'te*1'") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name test1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn doc_bin() { full_project() .cargo("doc -v --bin 'bi*1'") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fix_example() { full_project() .cargo("fix -v --example 'ex*1' --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `[..] rustc --crate-name example1 [..]` [FIXING] examples/example1.rs [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fix_bin() { full_project() .cargo("fix -v --bin 'bi*1' --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `[..] rustc --crate-name bin1 [..]` [FIXING] src/bin/bin1.rs [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fix_bench() { full_project() .cargo("fix -v --bench 'be*1' --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `[..] rustc --crate-name bench1 [..]` [FIXING] benches/bench1.rs [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fix_test() { full_project() .cargo("fix -v --test 'te*1' --allow-no-vcs") .with_stderr( "\ [CHECKING] foo v0.0.1 ([CWD]) [RUNNING] `[..] rustc --crate-name test1 [..]` [FIXING] tests/test1.rs [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn run_example_and_bin() { let p = full_project(); p.cargo("run -v --bin 'bi*1'") .with_status(101) .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection") .run(); p.cargo("run -v --example 'ex*1'") .with_status(101) .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection") .run(); } #[cargo_test] fn test_example() { full_project() .cargo("test -v --example 'ex*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name example1 [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..]example1[..] ", ) .run(); } #[cargo_test] fn test_bin() { full_project() .cargo("test -v --bin 'bi*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bin1 [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..]bin1[..] ", ) .run(); } #[cargo_test] fn test_bench() { full_project() .cargo("test -v --bench 'be*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..]bench1[..] ", ) .run(); } #[cargo_test] fn test_test() { full_project() .cargo("test -v --test 'te*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..]test1[..] ", ) .run(); } #[cargo_test] fn bench_example() { full_project() .cargo("bench -v --example 'ex*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name example1 [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]example1[..] --bench` ", ) .run(); } #[cargo_test] fn bench_bin() { full_project() .cargo("bench -v --bin 'bi*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bin1 [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]bin1[..] --bench` ", ) .run(); } #[cargo_test] fn bench_bench() { full_project() .cargo("bench -v --bench 'be*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]bench1[..] --bench` ", ) .run(); } #[cargo_test] fn bench_test() { full_project() .cargo("bench -v --test 'te*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `[..]test1[..] --bench` ", ) .run(); } #[cargo_test] fn install_example() { full_project() .cargo("install --path . --example 'ex*1'") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/home/.cargo/bin/example1[EXE] [INSTALLED] package `foo v0.0.1 ([CWD])` (executable `example1[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn install_bin() { full_project() .cargo("install --path . --bin 'bi*1'") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/home/.cargo/bin/bin1[EXE] [INSTALLED] package `foo v0.0.1 ([CWD])` (executable `bin1[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn rustdoc_example() { full_project() .cargo("rustdoc -v --example 'ex*1'") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-type bin --crate-name example1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_bin() { full_project() .cargo("rustdoc -v --bin 'bi*1'") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_bench() { full_project() .cargo("rustdoc -v --bench 'be*1'") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-type bin --crate-name bench1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_test() { full_project() .cargo("rustdoc -v --test 'te*1'") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-type bin --crate-name test1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustc_example() { full_project() .cargo("rustc -v --example 'ex*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name example1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustc_bin() { full_project() .cargo("rustc -v --bin 'bi*1'") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name bin1 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustc_bench() { full_project() .cargo("rustc -v --bench 'be*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustc_test() { full_project() .cargo("rustc -v --test 'te*1'") .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [RUNNING] `rustc --crate-name [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } fn full_project() -> Project { project() .file("examples/example1.rs", "fn main() { }") .file("examples/example2.rs", "fn main() { }") .file("benches/bench1.rs", "") .file("benches/bench2.rs", "") .file("tests/test1.rs", "") .file("tests/test2.rs", "") .file("src/main.rs", "fn main() { }") .file("src/bin/bin1.rs", "fn main() { }") .file("src/bin/bin2.rs", "fn main() { }") .build() } cargo-0.66.0/tests/testsuite/help.rs000066400000000000000000000147661432416201200174000ustar00rootroot00000000000000//! Tests for cargo's help output. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, cargo_exe, cargo_process, paths, process, project}; use std::fs; use std::path::Path; use std::str::from_utf8; #[cargo_test] fn help() { cargo_process("").run(); cargo_process("help").run(); cargo_process("-h").run(); cargo_process("help build").run(); cargo_process("build -h").run(); cargo_process("help help").run(); // Ensure that help output goes to stdout, not stderr. cargo_process("search --help").with_stderr("").run(); cargo_process("search --help") .with_stdout_contains("[..] --frozen [..]") .run(); } #[cargo_test] fn help_external_subcommand() { // Check that `help external-subcommand` forwards the --help flag to the // given subcommand. Package::new("cargo-fake-help", "1.0.0") .file( "src/main.rs", r#" fn main() { if ::std::env::args().nth(2) == Some(String::from("--help")) { println!("fancy help output"); } } "#, ) .publish(); cargo_process("install cargo-fake-help").run(); cargo_process("help fake-help") .with_stdout("fancy help output\n") .run(); } #[cargo_test] fn z_flags_help() { // Test that the output of `cargo -Z help` shows a different help screen with // all the `-Z` flags. cargo_process("-Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); } fn help_with_man(display_command: &str) { // Build a "man" process that just echoes the contents. let p = project() .at(display_command) .file("Cargo.toml", &basic_manifest(display_command, "1.0.0")) .file( "src/main.rs", &r#" fn main() { eprintln!("custom __COMMAND__"); let path = std::env::args().skip(1).next().unwrap(); let mut f = std::fs::File::open(path).unwrap(); std::io::copy(&mut f, &mut std::io::stdout()).unwrap(); } "# .replace("__COMMAND__", display_command), ) .build(); p.cargo("build").run(); help_with_man_and_path(display_command, "build", "build", &p.target_debug_dir()); } fn help_with_man_and_path( display_command: &str, subcommand: &str, actual_subcommand: &str, path: &Path, ) { let contents = if display_command == "man" { fs::read_to_string(format!("src/etc/man/cargo-{}.1", actual_subcommand)).unwrap() } else { fs::read_to_string(format!( "src/doc/man/generated_txt/cargo-{}.txt", actual_subcommand )) .unwrap() }; let output = process(&cargo_exe()) .arg("help") .arg(subcommand) .env("PATH", path) .exec_with_output() .unwrap(); assert!(output.status.success()); let stderr = from_utf8(&output.stderr).unwrap(); if display_command.is_empty() { assert_eq!(stderr, ""); } else { assert_eq!(stderr, format!("custom {}\n", display_command)); } let stdout = from_utf8(&output.stdout).unwrap(); assert_eq!(stdout, contents); } fn help_with_stdout_and_path(subcommand: &str, path: &Path) -> String { let output = process(&cargo_exe()) .arg("help") .arg(subcommand) .env("PATH", path) .exec_with_output() .unwrap(); assert!(output.status.success()); let stderr = from_utf8(&output.stderr).unwrap(); assert_eq!(stderr, ""); let stdout = from_utf8(&output.stdout).unwrap(); stdout.to_string() } #[cargo_test] fn help_man() { // Checks that `help command` displays the man page using the given command. help_with_man("man"); help_with_man("less"); help_with_man("more"); // Check with no commands in PATH. help_with_man_and_path("", "build", "build", Path::new("")); } #[cargo_test] fn help_alias() { // Check that `help some_alias` will resolve. help_with_man_and_path("", "b", "build", Path::new("")); let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); fs::write( config, r#" [alias] empty-alias = "" simple-alias = "build" complex-alias = ["build", "--release"] "#, ) .unwrap(); // The `empty-alias` returns an error. cargo_process("help empty-alias") .env("PATH", Path::new("")) .with_stderr_contains("[..]The subcommand 'empty-alias' wasn't recognized[..]") .run_expect_error(); // Because `simple-alias` aliases a subcommand with no arguments, help shows the manpage. help_with_man_and_path("", "simple-alias", "build", Path::new("")); // Help for `complex-alias` displays the full alias command. let out = help_with_stdout_and_path("complex-alias", Path::new("")); assert_eq!(out, "`complex-alias` is aliased to `build --release`\n"); } #[cargo_test] fn alias_z_flag_help() { cargo_process("build -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("run -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("check -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("test -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("b -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("r -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("c -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); cargo_process("t -Z help") .with_stdout_contains( " -Z allow-features[..]-- Allow *only* the listed unstable features", ) .run(); } cargo-0.66.0/tests/testsuite/inheritable_workspace_fields.rs000066400000000000000000001007621432416201200243320ustar00rootroot00000000000000//! Tests for inheriting Cargo.toml fields with field.workspace = true use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{ basic_lib_manifest, basic_manifest, git, path2url, paths, project, publish, registry, }; #[cargo_test] fn permit_additional_workspace_fields() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.package] version = "1.2.3" authors = ["Rustaceans"] description = "This is a crate" documentation = "https://www.rust-lang.org/learn" readme = "README.md" homepage = "https://www.rust-lang.org" repository = "https://github.com/example/example" license = "MIT" license-file = "LICENSE" keywords = ["cli"] categories = ["development-tools"] publish = false edition = "2018" rust-version = "1.60" exclude = ["foo.txt"] include = ["bar.txt", "**/*.rs", "Cargo.toml", "LICENSE", "README.md"] [workspace.package.badges] gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" } [workspace.dependencies] dep = "0.1" "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") // Should not warn about unused fields. .with_stderr( "\ [COMPILING] bar v0.1.0 ([CWD]/bar) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("check").run(); let lockfile = p.read_lockfile(); assert!(!lockfile.contains("dep")); } #[cargo_test] fn deny_optional_dependencies() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.dependencies] dep1 = { version = "0.1", optional = true } "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]foo/Cargo.toml` Caused by: dep1 is optional, but workspace dependencies cannot be optional ", ) .run(); } #[cargo_test] fn inherit_own_workspace_fields() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" badges.workspace = true [package] name = "foo" version.workspace = true authors.workspace = true description.workspace = true documentation.workspace = true homepage.workspace = true repository.workspace = true license.workspace = true keywords.workspace = true categories.workspace = true publish.workspace = true edition.workspace = true rust-version.workspace = true exclude.workspace = true include.workspace = true [workspace] members = [] [workspace.package] version = "1.2.3" authors = ["Rustaceans"] description = "This is a crate" documentation = "https://www.rust-lang.org/learn" homepage = "https://www.rust-lang.org" repository = "https://github.com/example/example" license = "MIT" keywords = ["cli"] categories = ["development-tools"] publish = true edition = "2018" rust-version = "1.60" exclude = ["foo.txt"] include = ["bar.txt", "**/*.rs", "Cargo.toml"] [workspace.package.badges] gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" } "#, ) .file("src/main.rs", "fn main() {}") .file("foo.txt", "") // should be ignored when packaging .file("bar.txt", "") // should be included when packaging .build(); p.cargo("publish --token sekrit").run(); publish::validate_upload_with_contents( r#" { "authors": ["Rustaceans"], "badges": { "gitlab": { "branch": "master", "repository": "https://gitlab.com/rust-lang/rust" } }, "categories": ["development-tools"], "deps": [], "description": "This is a crate", "documentation": "https://www.rust-lang.org/learn", "features": {}, "homepage": "https://www.rust-lang.org", "keywords": ["cli"], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": "https://github.com/example/example", "vers": "1.2.3" } "#, "foo-1.2.3.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".cargo_vcs_info.json", "bar.txt", ], &[( "Cargo.toml", &format!( r#"{} [package] edition = "2018" rust-version = "1.60" name = "foo" version = "1.2.3" authors = ["Rustaceans"] exclude = ["foo.txt"] include = [ "bar.txt", "**/*.rs", "Cargo.toml", ] publish = true description = "This is a crate" homepage = "https://www.rust-lang.org" documentation = "https://www.rust-lang.org/learn" keywords = ["cli"] categories = ["development-tools"] license = "MIT" repository = "https://github.com/example/example" [badges.gitlab] branch = "master" repository = "https://gitlab.com/rust-lang/rust" "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn inherit_own_dependencies() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.2.0" authors = [] [dependencies] dep.workspace = true [build-dependencies] dep-build.workspace = true [dev-dependencies] dep-dev.workspace = true [workspace] members = [] [workspace.dependencies] dep = "0.1" dep-build = "0.8" dep-dev = "0.5.2" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("dep", "0.1.2").publish(); Package::new("dep-build", "0.8.2").publish(); Package::new("dep-dev", "0.5.2").publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep-build v0.8.2 ([..]) [DOWNLOADED] dep v0.1.2 ([..]) [COMPILING] dep v0.1.2 [COMPILING] bar v0.2.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("check").run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); assert!(lockfile.contains("dep-dev")); assert!(lockfile.contains("dep-build")); p.cargo("publish --token sekrit").run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "dep", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1" }, { "default_features": true, "features": [], "kind": "dev", "name": "dep-dev", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.5.2" }, { "default_features": true, "features": [], "kind": "build", "name": "dep-build", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.8" } ], "description": null, "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": null, "license_file": null, "links": null, "name": "bar", "readme": null, "readme_file": null, "repository": null, "vers": "0.2.0" } "#, "bar-0.2.0.crate", &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "bar" version = "0.2.0" authors = [] [dependencies.dep] version = "0.1" [dev-dependencies.dep-dev] version = "0.5.2" [build-dependencies.dep-build] version = "0.8" "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn inherit_own_detailed_dependencies() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.2.0" authors = [] [dependencies] dep.workspace = true [workspace] members = [] [workspace.dependencies] dep = { version = "0.1.2", features = ["testing"] } "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("dep", "0.1.2") .feature("testing", &vec![]) .publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep v0.1.2 ([..]) [COMPILING] dep v0.1.2 [COMPILING] bar v0.2.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("check").run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); p.cargo("publish --token sekrit").run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": ["testing"], "kind": "normal", "name": "dep", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1.2" } ], "description": null, "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": null, "license_file": null, "links": null, "name": "bar", "readme": null, "readme_file": null, "repository": null, "vers": "0.2.0" } "#, "bar-0.2.0.crate", &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "bar" version = "0.2.0" authors = [] [dependencies.dep] version = "0.1.2" features = ["testing"] "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn inherit_from_own_undefined_field() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [package] name = "foo" version = "1.2.5" authors = ["rustaceans"] description.workspace = true [workspace] members = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: error inheriting `description` from workspace root manifest's `workspace.package.description` Caused by: `workspace.package.description` was not defined ", ) .run(); } #[cargo_test] fn inherited_dependencies_union_features() { Package::new("dep", "0.1.0") .feature("fancy", &["fancy_dep"]) .feature("dancy", &["dancy_dep"]) .add_dep(Dependency::new("fancy_dep", "0.2").optional(true)) .add_dep(Dependency::new("dancy_dep", "0.6").optional(true)) .file("src/lib.rs", "") .publish(); Package::new("fancy_dep", "0.2.4").publish(); Package::new("dancy_dep", "0.6.8").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.2.0" authors = [] [dependencies] dep = { workspace = true, features = ["dancy"] } [workspace] members = [] [workspace.dependencies] dep = { version = "0.1", features = ["fancy"] } "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] fancy_dep v0.2.4 ([..]) [DOWNLOADED] dep v0.1.0 ([..]) [DOWNLOADED] dancy_dep v0.6.8 ([..]) [COMPILING] [..] [COMPILING] [..] [COMPILING] dep v0.1.0 [COMPILING] bar v0.2.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); assert!(lockfile.contains("fancy_dep")); assert!(lockfile.contains("dancy_dep")); } #[cargo_test] fn inherit_workspace_fields() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.package] version = "1.2.3" authors = ["Rustaceans"] description = "This is a crate" documentation = "https://www.rust-lang.org/learn" readme = "README.md" homepage = "https://www.rust-lang.org" repository = "https://github.com/example/example" license = "MIT" license-file = "LICENSE" keywords = ["cli"] categories = ["development-tools"] publish = true edition = "2018" rust-version = "1.60" exclude = ["foo.txt"] include = ["bar.txt", "**/*.rs", "Cargo.toml", "LICENSE", "README.md"] [workspace.package.badges] gitlab = { repository = "https://gitlab.com/rust-lang/rust", branch = "master" } "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" badges.workspace = true [package] name = "bar" workspace = ".." version.workspace = true authors.workspace = true description.workspace = true documentation.workspace = true readme.workspace = true homepage.workspace = true repository.workspace = true license.workspace = true license-file.workspace = true keywords.workspace = true categories.workspace = true publish.workspace = true edition.workspace = true rust-version.workspace = true exclude.workspace = true include.workspace = true "#, ) .file("LICENSE", "license") .file("README.md", "README.md") .file("bar/src/main.rs", "fn main() {}") .file("bar/foo.txt", "") // should be ignored when packaging .file("bar/bar.txt", "") // should be included when packaging .build(); p.cargo("publish --token sekrit").cwd("bar").run(); publish::validate_upload_with_contents( r#" { "authors": ["Rustaceans"], "badges": { "gitlab": { "branch": "master", "repository": "https://gitlab.com/rust-lang/rust" } }, "categories": ["development-tools"], "deps": [], "description": "This is a crate", "documentation": "https://www.rust-lang.org/learn", "features": {}, "homepage": "https://www.rust-lang.org", "keywords": ["cli"], "license": "MIT", "license_file": "../LICENSE", "links": null, "name": "bar", "readme": "README.md", "readme_file": "../README.md", "repository": "https://github.com/example/example", "vers": "1.2.3" } "#, "bar-1.2.3.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", "README.md", "LICENSE", ".cargo_vcs_info.json", "bar.txt", ], &[( "Cargo.toml", &format!( r#"{} [package] edition = "2018" rust-version = "1.60" name = "bar" version = "1.2.3" authors = ["Rustaceans"] exclude = ["foo.txt"] include = [ "bar.txt", "**/*.rs", "Cargo.toml", "LICENSE", "README.md", ] publish = true description = "This is a crate" homepage = "https://www.rust-lang.org" documentation = "https://www.rust-lang.org/learn" readme = "README.md" keywords = ["cli"] categories = ["development-tools"] license = "MIT" license-file = "LICENSE" repository = "https://github.com/example/example" [badges.gitlab] branch = "master" repository = "https://gitlab.com/rust-lang/rust" "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn inherit_dependencies() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.dependencies] dep = "0.1" dep-build = "0.8" dep-dev = "0.5.2" "#, ) .file( "bar/Cargo.toml", r#" [project] workspace = ".." name = "bar" version = "0.2.0" authors = [] [dependencies] dep.workspace = true [build-dependencies] dep-build.workspace = true [dev-dependencies] dep-dev.workspace = true "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); Package::new("dep", "0.1.2").publish(); Package::new("dep-build", "0.8.2").publish(); Package::new("dep-dev", "0.5.2").publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep-build v0.8.2 ([..]) [DOWNLOADED] dep v0.1.2 ([..]) [COMPILING] dep v0.1.2 [COMPILING] bar v0.2.0 ([CWD]/bar) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("check").run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); assert!(lockfile.contains("dep-dev")); assert!(lockfile.contains("dep-build")); p.cargo("publish --token sekrit").cwd("bar").run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "dep", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.1" }, { "default_features": true, "features": [], "kind": "dev", "name": "dep-dev", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.5.2" }, { "default_features": true, "features": [], "kind": "build", "name": "dep-build", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^0.8" } ], "description": null, "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": null, "license_file": null, "links": null, "name": "bar", "readme": null, "readme_file": null, "repository": null, "vers": "0.2.0" } "#, "bar-0.2.0.crate", &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "bar" version = "0.2.0" authors = [] [dependencies.dep] version = "0.1" [dev-dependencies.dep-dev] version = "0.5.2" [build-dependencies.dep-build] version = "0.8" "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn inherit_target_dependencies() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.dependencies] dep = "0.1" "#, ) .file( "bar/Cargo.toml", r#" [project] workspace = ".." name = "bar" version = "0.2.0" authors = [] [target.'cfg(unix)'.dependencies] dep.workspace = true [target.'cfg(windows)'.dependencies] dep.workspace = true "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); Package::new("dep", "0.1.2").publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep v0.1.2 ([..]) [COMPILING] dep v0.1.2 [COMPILING] bar v0.2.0 ([CWD]/bar) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); } #[cargo_test] fn inherit_dependency_override_optional() { Package::new("dep", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.dependencies] dep = "0.1.0" "#, ) .file( "bar/Cargo.toml", r#" [project] workspace = ".." name = "bar" version = "0.2.0" authors = [] [dependencies] dep = { workspace = true, optional = true } "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [COMPILING] bar v0.2.0 ([CWD]/bar) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn inherit_dependency_features() { Package::new("dep", "0.1.0") .feature("fancy", &["fancy_dep"]) .add_dep(Dependency::new("fancy_dep", "0.2").optional(true)) .file("src/lib.rs", "") .publish(); Package::new("fancy_dep", "0.2.4").publish(); Package::new("dancy_dep", "0.6.8").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.2.0" authors = [] [dependencies] dep = { workspace = true, features = ["fancy"] } [workspace] members = [] [workspace.dependencies] dep = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] fancy_dep v0.2.4 ([..]) [DOWNLOADED] dep v0.1.0 ([..]) [COMPILING] fancy_dep v0.2.4 [COMPILING] dep v0.1.0 [COMPILING] bar v0.2.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); assert!(lockfile.contains("fancy_dep")); } #[cargo_test] fn inherit_detailed_dependencies() { let git_project = git::new("detailed", |project| { project .file("Cargo.toml", &basic_lib_manifest("detailed")) .file( "src/detailed.rs", r#" pub fn hello() -> &'static str { "hello world" } "#, ) }); // Make a new branch based on the current HEAD commit let repo = git2::Repository::open(&git_project.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); let head = repo.find_commit(head).unwrap(); repo.branch("branchy", &head, true).unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [workspace] members = ["bar"] [workspace.dependencies] detailed = {{ git = '{}', branch = "branchy" }} "#, git_project.url() ), ) .file( "bar/Cargo.toml", r#" [project] workspace = ".." name = "bar" version = "0.2.0" authors = [] [dependencies] detailed.workspace = true "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); let git_root = git_project.root(); p.cargo("build") .with_stderr(&format!( "\ [UPDATING] git repository `{}`\n\ [COMPILING] detailed v0.5.0 ({}?branch=branchy#[..])\n\ [COMPILING] bar v0.2.0 ([CWD]/bar)\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", path2url(&git_root), path2url(&git_root), )) .run(); } #[cargo_test] fn inherit_path_dependencies() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [workspace.dependencies] dep = { path = "dep" } "#, ) .file( "bar/Cargo.toml", r#" [project] workspace = ".." name = "bar" version = "0.2.0" authors = [] [dependencies] dep.workspace = true "#, ) .file("bar/src/main.rs", "fn main() {}") .file("dep/Cargo.toml", &basic_manifest("dep", "0.9.0")) .file("dep/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] dep v0.9.0 ([CWD]/dep) [COMPILING] bar v0.2.0 ([CWD]/bar) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let lockfile = p.read_lockfile(); assert!(lockfile.contains("dep")); } #[cargo_test] fn error_workspace_false() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" workspace = ".." version = "1.2.3" authors = ["rustaceans"] description = { workspace = false } "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: `workspace=false` is unsupported for `package.description` ", ) .run(); } #[cargo_test] fn error_workspace_dependency_looked_for_workspace_itself() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [package] name = "bar" version = "1.2.3" workspace = ".." [dependencies] dep.workspace = true [workspace] members = ["bar"] [workspace.dependencies] dep.workspace = true "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: dep was specified as `workspace.dependencies.dep.workspace = true`, but \ workspace dependencies cannot specify `workspace = true` ", ) .run(); } #[cargo_test] fn error_malformed_workspace_root() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = [invalid toml "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" workspace = ".." version = "1.2.3" authors = ["rustaceans"] "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: [..] Caused by: [..] | 3 | members = [invalid toml | ^ Unexpected `i` Expected newline or `#` ", ) .run(); } #[cargo_test] fn error_no_root_workspace() { registry::init(); let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "bar/Cargo.toml", r#" [package] name = "bar" workspace = ".." version = "1.2.3" authors = ["rustaceans"] description.workspace = true "#, ) .file("src/main.rs", "fn main() {}") .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/Cargo.toml` Caused by: error inheriting `description` from workspace root manifest's `workspace.package.description` Caused by: root of a workspace inferred but wasn't a root: [..]/Cargo.toml ", ) .run(); } #[cargo_test] fn error_inherit_unspecified_dependency() { let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" workspace = ".." version = "1.2.3" authors = ["rustaceans"] [dependencies] foo.workspace = true "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: error reading `dependencies.foo` from workspace root manifest's `workspace.dependencies.foo` Caused by: `workspace.dependencies` was not defined ", ) .run(); } cargo-0.66.0/tests/testsuite/init/000077500000000000000000000000001432416201200170275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/auto_git/000077500000000000000000000000001432416201200206425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/auto_git/in000077700000000000000000000000001432416201200233202../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/auto_git/mod.rs000066400000000000000000000012431432416201200217670ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn auto_git() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), &project_root); assert!(project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/auto_git/out/000077500000000000000000000000001432416201200214515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/auto_git/out/.gitignore000066400000000000000000000000241432416201200234350ustar00rootroot00000000000000/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/auto_git/out/Cargo.toml000066400000000000000000000002551432416201200234030ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/auto_git/out/src/000077500000000000000000000000001432416201200222405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/auto_git/out/src/lib.rs000066400000000000000000000003301432416201200233500ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/auto_git/stderr.log000066400000000000000000000000351432416201200226460ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/auto_git/stdout.log000066400000000000000000000000001432416201200226550ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/000077500000000000000000000000001432416201200246005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/in/000077500000000000000000000000001432416201200252065ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/in/src/000077500000000000000000000000001432416201200257755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs000066400000000000000000000001061432416201200272640ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/mod.rs000066400000000000000000000012171432416201200257260ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_explicit() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --bin --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/out/000077500000000000000000000000001432416201200254075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml000066400000000000000000000002551432416201200273410ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/out/src/000077500000000000000000000000001432416201200261765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs000066400000000000000000000001061432416201200274650ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/stderr.log000066400000000000000000000000521432416201200266030ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit/stdout.log000066400000000000000000000000001432416201200266130ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/000077500000000000000000000000001432416201200260045ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/000077500000000000000000000000001432416201200264125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs000066400000000000000000000001061432416201200277010ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs000066400000000000000000000013061432416201200271310ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_explicit_nosrc() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --bin --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src").is_dir()); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/000077500000000000000000000000001432416201200266135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml000066400000000000000000000003251432416201200305430ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "main.rs" cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs000066400000000000000000000001061432416201200301020ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log000066400000000000000000000000521432416201200300070ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log000066400000000000000000000000001432416201200300170ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/000077500000000000000000000000001432416201200245715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/in/000077500000000000000000000000001432416201200251775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/in/src/000077500000000000000000000000001432416201200257665ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs000066400000000000000000000001061432416201200272550ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/mod.rs000066400000000000000000000012111432416201200257110ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_implicit() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/out/000077500000000000000000000000001432416201200254005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml000066400000000000000000000002551432416201200273320ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/out/src/000077500000000000000000000000001432416201200261675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs000066400000000000000000000001061432416201200274560ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/stderr.log000066400000000000000000000000521432416201200265740ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit/stdout.log000066400000000000000000000000001432416201200266040ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/000077500000000000000000000000001432416201200266365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/000077500000000000000000000000001432416201200272445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs000066400000000000000000000001061432416201200305220ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs000066400000000000000000000013041432416201200277610ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_implicit_namenosrc() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src").is_dir()); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/000077500000000000000000000000001432416201200274455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml000066400000000000000000000003251432416201200313750ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "case.rs" cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs000066400000000000000000000001061432416201200307230ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log000066400000000000000000000000521432416201200306410ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log000066400000000000000000000000001432416201200306510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/000077500000000000000000000000001432416201200263015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/000077500000000000000000000000001432416201200267075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/000077500000000000000000000000001432416201200274765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs000066400000000000000000000001061432416201200307540ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs000066400000000000000000000013131432416201200274240ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_implicit_namesrc() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src/main.rs").is_file()); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/000077500000000000000000000000001432416201200271105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml000066400000000000000000000003311432416201200310350ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "src/case.rs" cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/000077500000000000000000000000001432416201200276775ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs000066400000000000000000000001061432416201200311550ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log000066400000000000000000000000521432416201200303040ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log000066400000000000000000000000001432416201200303140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/000077500000000000000000000000001432416201200257755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/000077500000000000000000000000001432416201200264035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs000066400000000000000000000001061432416201200276720ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs000066400000000000000000000013001432416201200271140ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn bin_already_exists_implicit_nosrc() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src").is_dir()); } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/000077500000000000000000000000001432416201200266045ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml000066400000000000000000000003251432416201200305340ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "main.rs" cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs000066400000000000000000000001061432416201200300730ustar00rootroot00000000000000fn main() { println!("Check that our file is not overwritten") } cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log000066400000000000000000000000521432416201200300000ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log000066400000000000000000000000001432416201200300100ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/both_lib_and_bin/000077500000000000000000000000001432416201200222635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/both_lib_and_bin/mod.rs000066400000000000000000000007451432416201200234160ustar00rootroot00000000000000use cargo_test_support::paths; use cargo_test_support::prelude::*; use cargo_test_support::curr_dir; #[cargo_test] fn both_lib_and_bin() { let cwd = paths::root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --bin") .current_dir(&cwd) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert!(!cwd.join("Cargo.toml").is_file()); } cargo-0.66.0/tests/testsuite/init/both_lib_and_bin/stderr.log000066400000000000000000000000611432416201200242660ustar00rootroot00000000000000error: can't specify both lib and binary outputs cargo-0.66.0/tests/testsuite/init/both_lib_and_bin/stdout.log000066400000000000000000000000001432416201200242760ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/000077500000000000000000000000001432416201200301375ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/000077500000000000000000000000001432416201200305455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs000066400000000000000000000000151432416201200320220ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs000066400000000000000000000000121432416201200316520ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs000066400000000000000000000010411432416201200312600ustar00rootroot00000000000000use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn cant_create_library_when_both_binlib_present() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); } cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log000066400000000000000000000001301432416201200321370ustar00rootroot00000000000000error: cannot have a package with multiple libraries, found both `case.rs` and `lib.rs` cargo-0.66.0/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log000066400000000000000000000000001432416201200321520ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/000077500000000000000000000000001432416201200252525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/in/000077500000000000000000000000001432416201200256605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs000066400000000000000000000000371432416201200267740ustar00rootroot00000000000000fn f() { println!("lib.rs"); } cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/in/src/000077500000000000000000000000001432416201200264475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs000066400000000000000000000000431432416201200275600ustar00rootroot00000000000000fn f() { println!("src/lib.rs"); } cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs000066400000000000000000000013051432416201200263760ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn confused_by_multiple_lib_files() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("Cargo.toml").is_file()); } cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/out/000077500000000000000000000000001432416201200260615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs000066400000000000000000000000371432416201200271750ustar00rootroot00000000000000fn f() { println!("lib.rs"); } cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/out/src/000077500000000000000000000000001432416201200266505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs000066400000000000000000000000431432416201200277610ustar00rootroot00000000000000fn f() { println!("src/lib.rs"); } cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log000066400000000000000000000001331432416201200272550ustar00rootroot00000000000000error: cannot have a package with multiple libraries, found both `src/lib.rs` and `lib.rs` cargo-0.66.0/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log000066400000000000000000000000001432416201200272650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/000077500000000000000000000000001432416201200271355ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/in/000077500000000000000000000000001432416201200275435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs000066400000000000000000000000151432416201200310200ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs000066400000000000000000000000121432416201200306500ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs000066400000000000000000000012331432416201200302610ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn creates_binary_when_both_binlib_present() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --bin --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/out/000077500000000000000000000000001432416201200277445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml000066400000000000000000000003721432416201200316760ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "case.rs" [lib] name = "case" path = "lib.rs" cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs000066400000000000000000000000151432416201200312210ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs000066400000000000000000000000121432416201200310510ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log000066400000000000000000000000521432416201200311400ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log000066400000000000000000000000001432416201200311500ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/000077500000000000000000000000001432416201200306105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/000077500000000000000000000000001432416201200312165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs000066400000000000000000000000121432416201200324700ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs000066400000000000000000000012431432416201200317350ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn creates_binary_when_instructed_and_has_lib_file() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --bin --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/000077500000000000000000000000001432416201200314175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml000066400000000000000000000003251432416201200333470ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "case.rs" cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs000066400000000000000000000000121432416201200326710ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log000066400000000000000000000001351432416201200326150ustar00rootroot00000000000000warning: file `case.rs` seems to be a library file Created binary (application) package cargo-0.66.0/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log000066400000000000000000000000001432416201200326230ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/000077500000000000000000000000001432416201200307725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/000077500000000000000000000000001432416201200314005ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs000066400000000000000000000000151432416201200326550ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs000066400000000000000000000012441432416201200321200ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn creates_library_when_instructed_and_has_bin_file() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/000077500000000000000000000000001432416201200316015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml000066400000000000000000000003231432416201200335270ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [lib] name = "case" path = "case.rs" cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs000066400000000000000000000000151432416201200330560ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log000066400000000000000000000001351432416201200327770ustar00rootroot00000000000000warning: file `case.rs` seems to be a binary (application) file Created library package cargo-0.66.0/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log000066400000000000000000000000001432416201200330050ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/empty_dir/000077500000000000000000000000001432416201200210235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/empty_dir/.keep000066400000000000000000000000001432416201200217360ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/empty_dir/mod.rs000066400000000000000000000003171432416201200221510ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::{command_is_available, paths, Project}; use std::fs; use std::process::Command; use crate::test_root; cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/000077500000000000000000000000001432416201200233765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/in000077700000000000000000000000001432416201200260542../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/mod.rs000066400000000000000000000012101432416201200245150ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn explicit_bin_with_git() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs git --bin") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/out/000077500000000000000000000000001432416201200242055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/out/.gitignore000066400000000000000000000000101432416201200261640ustar00rootroot00000000000000/target cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml000066400000000000000000000002551432416201200261370ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/out/src/000077500000000000000000000000001432416201200247745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs000066400000000000000000000000551432416201200262660ustar00rootroot00000000000000fn main() { println!("Hello, world!"); } cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/stderr.log000066400000000000000000000000521432416201200254010ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/explicit_bin_with_git/stdout.log000066400000000000000000000000001432416201200254110ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/formats_source/000077500000000000000000000000001432416201200220625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/formats_source/in/000077500000000000000000000000001432416201200224705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/formats_source/in/rustfmt.toml000066400000000000000000000000171432416201200250670ustar00rootroot00000000000000tab_spaces = 2 cargo-0.66.0/tests/testsuite/init/formats_source/mod.rs000066400000000000000000000021131432416201200232040ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::{process, Project}; use cargo_test_support::curr_dir; #[cargo_test] fn formats_source() { // This cannot use `requires_rustfmt` because rustfmt is not available in // the rust-lang/rust environment. Additionally, if running cargo without // rustup (but with rustup installed), this test also fails due to HOME // preventing the proxy from choosing a toolchain. if let Err(e) = process("rustfmt").arg("-V").exec_with_output() { eprintln!("skipping test, rustfmt not available:\n{e:?}"); return; } let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/formats_source/out/000077500000000000000000000000001432416201200226715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/formats_source/out/Cargo.toml000066400000000000000000000002551432416201200246230ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/formats_source/out/rustfmt.toml000066400000000000000000000000171432416201200252700ustar00rootroot00000000000000tab_spaces = 2 cargo-0.66.0/tests/testsuite/init/formats_source/out/src/000077500000000000000000000000001432416201200234605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/formats_source/out/src/lib.rs000066400000000000000000000003061432416201200245730ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/formats_source/stderr.log000066400000000000000000000000351432416201200240660ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/formats_source/stdout.log000066400000000000000000000000001432416201200240750ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/000077500000000000000000000000001432416201200225475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/in/000077500000000000000000000000001432416201200231555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/in/.fossil/000077500000000000000000000000001432416201200245325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep000066400000000000000000000000001432416201200254450ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/mod.rs000066400000000000000000000012541432416201200236760ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn fossil_autodetect() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/000077500000000000000000000000001432416201200233565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/000077500000000000000000000000001432416201200265715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob000066400000000000000000000000221432416201200305110ustar00rootroot00000000000000target Cargo.lock cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob000066400000000000000000000000221432416201200307120ustar00rootroot00000000000000target Cargo.lock cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/Cargo.toml000066400000000000000000000002551432416201200253100ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/src/000077500000000000000000000000001432416201200241455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/fossil_autodetect/out/src/lib.rs000066400000000000000000000003301432416201200252550ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/fossil_autodetect/stderr.log000066400000000000000000000000351432416201200245530ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/fossil_autodetect/stdout.log000066400000000000000000000000001432416201200245620ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_autodetect/000077500000000000000000000000001432416201200220335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_autodetect/mod.rs000066400000000000000000000014201432416201200231550ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::paths; use cargo_test_support::prelude::*; use std::fs; use cargo_test_support::curr_dir; #[cargo_test] fn git_autodetect() { let project_root = &paths::root().join("foo"); // Need to create `.git` dir manually because it cannot be tracked under a git repo fs::create_dir_all(project_root.join(".git")).unwrap(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/git_autodetect/out/000077500000000000000000000000001432416201200226425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_autodetect/out/.gitignore000066400000000000000000000000241432416201200246260ustar00rootroot00000000000000/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/git_autodetect/out/Cargo.toml000066400000000000000000000002541432416201200245730ustar00rootroot00000000000000[package] name = "foo" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/git_autodetect/out/src/000077500000000000000000000000001432416201200234315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_autodetect/out/src/lib.rs000066400000000000000000000003301432416201200245410ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/git_autodetect/stderr.log000066400000000000000000000000351432416201200240370ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/git_autodetect/stdout.log000066400000000000000000000000001432416201200240460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/000077500000000000000000000000001432416201200273605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/in/000077500000000000000000000000001432416201200277665ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/in/.gitignore000066400000000000000000000000141432416201200317510ustar00rootroot00000000000000**/some.filecargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs000066400000000000000000000013211432416201200305020ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn git_ignore_exists_no_conflicting_entries() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --edition 2015") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/000077500000000000000000000000001432416201200301675ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/.gitignore000066400000000000000000000000641432416201200321570ustar00rootroot00000000000000**/some.file # Added by cargo /target /Cargo.lock cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml000066400000000000000000000002551432416201200321210ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2015" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/000077500000000000000000000000001432416201200307565ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs000066400000000000000000000003301432416201200320660ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log000066400000000000000000000000351432416201200313640ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log000066400000000000000000000000001432416201200313730ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/000077500000000000000000000000001432416201200256365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/in/000077500000000000000000000000001432416201200262445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml000066400000000000000000000000171432416201200306430ustar00rootroot00000000000000tab_spaces = 2 cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/mod.rs000066400000000000000000000013221432416201200267610ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn ignores_failure_to_format_source() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs none") .env("PATH", "") // pretend that `rustfmt` is missing .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/out/000077500000000000000000000000001432416201200264455ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml000066400000000000000000000002551432416201200303770ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml000066400000000000000000000000171432416201200310440ustar00rootroot00000000000000tab_spaces = 2 cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/out/src/000077500000000000000000000000001432416201200272345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs000066400000000000000000000003301432416201200303440ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/stderr.log000066400000000000000000000000351432416201200276420ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/ignores_failure_to_format_source/stdout.log000066400000000000000000000000001432416201200276510ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/000077500000000000000000000000001432416201200233535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/in/000077500000000000000000000000001432416201200237615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/in/main.rs000066400000000000000000000000151432416201200252470ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/mod.rs000066400000000000000000000012021432416201200244730ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn inferred_bin_with_git() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs git") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/out/000077500000000000000000000000001432416201200241625ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/out/.gitignore000066400000000000000000000000101432416201200261410ustar00rootroot00000000000000/target cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml000066400000000000000000000003251432416201200261120ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [[bin]] name = "case" path = "main.rs" cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/out/main.rs000066400000000000000000000000151432416201200254500ustar00rootroot00000000000000fn main() {} cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/stderr.log000066400000000000000000000000521432416201200253560ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/inferred_bin_with_git/stdout.log000066400000000000000000000000001432416201200253660ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/000077500000000000000000000000001432416201200233515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/in/000077500000000000000000000000001432416201200237575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/in/lib.rs000066400000000000000000000000121432416201200250640ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/mod.rs000066400000000000000000000012021432416201200244710ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn inferred_lib_with_git() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs git") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/out/000077500000000000000000000000001432416201200241605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/out/.gitignore000066400000000000000000000000241432416201200261440ustar00rootroot00000000000000/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml000066400000000000000000000003221432416201200261050ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [lib] name = "case" path = "lib.rs" cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/out/lib.rs000066400000000000000000000000121432416201200252650ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/stderr.log000066400000000000000000000000351432416201200253550ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/inferred_lib_with_git/stdout.log000066400000000000000000000000001432416201200253640ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/invalid_dir_name/000077500000000000000000000000001432416201200223135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/invalid_dir_name/mod.rs000066400000000000000000000010341432416201200234360ustar00rootroot00000000000000use cargo_test_support::paths; use cargo_test_support::prelude::*; use std::fs; use cargo_test_support::curr_dir; #[cargo_test] fn invalid_dir_name() { let foo = &paths::root().join("foo.bar"); fs::create_dir_all(foo).unwrap(); snapbox::cmd::Command::cargo_ui() .arg_line("init") .current_dir(foo) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert!(!foo.join("Cargo.toml").is_file()); } cargo-0.66.0/tests/testsuite/init/invalid_dir_name/stderr.log000066400000000000000000000010151432416201200243160ustar00rootroot00000000000000error: invalid character `.` in package name: `foo.bar`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters) If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name "foo.bar", use a valid package name, and set the binary name to be different from the package. This can be done by setting the binary filename to `src/bin/foo.bar.rs` or change the name in Cargo.toml with: [[bin]] name = "foo.bar" path = "src/main.rs" cargo-0.66.0/tests/testsuite/init/invalid_dir_name/stdout.log000066400000000000000000000000001432416201200243260ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/000077500000000000000000000000001432416201200241015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/in/000077500000000000000000000000001432416201200245075ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs000066400000000000000000000000001432416201200256110ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/mod.rs000066400000000000000000000013001432416201200252200ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn lib_already_exists_nosrc() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src/main.rs").is_file()); } cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/out/000077500000000000000000000000001432416201200247105ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml000066400000000000000000000003221432416201200266350ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [lib] name = "case" path = "lib.rs" cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs000066400000000000000000000000001432416201200260120ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/stderr.log000066400000000000000000000000351432416201200261050ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/lib_already_exists_nosrc/stdout.log000066400000000000000000000000001432416201200261140ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/000077500000000000000000000000001432416201200235445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/in/000077500000000000000000000000001432416201200241525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/in/src/000077500000000000000000000000001432416201200247415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs000066400000000000000000000000121432416201200260460ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/mod.rs000066400000000000000000000012761432416201200246770ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn lib_already_exists_src() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("src/main.rs").is_file()); } cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/out/000077500000000000000000000000001432416201200243535ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml000066400000000000000000000002551432416201200263050ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/out/src/000077500000000000000000000000001432416201200251425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs000066400000000000000000000000121432416201200262470ustar00rootroot00000000000000fn f() {} cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/stderr.log000066400000000000000000000000351432416201200255500ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/lib_already_exists_src/stdout.log000066400000000000000000000000001432416201200255570ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/000077500000000000000000000000001432416201200232335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/in/000077500000000000000000000000001432416201200236415ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/in/.hg/000077500000000000000000000000001432416201200243155ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/in/.hg/.keep000066400000000000000000000000001432416201200252300ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/mod.rs000066400000000000000000000012571432416201200243650ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn mercurial_autodetect() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/out/000077500000000000000000000000001432416201200240425ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/out/.hgignore000066400000000000000000000000261432416201200256430ustar00rootroot00000000000000^target/ ^Cargo.lock$ cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml000066400000000000000000000002551432416201200257740ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/out/src/000077500000000000000000000000001432416201200246315ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs000066400000000000000000000003301432416201200257410ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/stderr.log000066400000000000000000000000351432416201200252370ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/mercurial_autodetect/stdout.log000066400000000000000000000000001432416201200252460ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/mod.rs000066400000000000000000000021751432416201200201610ustar00rootroot00000000000000//! Tests for the `cargo init` command. mod auto_git; mod bin_already_exists_explicit; mod bin_already_exists_explicit_nosrc; mod bin_already_exists_implicit; mod bin_already_exists_implicit_namenosrc; mod bin_already_exists_implicit_namesrc; mod bin_already_exists_implicit_nosrc; mod both_lib_and_bin; mod cant_create_library_when_both_binlib_present; mod confused_by_multiple_lib_files; mod creates_binary_when_both_binlib_present; mod creates_binary_when_instructed_and_has_lib_file; mod creates_library_when_instructed_and_has_bin_file; mod explicit_bin_with_git; mod formats_source; mod fossil_autodetect; mod git_autodetect; mod git_ignore_exists_no_conflicting_entries; mod ignores_failure_to_format_source; mod inferred_bin_with_git; mod inferred_lib_with_git; mod invalid_dir_name; mod lib_already_exists_nosrc; mod lib_already_exists_src; mod mercurial_autodetect; mod multibin_project_name_clash; #[cfg(not(windows))] mod no_filename; mod pijul_autodetect; mod reserved_name; mod simple_bin; mod simple_git; mod simple_git_ignore_exists; mod simple_hg; mod simple_hg_ignore_exists; mod simple_lib; mod unknown_flags; mod with_argument; cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/000077500000000000000000000000001432416201200245525ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/in/000077500000000000000000000000001432416201200251605ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/in/case.rs000066400000000000000000000000421432416201200264350ustar00rootroot00000000000000fn main() { println!("foo.rs"); } cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/in/main.rs000066400000000000000000000000431432416201200264470ustar00rootroot00000000000000fn main() { println!("main.rs"); } cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/mod.rs000066400000000000000000000013101432416201200256720ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn multibin_project_name_clash() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs none") .current_dir(project_root) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join("Cargo.toml").is_file()); } cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/out/000077500000000000000000000000001432416201200253615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/out/case.rs000066400000000000000000000000421432416201200266360ustar00rootroot00000000000000fn main() { println!("foo.rs"); } cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/out/main.rs000066400000000000000000000000431432416201200266500ustar00rootroot00000000000000fn main() { println!("main.rs"); } cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/stderr.log000066400000000000000000000002221432416201200265540ustar00rootroot00000000000000error: multiple possible binary sources found: main.rs case.rs cannot automatically generate Cargo.toml as the main target would be ambiguous cargo-0.66.0/tests/testsuite/init/multibin_project_name_clash/stdout.log000066400000000000000000000000001432416201200265650ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/no_filename/000077500000000000000000000000001432416201200213035ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/no_filename/mod.rs000066400000000000000000000006451432416201200224350ustar00rootroot00000000000000use cargo_test_support::paths; use cargo_test_support::prelude::*; use cargo_test_support::curr_dir; #[cfg(not(windows))] #[cargo_test] fn no_filename() { snapbox::cmd::Command::cargo_ui() .arg_line("init /") .current_dir(paths::root()) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); } cargo-0.66.0/tests/testsuite/init/no_filename/stderr.log000066400000000000000000000001161432416201200233070ustar00rootroot00000000000000error: cannot auto-detect package name from path "/" ; use --name to override cargo-0.66.0/tests/testsuite/init/no_filename/stdout.log000066400000000000000000000000001432416201200233160ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/000077500000000000000000000000001432416201200223735ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/in/000077500000000000000000000000001432416201200230015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/in/.pijul/000077500000000000000000000000001432416201200242025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep000066400000000000000000000000001432416201200251150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/mod.rs000066400000000000000000000012531432416201200235210ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn pijul_autodetect() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/pijul_autodetect/out/000077500000000000000000000000001432416201200232025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/out/.ignore000066400000000000000000000000241432416201200244620ustar00rootroot00000000000000/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/pijul_autodetect/out/Cargo.toml000066400000000000000000000002551432416201200251340ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/pijul_autodetect/out/src/000077500000000000000000000000001432416201200237715ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/pijul_autodetect/out/src/lib.rs000066400000000000000000000003301432416201200251010ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/pijul_autodetect/stderr.log000066400000000000000000000000351432416201200243770ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/pijul_autodetect/stdout.log000066400000000000000000000000001432416201200244060ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/reserved_name/000077500000000000000000000000001432416201200216465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/reserved_name/mod.rs000066400000000000000000000010721432416201200227730ustar00rootroot00000000000000use cargo_test_support::paths; use cargo_test_support::prelude::*; use std::fs; use cargo_test_support::curr_dir; #[cargo_test] fn reserved_name() { let project_root = &paths::root().join("test"); fs::create_dir_all(project_root).unwrap(); snapbox::cmd::Command::cargo_ui() .arg_line("init") .current_dir(project_root) .assert() .code(101) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert!(!project_root.join("Cargo.toml").is_file()); } cargo-0.66.0/tests/testsuite/init/reserved_name/stderr.log000066400000000000000000000007441432416201200236610ustar00rootroot00000000000000error: the name `test` cannot be used as a package name, it conflicts with Rust's built-in test library If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name "test", use a valid package name, and set the binary name to be different from the package. This can be done by setting the binary filename to `src/bin/test.rs` or change the name in Cargo.toml with: [[bin]] name = "test" path = "src/main.rs" cargo-0.66.0/tests/testsuite/init/reserved_name/stdout.log000066400000000000000000000000001432416201200236610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_bin/000077500000000000000000000000001432416201200211505ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_bin/in000077700000000000000000000000001432416201200236262../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_bin/mod.rs000066400000000000000000000015701432416201200223000ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn simple_bin() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --bin --vcs none --edition 2015") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".gitignore").is_file()); snapbox::cmd::Command::cargo_ui() .current_dir(project_root) .arg("build") .assert() .success(); assert!(project.bin("case").is_file()); } cargo-0.66.0/tests/testsuite/init/simple_bin/out/000077500000000000000000000000001432416201200217575ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_bin/out/Cargo.toml000066400000000000000000000002551432416201200237110ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2015" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_bin/out/src/000077500000000000000000000000001432416201200225465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_bin/out/src/main.rs000066400000000000000000000000551432416201200240400ustar00rootroot00000000000000fn main() { println!("Hello, world!"); } cargo-0.66.0/tests/testsuite/init/simple_bin/stderr.log000066400000000000000000000000521432416201200231530ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/simple_bin/stdout.log000066400000000000000000000000001432416201200231630ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git/000077500000000000000000000000001432416201200211635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git/in000077700000000000000000000000001432416201200236412../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git/mod.rs000066400000000000000000000012561432416201200223140ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn simple_git() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs git") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/simple_git/out/000077500000000000000000000000001432416201200217725ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git/out/.gitignore000066400000000000000000000000241432416201200237560ustar00rootroot00000000000000/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/simple_git/out/Cargo.toml000066400000000000000000000002551432416201200237240ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_git/out/src/000077500000000000000000000000001432416201200225615ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git/out/src/lib.rs000066400000000000000000000003301432416201200236710ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/simple_git/stderr.log000066400000000000000000000000351432416201200231670ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/simple_git/stdout.log000066400000000000000000000000001432416201200231760ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/000077500000000000000000000000001432416201200241255ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/in/000077500000000000000000000000001432416201200245335ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/in/.gitignore000066400000000000000000000000241432416201200265170ustar00rootroot00000000000000/target **/some.filecargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/mod.rs000066400000000000000000000015071432416201200252550ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn simple_git_ignore_exists() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --edition 2015") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(project_root.join(".git").is_dir()); snapbox::cmd::Command::cargo_ui() .current_dir(project_root) .arg("build") .assert() .success(); } cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/out/000077500000000000000000000000001432416201200247345ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/out/.gitignore000066400000000000000000000001561432416201200267260ustar00rootroot00000000000000/target **/some.file # Added by cargo # # already existing elements were commented out #/target /Cargo.lock cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml000066400000000000000000000002551432416201200266660ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2015" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/out/src/000077500000000000000000000000001432416201200255235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs000066400000000000000000000003301432416201200266330ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/stderr.log000066400000000000000000000000351432416201200261310ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/simple_git_ignore_exists/stdout.log000066400000000000000000000000001432416201200261400ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg/000077500000000000000000000000001432416201200207765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg/in000077700000000000000000000000001432416201200234542../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg/mod.rs000066400000000000000000000012721432416201200221250ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test(requires_hg)] fn simple_hg() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs hg") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/simple_hg/out/000077500000000000000000000000001432416201200216055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg/out/.hgignore000066400000000000000000000000261432416201200234060ustar00rootroot00000000000000^target/ ^Cargo.lock$ cargo-0.66.0/tests/testsuite/init/simple_hg/out/Cargo.toml000066400000000000000000000002551432416201200235370ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_hg/out/src/000077500000000000000000000000001432416201200223745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg/out/src/lib.rs000066400000000000000000000003301432416201200235040ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/simple_hg/stderr.log000066400000000000000000000000351432416201200230020ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/simple_hg/stdout.log000066400000000000000000000000001432416201200230110ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/000077500000000000000000000000001432416201200237405ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/in/000077500000000000000000000000001432416201200243465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/in/.hg/000077500000000000000000000000001432416201200250225ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/in/.hg/.keep000066400000000000000000000000001432416201200257350ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/in/.hgignore000066400000000000000000000000121432416201200261420ustar00rootroot00000000000000^/somefilecargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/mod.rs000066400000000000000000000012621432416201200250660ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn simple_hg_ignore_exists() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".git").is_dir()); } cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/out/000077500000000000000000000000001432416201200245475ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/out/.hgignore000066400000000000000000000000641432416201200263520ustar00rootroot00000000000000^/somefile # Added by cargo ^target/ ^Cargo.lock$ cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml000066400000000000000000000002551432416201200265010ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/out/src/000077500000000000000000000000001432416201200253365ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs000066400000000000000000000003301432416201200264460ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/stderr.log000066400000000000000000000000351432416201200257440ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/simple_hg_ignore_exists/stdout.log000066400000000000000000000000001432416201200257530ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_lib/000077500000000000000000000000001432416201200211465ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_lib/in000077700000000000000000000000001432416201200236242../empty_dirustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_lib/mod.rs000066400000000000000000000015701432416201200222760ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn simple_lib() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init --lib --vcs none --edition 2015") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); assert!(!project_root.join(".gitignore").is_file()); snapbox::cmd::Command::cargo_ui() .current_dir(project_root) .arg("build") .assert() .success(); assert!(!project.bin("foo").is_file()); } cargo-0.66.0/tests/testsuite/init/simple_lib/out/000077500000000000000000000000001432416201200217555ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_lib/out/Cargo.toml000066400000000000000000000002551432416201200237070ustar00rootroot00000000000000[package] name = "case" version = "0.1.0" edition = "2015" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/simple_lib/out/src/000077500000000000000000000000001432416201200225445ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/simple_lib/out/src/lib.rs000066400000000000000000000003301432416201200236540ustar00rootroot00000000000000pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } cargo-0.66.0/tests/testsuite/init/simple_lib/stderr.log000066400000000000000000000000351432416201200231520ustar00rootroot00000000000000 Created library package cargo-0.66.0/tests/testsuite/init/simple_lib/stdout.log000066400000000000000000000000001432416201200231610ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/unknown_flags/000077500000000000000000000000001432416201200217025ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/unknown_flags/mod.rs000066400000000000000000000006311432416201200230270ustar00rootroot00000000000000use cargo_test_support::paths; use cargo_test_support::prelude::*; use cargo_test_support::curr_dir; #[cargo_test] fn unknown_flags() { snapbox::cmd::Command::cargo_ui() .arg_line("init foo --flag") .current_dir(paths::root()) .assert() .code(1) .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); } cargo-0.66.0/tests/testsuite/init/unknown_flags/stderr.log000066400000000000000000000003521432416201200237100ustar00rootroot00000000000000error: Found argument '--flag' which wasn't expected, or isn't valid in this context If you tried to supply `--flag` as a value rather than a flag, use `-- --flag` USAGE: cargo[EXE] init For more information try --help cargo-0.66.0/tests/testsuite/init/unknown_flags/stdout.log000066400000000000000000000000001432416201200237150ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/000077500000000000000000000000001432416201200217045ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/in/000077500000000000000000000000001432416201200223125ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/in/foo/000077500000000000000000000000001432416201200230755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/in/foo/.keep000066400000000000000000000000001432416201200240100ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/mod.rs000066400000000000000000000011771432416201200230370ustar00rootroot00000000000000use cargo_test_support::compare::assert_ui; use cargo_test_support::prelude::*; use cargo_test_support::Project; use cargo_test_support::curr_dir; #[cargo_test] fn with_argument() { let project = Project::from_template(curr_dir!().join("in")); let project_root = &project.root(); snapbox::cmd::Command::cargo_ui() .arg_line("init foo --vcs none") .current_dir(project_root) .assert() .success() .stdout_matches_path(curr_dir!().join("stdout.log")) .stderr_matches_path(curr_dir!().join("stderr.log")); assert_ui().subset_matches(curr_dir!().join("out"), project_root); } cargo-0.66.0/tests/testsuite/init/with_argument/out/000077500000000000000000000000001432416201200225135ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/out/foo/000077500000000000000000000000001432416201200232765ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/out/foo/Cargo.toml000066400000000000000000000002541432416201200252270ustar00rootroot00000000000000[package] name = "foo" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] cargo-0.66.0/tests/testsuite/init/with_argument/out/foo/src/000077500000000000000000000000001432416201200240655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/init/with_argument/out/foo/src/main.rs000066400000000000000000000000551432416201200253570ustar00rootroot00000000000000fn main() { println!("Hello, world!"); } cargo-0.66.0/tests/testsuite/init/with_argument/stderr.log000066400000000000000000000000521432416201200237070ustar00rootroot00000000000000 Created binary (application) package cargo-0.66.0/tests/testsuite/init/with_argument/stdout.log000066400000000000000000000000001432416201200237170ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/install.rs000066400000000000000000001541101432416201200201020ustar00rootroot00000000000000//! Tests for the `cargo install` command. use std::fs::{self, OpenOptions}; use std::io::prelude::*; use cargo_test_support::cross_compile; use cargo_test_support::git; use cargo_test_support::registry::{self, registry_path, Package}; use cargo_test_support::{ basic_manifest, cargo_process, no_such_file_err_msg, project, project_in, symlink_supported, t, }; use cargo_test_support::install::{ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, }; use cargo_test_support::paths::{self, CargoPathExt}; use std::env; use std::path::PathBuf; fn pkg(name: &str, vers: &str) { Package::new(name, vers) .file("src/lib.rs", "") .file( "src/main.rs", &format!("extern crate {}; fn main() {{}}", name), ) .publish(); } #[cargo_test] fn simple() { pkg("foo", "0.0.1"); cargo_process("install foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.0.1 (registry [..]) [INSTALLING] foo v0.0.1 [COMPILING] foo v0.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); cargo_process("uninstall foo") .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]") .run(); assert_has_not_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn simple_with_message_format() { pkg("foo", "0.0.1"); cargo_process("install foo --message-format=json") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.0.1 (registry [..]) [INSTALLING] foo v0.0.1 [COMPILING] foo v0.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .with_json( r#" { "reason": "compiler-artifact", "package_id": "foo 0.0.1 ([..])", "manifest_path": "[..]", "target": { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "foo", "src_path": "[..]/foo-0.0.1/src/lib.rs", "edition": "2015", "doc": true, "doctest": true, "test": true }, "profile": "{...}", "features": [], "filenames": "{...}", "executable": null, "fresh": false } { "reason": "compiler-artifact", "package_id": "foo 0.0.1 ([..])", "manifest_path": "[..]", "target": { "kind": [ "bin" ], "crate_types": [ "bin" ], "name": "foo", "src_path": "[..]/foo-0.0.1/src/main.rs", "edition": "2015", "doc": true, "doctest": false, "test": true }, "profile": "{...}", "features": [], "filenames": "{...}", "executable": "[..]", "fresh": false } {"reason":"build-finished","success":true} "#, ) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn with_index() { let registry = registry::init(); pkg("foo", "0.0.1"); cargo_process("install foo --index") .arg(registry.index_url().as_str()) .with_stderr(&format!( "\ [UPDATING] `{reg}` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.0.1 (registry `{reg}`) [INSTALLING] foo v0.0.1 (registry `{reg}`) [COMPILING] foo v0.0.1 (registry `{reg}`) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.0.1 (registry `{reg}`)` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", reg = registry_path().to_str().unwrap() )) .run(); assert_has_installed_exe(cargo_home(), "foo"); cargo_process("uninstall foo") .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]") .run(); assert_has_not_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn multiple_pkgs() { pkg("foo", "0.0.1"); pkg("bar", "0.0.2"); cargo_process("install foo bar baz") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.0.1 (registry `dummy-registry`) [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.2 (registry `dummy-registry`) [ERROR] could not find `baz` in registry `[..]` with version `*` [INSTALLING] foo v0.0.1 [COMPILING] foo v0.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) [INSTALLING] bar v0.0.2 [COMPILING] bar v0.0.2 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] [INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`) [SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries [ERROR] some crates failed to install ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "bar"); cargo_process("uninstall foo bar") .with_stderr( "\ [REMOVING] [CWD]/home/.cargo/bin/foo[EXE] [REMOVING] [CWD]/home/.cargo/bin/bar[EXE] [SUMMARY] Successfully uninstalled foo, bar! ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); assert_has_not_installed_exe(cargo_home(), "bar"); } fn path() -> Vec { env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() } #[cargo_test] fn multiple_pkgs_path_set() { // confirm partial failure results in 101 status code and does not have the // '[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries' // even if CARGO_HOME/bin is in the PATH pkg("foo", "0.0.1"); pkg("bar", "0.0.2"); // add CARGO_HOME/bin to path let mut path = path(); path.push(cargo_home().join("bin")); let new_path = env::join_paths(path).unwrap(); cargo_process("install foo bar baz") .env("PATH", new_path) .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.0.1 (registry `dummy-registry`) [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.2 (registry `dummy-registry`) [ERROR] could not find `baz` in registry `[..]` with version `*` [INSTALLING] foo v0.0.1 [COMPILING] foo v0.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) [INSTALLING] bar v0.0.2 [COMPILING] bar v0.0.2 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] [INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`) [SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). [ERROR] some crates failed to install ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "bar"); cargo_process("uninstall foo bar") .with_stderr( "\ [REMOVING] [CWD]/home/.cargo/bin/foo[EXE] [REMOVING] [CWD]/home/.cargo/bin/bar[EXE] [SUMMARY] Successfully uninstalled foo, bar! ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); assert_has_not_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn pick_max_version() { pkg("foo", "0.1.0"); pkg("foo", "0.2.0"); pkg("foo", "0.2.1"); pkg("foo", "0.2.1-pre.1"); pkg("foo", "0.3.0-pre.2"); cargo_process("install foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.2.1 (registry [..]) [INSTALLING] foo v0.2.1 [COMPILING] foo v0.2.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.2.1` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn installs_beta_version_by_explicit_name_from_git() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.3.0-beta.1")) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install --git") .arg(p.url().to_string()) .arg("foo") .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn missing() { pkg("foo", "0.0.1"); cargo_process("install bar") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [ERROR] could not find `bar` in registry `[..]` with version `*` ", ) .run(); } #[cargo_test] fn missing_current_working_directory() { cargo_process("install .") .with_status(101) .with_stderr( "\ error: To install the binaries for the package in current working \ directory use `cargo install --path .`. Use `cargo build` if you \ want to simply build the package. ", ) .run(); } #[cargo_test] fn bad_version() { pkg("foo", "0.0.1"); cargo_process("install foo --version=0.2.0") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [ERROR] could not find `foo` in registry `[..]` with version `=0.2.0` ", ) .run(); } #[cargo_test] fn bad_paths() { cargo_process("install") .with_status(101) .with_stderr("[ERROR] `[CWD]` is not a crate root; specify a crate to install [..]") .run(); cargo_process("install --path .") .with_status(101) .with_stderr("[ERROR] `[CWD]` does not contain a Cargo.toml file[..]") .run(); let toml = paths::root().join("Cargo.toml"); fs::write(toml, "").unwrap(); cargo_process("install --path Cargo.toml") .with_status(101) .with_stderr("[ERROR] `[CWD]/Cargo.toml` is not a directory[..]") .run(); cargo_process("install --path .") .with_status(101) .with_stderr_contains("[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`") .run(); } #[cargo_test] fn install_location_precedence() { pkg("foo", "0.0.1"); let root = paths::root(); let t1 = root.join("t1"); let t2 = root.join("t2"); let t3 = root.join("t3"); let t4 = cargo_home(); fs::create_dir(root.join(".cargo")).unwrap(); fs::write( root.join(".cargo/config"), &format!( "[install] root = '{}' ", t3.display() ), ) .unwrap(); println!("install --root"); cargo_process("install foo --root") .arg(&t1) .env("CARGO_INSTALL_ROOT", &t2) .run(); assert_has_installed_exe(&t1, "foo"); assert_has_not_installed_exe(&t2, "foo"); println!("install CARGO_INSTALL_ROOT"); cargo_process("install foo") .env("CARGO_INSTALL_ROOT", &t2) .run(); assert_has_installed_exe(&t2, "foo"); assert_has_not_installed_exe(&t3, "foo"); println!("install install.root"); cargo_process("install foo").run(); assert_has_installed_exe(&t3, "foo"); assert_has_not_installed_exe(&t4, "foo"); fs::remove_file(root.join(".cargo/config")).unwrap(); println!("install cargo home"); cargo_process("install foo").run(); assert_has_installed_exe(&t4, "foo"); } #[cargo_test] fn install_path() { let p = project().file("src/main.rs", "fn main() {}").build(); cargo_process("install --path").arg(p.root()).run(); assert_has_installed_exe(cargo_home(), "foo"); // path-style installs force a reinstall p.cargo("install --path .") .with_stderr( "\ [INSTALLING] foo v0.0.1 [..] [FINISHED] release [..] [REPLACING] [..]/.cargo/bin/foo[EXE] [REPLACED] package `foo v0.0.1 [..]` with `foo v0.0.1 [..]` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn install_target_dir() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("install --target-dir td_test") .with_stderr( "\ [WARNING] Using `cargo install` [..] [INSTALLING] foo v0.0.1 [..] [COMPILING] foo v0.0.1 [..] [FINISHED] release [..] [INSTALLING] [..]foo[EXE] [INSTALLED] package `foo v0.0.1 [..]foo[..]` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); let mut path = p.root(); path.push("td_test"); assert!(path.exists()); #[cfg(not(windows))] path.push("release/foo"); #[cfg(windows)] path.push("release/foo.exe"); assert!(path.exists()); } #[cargo_test] #[cfg(target_os = "linux")] fn install_path_with_lowercase_cargo_toml() { let toml = paths::root().join("cargo.toml"); fs::write(toml, "").unwrap(); cargo_process("install --path .") .with_status(101) .with_stderr( "\ [ERROR] `[CWD]` does not contain a Cargo.toml file, \ but found cargo.toml please try to rename it to Cargo.toml. --path must point to a directory containing a Cargo.toml file. ", ) .run(); } #[cargo_test] fn install_relative_path_outside_current_ws() { let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [workspace] members = ["baz"] "#, ) .file("src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] edition = "2021" [dependencies] foo = "1" "#, ) .file("baz/src/lib.rs", "") .build(); let _bin_project = project_in("bar") .file("src/main.rs", "fn main() {}") .build(); p.cargo("install --path ../bar/foo") .with_stderr(&format!( "\ [INSTALLING] foo v0.0.1 ([..]/bar/foo) [COMPILING] foo v0.0.1 ([..]/bar/foo) [FINISHED] release [..] [INSTALLING] {home}/bin/foo[EXE] [INSTALLED] package `foo v0.0.1 ([..]/bar/foo)` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", home = cargo_home().display(), )) .run(); // Validate the workspace error message to display available targets. p.cargo("install --path ../bar/foo --bin") .with_status(101) .with_stderr( "\ [ERROR] \"--bin\" takes one argument. Available binaries: foo ", ) .run(); } #[cargo_test] fn multiple_crates_error() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/main.rs", "fn main() {}") .build(); cargo_process("install --git") .arg(p.url().to_string()) .with_status(101) .with_stderr( "\ [UPDATING] git repository [..] [ERROR] multiple packages with binaries found: bar, foo. \ When installing a git repository, cargo will always search the entire repo for any Cargo.toml. \ Please specify which to install. ", ) .run(); } #[cargo_test] fn multiple_crates_select() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/main.rs", "fn main() {}") .build(); cargo_process("install --git") .arg(p.url().to_string()) .arg("foo") .run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_not_installed_exe(cargo_home(), "bar"); cargo_process("install --git") .arg(p.url().to_string()) .arg("bar") .run(); assert_has_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn multiple_crates_git_all() { let p = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = ["bin1", "bin2"] "#, ) .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0")) .file("bin2/Cargo.toml", &basic_manifest("bin2", "0.1.0")) .file( "bin1/src/main.rs", r#"fn main() { println!("Hello, world!"); }"#, ) .file( "bin2/src/main.rs", r#"fn main() { println!("Hello, world!"); }"#, ) .build(); cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run(); } #[cargo_test] fn multiple_crates_auto_binaries() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#, ) .file("src/main.rs", "extern crate bar; fn main() {}") .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/lib.rs", "") .build(); cargo_process("install --path").arg(p.root()).run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn multiple_crates_auto_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#, ) .file("src/lib.rs", "extern crate bar;") .file( "examples/foo.rs", " extern crate bar; extern crate foo; fn main() {} ", ) .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/lib.rs", "") .build(); cargo_process("install --path") .arg(p.root()) .arg("--example=foo") .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn no_binaries_or_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/lib.rs", "") .build(); cargo_process("install --path") .arg(p.root()) .with_status(101) .with_stderr("[ERROR] no packages found with binaries or examples") .run(); } #[cargo_test] fn no_binaries() { let p = project() .file("src/lib.rs", "") .file("examples/foo.rs", "fn main() {}") .build(); cargo_process("install --path") .arg(p.root()) .arg("foo") .with_status(101) .with_stderr( "\ [ERROR] there is nothing to install in `foo v0.0.1 ([..])`, because it has no binaries[..] [..] [..]", ) .run(); } #[cargo_test] fn examples() { let p = project() .file("src/lib.rs", "") .file("examples/foo.rs", "extern crate foo; fn main() {}") .build(); cargo_process("install --path") .arg(p.root()) .arg("--example=foo") .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn install_force() { let p = project().file("src/main.rs", "fn main() {}").build(); cargo_process("install --path").arg(p.root()).run(); let p = project() .at("foo2") .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install --force --path") .arg(p.root()) .with_stderr( "\ [INSTALLING] foo v0.2.0 ([..]) [COMPILING] foo v0.2.0 ([..]) [FINISHED] release [optimized] target(s) in [..] [REPLACING] [CWD]/home/.cargo/bin/foo[EXE] [REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); cargo_process("install --list") .with_stdout( "\ foo v0.2.0 ([..]): foo[..] ", ) .run(); } #[cargo_test] fn install_force_partial_overlap() { let p = project() .file("src/bin/foo-bin1.rs", "fn main() {}") .file("src/bin/foo-bin2.rs", "fn main() {}") .build(); cargo_process("install --path").arg(p.root()).run(); let p = project() .at("foo2") .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) .file("src/bin/foo-bin2.rs", "fn main() {}") .file("src/bin/foo-bin3.rs", "fn main() {}") .build(); cargo_process("install --force --path") .arg(p.root()) .with_stderr( "\ [INSTALLING] foo v0.2.0 ([..]) [COMPILING] foo v0.2.0 ([..]) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE] [REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] [REMOVING] executable `[..]/bin/foo-bin1[EXE]` from previous version foo v0.0.1 [..] [INSTALLED] package `foo v0.2.0 ([..]/foo2)` (executable `foo-bin3[EXE]`) [REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); cargo_process("install --list") .with_stdout( "\ foo v0.2.0 ([..]): foo-bin2[..] foo-bin3[..] ", ) .run(); } #[cargo_test] fn install_force_bin() { let p = project() .file("src/bin/foo-bin1.rs", "fn main() {}") .file("src/bin/foo-bin2.rs", "fn main() {}") .build(); cargo_process("install --path").arg(p.root()).run(); let p = project() .at("foo2") .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) .file("src/bin/foo-bin1.rs", "fn main() {}") .file("src/bin/foo-bin2.rs", "fn main() {}") .build(); cargo_process("install --force --bin foo-bin2 --path") .arg(p.root()) .with_stderr( "\ [INSTALLING] foo v0.2.0 ([..]) [COMPILING] foo v0.2.0 ([..]) [FINISHED] release [optimized] target(s) in [..] [REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] [REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); cargo_process("install --list") .with_stdout( "\ foo v0.0.1 ([..]): foo-bin1[..] foo v0.2.0 ([..]): foo-bin2[..] ", ) .run(); } #[cargo_test] fn compile_failure() { let p = project().file("src/main.rs", "").build(); cargo_process("install --path") .arg(p.root()) .with_status(101) .with_stderr_contains( "\ [ERROR] could not compile `foo` due to previous error [ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \ found at `[..]target` ", ) .run(); } #[cargo_test] fn git_repo() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); // Use `--locked` to test that we don't even try to write a lock file. cargo_process("install --locked --git") .arg(p.url().to_string()) .with_stderr( "\ [UPDATING] git repository `[..]` [WARNING] no Cargo.lock file published in foo v0.1.0 ([..]) [INSTALLING] foo v0.1.0 ([..]) [COMPILING] foo v0.1.0 ([..]) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.1.0 ([..]/foo#[..])` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] #[cfg(target_os = "linux")] fn git_repo_with_lowercase_cargo_toml() { let p = git::repo(&paths::root().join("foo")) .file("cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install --git") .arg(p.url().to_string()) .with_status(101) .with_stderr( "\ [UPDATING] git repository [..] [ERROR] Could not find Cargo.toml in `[..]`, but found cargo.toml please try to rename it to Cargo.toml ", ) .run(); } #[cargo_test] fn list() { pkg("foo", "0.0.1"); pkg("bar", "0.2.1"); pkg("bar", "0.2.2"); cargo_process("install --list").with_stdout("").run(); cargo_process("install bar --version =0.2.1").run(); cargo_process("install foo").run(); cargo_process("install --list") .with_stdout( "\ bar v0.2.1: bar[..] foo v0.0.1: foo[..] ", ) .run(); } #[cargo_test] fn list_error() { pkg("foo", "0.0.1"); cargo_process("install foo").run(); cargo_process("install --list") .with_stdout( "\ foo v0.0.1: foo[..] ", ) .run(); let mut worldfile_path = cargo_home(); worldfile_path.push(".crates.toml"); let mut worldfile = OpenOptions::new() .write(true) .open(worldfile_path) .expect(".crates.toml should be there"); worldfile.write_all(b"\x00").unwrap(); drop(worldfile); cargo_process("install --list --verbose") .with_status(101) .with_stderr( "\ [ERROR] failed to parse crate metadata at `[..]` Caused by: invalid TOML found for metadata Caused by: TOML parse error at line 1, column 1 | 1 | [..] | ^ Unexpected `[..]` Expected key or end of input ", ) .run(); } #[cargo_test] fn uninstall_pkg_does_not_exist() { cargo_process("uninstall foo") .with_status(101) .with_stderr("[ERROR] package ID specification `foo` did not match any packages") .run(); } #[cargo_test] fn uninstall_bin_does_not_exist() { pkg("foo", "0.0.1"); cargo_process("install foo").run(); cargo_process("uninstall foo --bin=bar") .with_status(101) .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`") .run(); } #[cargo_test] fn uninstall_piecemeal() { let p = project() .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .build(); cargo_process("install --path").arg(p.root()).run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_installed_exe(cargo_home(), "bar"); cargo_process("uninstall foo --bin=bar") .with_stderr("[REMOVING] [..]bar[..]") .run(); assert_has_installed_exe(cargo_home(), "foo"); assert_has_not_installed_exe(cargo_home(), "bar"); cargo_process("uninstall foo --bin=foo") .with_stderr("[REMOVING] [..]foo[..]") .run(); assert_has_not_installed_exe(cargo_home(), "foo"); cargo_process("uninstall foo") .with_status(101) .with_stderr("[ERROR] package ID specification `foo` did not match any packages") .run(); } #[cargo_test] fn subcommand_works_out_of_the_box() { Package::new("cargo-foo", "1.0.0") .file("src/main.rs", r#"fn main() { println!("bar"); }"#) .publish(); cargo_process("install cargo-foo").run(); cargo_process("foo").with_stdout("bar\n").run(); cargo_process("--list") .with_stdout_contains(" foo\n") .run(); } #[cargo_test] fn installs_from_cwd_by_default() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("install") .with_stderr_contains( "warning: Using `cargo install` to install the binaries for the \ package in current working directory is deprecated, \ use `cargo install --path .` instead. \ Use `cargo build` if you want to simply build the package.", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn installs_from_cwd_with_2018_warnings() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] edition = "2018" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("install") .with_status(101) .with_stderr_contains( "error: Using `cargo install` to install the binaries for the \ package in current working directory is no longer supported, \ use `cargo install --path .` instead. \ Use `cargo build` if you want to simply build the package.", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn uninstall_cwd() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("install --path .") .with_stderr(&format!( "\ [INSTALLING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] release [optimized] target(s) in [..] [INSTALLING] {home}/bin/foo[EXE] [INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) [WARNING] be sure to add `{home}/bin` to your PATH to be able to run the installed binaries", home = cargo_home().display(), )) .run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall") .with_stdout("") .with_stderr(&format!( "[REMOVING] {home}/bin/foo[EXE]", home = cargo_home().display() )) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn uninstall_cwd_not_installed() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("uninstall") .with_status(101) .with_stdout("") .with_stderr("error: package `foo v0.0.1 ([CWD])` is not installed") .run(); } #[cargo_test] fn uninstall_cwd_no_project() { cargo_process("uninstall") .with_status(101) .with_stdout("") .with_stderr(format!( "\ [ERROR] failed to read `[CWD]/Cargo.toml` Caused by: {err_msg}", err_msg = no_such_file_err_msg(), )) .run(); } #[cargo_test] fn do_not_rebuilds_on_local_install() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build --release").run(); cargo_process("install --path") .arg(p.root()) .with_stderr( "\ [INSTALLING] [..] [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..] [INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); assert!(p.build_dir().exists()); assert!(p.release_bin("foo").exists()); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn reports_unsuccessful_subcommand_result() { Package::new("cargo-fail", "1.0.0") .file("src/main.rs", "fn main() { panic!(); }") .publish(); cargo_process("install cargo-fail").run(); cargo_process("--list") .with_stdout_contains(" fail\n") .run(); cargo_process("fail") .with_status(101) .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]") .run(); } #[cargo_test] fn git_with_lockfile() { let p = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "fn main() {}") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar 0.1.0" ] [[package]] name = "bar" version = "0.1.0" "#, ) .build(); cargo_process("install --git") .arg(p.url().to_string()) .run(); } #[cargo_test] fn q_silences_warnings() { let p = project().file("src/main.rs", "fn main() {}").build(); cargo_process("install -q --path") .arg(p.root()) .with_stderr("") .run(); } #[cargo_test] fn readonly_dir() { pkg("foo", "0.0.1"); let root = paths::root(); let dir = &root.join("readonly"); fs::create_dir(root.join("readonly")).unwrap(); let mut perms = fs::metadata(dir).unwrap().permissions(); perms.set_readonly(true); fs::set_permissions(dir, perms).unwrap(); cargo_process("install foo").cwd(dir).run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn use_path_workspace() { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [workspace] members = ["baz"] "#, ) .file("src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] [dependencies] foo = "1" "#, ) .file("baz/src/lib.rs", "") .build(); p.cargo("build").run(); let lock = p.read_lockfile(); p.cargo("install").run(); let lock2 = p.read_lockfile(); assert_eq!(lock, lock2, "different lockfiles"); } #[cargo_test] fn dev_dependencies_no_check() { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dev-dependencies] baz = "1.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[..] no matching package named `baz` found") .run(); p.cargo("install").run(); } #[cargo_test] fn dev_dependencies_lock_file_untouched() { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dev-dependencies] bar = { path = "a" } "#, ) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); let lock = p.read_lockfile(); p.cargo("install").run(); let lock2 = p.read_lockfile(); assert!(lock == lock2, "different lockfiles"); } #[cargo_test] fn install_target_native() { pkg("foo", "0.1.0"); cargo_process("install foo --target") .arg(cargo_test_support::rustc_host()) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn install_target_foreign() { if cross_compile::disabled() { return; } pkg("foo", "0.1.0"); cargo_process("install foo --target") .arg(cross_compile::alternate()) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn vers_precise() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install foo --vers 0.1.1") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") .run(); } #[cargo_test] fn version_precise() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install foo --version 0.1.1") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") .run(); } #[cargo_test] fn inline_version_precise() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install foo@0.1.1") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") .run(); } #[cargo_test] fn inline_version_multiple() { pkg("foo", "0.1.0"); pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); pkg("bar", "0.2.0"); pkg("bar", "0.2.1"); pkg("bar", "0.2.2"); cargo_process("install foo@0.1.1 bar@0.2.1") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") .with_stderr_contains("[DOWNLOADED] bar v0.2.1 (registry [..])") .run(); } #[cargo_test] fn inline_version_without_name() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install @0.1.1") .with_status(101) .with_stderr("error: missing crate name for `@0.1.1`") .run(); } #[cargo_test] fn inline_and_explicit_version() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install foo@0.1.1 --version 0.1.1") .with_status(101) .with_stderr("error: cannot specify both `@0.1.1` and `--version`") .run(); } #[cargo_test] fn not_both_vers_and_version() { pkg("foo", "0.1.1"); pkg("foo", "0.1.2"); cargo_process("install foo --version 0.1.1 --vers 0.1.2") .with_status(1) .with_stderr_contains( "\ error: The argument '--version ' was provided more than once, \ but cannot be used multiple times ", ) .run(); } #[cargo_test] fn test_install_git_cannot_be_a_base_url() { cargo_process("install --git github.com:rust-lang/rustfmt.git") .with_status(101) .with_stderr( "\ [ERROR] invalid url `github.com:rust-lang/rustfmt.git`: cannot-be-a-base-URLs are not supported", ) .run(); } #[cargo_test] fn uninstall_multiple_and_specifying_bin() { cargo_process("uninstall foo bar --bin baz") .with_status(101) .with_stderr("\ [ERROR] A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.") .run(); } #[cargo_test] fn uninstall_with_empty_package_option() { cargo_process("uninstall -p") .with_status(101) .with_stderr( "\ [ERROR] \"--package \" requires a SPEC format value. Run `cargo help pkgid` for more information about SPEC format. ", ) .run(); } #[cargo_test] fn uninstall_multiple_and_some_pkg_does_not_exist() { pkg("foo", "0.0.1"); cargo_process("install foo").run(); cargo_process("uninstall foo bar") .with_status(101) .with_stderr( "\ [REMOVING] [CWD]/home/.cargo/bin/foo[EXE] error: package ID specification `bar` did not match any packages [SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above). error: some packages failed to uninstall ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); assert_has_not_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn custom_target_dir_for_git_source() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install --git") .arg(p.url().to_string()) .run(); assert!(!paths::root().join("target/release").is_dir()); cargo_process("install --force --git") .arg(p.url().to_string()) .env("CARGO_TARGET_DIR", "target") .run(); assert!(paths::root().join("target/release").is_dir()); } #[cargo_test] fn install_respects_lock_file() { // `cargo install` now requires --locked to use a Cargo.lock. Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.1.1") .file("src/lib.rs", "not rust") .publish(); Package::new("foo", "0.1.0") .dep("bar", "0.1") .file("src/lib.rs", "") .file( "src/main.rs", "extern crate foo; extern crate bar; fn main() {}", ) .file( "Cargo.lock", r#" [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] "#, ) .publish(); cargo_process("install foo") .with_stderr_contains("[..]not rust[..]") .with_status(101) .run(); cargo_process("install --locked foo").run(); } #[cargo_test] fn install_path_respects_lock_file() { // --path version of install_path_respects_lock_file, --locked is required // to use Cargo.lock. Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.1.1") .file("src/lib.rs", "not rust") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" "#, ) .file("src/main.rs", "extern crate bar; fn main() {}") .file( "Cargo.lock", r#" [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] "#, ) .build(); p.cargo("install --path .") .with_stderr_contains("[..]not rust[..]") .with_status(101) .run(); p.cargo("install --path . --locked").run(); } #[cargo_test] fn lock_file_path_deps_ok() { Package::new("bar", "0.1.0").publish(); Package::new("foo", "0.1.0") .dep("bar", "0.1") .file("src/lib.rs", "") .file( "src/main.rs", "extern crate foo; extern crate bar; fn main() {}", ) .file( "Cargo.lock", r#" [[package]] name = "bar" version = "0.1.0" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar 0.1.0", ] "#, ) .publish(); cargo_process("install foo").run(); } #[cargo_test] fn install_empty_argument() { // Bug 5229 cargo_process("install") .arg("") .with_status(1) .with_stderr_contains( "[ERROR] The argument '...' requires a value but none was supplied", ) .run(); } #[cargo_test] fn git_repo_replace() { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); let repo = git2::Repository::open(&p.root()).unwrap(); let old_rev = repo.revparse_single("HEAD").unwrap().id(); cargo_process("install --git") .arg(p.url().to_string()) .run(); git::commit(&repo); let new_rev = repo.revparse_single("HEAD").unwrap().id(); let mut path = paths::home(); path.push(".cargo/.crates.toml"); assert_ne!(old_rev, new_rev); assert!(fs::read_to_string(path.clone()) .unwrap() .contains(&format!("{}", old_rev))); cargo_process("install --force --git") .arg(p.url().to_string()) .run(); assert!(fs::read_to_string(path) .unwrap() .contains(&format!("{}", new_rev))); } #[cargo_test] fn workspace_uses_workspace_target_dir() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [workspace] [dependencies] bar = { path = 'bar' } "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build --release").cwd("bar").run(); cargo_process("install --path") .arg(p.root().join("bar")) .with_stderr( "[INSTALLING] [..] [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..] [INSTALLED] package `bar v0.1.0 ([..]/bar)` (executable `bar[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); } #[cargo_test] fn install_ignores_local_cargo_config() { pkg("bar", "0.0.1"); let p = project() .file( ".cargo/config", r#" [build] target = "non-existing-target" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("install bar").run(); assert_has_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn install_ignores_unstable_table_in_local_cargo_config() { pkg("bar", "0.0.1"); let p = project() .file( ".cargo/config", r#" [unstable] build-std = ["core"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("install bar") .masquerade_as_nightly_cargo(&["build-std"]) .run(); assert_has_installed_exe(cargo_home(), "bar"); } #[cargo_test] fn install_global_cargo_config() { pkg("bar", "0.0.1"); let config = cargo_home().join("config"); let mut toml = fs::read_to_string(&config).unwrap_or_default(); toml.push_str( r#" [build] target = 'nonexistent' "#, ); fs::write(&config, toml).unwrap(); cargo_process("install bar") .with_status(101) .with_stderr_contains("[..]--target nonexistent[..]") .run(); } #[cargo_test] fn install_path_config() { project() .file( ".cargo/config", r#" [build] target = 'nonexistent' "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo_process("install --path foo") .with_status(101) .with_stderr_contains("[..]--target nonexistent[..]") .run(); } #[cargo_test] fn install_version_req() { // Try using a few versionreq styles. pkg("foo", "0.0.3"); pkg("foo", "1.0.4"); pkg("foo", "1.0.5"); cargo_process("install foo --version=*") .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") .with_stderr_contains("[INSTALLING] foo v1.0.5") .run(); cargo_process("uninstall foo").run(); cargo_process("install foo --version=^1.0") .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") .with_stderr_contains("[INSTALLING] foo v1.0.5") .run(); cargo_process("uninstall foo").run(); cargo_process("install foo --version=0.0.*") .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") .with_stderr_contains("[INSTALLING] foo v0.0.3") .run(); } #[cargo_test] fn git_install_reads_workspace_manifest() { let p = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [workspace] members = ["bin1"] [profile.release] incremental = 3 "#, ) .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0")) .file( "bin1/src/main.rs", r#"fn main() { println!("Hello, world!"); }"#, ) .build(); cargo_process(&format!("install --git {}", p.url().to_string())) .with_status(101) .with_stderr_contains(" invalid type: integer `3`[..]") .run(); } #[cargo_test] fn install_git_with_symlink_home() { // Ensure that `cargo install` with a git repo is OK when CARGO_HOME is a // symlink, and uses an build script. if !symlink_supported() { return; } let p = git::new("foo", |p| { p.file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") // This triggers discover_git_and_list_files for detecting changed files. .file("build.rs", "fn main() {}") }); #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; let actual = paths::root().join("actual-home"); t!(std::fs::create_dir(&actual)); t!(symlink(&actual, paths::home().join(".cargo"))); cargo_process("install --git") .arg(p.url().to_string()) .with_stderr( "\ [UPDATING] git repository [..] [INSTALLING] foo v1.0.0 [..] [COMPILING] foo v1.0.0 [..] [FINISHED] [..] [INSTALLING] [..]home/.cargo/bin/foo[..] [INSTALLED] package `foo [..] [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn install_yanked_cargo_package() { Package::new("baz", "0.0.1").yanked(true).publish(); cargo_process("install baz --version 0.0.1") .with_status(101) .with_stderr_contains( "\ [ERROR] cannot install package `baz`, it has been yanked from registry `crates-io` ", ) .run(); } #[cargo_test] fn install_cargo_package_in_a_patched_workspace() { pkg("foo", "0.1.0"); pkg("fizz", "1.0.0"); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [workspace] members = ["baz"] "#, ) .file("src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] [dependencies] fizz = "1" [patch.crates-io] fizz = { version = "=1.0.0" } "#, ) .file("baz/src/lib.rs", "") .build(); let stderr = "\ [WARNING] patch for the non root package will be ignored, specify patch at the workspace root: package: [..]/foo/baz/Cargo.toml workspace: [..]/foo/Cargo.toml "; p.cargo("check").with_stderr_contains(&stderr).run(); // A crate installation must not emit any message from a workspace under // current working directory. // See https://github.com/rust-lang/cargo/issues/8619 p.cargo("install foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.1.0 (registry [..]) [INSTALLING] foo v0.1.0 [COMPILING] foo v0.1.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]foo[EXE] [INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`) [WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries ", ) .run(); assert_has_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn locked_install_without_published_lockfile() { Package::new("foo", "0.1.0") .file("src/main.rs", "//! Some docs\nfn main() {}") .publish(); cargo_process("install foo --locked") .with_stderr_contains("[WARNING] no Cargo.lock file published in foo v0.1.0") .run(); } #[cargo_test] fn install_semver_metadata() { // Check trying to install a package that uses semver metadata. // This uses alt registry because the bug this is exercising doesn't // trigger with a replaced source. registry::alt_init(); Package::new("foo", "1.0.0+abc") .alternative(true) .file("src/main.rs", "fn main() {}") .publish(); cargo_process("install foo --registry alternative --version 1.0.0+abc").run(); cargo_process("install foo --registry alternative") .with_stderr("\ [UPDATING] `alternative` index [IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override [WARNING] be sure to add [..] ") .run(); // "Updating" is not displayed here due to the --version fast-path. cargo_process("install foo --registry alternative --version 1.0.0+abc") .with_stderr("\ [IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override [WARNING] be sure to add [..] ") .run(); cargo_process("install foo --registry alternative --version 1.0.0 --force") .with_stderr( "\ [UPDATING] `alternative` index [INSTALLING] foo v1.0.0+abc (registry `alternative`) [COMPILING] foo v1.0.0+abc (registry `alternative`) [FINISHED] [..] [REPLACING] [ROOT]/home/.cargo/bin/foo[EXE] [REPLACED] package [..] [WARNING] be sure to add [..] ", ) .run(); // Check that from a fresh cache will work without metadata, too. paths::home().join(".cargo/registry").rm_rf(); paths::home().join(".cargo/bin").rm_rf(); cargo_process("install foo --registry alternative --version 1.0.0") .with_stderr( "\ [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] foo v1.0.0+abc (registry `alternative`) [INSTALLING] foo v1.0.0+abc (registry `alternative`) [COMPILING] foo v1.0.0+abc (registry `alternative`) [FINISHED] [..] [INSTALLING] [ROOT]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v1.0.0+abc (registry `alternative`)` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/install_upgrade.rs000066400000000000000000000670241432416201200216200ustar00rootroot00000000000000//! Tests for `cargo install` where it upgrades a package if it is out-of-date. use cargo::core::PackageId; use std::collections::BTreeSet; use std::env; use std::fs; use std::path::PathBuf; use std::sync::atomic::{AtomicUsize, Ordering}; use toml_edit::easy as toml; use cargo_test_support::install::{cargo_home, exe}; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{self, Package}; use cargo_test_support::{ basic_manifest, cargo_process, cross_compile, execs, git, process, project, Execs, }; fn pkg_maybe_yanked(name: &str, vers: &str, yanked: bool) { Package::new(name, vers) .yanked(yanked) .file( "src/main.rs", r#"fn main() { println!("{}", env!("CARGO_PKG_VERSION")) }"#, ) .publish(); } // Helper for publishing a package. fn pkg(name: &str, vers: &str) { pkg_maybe_yanked(name, vers, false) } fn v1_path() -> PathBuf { cargo_home().join(".crates.toml") } fn v2_path() -> PathBuf { cargo_home().join(".crates2.json") } fn load_crates1() -> toml::Value { toml::from_str(&fs::read_to_string(v1_path()).unwrap()).unwrap() } fn load_crates2() -> serde_json::Value { serde_json::from_str(&fs::read_to_string(v2_path()).unwrap()).unwrap() } fn installed_exe(name: &str) -> PathBuf { cargo_home().join("bin").join(exe(name)) } /// Helper for executing binaries installed by cargo. fn installed_process(name: &str) -> Execs { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); thread_local!(static UNIQUE_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); // This copies the executable to a unique name so that it may be safely // replaced on Windows. See Project::rename_run for details. let src = installed_exe(name); let dst = installed_exe(&UNIQUE_ID.with(|my_id| format!("{}-{}", name, my_id))); // Note: Cannot use copy. On Linux, file descriptors may be left open to // the executable as other tests in other threads are constantly spawning // new processes (see https://github.com/rust-lang/cargo/pull/5557 for // more). fs::rename(&src, &dst) .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); // Leave behind a fake file so that reinstall duplicate check works. fs::write(src, "").unwrap(); let p = process(dst); execs().with_process_builder(p) } /// Check that the given package name/version has the following bins listed in /// the trackers. Also verifies that both trackers are in sync and valid. /// Pass in an empty `bins` list to assert that the package is *not* installed. fn validate_trackers(name: &str, version: &str, bins: &[&str]) { let v1 = load_crates1(); let v1_table = v1.get("v1").unwrap().as_table().unwrap(); let v2 = load_crates2(); let v2_table = v2["installs"].as_object().unwrap(); assert_eq!(v1_table.len(), v2_table.len()); // Convert `bins` to a BTreeSet. let bins: BTreeSet = bins .iter() .map(|b| format!("{}{}", b, env::consts::EXE_SUFFIX)) .collect(); // Check every entry matches between v1 and v2. for (pkg_id_str, v1_bins) in v1_table { let pkg_id: PackageId = toml::Value::from(pkg_id_str.to_string()) .try_into() .unwrap(); let v1_bins: BTreeSet = v1_bins .as_array() .unwrap() .iter() .map(|b| b.as_str().unwrap().to_string()) .collect(); if pkg_id.name().as_str() == name && pkg_id.version().to_string() == version { if bins.is_empty() { panic!( "Expected {} to not be installed, but found: {:?}", name, v1_bins ); } else { assert_eq!(bins, v1_bins); } } let pkg_id_value = serde_json::to_value(&pkg_id).unwrap(); let pkg_id_str = pkg_id_value.as_str().unwrap(); let v2_info = v2_table .get(pkg_id_str) .expect("v2 missing v1 pkg") .as_object() .unwrap(); let v2_bins = v2_info["bins"].as_array().unwrap(); let v2_bins: BTreeSet = v2_bins .iter() .map(|b| b.as_str().unwrap().to_string()) .collect(); assert_eq!(v1_bins, v2_bins); } } #[cargo_test] fn registry_upgrade() { // Installing and upgrading from a registry. pkg("foo", "1.0.0"); cargo_process("install foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v1.0.0 (registry [..]) [INSTALLING] foo v1.0.0 [COMPILING] foo v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); installed_process("foo").with_stdout("1.0.0").run(); validate_trackers("foo", "1.0.0", &["foo"]); cargo_process("install foo") .with_stderr( "\ [UPDATING] `[..]` index [IGNORED] package `foo v1.0.0` is already installed[..] [WARNING] be sure to add [..] ", ) .run(); pkg("foo", "1.0.1"); cargo_process("install foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v1.0.1 (registry [..]) [INSTALLING] foo v1.0.1 [COMPILING] foo v1.0.1 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [CWD]/home/.cargo/bin/foo[EXE] [REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); installed_process("foo").with_stdout("1.0.1").run(); validate_trackers("foo", "1.0.1", &["foo"]); cargo_process("install foo --version=1.0.0") .with_stderr_contains("[COMPILING] foo v1.0.0") .run(); installed_process("foo").with_stdout("1.0.0").run(); validate_trackers("foo", "1.0.0", &["foo"]); cargo_process("install foo --version=^1.0") .with_stderr_contains("[COMPILING] foo v1.0.1") .run(); installed_process("foo").with_stdout("1.0.1").run(); validate_trackers("foo", "1.0.1", &["foo"]); cargo_process("install foo --version=^1.0") .with_stderr_contains("[IGNORED] package `foo v1.0.1` is already installed[..]") .run(); } #[cargo_test] fn uninstall() { // Basic uninstall test. pkg("foo", "1.0.0"); cargo_process("install foo").run(); cargo_process("uninstall foo").run(); let data = load_crates2(); assert_eq!(data["installs"].as_object().unwrap().len(), 0); let v1_table = load_crates1(); assert_eq!(v1_table.get("v1").unwrap().as_table().unwrap().len(), 0); } #[cargo_test] fn upgrade_force() { pkg("foo", "1.0.0"); cargo_process("install foo").run(); cargo_process("install foo --force") .with_stderr( "\ [UPDATING] `[..]` index [INSTALLING] foo v1.0.0 [COMPILING] foo v1.0.0 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [..]/.cargo/bin/foo[EXE] [REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executable `foo[EXE]`) [WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] ", ) .run(); validate_trackers("foo", "1.0.0", &["foo"]); } #[cargo_test] fn ambiguous_version_no_longer_allowed() { // Non-semver-requirement is not allowed for `--version`. pkg("foo", "1.0.0"); cargo_process("install foo --version=1.0") .with_stderr( "\ [ERROR] the `--version` provided, `1.0`, is not a valid semver version: cannot parse '1.0' as a semver if you want to specify semver range, add an explicit qualifier, like ^1.0 ", ) .with_status(101) .run(); } #[cargo_test] fn path_is_always_dirty() { // --path should always reinstall. let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("install --path .").run(); p.cargo("install --path .") .with_stderr_contains("[REPLACING] [..]/foo[EXE]") .run(); } #[cargo_test] fn fails_for_conflicts_unknown() { // If an untracked file is in the way, it should fail. pkg("foo", "1.0.0"); let exe = installed_exe("foo"); exe.parent().unwrap().mkdir_p(); fs::write(exe, "").unwrap(); cargo_process("install foo") .with_stderr_contains("[ERROR] binary `foo[EXE]` already exists in destination") .with_status(101) .run(); } #[cargo_test] fn fails_for_conflicts_known() { // If the same binary exists in another package, it should fail. pkg("foo", "1.0.0"); Package::new("bar", "1.0.0") .file("src/bin/foo.rs", "fn main() {}") .publish(); cargo_process("install foo").run(); cargo_process("install bar") .with_stderr_contains( "[ERROR] binary `foo[EXE]` already exists in destination as part of `foo v1.0.0`", ) .with_status(101) .run(); } #[cargo_test] fn supports_multiple_binary_names() { // Can individually install with --bin or --example Package::new("foo", "1.0.0") .file("src/main.rs", r#"fn main() { println!("foo"); }"#) .file("src/bin/a.rs", r#"fn main() { println!("a"); }"#) .file("examples/ex1.rs", r#"fn main() { println!("ex1"); }"#) .publish(); cargo_process("install foo --bin foo").run(); installed_process("foo").with_stdout("foo").run(); assert!(!installed_exe("a").exists()); assert!(!installed_exe("ex1").exists()); validate_trackers("foo", "1.0.0", &["foo"]); cargo_process("install foo --bin a").run(); installed_process("a").with_stdout("a").run(); assert!(!installed_exe("ex1").exists()); validate_trackers("foo", "1.0.0", &["a", "foo"]); cargo_process("install foo --example ex1").run(); installed_process("ex1").with_stdout("ex1").run(); validate_trackers("foo", "1.0.0", &["a", "ex1", "foo"]); cargo_process("uninstall foo --bin foo").run(); assert!(!installed_exe("foo").exists()); assert!(installed_exe("ex1").exists()); validate_trackers("foo", "1.0.0", &["a", "ex1"]); cargo_process("uninstall foo").run(); assert!(!installed_exe("ex1").exists()); assert!(!installed_exe("a").exists()); } #[cargo_test] fn v1_already_installed_fresh() { // Install with v1, then try to install again with v2. pkg("foo", "1.0.0"); cargo_process("install foo").run(); cargo_process("install foo") .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") .run(); } #[cargo_test] fn v1_already_installed_dirty() { // Install with v1, then install a new version with v2. pkg("foo", "1.0.0"); cargo_process("install foo").run(); pkg("foo", "1.0.1"); cargo_process("install foo") .with_stderr_contains("[COMPILING] foo v1.0.1") .with_stderr_contains("[REPLACING] [..]/foo[EXE]") .run(); validate_trackers("foo", "1.0.1", &["foo"]); } #[cargo_test] fn change_features_rebuilds() { Package::new("foo", "1.0.0") .file( "src/main.rs", r#" fn main() { if cfg!(feature = "f1") { println!("f1"); } if cfg!(feature = "f2") { println!("f2"); } } "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [features] f1 = [] f2 = [] default = ["f1"] "#, ) .publish(); cargo_process("install foo").run(); installed_process("foo").with_stdout("f1").run(); cargo_process("install foo --no-default-features").run(); installed_process("foo").with_stdout("").run(); cargo_process("install foo --all-features").run(); installed_process("foo").with_stdout("f1\nf2").run(); cargo_process("install foo --no-default-features --features=f1").run(); installed_process("foo").with_stdout("f1").run(); } #[cargo_test] fn change_profile_rebuilds() { pkg("foo", "1.0.0"); cargo_process("install foo").run(); cargo_process("install foo --debug") .with_stderr_contains("[COMPILING] foo v1.0.0") .with_stderr_contains("[REPLACING] [..]foo[EXE]") .run(); cargo_process("install foo --debug") .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") .run(); } #[cargo_test] fn change_target_rebuilds() { if cross_compile::disabled() { return; } pkg("foo", "1.0.0"); cargo_process("install foo").run(); let target = cross_compile::alternate(); cargo_process("install foo -v --target") .arg(&target) .with_stderr_contains("[COMPILING] foo v1.0.0") .with_stderr_contains("[REPLACING] [..]foo[EXE]") .with_stderr_contains(&format!("[..]--target {}[..]", target)) .run(); } #[cargo_test] fn change_bin_sets_rebuilds() { // Changing which bins in a multi-bin project should reinstall. Package::new("foo", "1.0.0") .file("src/main.rs", "fn main() { }") .file("src/bin/x.rs", "fn main() { }") .file("src/bin/y.rs", "fn main() { }") .publish(); cargo_process("install foo --bin x").run(); assert!(installed_exe("x").exists()); assert!(!installed_exe("y").exists()); assert!(!installed_exe("foo").exists()); validate_trackers("foo", "1.0.0", &["x"]); cargo_process("install foo --bin y") .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `y[EXE]`)") .run(); assert!(installed_exe("x").exists()); assert!(installed_exe("y").exists()); assert!(!installed_exe("foo").exists()); validate_trackers("foo", "1.0.0", &["x", "y"]); cargo_process("install foo") .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`)") .with_stderr_contains( "[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executables `x[EXE]`, `y[EXE]`)", ) .run(); assert!(installed_exe("x").exists()); assert!(installed_exe("y").exists()); assert!(installed_exe("foo").exists()); validate_trackers("foo", "1.0.0", &["foo", "x", "y"]); } #[cargo_test] fn forwards_compatible() { // Unknown fields should be preserved. pkg("foo", "1.0.0"); pkg("bar", "1.0.0"); cargo_process("install foo").run(); let key = "foo 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)"; let v2 = cargo_home().join(".crates2.json"); let mut data = load_crates2(); data["newfield"] = serde_json::Value::Bool(true); data["installs"][key]["moreinfo"] = serde_json::Value::String("shazam".to_string()); fs::write(&v2, serde_json::to_string(&data).unwrap()).unwrap(); cargo_process("install bar").run(); let data: serde_json::Value = serde_json::from_str(&fs::read_to_string(&v2).unwrap()).unwrap(); assert_eq!(data["newfield"].as_bool().unwrap(), true); assert_eq!( data["installs"][key]["moreinfo"].as_str().unwrap(), "shazam" ); } #[cargo_test] fn v2_syncs() { // V2 inherits the installs from V1. pkg("one", "1.0.0"); pkg("two", "1.0.0"); pkg("three", "1.0.0"); let p = project() .file("src/bin/x.rs", "fn main() {}") .file("src/bin/y.rs", "fn main() {}") .build(); cargo_process("install one").run(); validate_trackers("one", "1.0.0", &["one"]); p.cargo("install --path .").run(); validate_trackers("foo", "1.0.0", &["x", "y"]); // v1 add/remove cargo_process("install two").run(); cargo_process("uninstall one").run(); // This should pick up that `two` was added, `one` was removed. cargo_process("install three").run(); validate_trackers("three", "1.0.0", &["three"]); cargo_process("install --list") .with_stdout( "\ foo v0.0.1 ([..]/foo): x[EXE] y[EXE] three v1.0.0: three[EXE] two v1.0.0: two[EXE] ", ) .run(); cargo_process("install one").run(); installed_process("one").with_stdout("1.0.0").run(); validate_trackers("one", "1.0.0", &["one"]); cargo_process("install two") .with_stderr_contains("[IGNORED] package `two v1.0.0` is already installed[..]") .run(); // v1 remove p.cargo("uninstall --bin x").run(); pkg("x", "1.0.0"); pkg("y", "1.0.0"); // This should succeed because `x` was removed in V1. cargo_process("install x").run(); validate_trackers("x", "1.0.0", &["x"]); // This should fail because `y` still exists in a different package. cargo_process("install y") .with_stderr_contains( "[ERROR] binary `y[EXE]` already exists in destination \ as part of `foo v0.0.1 ([..])`", ) .with_status(101) .run(); } #[cargo_test] fn upgrade_git() { let git_project = git::new("foo", |project| project.file("src/main.rs", "fn main() {}")); // install cargo_process("install --git") .arg(git_project.url().to_string()) .run(); // Check install stays fresh. cargo_process("install --git") .arg(git_project.url().to_string()) .with_stderr_contains( "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ already installed,[..]", ) .run(); // Modify a file. let repo = git2::Repository::open(git_project.root()).unwrap(); git_project.change_file("src/main.rs", r#"fn main() {println!("onomatopoeia");}"#); git::add(&repo); git::commit(&repo); // Install should reinstall. cargo_process("install --git") .arg(git_project.url().to_string()) .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") .with_stderr_contains("[REPLACING] [..]/foo[EXE]") .run(); installed_process("foo").with_stdout("onomatopoeia").run(); // Check install stays fresh. cargo_process("install --git") .arg(git_project.url().to_string()) .with_stderr_contains( "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ already installed,[..]", ) .run(); } #[cargo_test] fn switch_sources() { // Installing what appears to be the same thing, but from different // sources should reinstall. registry::alt_init(); pkg("foo", "1.0.0"); Package::new("foo", "1.0.0") .file("src/main.rs", r#"fn main() { println!("alt"); }"#) .alternative(true) .publish(); let p = project() .at("foo-local") // so it doesn't use the same directory as the git project .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", r#"fn main() { println!("local"); }"#) .build(); let git_project = git::new("foo", |project| { project.file("src/main.rs", r#"fn main() { println!("git"); }"#) }); cargo_process("install foo").run(); installed_process("foo").with_stdout("1.0.0").run(); cargo_process("install foo --registry alternative").run(); installed_process("foo").with_stdout("alt").run(); p.cargo("install --path .").run(); installed_process("foo").with_stdout("local").run(); cargo_process("install --git") .arg(git_project.url().to_string()) .run(); installed_process("foo").with_stdout("git").run(); } #[cargo_test] fn multiple_report() { // Testing the full output that indicates installed/ignored/replaced/summary. pkg("one", "1.0.0"); pkg("two", "1.0.0"); fn three(vers: &str) { Package::new("three", vers) .file("src/main.rs", "fn main() { }") .file("src/bin/x.rs", "fn main() { }") .file("src/bin/y.rs", "fn main() { }") .publish(); } three("1.0.0"); cargo_process("install one two three") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] one v1.0.0 (registry `[..]`) [DOWNLOADING] crates ... [DOWNLOADED] two v1.0.0 (registry `[..]`) [DOWNLOADING] crates ... [DOWNLOADED] three v1.0.0 (registry `[..]`) [INSTALLING] one v1.0.0 [COMPILING] one v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/one[EXE] [INSTALLED] package `one v1.0.0` (executable `one[EXE]`) [INSTALLING] two v1.0.0 [COMPILING] two v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/two[EXE] [INSTALLED] package `two v1.0.0` (executable `two[EXE]`) [INSTALLING] three v1.0.0 [COMPILING] three v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/three[EXE] [INSTALLING] [..]/.cargo/bin/x[EXE] [INSTALLING] [..]/.cargo/bin/y[EXE] [INSTALLED] package `three v1.0.0` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) [SUMMARY] Successfully installed one, two, three! [WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] ", ) .run(); pkg("foo", "1.0.1"); pkg("bar", "1.0.1"); three("1.0.1"); cargo_process("install one two three") .with_stderr( "\ [UPDATING] `[..]` index [IGNORED] package `one v1.0.0` is already installed, use --force to override [IGNORED] package `two v1.0.0` is already installed, use --force to override [DOWNLOADING] crates ... [DOWNLOADED] three v1.0.1 (registry `[..]`) [INSTALLING] three v1.0.1 [COMPILING] three v1.0.1 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [..]/.cargo/bin/three[EXE] [REPLACING] [..]/.cargo/bin/x[EXE] [REPLACING] [..]/.cargo/bin/y[EXE] [REPLACED] package `three v1.0.0` with `three v1.0.1` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) [SUMMARY] Successfully installed one, two, three! [WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] ", ) .run(); cargo_process("uninstall three") .with_stderr( "\ [REMOVING] [..]/.cargo/bin/three[EXE] [REMOVING] [..]/.cargo/bin/x[EXE] [REMOVING] [..]/.cargo/bin/y[EXE] ", ) .run(); cargo_process("install three --bin x") .with_stderr( "\ [UPDATING] `[..]` index [INSTALLING] three v1.0.1 [COMPILING] three v1.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/x[EXE] [INSTALLED] package `three v1.0.1` (executable `x[EXE]`) [WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] ", ) .run(); cargo_process("install three") .with_stderr( "\ [UPDATING] `[..]` index [INSTALLING] three v1.0.1 [COMPILING] three v1.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/three[EXE] [INSTALLING] [..]/.cargo/bin/y[EXE] [REPLACING] [..]/.cargo/bin/x[EXE] [INSTALLED] package `three v1.0.1` (executables `three[EXE]`, `y[EXE]`) [REPLACED] package `three v1.0.1` with `three v1.0.1` (executable `x[EXE]`) [WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] ", ) .run(); } #[cargo_test] fn no_track() { pkg("foo", "1.0.0"); cargo_process("install --no-track foo").run(); assert!(!v1_path().exists()); assert!(!v2_path().exists()); cargo_process("install --no-track foo") .with_stderr( "\ [UPDATING] `[..]` index [ERROR] binary `foo[EXE]` already exists in destination `[..]/.cargo/bin/foo[EXE]` Add --force to overwrite ", ) .with_status(101) .run(); } #[cargo_test] fn deletes_orphaned() { // When an executable is removed from a project, upgrading should remove it. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .file("src/bin/other.rs", "fn main() {}") .file("examples/ex1.rs", "fn main() {}") .build(); p.cargo("install --path . --bins --examples").run(); assert!(installed_exe("other").exists()); // Remove a binary, add a new one, and bump the version. fs::remove_file(p.root().join("src/bin/other.rs")).unwrap(); p.change_file("examples/ex2.rs", "fn main() {}"); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.2.0" "#, ); p.cargo("install --path . --bins --examples") .with_stderr( "\ [INSTALLING] foo v0.2.0 [..] [COMPILING] foo v0.2.0 [..] [FINISHED] release [..] [INSTALLING] [..]/.cargo/bin/ex2[EXE] [REPLACING] [..]/.cargo/bin/ex1[EXE] [REPLACING] [..]/.cargo/bin/foo[EXE] [REMOVING] executable `[..]/.cargo/bin/other[EXE]` from previous version foo v0.1.0 [..] [INSTALLED] package `foo v0.2.0 [..]` (executable `ex2[EXE]`) [REPLACED] package `foo v0.1.0 [..]` with `foo v0.2.0 [..]` (executables `ex1[EXE]`, `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); assert!(!installed_exe("other").exists()); validate_trackers("foo", "0.2.0", &["foo", "ex1", "ex2"]); // 0.1.0 should not have any entries. validate_trackers("foo", "0.1.0", &[]); } #[cargo_test] fn already_installed_exact_does_not_update() { pkg("foo", "1.0.0"); cargo_process("install foo --version=1.0.0").run(); cargo_process("install foo --version=1.0.0") .with_stderr( "\ [IGNORED] package `foo v1.0.0` is already installed[..] [WARNING] be sure to add [..] ", ) .run(); cargo_process("install foo --version=>=1.0.0") .with_stderr( "\ [UPDATING] `[..]` index [IGNORED] package `foo v1.0.0` is already installed[..] [WARNING] be sure to add [..] ", ) .run(); pkg("foo", "1.0.1"); cargo_process("install foo --version=>=1.0.0") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v1.0.1 (registry [..]) [INSTALLING] foo v1.0.1 [COMPILING] foo v1.0.1 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [CWD]/home/.cargo/bin/foo[EXE] [REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn already_installed_updates_yank_status_on_upgrade() { pkg("foo", "1.0.0"); pkg_maybe_yanked("foo", "1.0.1", true); cargo_process("install foo --version=1.0.0").run(); cargo_process("install foo --version=1.0.1") .with_status(101) .with_stderr_contains( "\ [ERROR] cannot install package `foo`, it has been yanked from registry `crates-io` ", ) .run(); pkg_maybe_yanked("foo", "1.0.1", false); pkg("foo", "1.0.1"); cargo_process("install foo --version=1.0.1") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v1.0.1 (registry [..]) [INSTALLING] foo v1.0.1 [COMPILING] foo v1.0.1 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [CWD]/home/.cargo/bin/foo[EXE] [REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn partially_already_installed_does_one_update() { pkg("foo", "1.0.0"); cargo_process("install foo --version=1.0.0").run(); pkg("bar", "1.0.0"); pkg("baz", "1.0.0"); cargo_process("install foo bar baz --version=1.0.0") .with_stderr( "\ [IGNORED] package `foo v1.0.0` is already installed[..] [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 (registry [..]) [DOWNLOADING] crates ... [DOWNLOADED] baz v1.0.0 (registry [..]) [INSTALLING] bar v1.0.0 [COMPILING] bar v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] [INSTALLED] package `bar v1.0.0` (executable `bar[EXE]`) [INSTALLING] baz v1.0.0 [COMPILING] baz v1.0.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [CWD]/home/.cargo/bin/baz[EXE] [INSTALLED] package `baz v1.0.0` (executable `baz[EXE]`) [SUMMARY] Successfully installed foo, bar, baz! [WARNING] be sure to add [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/jobserver.rs000066400000000000000000000132241432416201200204350ustar00rootroot00000000000000//! Tests for the jobserver protocol. use cargo_util::is_ci; use std::net::TcpListener; use std::process::Command; use std::thread; use cargo_test_support::install::{assert_has_installed_exe, cargo_home}; use cargo_test_support::{cargo_exe, project}; const EXE_CONTENT: &str = r#" use std::env; fn main() { let var = env::var("CARGO_MAKEFLAGS").unwrap(); let arg = var.split(' ') .find(|p| p.starts_with("--jobserver")) .unwrap(); let val = &arg[arg.find('=').unwrap() + 1..]; validate(val); } #[cfg(unix)] fn validate(s: &str) { use std::fs::File; use std::io::*; use std::os::unix::prelude::*; let fds = s.split(',').collect::>(); println!("{}", s); assert_eq!(fds.len(), 2); unsafe { let mut read = File::from_raw_fd(fds[0].parse().unwrap()); let mut write = File::from_raw_fd(fds[1].parse().unwrap()); let mut buf = [0]; assert_eq!(read.read(&mut buf).unwrap(), 1); assert_eq!(write.write(&buf).unwrap(), 1); } } #[cfg(windows)] fn validate(_: &str) { // a little too complicated for a test... } "#; #[cargo_test] fn jobserver_exists() { let p = project() .file("build.rs", EXE_CONTENT) .file("src/lib.rs", "") .build(); // Explicitly use `-j2` to ensure that there's eventually going to be a // token to read from `validate` above, since running the build script // itself consumes a token. p.cargo("build -j2").run(); } #[cargo_test] fn external_subcommand_inherits_jobserver() { let make = if cfg!(windows) { "mingw32-make" } else { "make" }; if Command::new(make).arg("--version").output().is_err() { return; } let name = "cargo-jobserver-check"; let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "{name}" version = "0.0.1" "# ), ) .file("src/main.rs", EXE_CONTENT) .file( "Makefile", "\ all: \t+$(CARGO) jobserver-check ", ) .build(); p.cargo("install --path .").run(); assert_has_installed_exe(cargo_home(), name); p.process(make).env("CARGO", cargo_exe()).arg("-j2").run(); } #[cargo_test] fn makes_jobserver_used() { let make = if cfg!(windows) { "mingw32-make" } else { "make" }; if !is_ci() && Command::new(make).arg("--version").output().is_err() { return; } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] d1 = { path = "d1" } d2 = { path = "d2" } d3 = { path = "d3" } "#, ) .file("src/lib.rs", "") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] build = "../dbuild.rs" "#, ) .file("d1/src/lib.rs", "") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] build = "../dbuild.rs" "#, ) .file("d2/src/lib.rs", "") .file( "d3/Cargo.toml", r#" [package] name = "d3" version = "0.0.1" authors = [] build = "../dbuild.rs" "#, ) .file("d3/src/lib.rs", "") .file( "dbuild.rs", r#" use std::net::TcpStream; use std::env; use std::io::Read; fn main() { let addr = env::var("ADDR").unwrap(); let mut stream = TcpStream::connect(addr).unwrap(); let mut v = Vec::new(); stream.read_to_end(&mut v).unwrap(); } "#, ) .file( "Makefile", "\ all: \t+$(CARGO) build ", ) .build(); let l = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = l.local_addr().unwrap(); let child = thread::spawn(move || { let a1 = l.accept().unwrap(); let a2 = l.accept().unwrap(); l.set_nonblocking(true).unwrap(); for _ in 0..1000 { assert!(l.accept().is_err()); thread::yield_now(); } drop(a1); l.set_nonblocking(false).unwrap(); let a3 = l.accept().unwrap(); drop((a2, a3)); }); p.process(make) .env("CARGO", cargo_exe()) .env("ADDR", addr.to_string()) .arg("-j2") .run(); child.join().unwrap(); } #[cargo_test] fn jobserver_and_j() { let make = if cfg!(windows) { "mingw32-make" } else { "make" }; if !is_ci() && Command::new(make).arg("--version").output().is_err() { return; } let p = project() .file("src/lib.rs", "") .file( "Makefile", "\ all: \t+$(CARGO) build -j2 ", ) .build(); p.process(make) .env("CARGO", cargo_exe()) .arg("-j2") .with_stderr( "\ warning: a `-j` argument was passed to Cargo but Cargo is also configured \ with an external jobserver in its environment, ignoring the `-j` parameter [COMPILING] [..] [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/list_availables.rs000066400000000000000000000117111432416201200215710ustar00rootroot00000000000000//! Tests for packages/target filter flags giving suggestions on which //! packages/targets are available. use cargo_test_support::project; const EXAMPLE: u8 = 1 << 0; const BIN: u8 = 1 << 1; const TEST: u8 = 1 << 2; const BENCH: u8 = 1 << 3; const PACKAGE: u8 = 1 << 4; fn list_availables_test(command: &str, targets: u8) { let full_project = project() .file("examples/a.rs", "fn main() { }") .file("examples/b.rs", "fn main() { }") .file("benches/bench1.rs", "") .file("benches/bench2.rs", "") .file("tests/test1.rs", "") .file("tests/test2.rs", "") .file("src/main.rs", "fn main() { }") .file("Cargo.lock", "") // for `cargo pkgid` .build(); if targets & EXAMPLE != 0 { full_project .cargo(&format!("{} --example", command)) .with_stderr( "\ error: \"--example\" takes one argument. Available examples: a b ", ) .with_status(101) .run(); } if targets & BIN != 0 { full_project .cargo(&format!("{} --bin", command)) .with_stderr( "\ error: \"--bin\" takes one argument. Available binaries: foo ", ) .with_status(101) .run(); } if targets & BENCH != 0 { full_project .cargo(&format!("{} --bench", command)) .with_stderr( "\ error: \"--bench\" takes one argument. Available benches: bench1 bench2 ", ) .with_status(101) .run(); } if targets & TEST != 0 { full_project .cargo(&format!("{} --test", command)) .with_stderr( "\ error: \"--test\" takes one argument. Available tests: test1 test2 ", ) .with_status(101) .run(); } if targets & PACKAGE != 0 { full_project .cargo(&format!("{} -p", command)) .with_stderr( "\ [ERROR] \"--package \" requires a SPEC format value, \ which can be any package ID specifier in the dependency graph. Run `cargo help pkgid` for more information about SPEC format. Possible packages/workspace members: foo ", ) .with_status(101) .run(); } let empty_project = project().file("src/lib.rs", "").build(); if targets & EXAMPLE != 0 { empty_project .cargo(&format!("{} --example", command)) .with_stderr( "\ error: \"--example\" takes one argument. No examples available. ", ) .with_status(101) .run(); } if targets & BIN != 0 { empty_project .cargo(&format!("{} --bin", command)) .with_stderr( "\ error: \"--bin\" takes one argument. No binaries available. ", ) .with_status(101) .run(); } if targets & BENCH != 0 { empty_project .cargo(&format!("{} --bench", command)) .with_stderr( "\ error: \"--bench\" takes one argument. No benches available. ", ) .with_status(101) .run(); } if targets & TEST != 0 { empty_project .cargo(&format!("{} --test", command)) .with_stderr( "\ error: \"--test\" takes one argument. No tests available. ", ) .with_status(101) .run(); } } #[cargo_test] fn build_list_availables() { list_availables_test("build", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn check_list_availables() { list_availables_test("check", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn doc_list_availables() { list_availables_test("doc", BIN | PACKAGE); } #[cargo_test] fn fix_list_availables() { list_availables_test("fix", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn run_list_availables() { list_availables_test("run", EXAMPLE | BIN | PACKAGE); } #[cargo_test] fn test_list_availables() { list_availables_test("test", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn bench_list_availables() { list_availables_test("bench", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn install_list_availables() { list_availables_test("install", EXAMPLE | BIN); } #[cargo_test] fn rustdoc_list_availables() { list_availables_test("rustdoc", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn rustc_list_availables() { list_availables_test("rustc", EXAMPLE | BIN | TEST | BENCH | PACKAGE); } #[cargo_test] fn pkgid_list_availables() { list_availables_test("pkgid", PACKAGE); } #[cargo_test] fn tree_list_availables() { list_availables_test("tree", PACKAGE); } #[cargo_test] fn clean_list_availables() { list_availables_test("clean", PACKAGE); } #[cargo_test] fn update_list_availables() { list_availables_test("update", PACKAGE); } cargo-0.66.0/tests/testsuite/local_registry.rs000066400000000000000000000273021432416201200214600ustar00rootroot00000000000000//! Tests for local-registry sources. use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::{registry_path, Package}; use cargo_test_support::{basic_manifest, project, t}; use std::fs; fn setup() { let root = paths::root(); t!(fs::create_dir(&root.join(".cargo"))); t!(fs::write( root.join(".cargo/config"), r#" [source.crates-io] registry = 'https://wut' replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] local-registry = 'registry' "# )); } #[cargo_test] fn simple() { setup(); Package::new("bar", "0.0.1") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.0.1" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] bar v0.0.1 ([..]) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.cargo("test").run(); } #[cargo_test] fn not_found() { setup(); // Publish a package so that the directory hierarchy is created. // Note, however, that we declare a dependency on baZ. Package::new("bar", "0.0.1").local(true).publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] baz = "0.0.1" "#, ) .file( "src/lib.rs", "extern crate baz; pub fn foo() { baz::bar(); }", ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] no matching package named `baz` found location searched: registry `crates-io` required by package `foo v0.0.1 ([..]/foo)` ", ) .run(); } #[cargo_test] fn depend_on_yanked() { setup(); Package::new("bar", "0.0.1").local(true).publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.0.1" "#, ) .file("src/lib.rs", "") .build(); // Run cargo to create lock file. p.cargo("check").run(); registry_path().join("index").join("3").rm_rf(); Package::new("bar", "0.0.1") .local(true) .yanked(true) .publish(); p.cargo("check") .with_stderr( "\ [FINISHED] [..] ", ) .run(); } #[cargo_test] fn multiple_versions() { setup(); Package::new("bar", "0.0.1").local(true).publish(); Package::new("bar", "0.1.0") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); Package::new("bar", "0.2.0") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); p.cargo("update -v") .with_stderr("[UPDATING] bar v0.1.0 -> v0.2.0") .run(); } #[cargo_test] fn multiple_names() { setup(); Package::new("bar", "0.0.1") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); Package::new("baz", "0.1.0") .local(true) .file("src/lib.rs", "pub fn baz() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" baz = "*" "#, ) .file( "src/lib.rs", r#" extern crate bar; extern crate baz; pub fn foo() { bar::bar(); baz::baz(); } "#, ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] [..] [UNPACKING] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn interdependent() { setup(); Package::new("bar", "0.0.1") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); Package::new("baz", "0.1.0") .local(true) .dep("bar", "*") .file("src/lib.rs", "extern crate bar; pub fn baz() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" baz = "*" "#, ) .file( "src/lib.rs", r#" extern crate bar; extern crate baz; pub fn foo() { bar::bar(); baz::baz(); } "#, ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] [..] [UNPACKING] [..] [COMPILING] bar v0.0.1 [COMPILING] baz v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn path_dep_rewritten() { setup(); Package::new("bar", "0.0.1") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); Package::new("baz", "0.1.0") .local(true) .dep("bar", "*") .file( "Cargo.toml", r#" [project] name = "baz" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar", version = "*" } "#, ) .file("src/lib.rs", "extern crate bar; pub fn baz() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" baz = "*" "#, ) .file( "src/lib.rs", r#" extern crate bar; extern crate baz; pub fn foo() { bar::bar(); baz::baz(); } "#, ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] [..] [UNPACKING] [..] [COMPILING] bar v0.0.1 [COMPILING] baz v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn invalid_dir_bad() { setup(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", r#" [source.crates-io] registry = 'https://wut' replace-with = 'my-awesome-local-directory' [source.my-awesome-local-directory] local-registry = '/path/to/nowhere' "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update registry `crates-io` Caused by: failed to update replaced source registry `crates-io` Caused by: local registry path is not a directory: [..]path[..]to[..]nowhere ", ) .run(); } #[cargo_test] fn different_directory_replacing_the_registry_is_bad() { setup(); // Move our test's .cargo/config to a temporary location and publish a // registry package we're going to use first. let config = paths::root().join(".cargo"); let config_tmp = paths::root().join(".cargo-old"); t!(fs::rename(&config, &config_tmp)); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/lib.rs", "") .build(); // Generate a lock file against the crates.io registry Package::new("bar", "0.0.1").publish(); p.cargo("build").run(); // Switch back to our directory source, and now that we're replacing // crates.io make sure that this fails because we're replacing with a // different checksum config.rm_rf(); t!(fs::rename(&config_tmp, &config)); Package::new("bar", "0.0.1") .file("src/lib.rs", "invalid") .local(true) .publish(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] checksum for `bar v0.0.1` changed between lock files this could be indicative of a few possible errors: * the lock file is corrupt * a replacement source in use (e.g., a mirror) returned a different checksum * the source itself may be corrupt in one way or another unable to verify that `bar v0.0.1` is the same as when the lockfile was generated ", ) .run(); } #[cargo_test] fn crates_io_registry_url_is_optional() { let root = paths::root(); t!(fs::create_dir(&root.join(".cargo"))); t!(fs::write( root.join(".cargo/config"), r#" [source.crates-io] replace-with = 'my-awesome-local-registry' [source.my-awesome-local-registry] local-registry = 'registry' "# )); Package::new("bar", "0.0.1") .local(true) .file("src/lib.rs", "pub fn bar() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.0.1" "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UNPACKING] bar v0.0.1 ([..]) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.cargo("test").run(); } cargo-0.66.0/tests/testsuite/locate_project.rs000066400000000000000000000034461432416201200214360ustar00rootroot00000000000000//! Tests for the `cargo locate-project` command. use cargo_test_support::project; #[cargo_test] fn simple() { let p = project().build(); p.cargo("locate-project") .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#) .run(); } #[cargo_test] fn message_format() { let p = project().build(); p.cargo("locate-project --message-format plain") .with_stdout("[ROOT]/foo/Cargo.toml") .run(); p.cargo("locate-project --message-format json") .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#) .run(); p.cargo("locate-project --message-format cryptic") .with_stderr("error: invalid message format specifier: `cryptic`") .with_status(101) .run(); } #[cargo_test] fn workspace() { let p = project() .file( "Cargo.toml", r#" [package] name = "outer" version = "0.0.0" [workspace] members = ["inner"] "#, ) .file("src/main.rs", "fn main() {}") .file( "inner/Cargo.toml", r#" [package] name = "inner" version = "0.0.0" "#, ) .file("inner/src/lib.rs", "") .build(); let outer_manifest = r#"{"root": "[ROOT]/foo/Cargo.toml"}"#; let inner_manifest = r#"{"root": "[ROOT]/foo/inner/Cargo.toml"}"#; p.cargo("locate-project").with_json(outer_manifest).run(); p.cargo("locate-project") .cwd("inner") .with_json(inner_manifest) .run(); p.cargo("locate-project --workspace") .with_json(outer_manifest) .run(); p.cargo("locate-project --workspace") .cwd("inner") .with_json(outer_manifest) .run(); } cargo-0.66.0/tests/testsuite/lockfile_compat.rs000066400000000000000000000470441432416201200215760ustar00rootroot00000000000000//! Tests for supporting older versions of the Cargo.lock file format. use cargo_test_support::compare::assert_match_exact; use cargo_test_support::git; use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; #[cargo_test] fn oldest_lockfile_still_works() { let cargo_commands = vec!["build", "update"]; for cargo_command in cargo_commands { oldest_lockfile_still_works_with_command(cargo_command); } } fn oldest_lockfile_still_works_with_command(cargo_command: &str) { Package::new("bar", "0.1.0").publish(); let expected_lockfile = r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "[..]" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar", ] "#; let old_lockfile = r#" [root] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" "#; let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("Cargo.lock", old_lockfile) .build(); p.cargo(cargo_command).run(); let lock = p.read_lockfile(); assert_match_exact(expected_lockfile, &lock); } #[cargo_test] fn frozen_flag_preserves_old_lockfile() { let cksum = Package::new("bar", "0.1.0").publish(); let old_lockfile = format!( r#"[root] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" "#, cksum, ); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("Cargo.lock", &old_lockfile) .build(); p.cargo("build --locked").run(); let lock = p.read_lockfile(); assert_match_exact(&old_lockfile, &lock); } #[cargo_test] fn totally_wild_checksums_works() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "checksum bar 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "#, ); let p = p.build(); p.cargo("build").run(); let lock = p.read_lockfile(); assert_match_exact( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "[..]" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar", ] "#, &lock, ); } #[cargo_test] fn wrong_checksum_is_an_error() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "#, ); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index error: checksum for `bar v0.1.0` changed between lock files this could be indicative of a few possible errors: * the lock file is corrupt * a replacement source in use (e.g., a mirror) returned a different checksum * the source itself may be corrupt in one way or another unable to verify that `bar v0.1.0` is the same as when the lockfile was generated ", ) .run(); } // If the checksum is unlisted in the lock file (e.g., ) yet we can // calculate it (e.g., it's a registry dep), then we should in theory just fill // it in. #[cargo_test] fn unlisted_checksum_is_bad_if_we_calculate() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" "#, ); let p = p.build(); p.cargo("fetch") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \ could now be calculated this could be indicative of a few possible situations: * the source `[..]` did not previously support checksums, but was replaced with one that does * newer Cargo implementations know how to checksum this source, but this older implementation does not * the lock file is corrupt ", ) .run(); } // If the checksum is listed in the lock file yet we cannot calculate it (e.g., // Git dependencies as of today), then make sure we choke. #[cargo_test] fn listed_checksum_bad_if_we_cannot_compute() { let git = git::new("bar", |p| { p.file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .file( "Cargo.lock", &format!( r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (git+{0})" ] [[package]] name = "bar" version = "0.1.0" source = "git+{0}" [metadata] "checksum bar 0.1.0 (git+{0})" = "checksum" "#, git.url() ), ); let p = p.build(); p.cargo("fetch") .with_status(101) .with_stderr( "\ [UPDATING] git repository `[..]` error: checksum for `bar v0.1.0 ([..])` could not be calculated, but a \ checksum is listed in the existing lock file[..] this could be indicative of a few possible situations: * the source `[..]` supports checksums, but was replaced with one that doesn't * the lock file is corrupt unable to verify that `bar v0.1.0 ([..])` is the same as when the lockfile was generated ", ) .run(); } #[cargo_test] fn current_lockfile_format() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); let actual = p.read_lockfile(); let expected = "\ # This file is automatically @generated by Cargo.\n# It is not intended for manual editing. version = 3 [[package]] name = \"bar\" version = \"0.1.0\" source = \"registry+https://github.com/rust-lang/crates.io-index\" checksum = \"[..]\" [[package]] name = \"foo\" version = \"0.0.1\" dependencies = [ \"bar\", ] "; assert_match_exact(expected, &actual); } #[cargo_test] fn lockfile_without_root() { Package::new("bar", "0.1.0").publish(); let lockfile = r#" # This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar", ] "#; let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("Cargo.lock", lockfile); let p = p.build(); p.cargo("build").run(); let lock = p.read_lockfile(); assert_match_exact( r#"# [..] # [..] version = 3 [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "[..]" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar", ] "#, &lock, ); } #[cargo_test] fn locked_correct_error() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", ""); let p = p.build(); p.cargo("build --locked") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index error: the lock file [CWD]/Cargo.lock needs to be updated but --locked was passed to prevent this If you want to try to generate the lock file without accessing the network, \ remove the --locked flag and use --offline instead. ", ) .run(); } #[cargo_test] fn v2_format_preserved() { let cksum = Package::new("bar", "0.1.0").publish(); let lockfile = format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "{}" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar", ] "#, cksum ); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("Cargo.lock", &lockfile) .build(); p.cargo("fetch").run(); let lock = p.read_lockfile(); assert_match_exact(&lockfile, &lock); } #[cargo_test] fn v2_path_and_crates_io() { let cksum010 = Package::new("a", "0.1.0").publish(); let cksum020 = Package::new("a", "0.2.0").publish(); let lockfile = format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "a" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "{}" [[package]] name = "a" version = "0.2.0" [[package]] name = "a" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "{}" [[package]] name = "foo" version = "0.0.1" dependencies = [ "a 0.1.0", "a 0.2.0", "a 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] "#, cksum010, cksum020, ); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = 'a' } b = { version = "0.1", package = 'a' } c = { version = "0.2", package = 'a' } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.2.0" "#, ) .file("a/src/lib.rs", "") .file("Cargo.lock", &lockfile) .build(); p.cargo("fetch").run(); p.cargo("fetch").run(); let lock = p.read_lockfile(); assert_match_exact(&lockfile, &lock); } #[cargo_test] fn v3_and_git() { let (git_project, repo) = git::new_repo("dep1", |project| { project .file("Cargo.toml", &basic_lib_manifest("dep1")) .file("src/lib.rs", "") }); let head_id = repo.head().unwrap().target().unwrap(); let lockfile = format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "dep1" version = "0.5.0" source = "git+{}?branch=master#{}" [[package]] name = "foo" version = "0.0.1" dependencies = [ "dep1", ] "#, git_project.url(), head_id, ); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] dep1 = {{ git = '{}', branch = 'master' }} "#, git_project.url(), ), ) .file("src/lib.rs", "") .file("Cargo.lock", "version = 3") .build(); p.cargo("fetch").run(); let lock = p.read_lockfile(); assert_match_exact(&lockfile, &lock); } #[cargo_test] fn lock_from_the_future() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#, ) .file("src/lib.rs", "") .file("Cargo.lock", "version = 10000000") .build(); p.cargo("fetch") .with_stderr( "\ error: failed to parse lock file at: [..] Caused by: lock file version `10000000` was found, but this version of Cargo does not \ understand this lock file, perhaps Cargo needs to be updated? ", ) .with_status(101) .run(); } #[cargo_test] fn preserve_old_format_if_no_update_needed() { let cksum = Package::new("bar", "0.1.0").publish(); let lockfile = format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "foo" version = "0.0.1" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [metadata] "checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" "#, cksum ); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("Cargo.lock", &lockfile) .build(); p.cargo("build --locked").run(); } #[cargo_test] fn same_name_version_different_sources() { let cksum = Package::new("foo", "0.1.0").publish(); let (git_project, repo) = git::new_repo("dep1", |project| { project .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") }); let head_id = repo.head().unwrap().target().unwrap(); // Lockfile was generated with Rust 1.51 let lockfile = format!( r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "foo" version = "0.1.0" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "foo 0.1.0 (git+{url})", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "{cksum}" [[package]] name = "foo" version = "0.1.0" source = "git+{url}#{sha}" "#, sha = head_id, url = git_project.url(), cksum = cksum ); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.1.0" [dependencies] foo = "0.1.0" foo2 = {{ git = '{}', package = 'foo' }} "#, git_project.url(), ), ) .file("src/lib.rs", "") .file("Cargo.lock", &lockfile) .build(); p.cargo("build").run(); assert_eq!(p.read_file("Cargo.lock"), lockfile); } #[cargo_test] fn bad_data_in_lockfile_error_meg() { Package::new("bar", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .file( "Cargo.lock", r#"# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e1b9346248cf3391ead604c4407258d327c28e37209f6d56127598165165dda" [[package]] name = "test" version = "0.0.0" dependencies = [ "bar", ]"#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [..] [ERROR] failed to select a version for the requirement `bar = \"*\"` (locked to 0.1.0) candidate versions found which didn't match: 0.0.1 location searched: `dummy-registry` index (which is replacing registry `crates-io`) required by package `test v0.0.0 ([..])` perhaps a crate was updated and forgotten to be re-vendored? ", ) .run(); } cargo-0.66.0/tests/testsuite/login.rs000066400000000000000000000056541432416201200175540ustar00rootroot00000000000000//! Tests for the `cargo login` command. use cargo_test_support::install::cargo_home; use cargo_test_support::registry::RegistryBuilder; use cargo_test_support::{cargo_process, t}; use std::fs::{self}; use std::path::PathBuf; use toml_edit::easy as toml; const TOKEN: &str = "test-token"; const TOKEN2: &str = "test-token2"; const ORIGINAL_TOKEN: &str = "api-token"; fn setup_new_credentials() { let config = cargo_home().join("credentials"); setup_new_credentials_at(config); } fn setup_new_credentials_at(config: PathBuf) { t!(fs::create_dir_all(config.parent().unwrap())); t!(fs::write( &config, format!(r#"token = "{token}""#, token = ORIGINAL_TOKEN) )); } fn check_token(expected_token: &str, registry: Option<&str>) -> bool { let credentials = cargo_home().join("credentials"); assert!(credentials.is_file()); let contents = fs::read_to_string(&credentials).unwrap(); let toml: toml::Value = contents.parse().unwrap(); let token = match (registry, toml) { // A registry has been provided, so check that the token exists in a // table for the registry. (Some(registry), toml::Value::Table(table)) => table .get("registries") .and_then(|registries_table| registries_table.get(registry)) .and_then(|registry_table| match registry_table.get("token") { Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()), _ => None, }), // There is no registry provided, so check the global token instead. (None, toml::Value::Table(table)) => table .get("registry") .and_then(|registry_table| registry_table.get("token")) .and_then(|v| match v { toml::Value::String(ref token) => Some(token.as_str().to_string()), _ => None, }), _ => None, }; if let Some(token_val) = token { token_val == expected_token } else { false } } #[cargo_test] fn registry_credentials() { let _alternative = RegistryBuilder::new().alternative().build(); let _alternative2 = RegistryBuilder::new() .alternative_named("alternative2") .build(); setup_new_credentials(); let reg = "alternative"; cargo_process("login --registry").arg(reg).arg(TOKEN).run(); // Ensure that we have not updated the default token assert!(check_token(ORIGINAL_TOKEN, None)); // Also ensure that we get the new token for the registry assert!(check_token(TOKEN, Some(reg))); let reg2 = "alternative2"; cargo_process("login --registry") .arg(reg2) .arg(TOKEN2) .run(); // Ensure not overwriting 1st alternate registry token with // 2nd alternate registry token (see rust-lang/cargo#7701). assert!(check_token(ORIGINAL_TOKEN, None)); assert!(check_token(TOKEN, Some(reg))); assert!(check_token(TOKEN2, Some(reg2))); } cargo-0.66.0/tests/testsuite/logout.rs000066400000000000000000000045171432416201200177520ustar00rootroot00000000000000//! Tests for the `cargo logout` command. use cargo_test_support::install::cargo_home; use cargo_test_support::{cargo_process, registry}; use std::fs; use toml_edit::easy as toml; #[cargo_test] fn gated() { registry::init(); cargo_process("logout") .masquerade_as_nightly_cargo(&["cargo-logout"]) .with_status(101) .with_stderr( "\ [ERROR] the `cargo logout` command is unstable, pass `-Z unstable-options` to enable it See https://github.com/rust-lang/cargo/issues/8933 for more information about \ the `cargo logout` command. ", ) .run(); } /// Checks whether or not the token is set for the given token. fn check_config_token(registry: Option<&str>, should_be_set: bool) { let credentials = cargo_home().join("credentials"); let contents = fs::read_to_string(&credentials).unwrap(); let toml: toml::Value = contents.parse().unwrap(); if let Some(registry) = registry { assert_eq!( toml.get("registries") .and_then(|registries| registries.get(registry)) .and_then(|registry| registry.get("token")) .is_some(), should_be_set ); } else { assert_eq!( toml.get("registry") .and_then(|registry| registry.get("token")) .is_some(), should_be_set ); } } fn simple_logout_test(reg: Option<&str>, flag: &str) { let msg = reg.unwrap_or("crates.io"); check_config_token(reg, true); cargo_process(&format!("logout -Z unstable-options {}", flag)) .masquerade_as_nightly_cargo(&["cargo-logout"]) .with_stderr(&format!( "\ [UPDATING] [..] [LOGOUT] token for `{}` has been removed from local storage ", msg )) .run(); check_config_token(reg, false); cargo_process(&format!("logout -Z unstable-options {}", flag)) .masquerade_as_nightly_cargo(&["cargo-logout"]) .with_stderr(&format!( "\ [LOGOUT] not currently logged in to `{}` ", msg )) .run(); check_config_token(reg, false); } #[cargo_test] fn default_registry() { registry::init(); simple_logout_test(None, ""); } #[cargo_test] fn other_registry() { registry::alt_init(); simple_logout_test(Some("alternative"), "--registry alternative"); } cargo-0.66.0/tests/testsuite/lto.rs000066400000000000000000000565671432416201200172530ustar00rootroot00000000000000use cargo::core::compiler::Lto; use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project, Project}; use std::process::Output; #[cargo_test] fn with_deps() { Package::new("bar", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [dependencies] bar = "*" [profile.release] lto = true "#, ) .file("src/main.rs", "extern crate bar; fn main() {}") .build(); p.cargo("build -v --release") .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C linker-plugin-lto[..]`") .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") .run(); } #[cargo_test] fn shared_deps() { Package::new("bar", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [dependencies] bar = "*" [build-dependencies] bar = "*" [profile.release] lto = true "#, ) .file("build.rs", "extern crate bar; fn main() {}") .file("src/main.rs", "extern crate bar; fn main() {}") .build(); p.cargo("build -v --release") .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") .run(); } #[cargo_test] fn build_dep_not_ltod() { Package::new("bar", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [build-dependencies] bar = "*" [profile.release] lto = true "#, ) .file("build.rs", "extern crate bar; fn main() {}") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v --release") .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C embed-bitcode=no[..]`") .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") .run(); } #[cargo_test] fn complicated() { Package::new("dep-shared", "0.0.1") .file("src/lib.rs", "pub fn foo() {}") .publish(); Package::new("dep-normal2", "0.0.1") .file("src/lib.rs", "pub fn foo() {}") .publish(); Package::new("dep-normal", "0.0.1") .dep("dep-shared", "*") .dep("dep-normal2", "*") .file( "src/lib.rs", " pub fn foo() { dep_shared::foo(); dep_normal2::foo(); } ", ) .publish(); Package::new("dep-build2", "0.0.1") .file("src/lib.rs", "pub fn foo() {}") .publish(); Package::new("dep-build", "0.0.1") .dep("dep-shared", "*") .dep("dep-build2", "*") .file( "src/lib.rs", " pub fn foo() { dep_shared::foo(); dep_build2::foo(); } ", ) .publish(); Package::new("dep-proc-macro2", "0.0.1") .file("src/lib.rs", "pub fn foo() {}") .publish(); Package::new("dep-proc-macro", "0.0.1") .proc_macro(true) .dep("dep-shared", "*") .dep("dep-proc-macro2", "*") .file( "src/lib.rs", " extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_attribute] pub fn foo(_: TokenStream, a: TokenStream) -> TokenStream { dep_shared::foo(); dep_proc_macro2::foo(); a } ", ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [lib] crate-type = ['cdylib', 'staticlib'] [dependencies] dep-normal = "*" dep-proc-macro = "*" [build-dependencies] dep-build = "*" [profile.release] lto = true # force build deps to share an opt-level with the rest of the # graph so they only get built once. [profile.release.build-override] opt-level = 3 "#, ) .file("build.rs", "fn main() { dep_build::foo() }") .file( "src/bin/foo-bin.rs", "#[dep_proc_macro::foo] fn main() { dep_normal::foo() }", ) .file( "src/lib.rs", "#[dep_proc_macro::foo] pub fn foo() { dep_normal::foo() }", ) .build(); p.cargo("build -v --release") // normal deps and their transitive dependencies do not need object // code, so they should have linker-plugin-lto specified .with_stderr_contains( "[..]`rustc[..]--crate-name dep_normal2 [..]-C linker-plugin-lto[..]`", ) .with_stderr_contains("[..]`rustc[..]--crate-name dep_normal [..]-C linker-plugin-lto[..]`") // build dependencies and their transitive deps don't need any bitcode, // so embedding should be turned off .with_stderr_contains("[..]`rustc[..]--crate-name dep_build2 [..]-C embed-bitcode=no[..]`") .with_stderr_contains("[..]`rustc[..]--crate-name dep_build [..]-C embed-bitcode=no[..]`") .with_stderr_contains( "[..]`rustc[..]--crate-name build_script_build [..]-C embed-bitcode=no[..]`", ) // proc macro deps are the same as build deps here .with_stderr_contains( "[..]`rustc[..]--crate-name dep_proc_macro2 [..]-C embed-bitcode=no[..]`", ) .with_stderr_contains( "[..]`rustc[..]--crate-name dep_proc_macro [..]-C embed-bitcode=no[..]`", ) .with_stderr_contains( "[..]`rustc[..]--crate-name foo_bin [..]--crate-type bin[..]-C lto[..]`", ) .with_stderr_contains( "[..]`rustc[..]--crate-name test [..]--crate-type cdylib[..]-C lto[..]`", ) .with_stderr_contains("[..]`rustc[..]--crate-name dep_shared [..]`") .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C lto[..]") .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C linker-plugin-lto[..]") .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C embed-bitcode[..]") .run(); } #[cargo_test] fn off_in_manifest_works() { Package::new("bar", "0.0.1") .file("src/lib.rs", "pub fn foo() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [dependencies] bar = "*" [profile.release] lto = "off" "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", "fn main() { test::foo(); bar::foo(); }", ) .build(); p.cargo("build -v --release") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] [..] [COMPILING] bar v0.0.1 [RUNNING] `rustc --crate-name bar [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..] [COMPILING] test [..] [RUNNING] `rustc --crate-name test [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..] [RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin [..]-C lto=off[..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn between_builds() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" [profile.release] lto = true "#, ) .file("src/lib.rs", "pub fn foo() {}") .file("src/main.rs", "fn main() { test::foo() }") .build(); p.cargo("build -v --release --lib") .with_stderr( "\ [COMPILING] test [..] [RUNNING] `rustc [..]--crate-type lib[..]-C linker-plugin-lto[..] [FINISHED] [..] ", ) .run(); p.cargo("build -v --release") .with_stderr_contains( "\ [COMPILING] test [..] [RUNNING] `rustc [..]--crate-type bin[..]-C lto[..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn test_all() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" [profile.release] lto = true "#, ) .file("src/main.rs", "fn main() {}") .file("tests/a.rs", "") .file("tests/b.rs", "") .build(); p.cargo("test --release -v") .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") .run(); } #[cargo_test] fn test_all_and_bench() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" [profile.release] lto = true [profile.bench] lto = true "#, ) .file("src/main.rs", "fn main() {}") .file("tests/a.rs", "") .file("tests/b.rs", "") .build(); p.cargo("test --release -v") .with_stderr_contains("[RUNNING] `rustc[..]--crate-name a[..]-C lto[..]") .with_stderr_contains("[RUNNING] `rustc[..]--crate-name b[..]-C lto[..]") .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") .run(); } /// Basic setup: /// /// foo v0.0.0 /// β”œβ”€β”€ bar v0.0.0 /// β”‚ β”œβ”€β”€ registry v0.0.1 /// β”‚ └── registry-shared v0.0.1 /// └── registry-shared v0.0.1 /// /// Where `bar` will have the given crate types. fn project_with_dep(crate_types: &str) -> Project { Package::new("registry", "0.0.1") .file("src/lib.rs", r#"pub fn foo() { println!("registry"); }"#) .publish(); Package::new("registry-shared", "0.0.1") .file("src/lib.rs", r#"pub fn foo() { println!("shared"); }"#) .publish(); project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.0" [workspace] [dependencies] bar = { path = 'bar' } registry-shared = "*" [profile.release] lto = true "#, ) .file( "src/main.rs", " fn main() { bar::foo(); registry_shared::foo(); } ", ) .file( "bar/Cargo.toml", &format!( r#" [package] name = "bar" version = "0.0.0" [dependencies] registry = "*" registry-shared = "*" [lib] crate-type = [{}] "#, crate_types ), ) .file( "bar/src/lib.rs", r#" pub fn foo() { println!("bar"); registry::foo(); registry_shared::foo(); } "#, ) .file("tests/a.rs", "") .file("bar/tests/b.rs", "") .build() } /// Helper for checking which LTO behavior is used for a specific crate. /// /// `krate_info` is extra compiler flags used to distinguish this if the same /// crate name is being built multiple times. fn verify_lto(output: &Output, krate: &str, krate_info: &str, expected_lto: Lto) { let stderr = std::str::from_utf8(&output.stderr).unwrap(); let mut matches = stderr.lines().filter(|line| { line.contains("Running") && line.contains(&format!("--crate-name {} ", krate)) && line.contains(krate_info) }); let line = matches.next().unwrap_or_else(|| { panic!( "expected to find crate `{}` info: `{}`, not found in output:\n{}", krate, krate_info, stderr ); }); if let Some(line2) = matches.next() { panic!( "found multiple lines matching crate `{}` info: `{}`:\nline1:{}\nline2:{}\noutput:\n{}", krate, krate_info, line, line2, stderr ); } let actual_lto = if let Some(index) = line.find("-C lto=") { let s = &line[index..]; let end = s.find(' ').unwrap(); let mode = &line[index..index + end]; if mode == "off" { Lto::Off } else { Lto::Run(Some(mode.into())) } } else if line.contains("-C lto") { Lto::Run(None) } else if line.contains("-C linker-plugin-lto") { Lto::OnlyBitcode } else if line.contains("-C embed-bitcode=no") { Lto::OnlyObject } else { Lto::ObjectAndBitcode }; assert_eq!( actual_lto, expected_lto, "did not find expected LTO in line: {}", line ); } #[cargo_test] fn cdylib_and_rlib() { let p = project_with_dep("'cdylib', 'rlib'"); let output = p.cargo("build --release -v").exec_with_output().unwrap(); // `registry` is ObjectAndBitcode because because it needs Object for the // rlib, and Bitcode for the cdylib (which doesn't support LTO). verify_lto( &output, "registry", "--crate-type lib", Lto::ObjectAndBitcode, ); // Same as `registry` verify_lto( &output, "registry_shared", "--crate-type lib", Lto::ObjectAndBitcode, ); // Same as `registry` verify_lto( &output, "bar", "--crate-type cdylib --crate-type rlib", Lto::ObjectAndBitcode, ); verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None)); p.cargo("test --release -v") .with_stderr_unordered( "\ [FRESH] registry v0.0.1 [FRESH] registry-shared v0.0.1 [FRESH] bar v0.0.0 [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..] [RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..] [FINISHED] [..] [RUNNING] [..] [RUNNING] [..] ", ) .run(); p.cargo("build --release -v --manifest-path bar/Cargo.toml") .with_stderr_unordered( "\ [FRESH] registry-shared v0.0.1 [FRESH] registry v0.0.1 [FRESH] bar v0.0.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("test --release -v --manifest-path bar/Cargo.toml") .with_stderr_unordered( "\ [FRESH] registry-shared v0.0.1 [FRESH] registry v0.0.1 [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..]-C lto[..]--test[..] [RUNNING] `rustc --crate-name b [..]-C lto[..]--test[..] [FINISHED] [..] [RUNNING] [..]target/release/deps/bar-[..] [RUNNING] [..]target/release/deps/b-[..] [DOCTEST] bar [RUNNING] `rustdoc --crate-type cdylib --crate-type rlib --crate-name bar --test [..]-C lto[..] ", ) .run(); } #[cargo_test] fn dylib() { let p = project_with_dep("'dylib'"); let output = p.cargo("build --release -v").exec_with_output().unwrap(); // `registry` is OnlyObject because rustc doesn't support LTO with dylibs. verify_lto(&output, "registry", "--crate-type lib", Lto::OnlyObject); // `registry_shared` is both because it is needed by both bar (Object) and // foo (Bitcode for LTO). verify_lto( &output, "registry_shared", "--crate-type lib", Lto::ObjectAndBitcode, ); // `bar` is OnlyObject because rustc doesn't support LTO with dylibs. verify_lto(&output, "bar", "--crate-type dylib", Lto::OnlyObject); // `foo` is LTO because it is a binary, and the profile specifies `lto=true`. verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None)); // `cargo test` should not rebuild dependencies. It builds the test // executables with `lto=true` because the tests are built with the // `--release` flag. p.cargo("test --release -v") .with_stderr_unordered( "\ [FRESH] registry v0.0.1 [FRESH] registry-shared v0.0.1 [FRESH] bar v0.0.0 [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..] [RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..] [FINISHED] [..] [RUNNING] [..] [RUNNING] [..] ", ) .run(); // Building just `bar` causes `registry-shared` to get rebuilt because it // switches to OnlyObject because it is now only being used with a dylib // which does not support LTO. // // `bar` gets rebuilt because `registry_shared` got rebuilt. p.cargo("build --release -v --manifest-path bar/Cargo.toml") .with_stderr_unordered( "\ [COMPILING] registry-shared v0.0.1 [FRESH] registry v0.0.1 [RUNNING] `rustc --crate-name registry_shared [..]-C embed-bitcode=no[..] [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..] [FINISHED] [..] ", ) .run(); // Testing just `bar` causes `registry` to get rebuilt because it switches // to needing both Object (for the `bar` dylib) and Bitcode (for the test // built with LTO). // // `bar` the dylib gets rebuilt because `registry` got rebuilt. p.cargo("test --release -v --manifest-path bar/Cargo.toml") .with_stderr_unordered( "\ [FRESH] registry-shared v0.0.1 [COMPILING] registry v0.0.1 [RUNNING] `rustc --crate-name registry [..] [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..] [RUNNING] `rustc --crate-name bar [..]-C lto [..]--test[..] [RUNNING] `rustc --crate-name b [..]-C lto [..]--test[..] [FINISHED] [..] [RUNNING] [..] [RUNNING] [..] ", ) .run(); } #[cargo_test] fn test_profile() { Package::new("bar", "0.0.1") .file("src/lib.rs", "pub fn foo() -> i32 { 123 } ") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [profile.test] lto = 'thin' [dependencies] bar = "*" "#, ) .file( "src/lib.rs", r#" #[test] fn t1() { assert_eq!(123, bar::foo()); } "#, ) .build(); p.cargo("test -v") // unordered because the two `foo` builds start in parallel .with_stderr_unordered("\ [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] [..] [COMPILING] bar v0.0.1 [RUNNING] `rustc --crate-name bar [..]crate-type lib[..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..]--crate-type lib --emit=dep-info,metadata,link -C linker-plugin-lto[..] [RUNNING] `rustc --crate-name foo [..]--emit=dep-info,link -C lto=thin [..]--test[..] [FINISHED] [..] [RUNNING] [..] [DOCTEST] foo [RUNNING] `rustdoc [..] ") .run(); } #[cargo_test] fn doctest() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [profile.release] lto = true [dependencies] bar = { path = "bar" } "#, ) .file( "src/lib.rs", r#" /// Foo! /// /// ``` /// foo::foo(); /// ``` pub fn foo() { bar::bar(); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "bar/src/lib.rs", r#" pub fn bar() { println!("hi!"); } "#, ) .build(); p.cargo("test --doc --release -v") .with_stderr_contains("[..]`rustc --crate-name bar[..]-C linker-plugin-lto[..]") .with_stderr_contains("[..]`rustc --crate-name foo[..]-C linker-plugin-lto[..]") // embed-bitcode should be harmless here .with_stderr_contains("[..]`rustdoc [..]-C lto[..]") .run(); // Try with bench profile. p.cargo("test --doc --release -v") .env("CARGO_PROFILE_BENCH_LTO", "true") .with_stderr_unordered( "\ [FRESH] bar v0.1.0 [..] [FRESH] foo v0.1.0 [..] [FINISHED] release [..] [DOCTEST] foo [RUNNING] `rustdoc [..]-C lto[..] ", ) .run(); } #[cargo_test] fn dylib_rlib_bin() { // dylib+rlib linked with a binary let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["dylib", "rlib"] [profile.release] lto = true "#, ) .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }") .file("src/bin/ferret.rs", "fn main() { foo::foo(); }") .build(); let output = p.cargo("build --release -v").exec_with_output().unwrap(); verify_lto( &output, "foo", "--crate-type dylib --crate-type rlib", Lto::ObjectAndBitcode, ); verify_lto(&output, "ferret", "--crate-type bin", Lto::Run(None)); } #[cargo_test] fn fresh_swapping_commands() { // In some rare cases, different commands end up building dependencies // with different LTO settings. This checks that it doesn't cause the // cache to thrash in that scenario. Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" [profile.release] lto = true "#, ) .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }") .build(); p.cargo("build --release -v") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [COMPILING] bar v1.0.0 [RUNNING] `rustc --crate-name bar [..]-C linker-plugin-lto[..] [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]-C linker-plugin-lto[..] [FINISHED] [..] ", ) .run(); p.cargo("test --release -v") .with_stderr_unordered( "\ [FRESH] bar v1.0.0 [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]-C lto[..]--test[..] [FINISHED] [..] [RUNNING] `[..]/foo[..]` [DOCTEST] foo [RUNNING] `rustdoc [..]-C lto[..] ", ) .run(); p.cargo("build --release -v") .with_stderr( "\ [FRESH] bar v1.0.0 [FRESH] foo [..] [FINISHED] [..] ", ) .run(); p.cargo("test --release -v --no-run -v") .with_stderr( "\ [FRESH] bar v1.0.0 [FRESH] foo [..] [FINISHED] [..] [EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]` ", ) .run(); } cargo-0.66.0/tests/testsuite/main.rs000066400000000000000000000046341432416201200173650ustar00rootroot00000000000000// See src/cargo/lib.rs for notes on these lint settings. #![warn(rust_2018_idioms)] #![allow(clippy::all)] #![cfg_attr(feature = "deny-warnings", deny(warnings))] #[macro_use] extern crate cargo_test_macro; mod advanced_env; mod alt_registry; mod artifact_dep; mod bad_config; mod bad_manifest_path; mod bench; mod binary_name; mod build; mod build_plan; mod build_script; mod build_script_env; mod build_script_extra_link_arg; mod cache_messages; mod cargo_add; mod cargo_alias_config; mod cargo_command; mod cargo_config; mod cargo_env_config; mod cargo_features; mod cargo_targets; mod cfg; mod check; mod check_cfg; mod clean; mod collisions; mod concurrent; mod config; mod config_cli; mod config_include; mod corrupt_git; mod credential_process; mod cross_compile; mod cross_publish; mod custom_target; mod death; mod dep_info; mod directory; mod doc; mod edition; mod error; mod features; mod features2; mod features_namespaced; mod fetch; mod fix; mod freshness; mod future_incompat_report; mod generate_lockfile; mod git; mod git_auth; mod git_gc; mod glob_targets; mod help; mod inheritable_workspace_fields; mod init; mod install; mod install_upgrade; mod jobserver; mod list_availables; mod local_registry; mod locate_project; mod lockfile_compat; mod login; mod logout; mod lto; mod member_discovery; mod member_errors; mod message_format; mod messages; mod metabuild; mod metadata; mod minimal_versions; mod multitarget; mod net_config; mod new; mod offline; mod old_cargos; mod out_dir; mod owner; mod package; mod package_features; mod patch; mod path; mod paths; mod pkgid; mod plugins; mod proc_macro; mod profile_config; mod profile_custom; mod profile_overrides; mod profile_targets; mod profiles; mod progress; mod pub_priv; mod publish; mod publish_lockfile; mod read_manifest; mod registry; mod rename_deps; mod replace; mod required_features; mod run; mod rust_version; mod rustc; mod rustc_info_cache; mod rustdoc; mod rustdoc_extern_html; mod rustdocflags; mod rustflags; mod search; mod shell_quoting; mod standard_lib; mod test; mod timings; mod tool_paths; mod tree; mod tree_graph_features; mod unit_graph; mod update; mod vendor; mod verify_project; mod version; mod warn_on_failure; mod weak_dep_features; mod workspaces; mod yank; #[cargo_test] fn aaa_trigger_cross_compile_disabled_check() { // This triggers the cross compile disabled check to run ASAP, see #5141 cargo_test_support::cross_compile::disabled(); } cargo-0.66.0/tests/testsuite/member_discovery.rs000066400000000000000000000024101432416201200217650ustar00rootroot00000000000000//! Tests for workspace member discovery. use cargo::core::{Shell, Workspace}; use cargo::util::config::Config; use cargo_test_support::install::cargo_home; use cargo_test_support::project; use cargo_test_support::registry; /// Tests exclusion of non-directory files from workspace member discovery using glob `*`. #[cargo_test] fn bad_file_member_exclusion() { let p = project() .file( "Cargo.toml", r#" [workspace] members = [ "crates/*" ] "#, ) .file("crates/.DS_Store", "PLACEHOLDER") .file( "crates/bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] "#, ) .file("crates/bar/src/main.rs", "fn main() {}") .build(); // Prevent this test from accessing the network by setting up .cargo/config. registry::init(); let config = Config::new( Shell::from_write(Box::new(Vec::new())), cargo_home(), cargo_home(), ); let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); assert_eq!(ws.members().count(), 1); assert_eq!(ws.members().next().unwrap().name(), "bar"); } cargo-0.66.0/tests/testsuite/member_errors.rs000066400000000000000000000120761432416201200213030ustar00rootroot00000000000000//! Tests for workspace member errors. use cargo::core::resolver::ResolveError; use cargo::core::{compiler::CompileMode, Shell, Workspace}; use cargo::ops::{self, CompileOptions}; use cargo::util::{config::Config, errors::ManifestError}; use cargo_test_support::install::cargo_home; use cargo_test_support::project; use cargo_test_support::registry; /// Tests inclusion of a `ManifestError` pointing to a member manifest /// when that manifest fails to deserialize. #[cargo_test] fn toml_deserialize_manifest_error() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] foobar == "0.55" "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); let root_manifest_path = p.root().join("Cargo.toml"); let member_manifest_path = p.root().join("bar").join("Cargo.toml"); let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); eprintln!("{:?}", error); let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); assert_eq!(manifest_err.manifest_path(), &root_manifest_path); let causes: Vec<_> = manifest_err.manifest_causes().collect(); assert_eq!(causes.len(), 1, "{:?}", causes); assert_eq!(causes[0].manifest_path(), &member_manifest_path); } /// Tests inclusion of a `ManifestError` pointing to a member manifest /// when that manifest has an invalid dependency path. #[cargo_test] fn member_manifest_path_io_error() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] foobar = { path = "nosuch" } "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); let root_manifest_path = p.root().join("Cargo.toml"); let member_manifest_path = p.root().join("bar").join("Cargo.toml"); let missing_manifest_path = p.root().join("bar").join("nosuch").join("Cargo.toml"); let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); eprintln!("{:?}", error); let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); assert_eq!(manifest_err.manifest_path(), &root_manifest_path); let causes: Vec<_> = manifest_err.manifest_causes().collect(); assert_eq!(causes.len(), 2, "{:?}", causes); assert_eq!(causes[0].manifest_path(), &member_manifest_path); assert_eq!(causes[1].manifest_path(), &missing_manifest_path); } /// Tests dependency version errors provide which package failed via a `ResolveError`. #[cargo_test] fn member_manifest_version_error() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] i-dont-exist = "0.55" "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); // Prevent this test from accessing the network by setting up .cargo/config. registry::init(); let config = Config::new( Shell::from_write(Box::new(Vec::new())), cargo_home(), cargo_home(), ); let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); let compile_options = CompileOptions::new(&config, CompileMode::Build).unwrap(); let member_bar = ws.members().find(|m| &*m.name() == "bar").unwrap(); let error = ops::compile(&ws, &compile_options).map(|_| ()).unwrap_err(); eprintln!("{:?}", error); let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError"); let package_path = resolve_err.package_path(); assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path); assert_eq!(package_path[0], member_bar.package_id()); } cargo-0.66.0/tests/testsuite/message_format.rs000066400000000000000000000070141432416201200214300ustar00rootroot00000000000000//! Tests for --message-format flag. use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; #[cargo_test] fn cannot_specify_two() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); let formats = ["human", "json", "short"]; let two_kinds = "error: cannot specify two kinds of `message-format` arguments\n"; for a in formats.iter() { for b in formats.iter() { p.cargo(&format!("build --message-format {},{}", a, b)) .with_status(101) .with_stderr(two_kinds) .run(); } } } #[cargo_test] fn double_json_works() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --message-format json,json-render-diagnostics") .run(); p.cargo("build --message-format json,json-diagnostic-short") .run(); p.cargo("build --message-format json,json-diagnostic-rendered-ansi") .run(); p.cargo("build --message-format json --message-format json-diagnostic-rendered-ansi") .run(); p.cargo("build --message-format json-diagnostic-rendered-ansi") .run(); p.cargo("build --message-format json-diagnostic-short,json-diagnostic-rendered-ansi") .run(); } #[cargo_test] fn cargo_renders() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [dependencies] bar = { path = 'bar' } "#, ) .file("src/main.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("build --message-format json-render-diagnostics") .with_status(101) .with_stdout( "{\"reason\":\"compiler-artifact\",[..]\n\ {\"reason\":\"build-finished\",\"success\":false}", ) .with_stderr_contains( "\ [COMPILING] bar [..] [COMPILING] foo [..] error[..]`main`[..] ", ) .run(); } #[cargo_test] fn cargo_renders_short() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "") .build(); p.cargo("build --message-format json-render-diagnostics,json-diagnostic-short") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo [..] error[..]`main`[..] ", ) .with_stderr_does_not_contain("note:") .run(); } #[cargo_test] fn cargo_renders_ansi() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/main.rs", "") .build(); p.cargo("build --message-format json-diagnostic-rendered-ansi") .with_status(101) .with_stdout_contains("[..]\\u001b[38;5;9merror[..]") .run(); } #[cargo_test] fn cargo_renders_doctests() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/lib.rs", "\ /// ```rust /// bar() /// ``` pub fn bar() {} ", ) .build(); p.cargo("test --doc --message-format short") .with_status(101) .with_stdout_contains("src/lib.rs:2:1: error[E0425]:[..]") .with_stdout_contains("[..]src/lib.rs - bar (line 1)[..]") .run(); } cargo-0.66.0/tests/testsuite/messages.rs000066400000000000000000000104361432416201200202450ustar00rootroot00000000000000//! General tests specifically about diagnostics and other messages. //! //! Tests for message caching can be found in `cache_messages`. use cargo_test_support::{process, project, Project}; use cargo_util::ProcessError; /// Captures the actual diagnostics displayed by rustc. This is done to avoid /// relying on the exact message formatting in rustc. pub fn raw_rustc_output(project: &Project, path: &str, extra: &[&str]) -> String { let mut proc = process("rustc"); if cfg!(windows) { // Sanitize in case the caller wants to do direct string comparison with Cargo's output. proc.arg(path.replace('/', "\\")); } else { proc.arg(path); } let rustc_output = match proc .arg("--crate-type=lib") .args(extra) .cwd(project.root()) .exec_with_output() { Ok(output) => output.stderr, Err(e) => e.downcast::().unwrap().stderr.unwrap(), }; // Do a little dance to remove rustc's "warnings emitted" message and the subsequent newline. let stderr = std::str::from_utf8(&rustc_output).expect("utf8"); let mut lines = stderr.lines(); let mut result = String::new(); while let Some(line) = lines.next() { if line.contains("warning emitted") || line.contains("warnings emitted") || line.contains("aborting due to") { // Eat blank line. match lines.next() { None | Some("") => continue, Some(s) => panic!("unexpected str {}", s), } } result.push_str(line); result.push('\n'); } result } #[cargo_test] fn deduplicate_messages_basic() { let p = project() .file( "src/lib.rs", r#" pub fn foo() { let x = 1; } "#, ) .build(); let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]); let expected_output = format!( "{}\ warning: `foo` (lib) generated 1 warning warning: `foo` (lib test) generated 1 warning (1 duplicate) [FINISHED] [..] [EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE]) ", rustc_message ); p.cargo("test --no-run -j1") .with_stderr(&format!("[COMPILING] foo [..]\n{}", expected_output)) .run(); // Run again, to check for caching behavior. p.cargo("test --no-run -j1") .with_stderr(expected_output) .run(); } #[cargo_test] fn deduplicate_messages_mismatched_warnings() { // One execution prints 1 warning, the other prints 2 where there is an overlap. let p = project() .file( "src/lib.rs", r#" pub fn foo() { let x = 1; } #[test] fn t1() { let MY_VALUE = 1; assert_eq!(MY_VALUE, 1); } "#, ) .build(); let lib_output = raw_rustc_output(&p, "src/lib.rs", &[]); let mut lib_test_output = raw_rustc_output(&p, "src/lib.rs", &["--test"]); // Remove the duplicate warning. let start = lib_test_output.find(&lib_output).expect("same warning"); lib_test_output.replace_range(start..start + lib_output.len(), ""); let expected_output = format!( "\ {}\ warning: `foo` (lib) generated 1 warning {}\ warning: `foo` (lib test) generated 2 warnings (1 duplicate) [FINISHED] [..] [EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE]) ", lib_output, lib_test_output ); p.cargo("test --no-run -j1") .with_stderr(&format!("[COMPILING] foo v0.0.1 [..]\n{}", expected_output)) .run(); // Run again, to check for caching behavior. p.cargo("test --no-run -j1") .with_stderr(expected_output) .run(); } #[cargo_test] fn deduplicate_errors() { let p = project() .file( "src/lib.rs", r#" this should not compile "#, ) .build(); let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]); p.cargo("test -j1") .with_status(101) .with_stderr(&format!( "\ [COMPILING] foo v0.0.1 [..] {}error: could not compile `foo` due to previous error ", rustc_message )) .run(); } cargo-0.66.0/tests/testsuite/metabuild.rs000066400000000000000000000526061432416201200204110ustar00rootroot00000000000000//! Tests for the metabuild feature (declarative build scripts). use cargo_test_support::{ basic_lib_manifest, basic_manifest, is_coarse_mtime, project, registry::Package, rustc_host, Project, }; use std::str; #[cargo_test] fn metabuild_gated() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" metabuild = ["mb"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["metabuild"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: feature `metabuild` is required The package requires the Cargo feature called `metabuild`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider adding `cargo-features = [\"metabuild\"]` to the top of Cargo.toml \ (above the [package] table) to tell Cargo you are opting in to use this unstable feature. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#metabuild \ for more information about the status of this feature. ", ) .run(); } fn basic_project() -> Project { project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = ["mb", "mb-other"] [build-dependencies] mb = {path="mb"} mb-other = {path="mb-other"} "#, ) .file("src/lib.rs", "") .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .file( "mb-other/Cargo.toml", r#" [package] name = "mb-other" version = "0.0.1" "#, ) .file( "mb-other/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb-other"); }"#, ) .build() } #[cargo_test] fn metabuild_basic() { let p = basic_project(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[foo 0.0.1] Hello mb") .with_stdout_contains("[foo 0.0.1] Hello mb-other") .run(); } #[cargo_test] fn metabuild_error_both() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = "mb" [build-dependencies] mb = {path="mb"} "#, ) .file("src/lib.rs", "") .file("build.rs", r#"fn main() {}"#) .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_status(101) .with_stderr_contains( "\ error: failed to parse manifest at [..] Caused by: cannot specify both `metabuild` and `build` ", ) .run(); } #[cargo_test] fn metabuild_missing_dep() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = "mb" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_status(101) .with_stderr_contains( "\ error: failed to parse manifest at [..] Caused by: metabuild package `mb` must be specified in `build-dependencies`", ) .run(); } #[cargo_test] fn metabuild_optional_dep() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = "mb" [build-dependencies] mb = {path="mb", optional=true} "#, ) .file("src/lib.rs", "") .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") .run(); p.cargo("build -vv --features mb") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[foo 0.0.1] Hello mb") .run(); } #[cargo_test] fn metabuild_lib_name() { // Test when setting `name` on [lib]. let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = "mb" [build-dependencies] mb = {path="mb"} "#, ) .file("src/lib.rs", "") .file( "mb/Cargo.toml", r#" [package] name = "mb" version = "0.0.1" [lib] name = "other" "#, ) .file( "mb/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[foo 0.0.1] Hello mb") .run(); } #[cargo_test] fn metabuild_fresh() { if is_coarse_mtime() { // This test doesn't work on coarse mtimes very well. Because the // metabuild script is created at build time, its mtime is almost // always equal to the mtime of the output. The second call to `build` // will then think it needs to be rebuilt when it should be fresh. return; } // Check that rebuild is fresh. let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" metabuild = "mb" [build-dependencies] mb = {path="mb"} "#, ) .file("src/lib.rs", "") .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[foo 0.0.1] Hello mb") .run(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") .with_stderr( "\ [FRESH] mb [..] [FRESH] foo [..] [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn metabuild_links() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" links = "cat" metabuild = "mb" [build-dependencies] mb = {path="mb"} "#, ) .file("src/lib.rs", "") .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#" pub fn metabuild() { assert_eq!(std::env::var("CARGO_MANIFEST_LINKS"), Ok("cat".to_string())); println!("Hello mb"); } "#, ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[foo 0.0.1] Hello mb") .run(); } #[cargo_test] fn metabuild_override() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "foo" version = "0.0.1" links = "cat" metabuild = "mb" [build-dependencies] mb = {path="mb"} "#, ) .file("src/lib.rs", "") .file("mb/Cargo.toml", &basic_lib_manifest("mb")) .file( "mb/src/lib.rs", r#"pub fn metabuild() { panic!("should not run"); }"#, ) .file( ".cargo/config", &format!( r#" [target.{}.cat] rustc-link-lib = ["a"] "#, rustc_host() ), ) .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .run(); } #[cargo_test] fn metabuild_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["member1", "member2"] "#, ) .file( "member1/Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "member1" version = "0.0.1" metabuild = ["mb1", "mb2"] [build-dependencies] mb1 = {path="../../mb1"} mb2 = {path="../../mb2"} "#, ) .file("member1/src/lib.rs", "") .file( "member2/Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "member2" version = "0.0.1" metabuild = ["mb1"] [build-dependencies] mb1 = {path="../../mb1"} "#, ) .file("member2/src/lib.rs", "") .build(); project() .at("mb1") .file("Cargo.toml", &basic_lib_manifest("mb1")) .file( "src/lib.rs", r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, ) .build(); project() .at("mb2") .file("Cargo.toml", &basic_lib_manifest("mb2")) .file( "src/lib.rs", r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, ) .build(); p.cargo("build -vv --workspace") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") .with_stdout_contains("[member1 0.0.1] Hello mb2 [..]member1") .with_stdout_contains("[member2 0.0.1] Hello mb1 [..]member2") .with_stdout_does_not_contain("[member2 0.0.1] Hello mb2 [..]member2") .run(); } #[cargo_test] fn metabuild_metadata() { // The metabuild Target is filtered out of the `metadata` results. let p = basic_project(); let meta = p .cargo("metadata --format-version=1") .masquerade_as_nightly_cargo(&["metabuild"]) .run_json(); let mb_info: Vec<&str> = meta["packages"] .as_array() .unwrap() .iter() .find(|p| p["name"].as_str().unwrap() == "foo") .unwrap()["metabuild"] .as_array() .unwrap() .iter() .map(|s| s.as_str().unwrap()) .collect(); assert_eq!(mb_info, ["mb", "mb-other"]); } #[cargo_test] fn metabuild_build_plan() { let p = basic_project(); p.cargo("build --build-plan -Zunstable-options") .masquerade_as_nightly_cargo(&["metabuild", "build-plan"]) .with_json( r#" { "invocations": [ { "package_name": "mb", "package_version": "0.5.0", "target_kind": ["lib"], "compile_mode": "build", "kind": null, "deps": [], "outputs": [ "[..]/target/debug/deps/libmb-[..].rlib", "[..]/target/debug/deps/libmb-[..].rmeta" ], "links": {}, "program": "rustc", "args": "{...}", "env": "{...}", "cwd": "[..]" }, { "package_name": "mb-other", "package_version": "0.0.1", "target_kind": ["lib"], "compile_mode": "build", "kind": null, "deps": [], "outputs": [ "[..]/target/debug/deps/libmb_other-[..].rlib", "[..]/target/debug/deps/libmb_other-[..].rmeta" ], "links": {}, "program": "rustc", "args": "{...}", "env": "{...}", "cwd": "[..]" }, { "package_name": "foo", "package_version": "0.0.1", "target_kind": ["custom-build"], "compile_mode": "build", "kind": null, "deps": [0, 1], "outputs": "{...}", "links": "{...}", "program": "rustc", "args": "{...}", "env": "{...}", "cwd": "[..]" }, { "package_name": "foo", "package_version": "0.0.1", "target_kind": ["custom-build"], "compile_mode": "run-custom-build", "kind": null, "deps": [2], "outputs": [], "links": {}, "program": "[..]/foo/target/debug/build/foo-[..]/metabuild-foo", "args": [], "env": "{...}", "cwd": "[..]" }, { "package_name": "foo", "package_version": "0.0.1", "target_kind": ["lib"], "compile_mode": "build", "kind": null, "deps": [3], "outputs": [ "[..]/foo/target/debug/deps/libfoo-[..].rlib", "[..]/foo/target/debug/deps/libfoo-[..].rmeta" ], "links": "{...}", "program": "rustc", "args": "{...}", "env": "{...}", "cwd": "[..]" } ], "inputs": [ "[..]/foo/Cargo.toml", "[..]/foo/mb/Cargo.toml", "[..]/foo/mb-other/Cargo.toml" ] } "#, ) .run(); assert_eq!(p.glob("target/.metabuild/metabuild-foo-*.rs").count(), 1); } #[cargo_test] fn metabuild_two_versions() { // Two versions of a metabuild dep with the same name. let p = project() .at("ws") .file( "Cargo.toml", r#" [workspace] members = ["member1", "member2"] "#, ) .file( "member1/Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "member1" version = "0.0.1" metabuild = ["mb"] [build-dependencies] mb = {path="../../mb1"} "#, ) .file("member1/src/lib.rs", "") .file( "member2/Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "member2" version = "0.0.1" metabuild = ["mb"] [build-dependencies] mb = {path="../../mb2"} "#, ) .file("member2/src/lib.rs", "") .build(); project().at("mb1") .file("Cargo.toml", r#" [package] name = "mb" version = "0.0.1" "#) .file( "src/lib.rs", r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, ) .build(); project().at("mb2") .file("Cargo.toml", r#" [package] name = "mb" version = "0.0.2" "#) .file( "src/lib.rs", r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, ) .build(); p.cargo("build -vv --workspace") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") .with_stdout_contains("[member2 0.0.1] Hello mb2 [..]member2") .run(); assert_eq!( p.glob("target/.metabuild/metabuild-member?-*.rs").count(), 2 ); } #[cargo_test] fn metabuild_external_dependency() { Package::new("mb", "1.0.0") .file("Cargo.toml", &basic_manifest("mb", "1.0.0")) .file( "src/lib.rs", r#"pub fn metabuild() { println!("Hello mb"); }"#, ) .publish(); Package::new("dep", "1.0.0") .file( "Cargo.toml", r#" cargo-features = ["metabuild"] [package] name = "dep" version = "1.0.0" metabuild = ["mb"] [build-dependencies] mb = "1.0" "#, ) .file("src/lib.rs", "") .build_dep("mb", "1.0.0") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] dep = "1.0" "#, ) .file("src/lib.rs", "extern crate dep;") .build(); p.cargo("build -vv") .masquerade_as_nightly_cargo(&["metabuild"]) .with_stdout_contains("[dep 1.0.0] Hello mb") .run(); assert_eq!(p.glob("target/.metabuild/metabuild-dep-*.rs").count(), 1); } #[cargo_test] fn metabuild_json_artifact() { let p = basic_project(); p.cargo("build --message-format=json") .masquerade_as_nightly_cargo(&["metabuild"]) .with_json_contains_unordered( r#" { "executable": null, "features": [], "filenames": "{...}", "fresh": false, "package_id": "foo [..]", "manifest_path": "[..]", "profile": "{...}", "reason": "compiler-artifact", "target": { "crate_types": [ "bin" ], "doc": false, "doctest": false, "edition": "2018", "kind": [ "custom-build" ], "name": "metabuild-foo", "src_path": "[..]/foo/target/.metabuild/metabuild-foo-[..].rs", "test": false } } { "cfgs": [], "env": [], "linked_libs": [], "linked_paths": [], "package_id": "foo [..]", "out_dir": "[..]", "reason": "build-script-executed" } "#, ) .run(); } #[cargo_test] fn metabuild_failed_build_json() { let p = basic_project(); // Modify the metabuild dep so that it fails to compile. p.change_file("mb/src/lib.rs", ""); p.cargo("build --message-format=json") .masquerade_as_nightly_cargo(&["metabuild"]) .with_status(101) .with_json_contains_unordered( r#" { "message": { "children": "{...}", "code": "{...}", "level": "error", "message": "cannot find function `metabuild` in [..] `mb`", "rendered": "{...}", "spans": "{...}" }, "package_id": "foo [..]", "manifest_path": "[..]", "reason": "compiler-message", "target": { "crate_types": [ "bin" ], "doc": false, "doctest": false, "edition": "2018", "kind": [ "custom-build" ], "name": "metabuild-foo", "src_path": null, "test": false } } "#, ) .run(); } cargo-0.66.0/tests/testsuite/metadata.rs000066400000000000000000003633711432416201200202270ustar00rootroot00000000000000//! Tests for the `cargo metadata` command. use cargo_test_support::install::cargo_home; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, main_file, project, rustc_host}; use serde_json::json; #[cargo_test] fn cargo_metadata_simple() { let p = project() .file("src/foo.rs", "") .file("Cargo.toml", &basic_bin_manifest("foo")) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name": "foo", "version": "0.5.0", "id": "foo[..]", "keywords": [], "source": null, "dependencies": [], "edition": "2015", "license": null, "license_file": null, "links": null, "description": null, "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "homepage": null, "documentation": null, "targets": [ { "kind": [ "bin" ], "crate_types": [ "bin" ], "doc": true, "doctest": false, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/foo.rs" } ], "features": {}, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "foo 0.5.0 (path+file:[..]foo)" } ], "root": "foo 0.5.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn cargo_metadata_warns_on_implicit_version() { let p = project() .file("src/foo.rs", "") .file("Cargo.toml", &basic_bin_manifest("foo")) .build(); p.cargo("metadata").with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems").run(); p.cargo("metadata --format-version 1").with_stderr("").run(); } #[cargo_test] fn library_with_several_crate_types() { let p = project() .file("src/lib.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" [lib] crate-type = ["lib", "staticlib"] "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "name": "foo", "readme": null, "repository": null, "homepage": null, "documentation": null, "version": "0.5.0", "rust_version": null, "id": "foo[..]", "keywords": [], "source": null, "dependencies": [], "edition": "2015", "license": null, "license_file": null, "links": null, "description": null, "targets": [ { "kind": [ "lib", "staticlib" ], "crate_types": [ "lib", "staticlib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/lib.rs" } ], "features": {}, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "foo 0.5.0 (path+file:[..]foo)" } ], "root": "foo 0.5.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn library_with_features() { let p = project() .file("src/lib.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" [features] default = ["default_feat"] default_feat = [] optional_feat = [] "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.5.0", "id": "foo[..]", "keywords": [], "source": null, "dependencies": [], "edition": "2015", "license": null, "license_file": null, "links": null, "description": null, "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/lib.rs" } ], "features": { "default": [ "default_feat" ], "default_feat": [], "optional_feat": [] }, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [ "default", "default_feat" ], "id": "foo 0.5.0 (path+file:[..]foo)" } ], "root": "foo 0.5.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn cargo_metadata_with_deps_and_version() { let p = project() .file("src/foo.rs", "") .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] license = "MIT" description = "foo" [[bin]] name = "foo" [dependencies] bar = "*" [dev-dependencies] foobar = "*" "#, ) .build(); Package::new("baz", "0.0.1").publish(); Package::new("foobar", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); p.cargo("metadata -q --format-version 1") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "dependencies": [ { "features": [], "kind": null, "name": "baz", "optional": false, "registry": null, "rename": null, "req": "^0.0.1", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true } ], "description": null, "edition": "2015", "features": {}, "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]src/lib.rs" } ], "version": "0.0.1" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "baz", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "baz", "src_path": "[..]src/lib.rs" } ], "version": "0.0.1" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [ { "features": [], "kind": null, "name": "bar", "optional": false, "registry": null, "rename": null, "req": "*", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true }, { "features": [], "kind": "dev", "name": "foobar", "optional": false, "registry": null, "rename": null, "req": "*", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true } ], "description": "foo", "edition": "2015", "features": {}, "id": "foo 0.5.0 (path+file:[..]foo)", "keywords": [], "license": "MIT", "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "bin" ], "doc": true, "doctest": false, "test": true, "edition": "2015", "kind": [ "bin" ], "name": "foo", "src_path": "[..]src/foo.rs" } ], "version": "0.5.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "foobar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "foobar", "src_path": "[..]src/lib.rs" } ], "version": "0.0.1" } ], "resolve": { "nodes": [ { "dependencies": [ "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "dep_kinds": [ { "kind": null, "target": null } ], "name": "baz", "pkg": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" } ], "features": [], "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dependencies": [], "deps": [], "features": [], "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dependencies": [ "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "dep_kinds": [ { "kind": null, "target": null } ], "name": "bar", "pkg": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dep_kinds": [ { "kind": "dev", "target": null } ], "name": "foobar", "pkg": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" } ], "features": [], "id": "foo 0.5.0 (path+file:[..]foo)" }, { "dependencies": [], "deps": [], "features": [], "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" } ], "root": "foo 0.5.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn example() { let p = project() .file("src/lib.rs", "") .file("examples/ex.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[example]] name = "ex" "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.1.0", "id": "foo[..]", "keywords": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "source": null, "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/lib.rs" }, { "kind": [ "example" ], "crate_types": [ "bin" ], "doc": false, "doctest": false, "test": false, "edition": "2015", "name": "ex", "src_path": "[..]/foo/examples/ex.rs" } ], "features": {}, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": [ "foo 0.1.0 (path+file:[..]foo)" ], "resolve": { "root": "foo 0.1.0 (path+file://[..]foo)", "nodes": [ { "id": "foo 0.1.0 (path+file:[..]foo)", "features": [], "dependencies": [], "deps": [] } ] }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn example_lib() { let p = project() .file("src/lib.rs", "") .file("examples/ex.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [[example]] name = "ex" crate-type = ["rlib", "dylib"] "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.1.0", "id": "foo[..]", "keywords": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "source": null, "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/lib.rs" }, { "kind": [ "example" ], "crate_types": [ "rlib", "dylib" ], "doc": false, "doctest": false, "test": false, "edition": "2015", "name": "ex", "src_path": "[..]/foo/examples/ex.rs" } ], "features": {}, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": [ "foo 0.1.0 (path+file:[..]foo)" ], "resolve": { "root": "foo 0.1.0 (path+file://[..]foo)", "nodes": [ { "id": "foo 0.1.0 (path+file:[..]foo)", "features": [], "dependencies": [], "deps": [] } ] }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn workspace_metadata() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] [workspace.metadata] tool1 = "hello" tool2 = [1, 2, 3] [workspace.metadata.foo] bar = 3 "#, ) .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file("baz/src/lib.rs", "") .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name": "bar", "version": "0.5.0", "id": "bar[..]", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "keywords": [], "source": null, "dependencies": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "bar", "src_path": "[..]bar/src/lib.rs" } ], "features": {}, "manifest_path": "[..]bar/Cargo.toml", "metadata": null, "publish": null }, { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name": "baz", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.5.0", "id": "baz[..]", "keywords": [], "source": null, "dependencies": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "baz", "src_path": "[..]baz/src/lib.rs" } ], "features": {}, "manifest_path": "[..]baz/Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": ["bar 0.5.0 (path+file:[..]bar)", "baz 0.5.0 (path+file:[..]baz)"], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "bar 0.5.0 (path+file:[..]bar)" }, { "dependencies": [], "deps": [], "features": [], "id": "baz 0.5.0 (path+file:[..]baz)" } ], "root": null }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": { "tool1": "hello", "tool2": [1, 2, 3], "foo": { "bar": 3 } } }"#, ) .run(); } #[cargo_test] fn workspace_metadata_with_dependencies_no_deps() { let p = project() // NOTE that 'artifact' isn't mentioned in the workspace here, yet it shows up as member. .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies] baz = { path = "../baz/" } artifact = { path = "../artifact/", artifact = "bin" } "#, ) .file("bar/src/lib.rs", "") .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file("baz/src/lib.rs", "") .file("artifact/Cargo.toml", &basic_bin_manifest("artifact")) .file("artifact/src/main.rs", "fn main() {}") .build(); p.cargo("metadata --no-deps -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.5.0", "id": "bar[..]", "keywords": [], "source": null, "license": null, "dependencies": [ { "features": [], "kind": null, "name": "artifact", "optional": false, "path": "[..]/foo/artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true, "artifact": { "kinds": [ "bin" ], "lib": false, "target": null } }, { "features": [], "kind": null, "name": "baz", "optional": false, "path": "[..]/foo/baz", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true } ], "license_file": null, "links": null, "description": null, "edition": "2015", "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "bar", "src_path": "[..]bar/src/lib.rs" } ], "features": {}, "manifest_path": "[..]bar/Cargo.toml", "metadata": null, "publish": null }, { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "artifact 0.5.0 (path+file:[..]/foo/artifact)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/artifact/Cargo.toml", "metadata": null, "name": "artifact", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "artifact", "src_path": "[..]/foo/artifact/src/main.rs", "test": true } ], "version": "0.5.0" }, { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name": "baz", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "version": "0.5.0", "id": "baz[..]", "keywords": [], "source": null, "dependencies": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "targets": [ { "kind": [ "lib" ], "crate_types": ["lib"], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "baz", "src_path": "[..]baz/src/lib.rs" } ], "features": {}, "manifest_path": "[..]baz/Cargo.toml", "metadata": null, "publish": null } ], "workspace_members": [ "bar 0.5.0 (path+file:[..]bar)", "artifact 0.5.0 (path+file:[..]/foo/artifact)", "baz 0.5.0 (path+file:[..]baz)" ], "resolve": null, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn workspace_metadata_with_dependencies_and_resolve() { let alt_target = "wasm32-unknown-unknown"; let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "artifact", "non-artifact", "bin-only-artifact"] "#, ) .file( "bar/Cargo.toml", &r#" [package] name = "bar" version = "0.5.0" authors = [] [build-dependencies] artifact = { path = "../artifact/", artifact = "bin", target = "target" } bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin", target = "$ALT_TARGET" } non-artifact = { path = "../non-artifact" } [dependencies] artifact = { path = "../artifact/", artifact = ["cdylib", "staticlib", "bin:baz-name"], lib = true, target = "$ALT_TARGET" } bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin:a-name" } non-artifact = { path = "../non-artifact" } [dev-dependencies] artifact = { path = "../artifact/" } non-artifact = { path = "../non-artifact" } bin-only-artifact = { path = "../bin-only-artifact/", artifact = "bin:b-name" } "#.replace("$ALT_TARGET", alt_target), ) .file("bar/src/lib.rs", "") .file("bar/build.rs", "fn main() {}") .file( "artifact/Cargo.toml", r#" [package] name = "artifact" version = "0.5.0" authors = [] [lib] crate-type = ["staticlib", "cdylib", "rlib"] [[bin]] name = "bar-name" [[bin]] name = "baz-name" "#, ) .file("artifact/src/main.rs", "fn main() {}") .file("artifact/src/lib.rs", "") .file( "bin-only-artifact/Cargo.toml", r#" [package] name = "bin-only-artifact" version = "0.5.0" authors = [] [[bin]] name = "a-name" [[bin]] name = "b-name" "#, ) .file("bin-only-artifact/src/main.rs", "fn main() {}") .file("non-artifact/Cargo.toml", r#" [package] name = "non-artifact" version = "0.5.0" authors = [] "#, ) .file("non-artifact/src/lib.rs", "") .build(); p.cargo("metadata -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_json( r#" { "metadata": null, "packages": [ { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/artifact/Cargo.toml", "metadata": null, "name": "artifact", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "staticlib", "cdylib", "rlib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "staticlib", "cdylib", "rlib" ], "name": "artifact", "src_path": "[..]/foo/artifact/src/lib.rs", "test": true }, { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "bar-name", "src_path": "[..]/foo/artifact/src/main.rs", "test": true }, { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "baz-name", "src_path": "[..]/foo/artifact/src/main.rs", "test": true } ], "version": "0.5.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [ { "artifact": { "kinds": [ "cdylib", "staticlib", "bin:baz-name" ], "lib": true, "target": "wasm32-unknown-unknown" }, "features": [], "kind": null, "name": "artifact", "optional": false, "path": "[..]/foo/artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "artifact": { "kinds": [ "bin:a-name" ], "lib": false, "target": null }, "features": [], "kind": null, "name": "bin-only-artifact", "optional": false, "path": "[..]/foo/bin-only-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": null, "name": "non-artifact", "optional": false, "path": "[..]/foo/non-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": "dev", "name": "artifact", "optional": false, "path": "[..]/foo/artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "artifact": { "kinds": [ "bin:b-name" ], "lib": false, "target": null }, "features": [], "kind": "dev", "name": "bin-only-artifact", "optional": false, "path": "[..]/foo/bin-only-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": "dev", "name": "non-artifact", "optional": false, "path": "[..]/foo/non-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "artifact": { "kinds": [ "bin" ], "lib": false, "target": "target" }, "features": [], "kind": "build", "name": "artifact", "optional": false, "path": "[..]/foo/artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "artifact": { "kinds": [ "bin" ], "lib": false, "target": "wasm32-unknown-unknown" }, "features": [], "kind": "build", "name": "bin-only-artifact", "optional": false, "path": "[..]/foo/bin-only-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": "build", "name": "non-artifact", "optional": false, "path": "[..]/foo/non-artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true } ], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "bar 0.5.0 (path+file://[..]/foo/bar)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/bar/Cargo.toml", "metadata": null, "name": "bar", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]/foo/bar/src/lib.rs", "test": true }, { "crate_types": [ "bin" ], "doc": false, "doctest": false, "edition": "2015", "kind": [ "custom-build" ], "name": "build-script-build", "src_path": "[..]/foo/bar/build.rs", "test": false } ], "version": "0.5.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/bin-only-artifact/Cargo.toml", "metadata": null, "name": "bin-only-artifact", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "a-name", "src_path": "[..]/foo/bin-only-artifact/src/main.rs", "test": true }, { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "b-name", "src_path": "[..]/foo/bin-only-artifact/src/main.rs", "test": true } ], "version": "0.5.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/non-artifact/Cargo.toml", "metadata": null, "name": "non-artifact", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "non-artifact", "src_path": "[..]/foo/non-artifact/src/lib.rs", "test": true } ], "version": "0.5.0" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)" }, { "dependencies": [ "artifact 0.5.0 (path+file://[..]/foo/artifact)", "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)" ], "deps": [ { "dep_kinds": [ { "kind": null, "target": null }, { "kind": "dev", "target": null }, { "kind": "build", "target": null } ], "name": "artifact", "pkg": "artifact 0.5.0 (path+file://[..]/foo/artifact)" }, { "dep_kinds": [ { "kind": null, "target": null }, { "kind": "dev", "target": null }, { "kind": "build", "target": null } ], "name": "non_artifact", "pkg": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)" } ], "features": [], "id": "bar 0.5.0 (path+file://[..]/foo/bar)" }, { "dependencies": [], "deps": [], "features": [], "id": "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)" }, { "dependencies": [], "deps": [], "features": [], "id": "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)" } ], "root": null }, "target_directory": "[..]/foo/target", "version": 1, "workspace_members": [ "bar 0.5.0 (path+file://[..]/foo/bar)", "artifact 0.5.0 (path+file://[..]/foo/artifact)", "bin-only-artifact 0.5.0 (path+file://[..]/foo/bin-only-artifact)", "non-artifact 0.5.0 (path+file://[..]/foo/non-artifact)" ], "workspace_root": "[..]/foo" } "#, ) .run(); } #[cargo_test] fn cargo_metadata_with_invalid_manifest() { let p = project().file("Cargo.toml", "").build(); p.cargo("metadata --format-version 1") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: virtual manifests must be configured with [workspace]", ) .run(); } #[cargo_test] fn cargo_metadata_with_invalid_authors_field() { let p = project() .file("src/foo.rs", "") .file( "Cargo.toml", r#" [package] authors = "" "#, ) .build(); p.cargo("metadata") .with_status(101) .with_stderr( r#"[ERROR] failed to parse manifest at `[..]` Caused by: invalid type: string "", expected vector of strings for key `package.authors`"#, ) .run(); } const MANIFEST_OUTPUT: &str = r#" { "packages": [{ "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name":"foo", "version":"0.5.0", "id":"foo[..]0.5.0[..](path+file://[..]/foo)", "source":null, "dependencies":[], "keywords": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "targets":[{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "test": true, "edition": "2015", "name":"foo", "src_path":"[..]/foo/src/foo.rs" }], "features":{}, "manifest_path":"[..]Cargo.toml", "metadata": null, "publish": null, "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null }], "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], "resolve": null, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#; #[cargo_test] fn cargo_metadata_no_deps_path_to_cargo_toml_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps --manifest-path foo/Cargo.toml") .cwd(p.root().parent().unwrap()) .with_json(MANIFEST_OUTPUT) .run(); } #[cargo_test] fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps --manifest-path") .arg(p.root().join("Cargo.toml")) .cwd(p.root().parent().unwrap()) .with_json(MANIFEST_OUTPUT) .run(); } #[cargo_test] fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps --manifest-path foo") .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr( "[ERROR] the manifest-path must be \ a path to a Cargo.toml file", ) .run(); } #[cargo_test] fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps --manifest-path") .arg(p.root()) .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr( "[ERROR] the manifest-path must be \ a path to a Cargo.toml file", ) .run(); } #[cargo_test] fn cargo_metadata_no_deps_cwd() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps") .with_json(MANIFEST_OUTPUT) .run(); } #[cargo_test] fn cargo_metadata_bad_version() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("metadata --no-deps --format-version 2") .with_status(1) .with_stderr_contains( "\ error: \"2\" isn't a valid value for '--format-version ' [possible values: 1] ", ) .run(); } #[cargo_test] fn multiple_features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [features] a = [] b = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("metadata --features").arg("a b").run(); } #[cargo_test] fn package_metadata() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] categories = ["database"] keywords = ["database"] readme = "README.md" repository = "https://github.com/rust-lang/cargo" homepage = "https://rust-lang.org" documentation = "https://doc.rust-lang.org/stable/std/" [package.metadata.bar] baz = "quux" "#, ) .file("README.md", "") .file("src/lib.rs", "") .build(); p.cargo("metadata --no-deps") .with_json( r#" { "packages": [ { "authors": ["wycats@example.com"], "categories": ["database"], "default_run": null, "name": "foo", "readme": "README.md", "repository": "https://github.com/rust-lang/cargo", "rust_version": null, "homepage": "https://rust-lang.org", "documentation": "https://doc.rust-lang.org/stable/std/", "version": "0.1.0", "id": "foo[..]", "keywords": ["database"], "source": null, "dependencies": [], "edition": "2015", "license": null, "license_file": null, "links": null, "description": null, "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]foo/src/lib.rs" } ], "features": {}, "manifest_path": "[..]foo/Cargo.toml", "metadata": { "bar": { "baz": "quux" } }, "publish": null } ], "workspace_members": ["foo[..]"], "resolve": null, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn package_publish() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] categories = ["database"] keywords = ["database"] readme = "README.md" repository = "https://github.com/rust-lang/cargo" publish = ["my-registry"] "#, ) .file("README.md", "") .file("src/lib.rs", "") .build(); p.cargo("metadata --no-deps") .with_json( r#" { "packages": [ { "authors": ["wycats@example.com"], "categories": ["database"], "default_run": null, "name": "foo", "readme": "README.md", "repository": "https://github.com/rust-lang/cargo", "rust_version": null, "homepage": null, "documentation": null, "version": "0.1.0", "id": "foo[..]", "keywords": ["database"], "source": null, "dependencies": [], "edition": "2015", "license": null, "license_file": null, "links": null, "description": null, "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "name": "foo", "src_path": "[..]foo/src/lib.rs" } ], "features": {}, "manifest_path": "[..]foo/Cargo.toml", "metadata": null, "publish": ["my-registry"] } ], "workspace_members": ["foo[..]"], "resolve": null, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); } #[cargo_test] fn cargo_metadata_path_to_cargo_toml_project() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); p.cargo("package --manifest-path") .arg(p.root().join("bar/Cargo.toml")) .cwd(p.root().parent().unwrap()) .run(); p.cargo("metadata --manifest-path") .arg(p.root().join("target/package/bar-0.5.0/Cargo.toml")) .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "bar 0.5.0 ([..])", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]src/lib.rs" } ], "version": "0.5.0" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "bar 0.5.0 ([..])" } ], "root": "bar 0.5.0 (path+file:[..])" }, "target_directory": "[..]", "version": 1, "workspace_members": [ "bar 0.5.0 (path+file:[..])" ], "workspace_root": "[..]", "metadata": null } "#, ) .run(); } #[cargo_test] fn package_edition_2018() { let p = project() .file("src/lib.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] edition = "2018" "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2018", "features": {}, "id": "foo 0.1.0 (path+file:[..])", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2018", "kind": [ "lib" ], "name": "foo", "src_path": "[..]src/lib.rs" } ], "version": "0.1.0" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "foo 0.1.0 (path+file:[..])" } ], "root": "foo 0.1.0 (path+file:[..])" }, "target_directory": "[..]", "version": 1, "workspace_members": [ "foo 0.1.0 (path+file:[..])" ], "workspace_root": "[..]", "metadata": null } "#, ) .run(); } #[cargo_test] fn package_default_run() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] edition = "2018" default-run = "a" "#, ) .build(); let json = p.cargo("metadata").run_json(); assert_eq!(json["packages"][0]["default_run"], json!("a")); } #[cargo_test] fn package_rust_version() { let p = project() .file("src/lib.rs", "") .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] edition = "2018" rust-version = "1.56" "#, ) .build(); let json = p.cargo("metadata").run_json(); assert_eq!(json["packages"][0]["rust_version"], json!("1.56")); } #[cargo_test] fn target_edition_2018() { let p = project() .file("src/lib.rs", "") .file("src/main.rs", "") .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = ["wycats@example.com"] edition = "2015" [lib] edition = "2018" "#, ) .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "foo 0.1.0 (path+file:[..])", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]Cargo.toml", "metadata": null, "publish": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2018", "kind": [ "lib" ], "name": "foo", "src_path": "[..]src/lib.rs" }, { "crate_types": [ "bin" ], "doc": true, "doctest": false, "test": true, "edition": "2015", "kind": [ "bin" ], "name": "foo", "src_path": "[..]src/main.rs" } ], "version": "0.1.0" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "foo 0.1.0 (path+file:[..])" } ], "root": "foo 0.1.0 (path+file:[..])" }, "target_directory": "[..]", "version": 1, "workspace_members": [ "foo 0.1.0 (path+file:[..])" ], "workspace_root": "[..]", "metadata": null } "#, ) .run(); } #[cargo_test] fn rename_dependency() { Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { version = "0.1.0" } baz = { version = "0.2.0", package = "bar" } "#, ) .file("src/lib.rs", "extern crate bar; extern crate baz;") .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]", "metadata": null, "publish": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]" } ], "version": "0.1.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]", "metadata": null, "publish": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]" } ], "version": "0.2.0" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [ { "features": [], "kind": null, "name": "bar", "optional": false, "rename": null, "registry": null, "req": "^0.1.0", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true }, { "features": [], "kind": null, "name": "bar", "optional": false, "rename": "baz", "registry": null, "req": "^0.2.0", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true } ], "description": null, "edition": "2015", "features": {}, "id": "foo 0.0.1[..]", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]", "metadata": null, "publish": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "foo", "src_path": "[..]" } ], "version": "0.0.1" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dependencies": [], "deps": [], "features": [], "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dependencies": [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "dep_kinds": [ { "kind": null, "target": null } ], "name": "bar", "pkg": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" }, { "dep_kinds": [ { "kind": null, "target": null } ], "name": "baz", "pkg": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" } ], "features": [], "id": "foo 0.0.1[..]" } ], "root": "foo 0.0.1[..]" }, "target_directory": "[..]", "version": 1, "workspace_members": [ "foo 0.0.1[..]" ], "workspace_root": "[..]", "metadata": null }"#, ) .run(); } #[cargo_test] fn metadata_links() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" links = "a" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "edition": "2015", "features": {}, "id": "foo 0.5.0 [..]", "keywords": [], "license": null, "license_file": null, "links": "a", "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "name": "foo", "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "foo", "src_path": "[..]/foo/src/lib.rs" }, { "crate_types": [ "bin" ], "doc": false, "doctest": false, "test": false, "edition": "2015", "kind": [ "custom-build" ], "name": "build-script-build", "src_path": "[..]/foo/build.rs" } ], "version": "0.5.0" } ], "resolve": { "nodes": [ { "dependencies": [], "deps": [], "features": [], "id": "foo 0.5.0 [..]" } ], "root": "foo 0.5.0 [..]" }, "target_directory": "[..]/foo/target", "version": 1, "workspace_members": [ "foo 0.5.0 [..]" ], "workspace_root": "[..]/foo", "metadata": null } "#, ) .run() } #[cargo_test] fn deps_with_bin_only() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bdep = { path = "bdep" } "#, ) .file("src/lib.rs", "") .file("bdep/Cargo.toml", &basic_bin_manifest("bdep")) .file("bdep/src/main.rs", "fn main() {}") .build(); p.cargo("metadata") .with_json( r#" { "packages": [ { "name": "foo", "version": "0.1.0", "id": "foo 0.1.0 ([..])", "license": null, "license_file": null, "description": null, "source": null, "dependencies": [ { "name": "bdep", "source": null, "req": "*", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "path": "[..]/foo/bdep", "features": [], "target": null, "registry": null } ], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "foo", "src_path": "[..]/foo/src/lib.rs", "edition": "2015", "doc": true, "doctest": true, "test": true } ], "features": {}, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } ], "workspace_members": [ "foo 0.1.0 ([..])" ], "resolve": { "nodes": [ { "id": "foo 0.1.0 ([..])", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 ([..])" }, "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]foo", "metadata": null } "#, ) .run(); } #[cargo_test] fn filter_platform() { // Testing the --filter-platform flag. Package::new("normal-dep", "0.0.1").publish(); Package::new("host-dep", "0.0.1").publish(); Package::new("alt-dep", "0.0.1").publish(); Package::new("cfg-dep", "0.0.1").publish(); // Just needs to be a valid target that is different from host. // Presumably nobody runs these tests on wasm. πŸ™ƒ let alt_target = "wasm32-unknown-unknown"; let host_target = rustc_host(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] normal-dep = "0.0.1" [target.{}.dependencies] host-dep = "0.0.1" [target.{}.dependencies] alt-dep = "0.0.1" [target.'cfg(foobar)'.dependencies] cfg-dep = "0.0.1" "#, host_target, alt_target ), ) .file("src/lib.rs", "") .build(); let alt_dep = r#" { "name": "alt-dep", "version": "0.0.1", "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "alt-dep", "src_path": "[..]/alt-dep-0.0.1/src/lib.rs", "edition": "2015", "test": true, "doc": true, "doctest": true } ], "features": {}, "manifest_path": "[..]/alt-dep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } "#; let cfg_dep = r#" { "name": "cfg-dep", "version": "0.0.1", "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "cfg-dep", "src_path": "[..]/cfg-dep-0.0.1/src/lib.rs", "edition": "2015", "test": true, "doc": true, "doctest": true } ], "features": {}, "manifest_path": "[..]/cfg-dep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } "#; let host_dep = r#" { "name": "host-dep", "version": "0.0.1", "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "host-dep", "src_path": "[..]/host-dep-0.0.1/src/lib.rs", "edition": "2015", "test": true, "doc": true, "doctest": true } ], "features": {}, "manifest_path": "[..]/host-dep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } "#; let normal_dep = r#" { "name": "normal-dep", "version": "0.0.1", "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "license": null, "license_file": null, "description": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "dependencies": [], "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "normal-dep", "src_path": "[..]/normal-dep-0.0.1/src/lib.rs", "edition": "2015", "test": true, "doc": true, "doctest": true } ], "features": {}, "manifest_path": "[..]/normal-dep-0.0.1/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } "#; // The dependencies are stored in sorted order by target and then by name. // Since the testsuite may run on different targets, this needs to be // sorted before it can be compared. let mut foo_deps = serde_json::json!([ { "name": "normal-dep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": null, "registry": null }, { "name": "cfg-dep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": "cfg(foobar)", "registry": null }, { "name": "alt-dep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": alt_target, "registry": null }, { "name": "host-dep", "source": "registry+https://github.com/rust-lang/crates.io-index", "req": "^0.0.1", "kind": null, "rename": null, "optional": false, "uses_default_features": true, "features": [], "target": host_target, "registry": null } ]); foo_deps.as_array_mut().unwrap().sort_by(|a, b| { // This really should be `rename`, but not needed here. // Also, sorting on `name` isn't really necessary since this test // only has one package per target, but leaving it here to be safe. let a = (a["target"].as_str(), a["name"].as_str()); let b = (b["target"].as_str(), b["name"].as_str()); a.cmp(&b) }); let foo = r#" { "name": "foo", "version": "0.1.0", "id": "foo 0.1.0 (path+file:[..]foo)", "license": null, "license_file": null, "description": null, "source": null, "dependencies": $FOO_DEPS, "targets": [ { "kind": [ "lib" ], "crate_types": [ "lib" ], "name": "foo", "src_path": "[..]/foo/src/lib.rs", "edition": "2015", "test": true, "doc": true, "doctest": true } ], "features": {}, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "authors": [], "categories": [], "default_run": null, "keywords": [], "readme": null, "repository": null, "rust_version": null, "homepage": null, "documentation": null, "edition": "2015", "links": null } "# .replace("$ALT_TRIPLE", alt_target) .replace("$HOST_TRIPLE", host_target) .replace("$FOO_DEPS", &foo_deps.to_string()); // We're going to be checking that we don't download excessively, // so we need to ensure that downloads will happen. let clear = || { cargo_home().join("registry/cache").rm_rf(); cargo_home().join("registry/src").rm_rf(); p.build_dir().rm_rf(); }; // Normal metadata, no filtering, returns *everything*. p.cargo("metadata") .with_stderr_unordered( "\ [UPDATING] [..] [WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems [DOWNLOADING] crates ... [DOWNLOADED] normal-dep v0.0.1 [..] [DOWNLOADED] host-dep v0.0.1 [..] [DOWNLOADED] alt-dep v0.0.1 [..] [DOWNLOADED] cfg-dep v0.0.1 [..] ", ) .with_json( &r#" { "packages": [ $ALT_DEP, $CFG_DEP, $FOO, $HOST_DEP, $NORMAL_DEP ], "workspace_members": [ "foo 0.1.0 (path+file:[..]foo)" ], "resolve": { "nodes": [ { "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 (path+file:[..]foo)", "dependencies": [ "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "name": "alt_dep", "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "$ALT_TRIPLE" } ] }, { "name": "cfg_dep", "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "cfg(foobar)" } ] }, { "name": "host_dep", "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "$HOST_TRIPLE" } ] }, { "name": "normal_dep", "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] }, { "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 (path+file:[..]foo)" }, "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null } "# .replace("$ALT_TRIPLE", alt_target) .replace("$HOST_TRIPLE", host_target) .replace("$ALT_DEP", alt_dep) .replace("$CFG_DEP", cfg_dep) .replace("$HOST_DEP", host_dep) .replace("$NORMAL_DEP", normal_dep) .replace("$FOO", &foo), ) .run(); clear(); // Filter on alternate, removes cfg and host. p.cargo("metadata --filter-platform") .arg(alt_target) .with_stderr_unordered( "\ [WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems [DOWNLOADING] crates ... [DOWNLOADED] normal-dep v0.0.1 [..] [DOWNLOADED] host-dep v0.0.1 [..] [DOWNLOADED] alt-dep v0.0.1 [..] ", ) .with_json( &r#" { "packages": [ $ALT_DEP, $FOO, $NORMAL_DEP ], "workspace_members": "{...}", "resolve": { "nodes": [ { "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 (path+file:[..]foo)", "dependencies": [ "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "name": "alt_dep", "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "$ALT_TRIPLE" } ] }, { "name": "normal_dep", "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] }, { "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]foo", "metadata": null } "# .replace("$ALT_TRIPLE", alt_target) .replace("$ALT_DEP", alt_dep) .replace("$NORMAL_DEP", normal_dep) .replace("$FOO", &foo), ) .run(); clear(); // Filter on host, removes alt and cfg. p.cargo("metadata --filter-platform") .arg(&host_target) .with_stderr_unordered( "\ [WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems [DOWNLOADING] crates ... [DOWNLOADED] normal-dep v0.0.1 [..] [DOWNLOADED] host-dep v0.0.1 [..] ", ) .with_json( &r#" { "packages": [ $FOO, $HOST_DEP, $NORMAL_DEP ], "workspace_members": "{...}", "resolve": { "nodes": [ { "id": "foo 0.1.0 (path+file:[..]foo)", "dependencies": [ "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "name": "host_dep", "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "$HOST_TRIPLE" } ] }, { "name": "normal_dep", "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] }, { "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 (path+file:[..]foo)" }, "target_directory": "[..]foo/target", "version": 1, "workspace_root": "[..]foo", "metadata": null } "# .replace("$HOST_TRIPLE", host_target) .replace("$HOST_DEP", host_dep) .replace("$NORMAL_DEP", normal_dep) .replace("$FOO", &foo), ) .run(); clear(); // Filter host with cfg, removes alt only p.cargo("metadata --filter-platform") .arg(&host_target) .env("RUSTFLAGS", "--cfg=foobar") .with_stderr_unordered( "\ [WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems [DOWNLOADING] crates ... [DOWNLOADED] normal-dep v0.0.1 [..] [DOWNLOADED] host-dep v0.0.1 [..] [DOWNLOADED] cfg-dep v0.0.1 [..] ", ) .with_json( &r#" { "packages": [ $CFG_DEP, $FOO, $HOST_DEP, $NORMAL_DEP ], "workspace_members": "{...}", "resolve": { "nodes": [ { "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 (path+file:[..]/foo)", "dependencies": [ "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "name": "cfg_dep", "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "cfg(foobar)" } ] }, { "name": "host_dep", "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": "$HOST_TRIPLE" } ] }, { "name": "normal_dep", "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] }, { "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] }, { "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 (path+file:[..]/foo)" }, "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null } "# .replace("$HOST_TRIPLE", host_target) .replace("$CFG_DEP", cfg_dep) .replace("$HOST_DEP", host_dep) .replace("$NORMAL_DEP", normal_dep) .replace("$FOO", &foo), ) .run(); } #[cargo_test] fn dep_kinds() { Package::new("bar", "0.1.0").publish(); Package::new("winapi", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [dev-dependencies] bar = "0.1" [build-dependencies] bar = "0.1" [target.'cfg(windows)'.dependencies] winapi = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("metadata") .with_json( r#" { "packages": "{...}", "workspace_members": "{...}", "target_directory": "{...}", "version": 1, "workspace_root": "{...}", "metadata": null, "resolve": { "nodes": [ { "id": "bar 0.1.0 [..]", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 [..]", "dependencies": [ "bar 0.1.0 [..]", "winapi 0.1.0 [..]" ], "deps": [ { "name": "bar", "pkg": "bar 0.1.0 [..]", "dep_kinds": [ { "kind": null, "target": null }, { "kind": "dev", "target": null }, { "kind": "build", "target": null } ] }, { "name": "winapi", "pkg": "winapi 0.1.0 [..]", "dep_kinds": [ { "kind": null, "target": "cfg(windows)" } ] } ], "features": [] }, { "id": "winapi 0.1.0 [..]", "dependencies": [], "deps": [], "features": [] } ], "root": "foo 0.1.0 [..]" } } "#, ) .run(); } #[cargo_test] fn dep_kinds_workspace() { // Check for bug with duplicate dep kinds in a workspace. // If different members select different features for the same package, // they show up multiple times in the resolver `deps`. // // Here: // foo -> dep // bar -> foo[feat1] -> dep let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [features] feat1 = [] [dependencies] dep = { path="dep" } [workspace] members = ["bar"] "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] foo = { path="..", features=["feat1"] } "#, ) .file("bar/src/lib.rs", "") .file("dep/Cargo.toml", &basic_lib_manifest("dep")) .file("dep/src/lib.rs", "") .build(); p.cargo("metadata") .with_json( r#" { "packages": "{...}", "workspace_members": "{...}", "target_directory": "[..]/foo/target", "version": 1, "workspace_root": "[..]/foo", "metadata": null, "resolve": { "nodes": [ { "id": "bar 0.1.0 (path+file://[..]/foo/bar)", "dependencies": [ "foo 0.1.0 (path+file://[..]/foo)" ], "deps": [ { "name": "foo", "pkg": "foo 0.1.0 (path+file://[..]/foo)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [] }, { "id": "dep 0.5.0 (path+file://[..]/foo/dep)", "dependencies": [], "deps": [], "features": [] }, { "id": "foo 0.1.0 (path+file://[..]/foo)", "dependencies": [ "dep 0.5.0 (path+file://[..]/foo/dep)" ], "deps": [ { "name": "dep", "pkg": "dep 0.5.0 (path+file://[..]/foo/dep)", "dep_kinds": [ { "kind": null, "target": null } ] } ], "features": [ "feat1" ] } ], "root": "foo 0.1.0 (path+file://[..]/foo)" } } "#, ) .run(); } // Creating non-utf8 path is an OS-specific pain, so let's run this only on // linux, where arbitrary bytes work. #[cfg(target_os = "linux")] #[cargo_test] fn cargo_metadata_non_utf8() { use std::ffi::OsString; use std::os::unix::ffi::OsStringExt; use std::path::PathBuf; let base = PathBuf::from(OsString::from_vec(vec![255])); let p = project() .no_manifest() .file(base.join("./src/lib.rs"), "") .file(base.join("./Cargo.toml"), &basic_lib_manifest("foo")) .build(); p.cargo("metadata") .cwd(p.root().join(base)) .arg("--format-version") .arg("1") .with_stderr("error: path contains invalid UTF-8 characters") .with_status(101) .run(); } // TODO: Consider using this test instead of the version without the 'artifact' suffix or merge them because they should be pretty much the same. #[cargo_test] fn workspace_metadata_with_dependencies_no_deps_artifact() { let p = project() // NOTE that 'artifact' isn't mentioned in the workspace here, yet it shows up as member. .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies] baz = { path = "../baz/" } baz-renamed = { path = "../baz/" } artifact = { path = "../artifact/", artifact = "bin" } "#, ) .file("bar/src/lib.rs", "") .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file("baz/src/lib.rs", "") .file("artifact/Cargo.toml", &basic_bin_manifest("artifact")) .file("artifact/src/main.rs", "fn main() {}") .build(); p.cargo("metadata --no-deps -Z bindeps") .masquerade_as_nightly_cargo(&["bindeps"]) .with_json( r#" { "metadata": null, "packages": [ { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [ { "artifact": { "kinds": [ "bin" ], "lib": false, "target": null }, "features": [], "kind": null, "name": "artifact", "optional": false, "path": "[..]/foo/artifact", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": null, "name": "baz", "optional": false, "path": "[..]/foo/baz", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true }, { "features": [], "kind": null, "name": "baz-renamed", "optional": false, "path": "[..]/foo/baz", "registry": null, "rename": null, "req": "*", "source": null, "target": null, "uses_default_features": true } ], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "bar 0.5.0 (path+file://[..]/foo/bar)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/bar/Cargo.toml", "metadata": null, "name": "bar", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]/foo/bar/src/lib.rs", "test": true } ], "version": "0.5.0" }, { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "artifact 0.5.0 (path+file://[..]/foo/artifact)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/artifact/Cargo.toml", "metadata": null, "name": "artifact", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "bin" ], "doc": true, "doctest": false, "edition": "2015", "kind": [ "bin" ], "name": "artifact", "src_path": "[..]/foo/artifact/src/main.rs", "test": true } ], "version": "0.5.0" }, { "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "baz 0.5.0 (path+file://[..]/foo/baz)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/baz/Cargo.toml", "metadata": null, "name": "baz", "publish": null, "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "baz", "src_path": "[..]/foo/baz/src/lib.rs", "test": true } ], "version": "0.5.0" } ], "resolve": null, "target_directory": "[..]/foo/target", "version": 1, "workspace_members": [ "bar 0.5.0 (path+file://[..]/foo/bar)", "artifact 0.5.0 (path+file://[..]/foo/artifact)", "baz 0.5.0 (path+file://[..]/foo/baz)" ], "workspace_root": "[..]/foo" } "#, ) .run(); } cargo-0.66.0/tests/testsuite/minimal_versions.rs000066400000000000000000000017441432416201200220160ustar00rootroot00000000000000//! Tests for minimal-version resolution. //! //! Note: Some tests are located in the resolver-tests package. use cargo_test_support::project; use cargo_test_support::registry::Package; // Ensure that the "-Z minimal-versions" CLI option works and the minimal // version of a dependency ends up in the lock file. #[cargo_test] fn minimal_version_cli() { Package::new("dep", "1.0.0").publish(); Package::new("dep", "1.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies] dep = "1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile -Zminimal-versions") .masquerade_as_nightly_cargo(&["minimal-versions"]) .run(); let lock = p.read_lockfile(); assert!(!lock.contains("1.1.0")); } cargo-0.66.0/tests/testsuite/mock-std/000077500000000000000000000000001432416201200176055ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/Cargo.toml000066400000000000000000000001721432416201200215350ustar00rootroot00000000000000[workspace] members = [ "library/alloc", "library/core", "library/proc_macro", "library/std", "library/test", ] cargo-0.66.0/tests/testsuite/mock-std/library/000077500000000000000000000000001432416201200212515ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/alloc/000077500000000000000000000000001432416201200223435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/alloc/Cargo.toml000066400000000000000000000003061432416201200242720ustar00rootroot00000000000000[package] name = "alloc" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" [dependencies] registry-dep-using-core = { version = "*", features = ['mockbuild'] } cargo-0.66.0/tests/testsuite/mock-std/library/alloc/src/000077500000000000000000000000001432416201200231325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/alloc/src/lib.rs000066400000000000000000000003441432416201200242470ustar00rootroot00000000000000#![feature(staged_api)] #![stable(since = "1.0.0", feature = "dummy")] extern crate alloc; #[stable(since = "1.0.0", feature = "dummy")] pub use alloc::*; #[stable(since = "1.0.0", feature = "dummy")] pub fn custom_api() { } cargo-0.66.0/tests/testsuite/mock-std/library/compiler_builtins/000077500000000000000000000000001432416201200247745ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/compiler_builtins/Cargo.toml000066400000000000000000000001741432416201200267260ustar00rootroot00000000000000[package] name = "compiler_builtins" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" cargo-0.66.0/tests/testsuite/mock-std/library/compiler_builtins/src/000077500000000000000000000000001432416201200255635ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/compiler_builtins/src/lib.rs000066400000000000000000000000271432416201200266760ustar00rootroot00000000000000// intentionally blank cargo-0.66.0/tests/testsuite/mock-std/library/core/000077500000000000000000000000001432416201200222015ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/core/Cargo.toml000066400000000000000000000001571432416201200241340ustar00rootroot00000000000000[package] name = "core" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" cargo-0.66.0/tests/testsuite/mock-std/library/core/src/000077500000000000000000000000001432416201200227705ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/core/src/lib.rs000066400000000000000000000003161432416201200241040ustar00rootroot00000000000000#![feature(staged_api)] #![stable(since = "1.0.0", feature = "dummy")] #[stable(since = "1.0.0", feature = "dummy")] pub use core::*; #[stable(since = "1.0.0", feature = "dummy")] pub fn custom_api() { } cargo-0.66.0/tests/testsuite/mock-std/library/panic_unwind/000077500000000000000000000000001432416201200237275ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/panic_unwind/Cargo.toml000066400000000000000000000001671432416201200256630ustar00rootroot00000000000000[package] name = "panic_unwind" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" cargo-0.66.0/tests/testsuite/mock-std/library/panic_unwind/src/000077500000000000000000000000001432416201200245165ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/panic_unwind/src/lib.rs000066400000000000000000000001421432416201200256270ustar00rootroot00000000000000#![feature(panic_unwind, panic_runtime)] #![panic_runtime] #![no_std] extern crate panic_unwind; cargo-0.66.0/tests/testsuite/mock-std/library/proc_macro/000077500000000000000000000000001432416201200233755ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/proc_macro/Cargo.toml000066400000000000000000000001651432416201200253270ustar00rootroot00000000000000[package] name = "proc_macro" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" cargo-0.66.0/tests/testsuite/mock-std/library/proc_macro/src/000077500000000000000000000000001432416201200241645ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/proc_macro/src/lib.rs000066400000000000000000000003561432416201200253040ustar00rootroot00000000000000#![feature(staged_api)] #![stable(since = "1.0.0", feature = "dummy")] extern crate proc_macro; #[stable(since = "1.0.0", feature = "dummy")] pub use proc_macro::*; #[stable(since = "1.0.0", feature = "dummy")] pub fn custom_api() { } cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/000077500000000000000000000000001432416201200262655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/Cargo.toml000066400000000000000000000003111432416201200302100ustar00rootroot00000000000000[package] name = "rustc-std-workspace-alloc" version = "1.9.0" authors = ["Alex Crichton "] edition = "2018" [lib] path = "lib.rs" [dependencies] alloc = { path = "../alloc" } cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-alloc/lib.rs000066400000000000000000000000361432416201200274000ustar00rootroot00000000000000#![no_std] pub use alloc::*; cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-core/000077500000000000000000000000001432416201200261235ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-core/Cargo.toml000066400000000000000000000003061432416201200300520ustar00rootroot00000000000000[package] name = "rustc-std-workspace-core" version = "1.9.0" authors = ["Alex Crichton "] edition = "2018" [lib] path = "lib.rs" [dependencies] core = { path = "../core" } cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-core/lib.rs000066400000000000000000000000351432416201200272350ustar00rootroot00000000000000#![no_std] pub use core::*; cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-std/000077500000000000000000000000001432416201200257655ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-std/Cargo.toml000066400000000000000000000003031432416201200277110ustar00rootroot00000000000000[package] name = "rustc-std-workspace-std" version = "1.9.0" authors = ["Alex Crichton "] edition = "2018" [lib] path = "lib.rs" [dependencies] std = { path = "../std" } cargo-0.66.0/tests/testsuite/mock-std/library/rustc-std-workspace-std/lib.rs000066400000000000000000000000201432416201200270710ustar00rootroot00000000000000pub use std::*; cargo-0.66.0/tests/testsuite/mock-std/library/std/000077500000000000000000000000001432416201200220435ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/std/Cargo.toml000066400000000000000000000003371432416201200237760ustar00rootroot00000000000000[package] name = "std" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" [dependencies] registry-dep-using-alloc = { version = "*", features = ['mockbuild'] } [features] feature1 = [] cargo-0.66.0/tests/testsuite/mock-std/library/std/src/000077500000000000000000000000001432416201200226325ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/std/src/lib.rs000066400000000000000000000004711432416201200237500ustar00rootroot00000000000000#![feature(staged_api)] #![stable(since = "1.0.0", feature = "dummy")] #[stable(since = "1.0.0", feature = "dummy")] pub use std::*; #[stable(since = "1.0.0", feature = "dummy")] pub fn custom_api() {} #[cfg(feature = "feature1")] #[stable(since = "1.0.0", feature = "dummy")] pub fn conditional_function() {} cargo-0.66.0/tests/testsuite/mock-std/library/test/000077500000000000000000000000001432416201200222305ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/test/Cargo.toml000066400000000000000000000006761432416201200241710ustar00rootroot00000000000000[package] name = "test" version = "0.1.0" authors = ["Alex Crichton "] edition = "2018" [dependencies] proc_macro = { path = "../proc_macro" } std = { path = "../std" } panic_unwind = { path = "../panic_unwind" } compiler_builtins = { path = "../compiler_builtins" } registry-dep-using-std = { version = "*", features = ['mockbuild'] } [features] panic-unwind = [] backtrace = [] feature1 = ["std/feature1"] default = [] cargo-0.66.0/tests/testsuite/mock-std/library/test/src/000077500000000000000000000000001432416201200230175ustar00rootroot00000000000000cargo-0.66.0/tests/testsuite/mock-std/library/test/src/lib.rs000066400000000000000000000002301432416201200241260ustar00rootroot00000000000000#![feature(staged_api)] #![feature(test)] #![unstable(feature = "test", issue = "none")] extern crate test; pub use test::*; pub fn custom_api() { } cargo-0.66.0/tests/testsuite/multitarget.rs000066400000000000000000000124451432416201200210010ustar00rootroot00000000000000//! Tests for multiple `--target` flags to subcommands use cargo_test_support::{basic_manifest, cross_compile, project, rustc_host}; #[cargo_test] fn simple_build() { if cross_compile::disabled() { return; } let t1 = cross_compile::alternate(); let t2 = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .arg("--target") .arg(&t1) .arg("--target") .arg(&t2) .run(); assert!(p.target_bin(t1, "foo").is_file()); assert!(p.target_bin(t2, "foo").is_file()); } #[cargo_test] fn simple_build_with_config() { if cross_compile::disabled() { return; } let t1 = cross_compile::alternate(); let t2 = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", &format!( r#" [build] target = ["{t1}", "{t2}"] "# ), ) .build(); p.cargo("build").run(); assert!(p.target_bin(t1, "foo").is_file()); assert!(p.target_bin(t2, "foo").is_file()); } #[cargo_test] fn simple_test() { if !cross_compile::can_run_on_host() { return; } let t1 = cross_compile::alternate(); let t2 = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/lib.rs", "fn main() {}") .build(); p.cargo("test") .arg("--target") .arg(&t1) .arg("--target") .arg(&t2) .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t1)) .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t2)) .run(); } #[cargo_test] fn simple_run() { let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("run --target a --target b") .with_stderr("[ERROR] only one `--target` argument is supported") .with_status(101) .run(); } #[cargo_test] fn simple_doc() { if cross_compile::disabled() { return; } let t1 = cross_compile::alternate(); let t2 = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/lib.rs", "//! empty lib") .build(); p.cargo("doc") .arg("--target") .arg(&t1) .arg("--target") .arg(&t2) .run(); assert!(p.build_dir().join(&t1).join("doc/foo/index.html").is_file()); assert!(p.build_dir().join(&t2).join("doc/foo/index.html").is_file()); } #[cargo_test] fn simple_check() { if cross_compile::disabled() { return; } let t1 = cross_compile::alternate(); let t2 = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check") .arg("--target") .arg(&t1) .arg("--target") .arg(&t2) .run(); } #[cargo_test] fn same_value_twice() { if cross_compile::disabled() { return; } let t = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .arg("--target") .arg(&t) .arg("--target") .arg(&t) .run(); assert!(p.target_bin(t, "foo").is_file()); } #[cargo_test] fn same_value_twice_with_config() { if cross_compile::disabled() { return; } let t = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", &format!( r#" [build] target = ["{t}", "{t}"] "# ), ) .build(); p.cargo("build").run(); assert!(p.target_bin(t, "foo").is_file()); } #[cargo_test] fn works_with_config_in_both_string_or_list() { if cross_compile::disabled() { return; } let t = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", &format!( r#" [build] target = "{t}" "# ), ) .build(); p.cargo("build").run(); assert!(p.target_bin(t, "foo").is_file()); p.cargo("clean").run(); p.change_file( ".cargo/config.toml", &format!( r#" [build] target = ["{t}"] "# ), ); p.cargo("build").run(); assert!(p.target_bin(t, "foo").is_file()); } #[cargo_test] fn works_with_env() { let t = rustc_host(); let p = project() .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").env("CARGO_BUILD_TARGET", t).run(); assert!(p.target_bin(t, "foo").is_file()); } cargo-0.66.0/tests/testsuite/net_config.rs000066400000000000000000000032161432416201200205470ustar00rootroot00000000000000//! Tests for network configuration. use cargo_test_support::project; #[cargo_test] fn net_retry_loads_from_config() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "http://127.0.0.1:11/foo/bar" "#, ) .file("src/main.rs", "") .file( ".cargo/config", r#" [net] retry=1 [http] timeout=1 "#, ) .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains( "[WARNING] spurious network error \ (1 tries remaining): [..]", ) .run(); } #[cargo_test] fn net_retry_git_outputs_warning() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "http://127.0.0.1:11/foo/bar" "#, ) .file( ".cargo/config", r#" [http] timeout=1 "#, ) .file("src/main.rs", "") .build(); p.cargo("build -v -j 1") .with_status(101) .with_stderr_contains( "[WARNING] spurious network error \ (2 tries remaining): [..]", ) .with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]") .run(); } cargo-0.66.0/tests/testsuite/new.rs000066400000000000000000000347651432416201200172420ustar00rootroot00000000000000//! Tests for the `cargo new` command. use cargo_test_support::cargo_process; use cargo_test_support::paths; use std::env; use std::fs::{self, File}; fn create_default_gitconfig() { // This helps on Windows where libgit2 is very aggressive in attempting to // find a git config file. let gitconfig = paths::home().join(".gitconfig"); File::create(gitconfig).unwrap(); // If we're running this under a user account that has a different default branch set up // then tests that assume the default branch is master will fail. We set the default branch // to master explicitly so that tests that rely on this behavior still pass. fs::write( paths::home().join(".gitconfig"), r#" [init] defaultBranch = master "#, ) .unwrap(); } #[cargo_test] fn simple_lib() { cargo_process("new --lib foo --vcs none --edition 2015") .with_stderr("[CREATED] library `foo` package") .run(); assert!(paths::root().join("foo").is_dir()); assert!(paths::root().join("foo/Cargo.toml").is_file()); assert!(paths::root().join("foo/src/lib.rs").is_file()); assert!(!paths::root().join("foo/.gitignore").is_file()); let lib = paths::root().join("foo/src/lib.rs"); let contents = fs::read_to_string(&lib).unwrap(); assert_eq!( contents, r#"pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } "# ); cargo_process("build").cwd(&paths::root().join("foo")).run(); } #[cargo_test] fn simple_bin() { cargo_process("new --bin foo --edition 2015") .with_stderr("[CREATED] binary (application) `foo` package") .run(); assert!(paths::root().join("foo").is_dir()); assert!(paths::root().join("foo/Cargo.toml").is_file()); assert!(paths::root().join("foo/src/main.rs").is_file()); cargo_process("build").cwd(&paths::root().join("foo")).run(); assert!(paths::root() .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)) .is_file()); } #[cargo_test] fn both_lib_and_bin() { cargo_process("new --lib --bin foo") .with_status(101) .with_stderr("[ERROR] can't specify both lib and binary outputs") .run(); } #[cargo_test] fn simple_git() { cargo_process("new --lib foo --edition 2015").run(); assert!(paths::root().is_dir()); assert!(paths::root().join("foo/Cargo.toml").is_file()); assert!(paths::root().join("foo/src/lib.rs").is_file()); assert!(paths::root().join("foo/.git").is_dir()); assert!(paths::root().join("foo/.gitignore").is_file()); let fp = paths::root().join("foo/.gitignore"); let contents = fs::read_to_string(&fp).unwrap(); assert_eq!(contents, "/target\n/Cargo.lock\n",); cargo_process("build").cwd(&paths::root().join("foo")).run(); } #[cargo_test] fn no_argument() { cargo_process("new") .with_status(1) .with_stderr_contains( "\ error: The following required arguments were not provided: ", ) .run(); } #[cargo_test] fn existing() { let dst = paths::root().join("foo"); fs::create_dir(&dst).unwrap(); cargo_process("new foo") .with_status(101) .with_stderr( "[ERROR] destination `[CWD]/foo` already exists\n\n\ Use `cargo init` to initialize the directory", ) .run(); } #[cargo_test] fn invalid_characters() { cargo_process("new foo.rs") .with_status(101) .with_stderr( "\ [ERROR] invalid character `.` in package name: `foo.rs`, [..] If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"foo.rs\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/foo.rs.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"foo.rs\" path = \"src/main.rs\" ", ) .run(); } #[cargo_test] fn reserved_name() { cargo_process("new test") .with_status(101) .with_stderr( "\ [ERROR] the name `test` cannot be used as a package name, it conflicts [..] If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"test\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/test.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"test\" path = \"src/main.rs\" ", ) .run(); } #[cargo_test] fn reserved_binary_name() { cargo_process("new --bin incremental") .with_status(101) .with_stderr( "\ [ERROR] the name `incremental` cannot be used as a package name, it conflicts [..] If you need a package name to not match the directory name, consider using --name flag. ", ) .run(); cargo_process("new --lib incremental") .with_stderr( "\ [WARNING] the name `incremental` will not support binary executables with that name, \ it conflicts with cargo's build directory names [CREATED] library `incremental` package ", ) .run(); } #[cargo_test] fn keyword_name() { cargo_process("new pub") .with_status(101) .with_stderr( "\ [ERROR] the name `pub` cannot be used as a package name, it is a Rust keyword If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"pub\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/pub.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"pub\" path = \"src/main.rs\" ", ) .run(); } #[cargo_test] fn std_name() { cargo_process("new core") .with_stderr( "\ [WARNING] the name `core` is part of Rust's standard library It is recommended to use a different name to avoid problems. If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"core\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/core.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"core\" path = \"src/main.rs\" [CREATED] binary (application) `core` package ", ) .run(); } #[cargo_test] fn git_prefers_command_line() { let root = paths::root(); fs::create_dir(&root.join(".cargo")).unwrap(); fs::write( &root.join(".cargo/config"), r#" [cargo-new] vcs = "none" name = "foo" email = "bar" "#, ) .unwrap(); cargo_process("new foo --vcs git").run(); assert!(paths::root().join("foo/.gitignore").exists()); assert!(!fs::read_to_string(paths::root().join("foo/Cargo.toml")) .unwrap() .contains("authors =")); } #[cargo_test] fn subpackage_no_git() { cargo_process("new foo").run(); assert!(paths::root().join("foo/.git").is_dir()); assert!(paths::root().join("foo/.gitignore").is_file()); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); cargo_process("new foo/components/subcomponent").run(); assert!(!paths::root() .join("foo/components/subcomponent/.git") .is_file()); assert!(!paths::root() .join("foo/components/subcomponent/.gitignore") .is_file()); } #[cargo_test] fn subpackage_git_with_gitignore() { cargo_process("new foo").run(); assert!(paths::root().join("foo/.git").is_dir()); assert!(paths::root().join("foo/.gitignore").is_file()); let gitignore = paths::root().join("foo/.gitignore"); fs::write(gitignore, b"components").unwrap(); let subpackage = paths::root().join("foo/components"); fs::create_dir(&subpackage).unwrap(); cargo_process("new foo/components/subcomponent").run(); assert!(paths::root() .join("foo/components/subcomponent/.git") .is_dir()); assert!(paths::root() .join("foo/components/subcomponent/.gitignore") .is_file()); } #[cargo_test] fn subpackage_git_with_vcs_arg() { cargo_process("new foo").run(); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); cargo_process("new foo/components/subcomponent --vcs git").run(); assert!(paths::root() .join("foo/components/subcomponent/.git") .is_dir()); assert!(paths::root() .join("foo/components/subcomponent/.gitignore") .is_file()); } #[cargo_test] fn unknown_flags() { cargo_process("new foo --flag") .with_status(1) .with_stderr_contains( "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", ) .run(); } #[cargo_test] fn explicit_invalid_name_not_suggested() { cargo_process("new --name 10-invalid a") .with_status(101) .with_stderr( "\ [ERROR] the name `10-invalid` cannot be used as a package name, \ the name cannot start with a digit\n\ If you need a binary with the name \"10-invalid\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/10-invalid.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"10-invalid\" path = \"src/main.rs\" ", ) .run(); } #[cargo_test] fn explicit_project_name() { cargo_process("new --lib foo --name bar") .with_stderr("[CREATED] library `bar` package") .run(); } #[cargo_test] fn new_with_edition_2015() { cargo_process("new --edition 2015 foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2015\"")); } #[cargo_test] fn new_with_edition_2018() { cargo_process("new --edition 2018 foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2018\"")); } #[cargo_test] fn new_default_edition() { cargo_process("new foo").run(); let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(manifest.contains("edition = \"2021\"")); } #[cargo_test] fn new_with_bad_edition() { cargo_process("new --edition something_else foo") .with_stderr_contains("error: \"something_else\" isn't a valid value[..]") .with_status(1) .run(); } #[cargo_test] fn new_with_reference_link() { cargo_process("new foo").run(); let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); assert!(contents.contains("# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html")) } #[cargo_test] fn lockfile_constant_during_new() { cargo_process("new foo").run(); cargo_process("build").cwd(&paths::root().join("foo")).run(); let before = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap(); cargo_process("build").cwd(&paths::root().join("foo")).run(); let after = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap(); assert_eq!(before, after); } #[cargo_test] fn restricted_windows_name() { if cfg!(windows) { cargo_process("new nul") .with_status(101) .with_stderr( "\ [ERROR] cannot use name `nul`, it is a reserved Windows filename If you need a package name to not match the directory name, consider using --name flag. ", ) .run(); } else { cargo_process("new nul") .with_stderr( "\ [WARNING] the name `nul` is a reserved Windows filename This package will not work on Windows platforms. [CREATED] binary (application) `nul` package ", ) .run(); } } #[cargo_test] fn non_ascii_name() { cargo_process("new ΠŸΡ€ΠΈΠ²Π΅Ρ‚") .with_stderr( "\ [WARNING] the name `ΠŸΡ€ΠΈΠ²Π΅Ρ‚` contains non-ASCII characters Non-ASCII crate names are not supported by Rust. [CREATED] binary (application) `ΠŸΡ€ΠΈΠ²Π΅Ρ‚` package ", ) .run(); } #[cargo_test] fn non_ascii_name_invalid() { // These are alphanumeric characters, but not Unicode XID. cargo_process("new β’Άβ’·β’Έ") .with_status(101) .with_stderr( "\ [ERROR] invalid character `β’Ά` in package name: `β’Άβ’·β’Έ`, \ the first character must be a Unicode XID start character (most letters or `_`) If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"β’Άβ’·β’Έ\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/β’Άβ’·β’Έ.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"β’Άβ’·β’Έ\" path = \"src/main.rs\" ", ) .run(); cargo_process("new aΒΌ") .with_status(101) .with_stderr( "\ [ERROR] invalid character `ΒΌ` in package name: `aΒΌ`, \ characters must be Unicode XID characters (numbers, `-`, `_`, or most letters) If you need a package name to not match the directory name, consider using --name flag. If you need a binary with the name \"aΒΌ\", use a valid package name, \ and set the binary name to be different from the package. \ This can be done by setting the binary filename to `src/bin/aΒΌ.rs` \ or change the name in Cargo.toml with: [[bin]] name = \"aΒΌ\" path = \"src/main.rs\" ", ) .run(); } #[cargo_test] fn git_default_branch() { // Check for init.defaultBranch support. create_default_gitconfig(); cargo_process("new foo").run(); let repo = git2::Repository::open(paths::root().join("foo")).unwrap(); let head = repo.find_reference("HEAD").unwrap(); assert_eq!(head.symbolic_target().unwrap(), "refs/heads/master"); fs::write( paths::home().join(".gitconfig"), r#" [init] defaultBranch = hello "#, ) .unwrap(); cargo_process("new bar").run(); let repo = git2::Repository::open(paths::root().join("bar")).unwrap(); let head = repo.find_reference("HEAD").unwrap(); assert_eq!(head.symbolic_target().unwrap(), "refs/heads/hello"); } cargo-0.66.0/tests/testsuite/offline.rs000066400000000000000000000442341432416201200200630ustar00rootroot00000000000000//! Tests for --offline flag. use cargo_test_support::{basic_manifest, git, main_file, path2url, project, registry::Package}; use std::fs; #[cargo_test] fn offline_unused_target_dep() { // --offline with a target dependency that is not used and not downloaded. Package::new("unused_dep", "1.0.0").publish(); Package::new("used_dep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] used_dep = "1.0" [target.'cfg(unused)'.dependencies] unused_dep = "1.0" "#, ) .file("src/lib.rs", "") .build(); // Do a build that downloads only what is necessary. p.cargo("build") .with_stderr_contains("[DOWNLOADED] used_dep [..]") .with_stderr_does_not_contain("[DOWNLOADED] unused_dep [..]") .run(); p.cargo("clean").run(); // Build offline, make sure it works. p.cargo("build --offline").run(); } #[cargo_test] fn offline_missing_optional() { Package::new("opt_dep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] opt_dep = { version = "1.0", optional = true } "#, ) .file("src/lib.rs", "") .build(); // Do a build that downloads only what is necessary. p.cargo("build") .with_stderr_does_not_contain("[DOWNLOADED] opt_dep [..]") .run(); p.cargo("clean").run(); // Build offline, make sure it works. p.cargo("build --offline").run(); p.cargo("build --offline --features=opt_dep") .with_stderr( "\ [ERROR] failed to download `opt_dep v1.0.0` Caused by: attempting to make an HTTP request, but --offline was specified ", ) .with_status(101) .run(); } #[cargo_test] fn cargo_compile_path_with_offline() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build --offline").run(); } #[cargo_test] fn cargo_compile_with_downloaded_dependency_with_offline() { Package::new("present_dep", "1.2.3") .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) .file("src/lib.rs", "") .publish(); // make package downloaded let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] present_dep = "1.2.3" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); let p2 = project() .at("bar") .file( "Cargo.toml", r#" [project] name = "bar" version = "0.1.0" [dependencies] present_dep = "1.2.3" "#, ) .file("src/lib.rs", "") .build(); p2.cargo("build --offline") .with_stderr( "\ [COMPILING] present_dep v1.2.3 [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn cargo_compile_offline_not_try_update() { // When --offline needs to download the registry, provide a reasonable // error hint to run without --offline. let p = project() .at("bar") .file( "Cargo.toml", r#" [project] name = "bar" version = "0.1.0" [dependencies] not_cached_dep = "1.2.5" "#, ) .file("src/lib.rs", "") .build(); let msg = "\ [ERROR] no matching package named `not_cached_dep` found location searched: registry `crates-io` required by package `bar v0.1.0 ([..]/bar)` As a reminder, you're using offline mode (--offline) which can sometimes cause \ surprising resolution failures, if this error is too confusing you may wish to \ retry without the offline flag. "; p.cargo("build --offline") .with_status(101) .with_stderr(msg) .run(); // While we're here, also check the config works. p.change_file(".cargo/config", "net.offline = true"); p.cargo("build").with_status(101).with_stderr(msg).run(); } #[cargo_test] fn compile_offline_without_maxvers_cached() { Package::new("present_dep", "1.2.1").publish(); Package::new("present_dep", "1.2.2").publish(); Package::new("present_dep", "1.2.3") .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) .file( "src/lib.rs", r#"pub fn get_version()->&'static str {"1.2.3"}"#, ) .publish(); Package::new("present_dep", "1.2.5") .file("Cargo.toml", &basic_manifest("present_dep", "1.2.5")) .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#) .publish(); // make package cached let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] present_dep = "=1.2.3" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); let p2 = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] present_dep = "1.2" "#, ) .file( "src/main.rs", "\ extern crate present_dep; fn main(){ println!(\"{}\", present_dep::get_version()); }", ) .build(); p2.cargo("run --offline") .with_stderr( "\ [COMPILING] present_dep v1.2.3 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] Running `[..]`", ) .with_stdout("1.2.3") .run(); } #[cargo_test] fn cargo_compile_forbird_git_httpsrepo_offline() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["chabapok@example.com"] [dependencies.dep1] git = 'https://github.com/some_user/dep1.git' "#, ) .file("src/main.rs", "") .build(); p.cargo("build --offline").with_status(101).with_stderr("\ [ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]` Caused by: failed to load source for dependency `dep1` Caused by: Unable to update https://github.com/some_user/dep1.git Caused by: can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (--offline)").run(); } #[cargo_test] fn compile_offline_while_transitive_dep_not_cached() { let baz = Package::new("baz", "1.0.0"); let baz_path = baz.archive_dst(); baz.publish(); let baz_content = fs::read(&baz_path).unwrap(); // Truncate the file to simulate a download failure. fs::write(&baz_path, &[]).unwrap(); Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [dependencies] bar = "0.1.0" "#, ) .file("src/main.rs", "fn main(){}") .build(); // simulate download bar, but fail to download baz p.cargo("build") .with_status(101) .with_stderr_contains("[..]failed to verify the checksum of `baz[..]") .run(); // Restore the file contents. fs::write(&baz_path, &baz_content).unwrap(); p.cargo("build --offline") .with_status(101) .with_stderr( "\ [ERROR] failed to download `bar v0.1.0` Caused by: attempting to make an HTTP request, but --offline was specified ", ) .run(); } #[cargo_test] fn update_offline_not_cached() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("update --offline") .with_status(101) .with_stderr( "\ [ERROR] no matching package named `bar` found location searched: registry `[..]` required by package `foo v0.0.1 ([..]/foo)` As a reminder, you're using offline mode (--offline) which can sometimes cause \ surprising resolution failures, if this error is too confusing you may wish to \ retry without the offline flag.", ) .run(); } #[cargo_test] fn cargo_compile_offline_with_cached_git_dep() { let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) .file( "src/lib.rs", r#" pub static COOL_STR:&str = "cached git repo rev1"; "#, ) }); let repo = git2::Repository::open(&git_project.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Commit the changes and make sure we trigger a recompile git_project.change_file( "src/lib.rs", r#"pub static COOL_STR:&str = "cached git repo rev2";"#, ); git::add(&repo); let rev2 = git::commit(&repo); // cache to registry rev1 and rev2 let prj = project() .at("cache_git_dep") .file( "Cargo.toml", &format!( r#" [project] name = "cache_git_dep" version = "0.5.0" [dependencies.dep1] git = '{}' rev = "{}" "#, git_project.url(), rev1 ), ) .file("src/main.rs", "fn main(){}") .build(); prj.cargo("build").run(); prj.change_file( "Cargo.toml", &format!( r#" [project] name = "cache_git_dep" version = "0.5.0" [dependencies.dep1] git = '{}' rev = "{}" "#, git_project.url(), rev2 ), ); prj.cargo("build").run(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies.dep1] git = '{}' "#, git_project.url() ), ) .file( "src/main.rs", &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]), ) .build(); let git_root = git_project.root(); p.cargo("build --offline") .with_stderr(format!( "\ [COMPILING] dep1 v0.5.0 ({}#[..]) [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", path2url(git_root), )) .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")) .with_stdout("hello from cached git repo rev2\n") .run(); p.change_file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.5.0" [dependencies.dep1] git = '{}' rev = "{}" "#, git_project.url(), rev1 ), ); p.cargo("build --offline").run(); p.process(&p.bin("foo")) .with_stdout("hello from cached git repo rev1\n") .run(); } #[cargo_test] fn offline_resolve_optional_fail() { // Example where resolve fails offline. // // This happens if at least 1 version of an optional dependency is // available, but none of them satisfy the requirements. The current logic // that handles this is `RegistryIndex::query_inner`, and it doesn't know // if the package being queried is an optional one. This is not ideal, it // would be best if it just ignored optional (unselected) dependencies. Package::new("dep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = { version = "1.0", optional = true } "#, ) .file("src/lib.rs", "") .build(); p.cargo("fetch").run(); // Change dep to 2.0. p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = { version = "2.0", optional = true } "#, ); p.cargo("build --offline") .with_status(101) .with_stderr( "\ [ERROR] failed to select a version for the requirement `dep = \"^2.0\"` candidate versions found which didn't match: 1.0.0 location searched: `[..]` index (which is replacing registry `crates-io`) required by package `foo v0.1.0 ([..]/foo)` perhaps a crate was updated and forgotten to be re-vendored? As a reminder, you're using offline mode (--offline) which can sometimes cause \ surprising resolution failures, if this error is too confusing you may wish to \ retry without the offline flag. ", ) .run(); } #[cargo_test] fn offline_with_all_patched() { // Offline works if everything is patched. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "1.0" [patch.crates-io] dep = {path = "dep"} "#, ) .file("src/lib.rs", "pub fn f() { dep::foo(); }") .file("dep/Cargo.toml", &basic_manifest("dep", "1.0.0")) .file("dep/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("check --offline").run(); } #[cargo_test] fn update_offline_cached() { // Cache a few versions to update against let p = project().file("src/lib.rs", "").build(); let versions = ["1.2.3", "1.2.5", "1.2.9"]; for vers in versions.iter() { Package::new("present_dep", vers) .file("Cargo.toml", &basic_manifest("present_dep", vers)) .file( "src/lib.rs", format!(r#"pub fn get_version()->&'static str {{ "{}" }}"#, vers).as_str(), ) .publish(); // make package cached p.change_file( "Cargo.toml", format!( r#" [project] name = "foo" version = "0.1.0" [dependencies] present_dep = "={}" "#, vers ) .as_str(), ); p.cargo("build").run(); } let p2 = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] present_dep = "1.2" "#, ) .file( "src/main.rs", "\ extern crate present_dep; fn main(){ println!(\"{}\", present_dep::get_version()); }", ) .build(); p2.cargo("build --offline") .with_stderr( "\ [COMPILING] present_dep v1.2.9 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p2.rename_run("foo", "with_1_2_9") .with_stdout("1.2.9") .run(); // updates happen without updating the index p2.cargo("update -p present_dep --precise 1.2.3 --offline") .with_status(0) .with_stderr( "\ [UPDATING] present_dep v1.2.9 -> v1.2.3 ", ) .run(); p2.cargo("build --offline") .with_stderr( "\ [COMPILING] present_dep v1.2.3 [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p2.rename_run("foo", "with_1_2_3") .with_stdout("1.2.3") .run(); // Offline update should only print package details and not index updating p2.cargo("update --offline") .with_status(0) .with_stderr( "\ [UPDATING] present_dep v1.2.3 -> v1.2.9 ", ) .run(); // No v1.2.8 loaded into the cache so expect failure. p2.cargo("update -p present_dep --precise 1.2.8 --offline") .with_status(101) .with_stderr( "\ [ERROR] no matching package named `present_dep` found location searched: registry `[..]` required by package `foo v0.1.0 ([..]/foo)` As a reminder, you're using offline mode (--offline) which can sometimes cause \ surprising resolution failures, if this error is too confusing you may wish to \ retry without the offline flag. ", ) .run(); } #[cargo_test] fn offline_and_frozen_and_no_lock() { let p = project().file("src/lib.rs", "").build(); p.cargo("build --frozen --offline") .with_status(101) .with_stderr("\ error: the lock file [ROOT]/foo/Cargo.lock needs to be updated but --frozen was passed to prevent this If you want to try to generate the lock file without accessing the network, \ remove the --frozen flag and use --offline instead. ") .run(); } #[cargo_test] fn offline_and_locked_and_no_frozen() { let p = project().file("src/lib.rs", "").build(); p.cargo("build --locked --offline") .with_status(101) .with_stderr("\ error: the lock file [ROOT]/foo/Cargo.lock needs to be updated but --locked was passed to prevent this If you want to try to generate the lock file without accessing the network, \ remove the --locked flag and use --offline instead. ") .run(); } cargo-0.66.0/tests/testsuite/old_cargos.rs000066400000000000000000000560071432416201200205560ustar00rootroot00000000000000//! Tests for checking behavior of old cargos. //! //! These tests are ignored because it is intended to be run on a developer //! system with a bunch of toolchains installed. This requires `rustup` to be //! installed. It will iterate over installed toolchains, and run some tests //! over each one, producing a report at the end. As of this writing, I have //! tested 1.0 to 1.51. Run this with: //! //! ```console //! cargo test --test testsuite -- old_cargos --nocapture --ignored //! ``` use cargo::CargoResult; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{self, Dependency, Package}; use cargo_test_support::{cargo_exe, execs, paths, process, project, rustc_host}; use cargo_util::{ProcessBuilder, ProcessError}; use semver::Version; use std::fs; fn tc_process(cmd: &str, toolchain: &str) -> ProcessBuilder { let mut p = if toolchain == "this" { if cmd == "cargo" { process(&cargo_exe()) } else { process(cmd) } } else { let mut cmd = process(cmd); cmd.arg(format!("+{}", toolchain)); cmd }; // Reset PATH since `process` modifies it to remove rustup. p.env("PATH", std::env::var_os("PATH").unwrap()); p } /// Returns a sorted list of all toolchains. /// /// The returned value includes the parsed version, and the rustup toolchain /// name as a string. fn collect_all_toolchains() -> Vec<(Version, String)> { let rustc_version = |tc| { let mut cmd = tc_process("rustc", tc); cmd.arg("-V"); let output = cmd.exec_with_output().expect("rustc installed"); let version = std::str::from_utf8(&output.stdout).unwrap(); let parts: Vec<_> = version.split_whitespace().collect(); assert_eq!(parts[0], "rustc"); assert!(parts[1].starts_with("1.")); Version::parse(parts[1]).expect("valid version") }; // Provide a way to override the list. if let Ok(tcs) = std::env::var("OLD_CARGO") { return tcs .split(',') .map(|tc| (rustc_version(tc), tc.to_string())) .collect(); } let host = rustc_host(); // I tend to have lots of toolchains installed, but I don't want to test // all of them (like dated nightlies, or toolchains for non-host targets). let valid_names = &[ format!("stable-{}", host), format!("beta-{}", host), format!("nightly-{}", host), ]; let output = ProcessBuilder::new("rustup") .args(&["toolchain", "list"]) .exec_with_output() .expect("rustup should be installed"); let stdout = std::str::from_utf8(&output.stdout).unwrap(); let mut toolchains: Vec<_> = stdout .lines() .map(|line| { // Some lines say things like (default), just get the version. line.split_whitespace().next().expect("non-empty line") }) .filter(|line| { line.ends_with(&host) && (line.starts_with("1.") || valid_names.iter().any(|name| name == line)) }) .map(|line| (rustc_version(line), line.to_string())) .collect(); toolchains.sort_by(|a, b| a.0.cmp(&b.0)); toolchains } /// Returns whether the default toolchain is the stable version. fn default_toolchain_is_stable() -> bool { let default = tc_process("rustc", "this").arg("-V").exec_with_output(); let stable = tc_process("rustc", "stable").arg("-V").exec_with_output(); match (default, stable) { (Ok(d), Ok(s)) => d.stdout == s.stdout, _ => false, } } // This is a test for exercising the behavior of older versions of cargo with // the new feature syntax. // // The test involves a few dependencies with different feature requirements: // // * `bar` 1.0.0 is the base version that does not use the new syntax. // * `bar` 1.0.1 has a feature with the new syntax, but the feature is unused. // The optional dependency `new-baz-dep` should not be activated. // * `bar` 1.0.2 has a dependency on `baz` that *requires* the new feature // syntax. #[ignore = "must be run manually, requires old cargo installations"] #[cargo_test] fn new_features() { let registry = registry::init(); if std::process::Command::new("rustup").output().is_err() { panic!("old_cargos requires rustup to be installed"); } Package::new("new-baz-dep", "1.0.0").publish(); Package::new("baz", "1.0.0").publish(); let baz101_cksum = Package::new("baz", "1.0.1") .add_dep(Dependency::new("new-baz-dep", "1.0").optional(true)) .feature("new-feat", &["dep:new-baz-dep"]) .publish(); let bar100_cksum = Package::new("bar", "1.0.0") .add_dep(Dependency::new("baz", "1.0").optional(true)) .feature("feat", &["baz"]) .publish(); let bar101_cksum = Package::new("bar", "1.0.1") .add_dep(Dependency::new("baz", "1.0").optional(true)) .feature("feat", &["dep:baz"]) .publish(); let bar102_cksum = Package::new("bar", "1.0.2") .add_dep(Dependency::new("baz", "1.0").enable_features(&["new-feat"])) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); let lock_bar_to = |toolchain_version: &Version, bar_version| { let lock = if toolchain_version < &Version::new(1, 12, 0) { let url = registry.index_url(); match bar_version { 100 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.0 (registry+{url})", ] [[package]] name = "bar" version = "1.0.0" source = "registry+{url}" "#, url = url ), 101 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.1 (registry+{url})", ] [[package]] name = "bar" version = "1.0.1" source = "registry+{url}" "#, url = url ), 102 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.2 (registry+{url})", ] [[package]] name = "bar" version = "1.0.2" source = "registry+{url}" dependencies = [ "baz 1.0.1 (registry+{url})", ] [[package]] name = "baz" version = "1.0.1" source = "registry+{url}" "#, url = url ), _ => panic!("unexpected version"), } } else { match bar_version { 100 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" "#, bar100_cksum ), 101 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" "#, bar101_cksum ), 102 => format!( r#" [root] name = "foo" version = "0.1.0" dependencies = [ "bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bar" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "baz" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "{bar102_cksum}" "checksum baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{baz101_cksum}" "#, bar102_cksum = bar102_cksum, baz101_cksum = baz101_cksum ), _ => panic!("unexpected version"), } }; p.change_file("Cargo.lock", &lock); }; let toolchains = collect_all_toolchains(); let config_path = paths::home().join(".cargo/config"); let lock_path = p.root().join("Cargo.lock"); struct ToolchainBehavior { bar: Option, baz: Option, new_baz_dep: Option, } // Collect errors to print at the end. One entry per toolchain, a list of // strings to print. let mut unexpected_results: Vec> = Vec::new(); for (version, toolchain) in &toolchains { let mut tc_result = Vec::new(); // Write a config appropriate for this version. if version < &Version::new(1, 12, 0) { fs::write( &config_path, format!( r#" [registry] index = "{}" "#, registry.index_url() ), ) .unwrap(); } else { fs::write( &config_path, format!( " [source.crates-io] registry = 'https://wut' # only needed by 1.12 replace-with = 'dummy-registry' [source.dummy-registry] registry = '{}' ", registry.index_url() ), ) .unwrap(); } // Fetches the version of a package in the lock file. let pkg_version = |pkg| -> Option { let output = tc_process("cargo", toolchain) .args(&["pkgid", pkg]) .cwd(p.root()) .exec_with_output() .ok()?; let stdout = std::str::from_utf8(&output.stdout).unwrap(); let version = stdout .trim() .rsplitn(2, ':') .next() .expect("version after colon"); Some(Version::parse(version).expect("parseable version")) }; // Runs `cargo build` and returns the versions selected in the lock. let run_cargo = || -> CargoResult { match tc_process("cargo", toolchain) .args(&["build", "--verbose"]) .cwd(p.root()) .exec_with_output() { Ok(_output) => { eprintln!("{} ok", toolchain); let bar = pkg_version("bar"); let baz = pkg_version("baz"); let new_baz_dep = pkg_version("new-baz-dep"); Ok(ToolchainBehavior { bar, baz, new_baz_dep, }) } Err(e) => { eprintln!("{} err {}", toolchain, e); Err(e) } } }; macro_rules! check_lock { ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, None) => { check_lock!(= $tc_result, $pkg, $which, $actual, None); }; ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => { check_lock!(= $tc_result, $pkg, $which, $actual, Some(Version::parse($expected).unwrap())); }; (= $tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => { let exp: Option = $expected; if $actual != $expected { $tc_result.push(format!( "{} for {} saw {:?} but expected {:?}", $which, $pkg, $actual, exp )); } }; } let check_err_contains = |tc_result: &mut Vec<_>, err: anyhow::Error, contents| { if let Some(ProcessError { stderr: Some(stderr), .. }) = err.downcast_ref::() { let stderr = std::str::from_utf8(stderr).unwrap(); if !stderr.contains(contents) { tc_result.push(format!( "{} expected to see error contents:\n{}\nbut saw:\n{}", toolchain, contents, stderr )); } } else { panic!("{} unexpected error {}", toolchain, err); } }; // Unlocked behavior. let which = "unlocked"; lock_path.rm_rf(); p.build_dir().rm_rf(); match run_cargo() { Ok(behavior) => { if version < &Version::new(1, 51, 0) { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); } else if version >= &Version::new(1, 51, 0) && version <= &Version::new(1, 59, 0) { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0"); check_lock!(tc_result, "baz", which, behavior.baz, None); check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); } // Starting with 1.60, namespaced-features has been stabilized. else { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); check_lock!( tc_result, "new-baz-dep", which, behavior.new_baz_dep, "1.0.0" ); } } Err(e) => { tc_result.push(format!("unlocked build failed: {}", e)); } } let which = "locked bar 1.0.0"; lock_bar_to(version, 100); match run_cargo() { Ok(behavior) => { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0"); check_lock!(tc_result, "baz", which, behavior.baz, None); check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); } Err(e) => { tc_result.push(format!("bar 1.0.0 locked build failed: {}", e)); } } let which = "locked bar 1.0.1"; lock_bar_to(version, 101); match run_cargo() { Ok(behavior) => { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.1"); check_lock!(tc_result, "baz", which, behavior.baz, None); check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); } Err(e) => { // When version >= 1.51 and <= 1.59, // 1.0.1 can't be used without -Znamespaced-features // It gets filtered out of the index. check_err_contains( &mut tc_result, e, "candidate versions found which didn't match: 1.0.2, 1.0.0", ); } } let which = "locked bar 1.0.2"; lock_bar_to(version, 102); match run_cargo() { Ok(behavior) => { if version <= &Version::new(1, 59, 0) { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); } // Starting with 1.60, namespaced-features has been stabilized. else { check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); check_lock!( tc_result, "new-baz-dep", which, behavior.new_baz_dep, "1.0.0" ); } } Err(e) => { // When version >= 1.51 and <= 1.59, // baz can't lock to 1.0.1, it requires -Znamespaced-features check_err_contains( &mut tc_result, e, "candidate versions found which didn't match: 1.0.0", ); } } unexpected_results.push(tc_result); } // Generate a report. let mut has_err = false; for ((tc_vers, tc_name), errs) in toolchains.iter().zip(unexpected_results) { if errs.is_empty() { continue; } eprintln!("error: toolchain {} (version {}):", tc_name, tc_vers); for err in errs { eprintln!(" {}", err); } has_err = true; } if has_err { panic!("at least one toolchain did not run as expected"); } } #[cargo_test] #[ignore = "must be run manually, requires old cargo installations"] fn index_cache_rebuild() { // Checks that the index cache gets rebuilt. // // 1.48 will not cache entries with features with the same name as a // dependency. If the cache does not get rebuilt, then running with // `-Znamespaced-features` would prevent the new cargo from seeing those // entries. The index cache version was changed to prevent this from // happening, and switching between versions should work correctly // (although it will thrash the cash, that's better than not working // correctly. Package::new("baz", "1.0.0").publish(); Package::new("bar", "1.0.0").publish(); Package::new("bar", "1.0.1") .add_dep(Dependency::new("baz", "1.0").optional(true)) .feature("baz", &["dep:baz"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); // This version of Cargo errors on index entries that have overlapping // feature names, so 1.0.1 will be missing. execs() .with_process_builder(tc_process("cargo", "1.48.0")) .arg("check") .cwd(p.root()) .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); fs::remove_file(p.root().join("Cargo.lock")).unwrap(); // This should rebuild the cache and use 1.0.1. p.cargo("check") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.1 [..] [CHECKING] bar v1.0.1 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); fs::remove_file(p.root().join("Cargo.lock")).unwrap(); // Verify 1.48 can still resolve, and is at 1.0.0. execs() .with_process_builder(tc_process("cargo", "1.48.0")) .arg("tree") .cwd(p.root()) .with_stdout( "\ foo v0.1.0 [..] └── bar v1.0.0 ", ) .run(); } #[cargo_test] #[ignore = "must be run manually, requires old cargo installations"] fn avoids_split_debuginfo_collision() { // Test needs two different toolchains. // If the default toolchain is stable, then it won't work. if default_toolchain_is_stable() { return; } // Checks for a bug where .o files were being incorrectly shared between // different toolchains using incremental and split-debuginfo on macOS. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.dev] split-debuginfo = "unpacked" "#, ) .file("src/main.rs", "fn main() {}") .build(); execs() .with_process_builder(tc_process("cargo", "stable")) .arg("build") .env("CARGO_INCREMENTAL", "1") .cwd(p.root()) .with_stderr( "\ [COMPILING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("build") .env("CARGO_INCREMENTAL", "1") .with_stderr( "\ [COMPILING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); execs() .with_process_builder(tc_process("cargo", "stable")) .arg("build") .env("CARGO_INCREMENTAL", "1") .cwd(p.root()) .with_stderr( "\ [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/out_dir.rs000066400000000000000000000175131432416201200201060ustar00rootroot00000000000000//! Tests for --out-dir flag. use cargo_test_support::sleep_ms; use cargo_test_support::{basic_manifest, project}; use std::env; use std::fs; use std::path::Path; #[cargo_test] fn binary_with_debug() { let p = project() .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .enable_mac_dsym() .run(); check_dir_contents( &p.root().join("out"), &["foo"], &["foo", "foo.dSYM"], &["foo.exe", "foo.pdb"], &["foo.exe"], ); } #[cargo_test] fn static_library_with_debug() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] crate-type = ["staticlib"] "#, ) .file( "src/lib.rs", r#" #[no_mangle] pub extern "C" fn foo() { println!("Hello, World!") } "#, ) .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .run(); check_dir_contents( &p.root().join("out"), &["libfoo.a"], &["libfoo.a"], &["foo.lib"], &["libfoo.a"], ); } #[cargo_test] fn dynamic_library_with_debug() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] crate-type = ["cdylib"] "#, ) .file( "src/lib.rs", r#" #[no_mangle] pub extern "C" fn foo() { println!("Hello, World!") } "#, ) .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .enable_mac_dsym() .run(); check_dir_contents( &p.root().join("out"), &["libfoo.so"], &["libfoo.dylib", "libfoo.dylib.dSYM"], &["foo.dll", "foo.dll.exp", "foo.dll.lib", "foo.pdb"], &["foo.dll", "libfoo.dll.a"], ); } #[cargo_test] fn rlib_with_debug() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] crate-type = ["rlib"] "#, ) .file( "src/lib.rs", r#" pub fn foo() { println!("Hello, World!") } "#, ) .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .run(); check_dir_contents( &p.root().join("out"), &["libfoo.rlib"], &["libfoo.rlib"], &["libfoo.rlib"], &["libfoo.rlib"], ); } #[cargo_test] fn include_only_the_binary_from_the_current_package() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [workspace] [dependencies] utils = { path = "./utils" } "#, ) .file("src/lib.rs", "extern crate utils;") .file( "src/main.rs", r#" extern crate foo; extern crate utils; fn main() { println!("Hello, World!") } "#, ) .file("utils/Cargo.toml", &basic_manifest("utils", "0.0.1")) .file("utils/src/lib.rs", "") .build(); p.cargo("build -Z unstable-options --bin foo --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .enable_mac_dsym() .run(); check_dir_contents( &p.root().join("out"), &["foo"], &["foo", "foo.dSYM"], &["foo.exe", "foo.pdb"], &["foo.exe"], ); } #[cargo_test] fn out_dir_is_a_file() { let p = project() .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) .file("out", "") .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .with_status(101) .with_stderr_contains("[ERROR] failed to create directory [..]") .run(); } #[cargo_test] fn replaces_artifacts() { let p = project() .file("src/main.rs", r#"fn main() { println!("foo") }"#) .build(); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .run(); p.process( &p.root() .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), ) .with_stdout("foo") .run(); sleep_ms(1000); p.change_file("src/main.rs", r#"fn main() { println!("bar") }"#); p.cargo("build -Z unstable-options --out-dir out") .masquerade_as_nightly_cargo(&["out-dir"]) .run(); p.process( &p.root() .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), ) .with_stdout("bar") .run(); } #[cargo_test] fn avoid_build_scripts() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/main.rs", "fn main() {}") .file("a/build.rs", r#"fn main() { println!("hello-build-a"); }"#) .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/main.rs", "fn main() {}") .file("b/build.rs", r#"fn main() { println!("hello-build-b"); }"#) .build(); p.cargo("build -Z unstable-options --out-dir out -vv") .masquerade_as_nightly_cargo(&["out-dir"]) .enable_mac_dsym() .with_stdout_contains("[a 0.0.1] hello-build-a") .with_stdout_contains("[b 0.0.1] hello-build-b") .run(); check_dir_contents( &p.root().join("out"), &["a", "b"], &["a", "a.dSYM", "b", "b.dSYM"], &["a.exe", "a.pdb", "b.exe", "b.pdb"], &["a.exe", "b.exe"], ); } #[cargo_test] fn cargo_build_out_dir() { let p = project() .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) .file( ".cargo/config", r#" [build] out-dir = "out" "#, ) .build(); p.cargo("build -Z unstable-options") .masquerade_as_nightly_cargo(&["out-dir"]) .enable_mac_dsym() .run(); check_dir_contents( &p.root().join("out"), &["foo"], &["foo", "foo.dSYM"], &["foo.exe", "foo.pdb"], &["foo.exe"], ); } fn check_dir_contents( out_dir: &Path, expected_linux: &[&str], expected_mac: &[&str], expected_win_msvc: &[&str], expected_win_gnu: &[&str], ) { let expected = if cfg!(target_os = "windows") { if cfg!(target_env = "msvc") { expected_win_msvc } else { expected_win_gnu } } else if cfg!(target_os = "macos") { expected_mac } else { expected_linux }; let actual = list_dir(out_dir); let mut expected = expected.iter().map(|s| s.to_string()).collect::>(); expected.sort_unstable(); assert_eq!(actual, expected); } fn list_dir(dir: &Path) -> Vec { let mut res = Vec::new(); for entry in fs::read_dir(dir).unwrap() { let entry = entry.unwrap(); res.push(entry.file_name().into_string().unwrap()); } res.sort_unstable(); res } cargo-0.66.0/tests/testsuite/owner.rs000066400000000000000000000053731432416201200175740ustar00rootroot00000000000000//! Tests for the `cargo owner` command. use std::fs; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::project; use cargo_test_support::registry::{self, api_path}; fn setup(name: &str, content: Option<&str>) { let dir = api_path().join(format!("api/v1/crates/{}", name)); dir.mkdir_p(); if let Some(body) = content { fs::write(dir.join("owners"), body).unwrap(); } } #[cargo_test] fn simple_list() { registry::init(); let content = r#"{ "users": [ { "id": 70, "login": "github:rust-lang:core", "name": "Core" }, { "id": 123, "login": "octocat" } ] }"#; setup("foo", Some(content)); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("owner -l --token sekrit") .with_stdout( "\ github:rust-lang:core (Core) octocat ", ) .run(); } #[cargo_test] fn simple_add() { registry::init(); setup("foo", None); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("owner -a username --token sekrit") .with_status(101) .with_stderr( " Updating `[..]` index error: failed to invite owners to crate `foo` on registry at file://[..] Caused by: EOF while parsing a value at line 1 column 0", ) .run(); } #[cargo_test] fn simple_remove() { registry::init(); setup("foo", None); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("owner -r username --token sekrit") .with_status(101) .with_stderr( " Updating `[..]` index Owner removing [\"username\"] from crate foo error: failed to remove owners from crate `foo` on registry at file://[..] Caused by: EOF while parsing a value at line 1 column 0", ) .run(); } cargo-0.66.0/tests/testsuite/package.rs000066400000000000000000001745431432416201200200430ustar00rootroot00000000000000//! Tests for the `cargo package` command. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::publish::validate_crate_contents; use cargo_test_support::registry::{self, Package}; use cargo_test_support::{ basic_manifest, cargo_process, git, path2url, paths, project, symlink_supported, t, }; use flate2::read::GzDecoder; use std::fs::{self, read_to_string, File}; use std::path::Path; use tar::Archive; #[cargo_test] fn simple() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] license = "MIT" description = "foo" "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("src/bar.txt", "") // should be ignored when packaging .build(); p.cargo("package") .with_stderr( "\ [WARNING] manifest has no documentation[..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); p.cargo("package -l") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package").with_stdout("").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], &[], ); } #[cargo_test] fn metadata_warning() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("package") .with_stderr( "\ warning: manifest has no description, license, license-file, documentation, \ homepage or repository. See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("package") .with_stderr( "\ warning: manifest has no description, documentation, homepage or repository. See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" repository = "bar" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("package") .with_stderr( "\ [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn package_verbose() { let root = paths::root().join("all"); let repo = git::repo(&root) .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) .file("src/main.rs", "fn main() {}") .file("a/a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/a/src/lib.rs", "") .build(); cargo_process("build").cwd(repo.root()).run(); println!("package main repo"); cargo_process("package -v --no-verify") .cwd(repo.root()) .with_stderr( "\ [WARNING] manifest has no description[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/main.rs ", ) .run(); let f = File::open(&repo.root().join("target/package/foo-0.0.1.crate")).unwrap(); let vcs_contents = format!( r#"{{ "git": {{ "sha1": "{}" }}, "path_in_vcs": "" }} "#, repo.revparse_head() ); validate_crate_contents( f, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".cargo_vcs_info.json", ], &[(".cargo_vcs_info.json", &vcs_contents)], ); println!("package sub-repo"); cargo_process("package -v --no-verify") .cwd(repo.root().join("a/a")) .with_stderr( "\ [WARNING] manifest has no description[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] a v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/lib.rs ", ) .run(); let f = File::open(&repo.root().join("a/a/target/package/a-0.0.1.crate")).unwrap(); let vcs_contents = format!( r#"{{ "git": {{ "sha1": "{}" }}, "path_in_vcs": "a/a" }} "#, repo.revparse_head() ); validate_crate_contents( f, "a-0.0.1.crate", &[ "Cargo.toml", "Cargo.toml.orig", "src/lib.rs", ".cargo_vcs_info.json", ], &[(".cargo_vcs_info.json", &vcs_contents)], ); } #[cargo_test] fn package_verification() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("build").run(); p.cargo("package") .with_stderr( "\ [WARNING] manifest has no description[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn vcs_file_collision() { let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" description = "foo" version = "0.0.1" authors = [] license = "MIT" documentation = "foo" homepage = "foo" repository = "foo" exclude = ["*.no-existe"] "#, ) .file( "src/main.rs", r#" fn main() {} "#, ) .file(".cargo_vcs_info.json", "foo") .build(); p.cargo("package") .arg("--no-verify") .with_status(101) .with_stderr( "\ [ERROR] invalid inclusion of reserved file name .cargo_vcs_info.json \ in package source ", ) .run(); } #[cargo_test] fn orig_file_collision() { let p = project().build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" description = "foo" version = "0.0.1" authors = [] license = "MIT" documentation = "foo" homepage = "foo" repository = "foo" exclude = ["*.no-existe"] "#, ) .file( "src/main.rs", r#" fn main() {} "#, ) .file("Cargo.toml.orig", "oops") .build(); p.cargo("package") .arg("--no-verify") .with_status(101) .with_stderr( "\ [ERROR] invalid inclusion of reserved file name Cargo.toml.orig \ in package source ", ) .run(); } #[cargo_test] fn path_dependency_no_version() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("package") .with_status(101) .with_stderr( "\ [WARNING] manifest has no documentation, homepage or repository. See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [ERROR] all dependencies must have a version specified when packaging. dependency `bar` does not specify a version\n\ Note: The packaged dependency will use the version from crates.io, the `path` specification will be removed from the dependency declaration. ", ) .run(); } #[cargo_test] fn git_dependency_no_version() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.foo] git = "git://path/to/nowhere" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("package") .with_status(101) .with_stderr( "\ [WARNING] manifest has no documentation, homepage or repository. See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [ERROR] all dependencies must have a version specified when packaging. dependency `foo` does not specify a version Note: The packaged dependency will use the version from crates.io, the `git` specification will be removed from the dependency declaration. ", ) .run(); } #[cargo_test] fn exclude() { let root = paths::root().join("exclude"); let repo = git::repo(&root) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = [ "*.txt", # file in root "file_root_1", # NO_CHANGE (ignored) "/file_root_2", # CHANGING (packaged -> ignored) "file_root_3/", # NO_CHANGE (packaged) "file_root_4/*", # NO_CHANGE (packaged) "file_root_5/**", # NO_CHANGE (packaged) # file in sub-dir "file_deep_1", # CHANGING (packaged -> ignored) "/file_deep_2", # NO_CHANGE (packaged) "file_deep_3/", # NO_CHANGE (packaged) "file_deep_4/*", # NO_CHANGE (packaged) "file_deep_5/**", # NO_CHANGE (packaged) # dir in root "dir_root_1", # CHANGING (packaged -> ignored) "/dir_root_2", # CHANGING (packaged -> ignored) "dir_root_3/", # CHANGING (packaged -> ignored) "dir_root_4/*", # NO_CHANGE (ignored) "dir_root_5/**", # NO_CHANGE (ignored) # dir in sub-dir "dir_deep_1", # CHANGING (packaged -> ignored) "/dir_deep_2", # NO_CHANGE "dir_deep_3/", # CHANGING (packaged -> ignored) "dir_deep_4/*", # CHANGING (packaged -> ignored) "dir_deep_5/**", # CHANGING (packaged -> ignored) ] "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("bar.txt", "") .file("src/bar.txt", "") // File in root. .file("file_root_1", "") .file("file_root_2", "") .file("file_root_3", "") .file("file_root_4", "") .file("file_root_5", "") // File in sub-dir. .file("some_dir/file_deep_1", "") .file("some_dir/file_deep_2", "") .file("some_dir/file_deep_3", "") .file("some_dir/file_deep_4", "") .file("some_dir/file_deep_5", "") // Dir in root. .file("dir_root_1/some_dir/file", "") .file("dir_root_2/some_dir/file", "") .file("dir_root_3/some_dir/file", "") .file("dir_root_4/some_dir/file", "") .file("dir_root_5/some_dir/file", "") // Dir in sub-dir. .file("some_dir/dir_deep_1/some_dir/file", "") .file("some_dir/dir_deep_2/some_dir/file", "") .file("some_dir/dir_deep_3/some_dir/file", "") .file("some_dir/dir_deep_4/some_dir/file", "") .file("some_dir/dir_deep_5/some_dir/file", "") .build(); cargo_process("package --no-verify -v") .cwd(repo.root()) .with_stdout("") .with_stderr( "\ [WARNING] manifest has no description[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] file_root_3 [ARCHIVING] file_root_4 [ARCHIVING] file_root_5 [ARCHIVING] some_dir/dir_deep_2/some_dir/file [ARCHIVING] some_dir/dir_deep_4/some_dir/file [ARCHIVING] some_dir/dir_deep_5/some_dir/file [ARCHIVING] some_dir/file_deep_2 [ARCHIVING] some_dir/file_deep_3 [ARCHIVING] some_dir/file_deep_4 [ARCHIVING] some_dir/file_deep_5 [ARCHIVING] src/main.rs ", ) .run(); assert!(repo.root().join("target/package/foo-0.0.1.crate").is_file()); cargo_process("package -l") .cwd(repo.root()) .with_stdout( "\ .cargo_vcs_info.json Cargo.lock Cargo.toml Cargo.toml.orig file_root_3 file_root_4 file_root_5 some_dir/dir_deep_2/some_dir/file some_dir/dir_deep_4/some_dir/file some_dir/dir_deep_5/some_dir/file some_dir/file_deep_2 some_dir/file_deep_3 some_dir/file_deep_4 some_dir/file_deep_5 src/main.rs ", ) .run(); } #[cargo_test] fn include() { let root = paths::root().join("include"); let repo = git::repo(&root) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] include = ["foo.txt", "**/*.rs", "Cargo.toml", ".dotfile"] "#, ) .file("foo.txt", "") .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file(".dotfile", "") // Should be ignored when packaging. .file("src/bar.txt", "") .build(); cargo_process("package --no-verify -v") .cwd(repo.root()) .with_stderr( "\ [WARNING] manifest has no description[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [WARNING] both package.include and package.exclude are specified; the exclude list will be ignored [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] .dotfile [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] foo.txt [ARCHIVING] src/main.rs ", ) .run(); } #[cargo_test] fn package_lib_with_bin() { let p = project() .file("src/main.rs", "extern crate foo; fn main() {}") .file("src/lib.rs", "") .build(); p.cargo("package -v").run(); } #[cargo_test] fn package_git_submodule() { let project = git::new("foo", |project| { project .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = ["foo@example.com"] license = "MIT" description = "foo" repository = "foo" "#, ) .file("src/lib.rs", "pub fn foo() {}") }); let library = git::new("bar", |library| { library.no_manifest().file("Makefile", "all:") }); let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("bar")); git::commit(&repository); let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); repository .reset( &repository.revparse_single("HEAD").unwrap(), git2::ResetType::Hard, None, ) .unwrap(); project .cargo("package --no-verify -v") .with_stderr_contains("[ARCHIVING] bar/Makefile") .run(); } #[cargo_test] /// Tests if a symlink to a git submodule is properly handled. /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn package_symlink_to_submodule() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; if !symlink_supported() { return; } let project = git::new("foo", |project| { project.file("src/lib.rs", "pub fn foo() {}") }); let library = git::new("submodule", |library| { library.no_manifest().file("Makefile", "all:") }); let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("submodule")); t!(symlink( &project.root().join("submodule"), &project.root().join("submodule-link") )); git::add(&repository); git::commit(&repository); let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); repository .reset( &repository.revparse_single("HEAD").unwrap(), git2::ResetType::Hard, None, ) .unwrap(); project .cargo("package --no-verify -v") .with_stderr_contains("[ARCHIVING] submodule/Makefile") .run(); } #[cargo_test] fn no_duplicates_from_modified_tracked_files() { let p = git::new("all", |p| p.file("src/main.rs", "fn main() {}")); p.change_file("src/main.rs", r#"fn main() { println!("A change!"); }"#); p.cargo("build").run(); p.cargo("package --list --allow-dirty") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); } #[cargo_test] fn ignore_nested() { let cargo_toml = r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#; let main_rs = r#" fn main() { println!("hello"); } "#; let p = project() .file("Cargo.toml", cargo_toml) .file("src/main.rs", main_rs) // If a project happens to contain a copy of itself, we should // ignore it. .file("a_dir/foo/Cargo.toml", cargo_toml) .file("a_dir/foo/src/main.rs", main_rs) .build(); p.cargo("package") .with_stderr( "\ [WARNING] manifest has no documentation[..] See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); p.cargo("package -l") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package").with_stdout("").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], &[], ); } // Windows doesn't allow these characters in filenames. #[cfg(unix)] #[cargo_test] fn package_weird_characters() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("src/:foo", "") .build(); p.cargo("package") .with_status(101) .with_stderr( "\ warning: [..] See [..] [ERROR] cannot package a filename with a special character `:`: src/:foo ", ) .run(); } #[cargo_test] fn repackage_on_source_change() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("package").run(); // Add another source file p.change_file("src/foo.rs", r#"fn main() { println!("foo"); }"#); // Check that cargo rebuilds the tarball p.cargo("package") .with_stderr( "\ [WARNING] [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // Check that the tarball contains the added file let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", "src/foo.rs", ], &[], ); } #[cargo_test] /// Tests if a broken symlink is properly handled when packaging. /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn broken_symlink() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; if !symlink_supported() { return; } let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = 'foo' documentation = 'foo' homepage = 'foo' repository = 'foo' "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); t!(symlink("nowhere", &p.root().join("src/foo.rs"))); p.cargo("package -v") .with_status(101) .with_stderr_contains( "\ [ERROR] failed to prepare local package for uploading Caused by: failed to open for archiving: `[..]foo.rs` Caused by: [..] ", ) .run(); } #[cargo_test] /// Tests if a broken but excluded symlink is ignored. /// See issue rust-lang/cargo#10917 /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn broken_but_excluded_symlink() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; if !symlink_supported() { return; } let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = 'foo' documentation = 'foo' homepage = 'foo' repository = 'foo' exclude = ["src/foo.rs"] "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); t!(symlink("nowhere", &p.root().join("src/foo.rs"))); p.cargo("package -v --list") // `src/foo.rs` is excluded. .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); } #[cargo_test] #[cfg(not(windows))] // https://github.com/libgit2/libgit2/issues/6250 /// Test that /dir and /dir/ matches symlinks to directories. fn gitignore_symlink_dir() { if !symlink_supported() { return; } let (p, _repo) = git::new_repo("foo", |p| { p.file("src/main.rs", r#"fn main() { println!("hello"); }"#) .symlink_dir("src", "src1") .symlink_dir("src", "src2") .symlink_dir("src", "src3") .symlink_dir("src", "src4") .file(".gitignore", "/src1\n/src2/\nsrc3\nsrc4/") }); p.cargo("package -l --no-metadata") .with_stderr("") .with_stdout( "\ .cargo_vcs_info.json .gitignore Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); } #[cargo_test] #[cfg(not(windows))] // https://github.com/libgit2/libgit2/issues/6250 /// Test that /dir and /dir/ matches symlinks to directories in dirty working directory. fn gitignore_symlink_dir_dirty() { if !symlink_supported() { return; } let (p, _repo) = git::new_repo("foo", |p| { p.file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file(".gitignore", "/src1\n/src2/\nsrc3\nsrc4/") }); p.symlink("src", "src1"); p.symlink("src", "src2"); p.symlink("src", "src3"); p.symlink("src", "src4"); p.cargo("package -l --no-metadata") .with_stderr("") .with_stdout( "\ .cargo_vcs_info.json .gitignore Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package -l --no-metadata --allow-dirty") .with_stderr("") .with_stdout( "\ .gitignore Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); } #[cargo_test] /// Tests if a symlink to a directory is properly included. /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn package_symlink_to_dir() { if !symlink_supported() { return; } project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("bla/Makefile", "all:") .symlink_dir("bla", "foo") .build() .cargo("package -v") .with_stderr_contains("[ARCHIVING] foo/Makefile") .run(); } #[cargo_test] /// Tests if a symlink to ancestor causes filesystem loop error. /// /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn filesystem_loop() { if !symlink_supported() { return; } project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .symlink_dir("a/b", "a/b/c/d/foo") .build() .cargo("package -v") .with_stderr_contains( "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b", ) .run(); } #[cargo_test] fn do_not_package_if_repository_is_dirty() { let p = project().build(); // Create a Git repository containing a minimal Rust project. let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); // Modify Cargo.toml without committing the change. p.change_file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" # change "#, ); p.cargo("package") .with_status(101) .with_stderr( "\ error: 1 files in the working directory contain changes that were not yet \ committed into git: Cargo.toml to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag ", ) .run(); } #[cargo_test] fn dirty_ignored() { // Cargo warns about an ignored file that will be published. let (p, repo) = git::new_repo("foo", |p| { p.file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" license = "foo" documentation = "foo" include = ["src", "build"] "#, ) .file("src/lib.rs", "") .file(".gitignore", "build") }); // Example of adding a file that is confusingly ignored by an overzealous // gitignore rule. p.change_file("src/build/mod.rs", ""); p.cargo("package --list") .with_status(101) .with_stderr( "\ error: 1 files in the working directory contain changes that were not yet committed into git: src/build/mod.rs to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag ", ) .run(); // Add the ignored file and make sure it is included. let mut index = t!(repo.index()); t!(index.add_path(Path::new("src/build/mod.rs"))); t!(index.write()); git::commit(&repo); p.cargo("package --list") .with_stderr("") .with_stdout( "\ .cargo_vcs_info.json Cargo.toml Cargo.toml.orig src/build/mod.rs src/lib.rs ", ) .run(); } #[cargo_test] fn generated_manifest() { let registry = registry::alt_init(); Package::new("abc", "1.0.0").publish(); Package::new("def", "1.0.0").alternative(true).publish(); Package::new("ghi", "1.0.0").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] license = "MIT" description = "foo" [project.metadata] foo = 'bar' [workspace] [dependencies] bar = { path = "bar", version = "0.1" } def = { version = "1.0", registry = "alternative" } ghi = "1.0" abc = "1.0" "#, ) .file("src/main.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("package --no-verify").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); let rewritten_toml = format!( r#"{} [package] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] description = "foo" license = "MIT" [package.metadata] foo = "bar" [dependencies.abc] version = "1.0" [dependencies.bar] version = "0.1" [dependencies.def] version = "1.0" registry-index = "{}" [dependencies.ghi] version = "1.0" "#, cargo::core::package::MANIFEST_PREAMBLE, registry.index_url() ); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], &[("Cargo.toml", &rewritten_toml)], ); } #[cargo_test] fn ignore_workspace_specifier() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [workspace] [dependencies] bar = { path = "bar", version = "0.1" } "#, ) .file("src/main.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("package --no-verify").cwd("bar").run(); let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); let rewritten_toml = format!( r#"{} [package] name = "bar" version = "0.1.0" authors = [] "#, cargo::core::package::MANIFEST_PREAMBLE ); validate_crate_contents( f, "bar-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[("Cargo.toml", &rewritten_toml)], ); } #[cargo_test] fn package_two_kinds_of_deps() { Package::new("other", "1.0.0").publish(); Package::new("other1", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] other = "1.0" other1 = { version = "1.0" } "#, ) .file("src/main.rs", "") .build(); p.cargo("package --no-verify").run(); } #[cargo_test] fn test_edition() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["edition"] [package] name = "foo" version = "0.0.1" authors = [] edition = "2018" "#, ) .file("src/lib.rs", r#" "#) .build(); p.cargo("build -v") .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..]--edition=2018 [..] ", ) .run(); } #[cargo_test] fn edition_with_metadata() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] edition = "2018" [package.metadata.docs.rs] features = ["foobar"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("package").run(); } #[cargo_test] fn test_edition_malformed() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] edition = "chicken" "#, ) .file("src/lib.rs", r#" "#) .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: failed to parse the `edition` key Caused by: supported edition values are `2015`, `2018`, or `2021`, but `chicken` is unknown " .to_string(), ) .run(); } #[cargo_test] fn test_edition_from_the_future() { let p = project() .file( "Cargo.toml", r#"[package] edition = "2038" name = "foo" version = "99.99.99" authors = [] "#, ) .file("src/main.rs", r#""#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: failed to parse the `edition` key Caused by: this version of Cargo is older than the `2038` edition, and only supports `2015`, `2018`, and `2021` editions. " .to_string(), ) .run(); } #[cargo_test] fn do_not_package_if_src_was_modified() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("dir/foo.txt", "") .file("bar.txt", "") .file( "build.rs", r#" use std::fs; fn main() { fs::write("src/generated.txt", "Hello, world of generated files." ).expect("failed to create file"); fs::remove_file("dir/foo.txt").expect("failed to remove file"); fs::remove_dir("dir").expect("failed to remove dir"); fs::write("bar.txt", "updated content").expect("failed to update"); fs::create_dir("new-dir").expect("failed to create dir"); } "#, ) .build(); p.cargo("package") .with_status(101) .with_stderr_contains( "\ error: failed to verify package tarball Caused by: Source directory was modified by build.rs during cargo publish. \ Build scripts should not modify anything outside of OUT_DIR. Changed: [CWD]/target/package/foo-0.0.1/bar.txt Added: [CWD]/target/package/foo-0.0.1/new-dir [CWD]/target/package/foo-0.0.1/src/generated.txt Removed: [CWD]/target/package/foo-0.0.1/dir [CWD]/target/package/foo-0.0.1/dir/foo.txt To proceed despite this, pass the `--no-verify` flag.", ) .run(); p.cargo("package --no-verify").run(); } #[cargo_test] fn package_with_select_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] required = [] optional = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("package --features required").run(); } #[cargo_test] fn package_with_all_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] required = [] optional = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("package --all-features").run(); } #[cargo_test] fn package_no_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] default = ["required"] required = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("package --no-default-features") .with_stderr_contains("error: This crate requires `required` feature!") .with_status(101) .run(); } #[cargo_test] fn include_cargo_toml_implicit() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" include = ["src/lib.rs"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("package --list") .with_stdout("Cargo.toml\nCargo.toml.orig\nsrc/lib.rs\n") .run(); } fn include_exclude_test(include: &str, exclude: &str, files: &[&str], expected: &str) { let mut pb = project().file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" include = {} exclude = {} "#, include, exclude ), ); for file in files { pb = pb.file(file, ""); } let p = pb.build(); p.cargo("package --list") .with_stderr("") .with_stdout(expected) .run(); p.root().rm_rf(); } #[cargo_test] fn package_include_ignore_only() { // Test with a gitignore pattern that fails to parse with glob. // This is a somewhat nonsense pattern, but is an example of something git // allows and glob does not. assert!(glob::Pattern::new("src/abc**").is_err()); include_exclude_test( r#"["Cargo.toml", "src/abc**", "src/lib.rs"]"#, "[]", &["src/lib.rs", "src/abc1.rs", "src/abc2.rs", "src/abc/mod.rs"], "Cargo.toml\n\ Cargo.toml.orig\n\ src/abc/mod.rs\n\ src/abc1.rs\n\ src/abc2.rs\n\ src/lib.rs\n\ ", ) } #[cargo_test] fn gitignore_patterns() { include_exclude_test( r#"["Cargo.toml", "foo"]"#, // include "[]", &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], "Cargo.toml\n\ Cargo.toml.orig\n\ a/b/foo\n\ a/foo\n\ foo\n\ x/foo/y\n\ ", ); include_exclude_test( r#"["Cargo.toml", "/foo"]"#, // include "[]", &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], "Cargo.toml\n\ Cargo.toml.orig\n\ foo\n\ ", ); include_exclude_test( "[]", r#"["foo/"]"#, // exclude &["src/lib.rs", "foo", "a/foo", "x/foo/y", "bar"], "Cargo.toml\n\ Cargo.toml.orig\n\ a/foo\n\ bar\n\ foo\n\ src/lib.rs\n\ ", ); include_exclude_test( "[]", r#"["*.txt", "[ab]", "[x-z]"]"#, // exclude &[ "src/lib.rs", "foo.txt", "bar/foo.txt", "other", "a", "b", "c", "x", "y", "z", ], "Cargo.toml\n\ Cargo.toml.orig\n\ c\n\ other\n\ src/lib.rs\n\ ", ); include_exclude_test( r#"["Cargo.toml", "**/foo/bar"]"#, // include "[]", &["src/lib.rs", "a/foo/bar", "foo", "bar"], "Cargo.toml\n\ Cargo.toml.orig\n\ a/foo/bar\n\ ", ); include_exclude_test( r#"["Cargo.toml", "foo/**"]"#, // include "[]", &["src/lib.rs", "a/foo/bar", "foo/x/y/z"], "Cargo.toml\n\ Cargo.toml.orig\n\ foo/x/y/z\n\ ", ); include_exclude_test( r#"["Cargo.toml", "a/**/b"]"#, // include "[]", &["src/lib.rs", "a/b", "a/x/b", "a/x/y/b"], "Cargo.toml\n\ Cargo.toml.orig\n\ a/b\n\ a/x/b\n\ a/x/y/b\n\ ", ); } #[cargo_test] fn gitignore_negate() { include_exclude_test( r#"["Cargo.toml", "*.rs", "!foo.rs", "\\!important"]"#, // include "[]", &["src/lib.rs", "foo.rs", "!important"], "!important\n\ Cargo.toml\n\ Cargo.toml.orig\n\ src/lib.rs\n\ ", ); // NOTE: This is unusual compared to git. Git treats `src/` as a // short-circuit which means rules like `!src/foo.rs` would never run. // However, because Cargo only works by iterating over *files*, it doesn't // short-circuit. include_exclude_test( r#"["Cargo.toml", "src/", "!src/foo.rs"]"#, // include "[]", &["src/lib.rs", "src/foo.rs"], "Cargo.toml\n\ Cargo.toml.orig\n\ src/lib.rs\n\ ", ); include_exclude_test( r#"["Cargo.toml", "src/*.rs", "!foo.rs"]"#, // include "[]", &["src/lib.rs", "foo.rs", "src/foo.rs", "src/bar/foo.rs"], "Cargo.toml\n\ Cargo.toml.orig\n\ src/lib.rs\n\ ", ); include_exclude_test( "[]", r#"["*.rs", "!foo.rs", "\\!important"]"#, // exclude &["src/lib.rs", "foo.rs", "!important"], "Cargo.toml\n\ Cargo.toml.orig\n\ foo.rs\n\ ", ); } #[cargo_test] fn exclude_dot_files_and_directories_by_default() { include_exclude_test( "[]", "[]", &["src/lib.rs", ".dotfile", ".dotdir/file"], "Cargo.toml\n\ Cargo.toml.orig\n\ src/lib.rs\n\ ", ); include_exclude_test( r#"["Cargo.toml", "src/lib.rs", ".dotfile", ".dotdir/file"]"#, "[]", &["src/lib.rs", ".dotfile", ".dotdir/file"], ".dotdir/file\n\ .dotfile\n\ Cargo.toml\n\ Cargo.toml.orig\n\ src/lib.rs\n\ ", ); } #[cargo_test] fn invalid_license_file_path() { // Test warning when license-file points to a non-existent file. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" license-file = "does-not-exist" description = "foo" homepage = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("package --no-verify") .with_stderr( "\ [WARNING] license-file `does-not-exist` does not appear to exist (relative to `[..]/foo`). Please update the license-file setting in the manifest at `[..]/foo/Cargo.toml` This may become a hard error in the future. [PACKAGING] foo v1.0.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn license_file_implicit_include() { // license-file should be automatically included even if not listed. let p = git::new("foo", |p| { p.file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" license-file = "subdir/LICENSE" description = "foo" homepage = "foo" include = ["src"] "#, ) .file("src/lib.rs", "") .file("subdir/LICENSE", "license text") }); p.cargo("package --list") .with_stdout( "\ .cargo_vcs_info.json Cargo.toml Cargo.toml.orig src/lib.rs subdir/LICENSE ", ) .with_stderr("") .run(); p.cargo("package --no-verify -v") .with_stderr( "\ [PACKAGING] foo v1.0.0 [..] [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/lib.rs [ARCHIVING] subdir/LICENSE ", ) .run(); let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); validate_crate_contents( f, "foo-1.0.0.crate", &[ ".cargo_vcs_info.json", "Cargo.toml", "Cargo.toml.orig", "subdir/LICENSE", "src/lib.rs", ], &[("subdir/LICENSE", "license text")], ); } #[cargo_test] fn relative_license_included() { // license-file path outside of package will copy into root. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" license-file = "../LICENSE" description = "foo" homepage = "foo" "#, ) .file("src/lib.rs", "") .file("../LICENSE", "license text") .build(); p.cargo("package --list") .with_stdout( "\ Cargo.toml Cargo.toml.orig LICENSE src/lib.rs ", ) .with_stderr("") .run(); p.cargo("package") .with_stderr( "\ [PACKAGING] foo v1.0.0 [..] [VERIFYING] foo v1.0.0 [..] [COMPILING] foo v1.0.0 [..] [FINISHED] [..] ", ) .run(); let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); validate_crate_contents( f, "foo-1.0.0.crate", &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"], &[("LICENSE", "license text")], ); let manifest = std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap(); assert!(manifest.contains("license-file = \"LICENSE\"")); let orig = std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap(); assert!(orig.contains("license-file = \"../LICENSE\"")); } #[cargo_test] fn relative_license_include_collision() { // Can't copy a relative license-file if there is a file with that name already. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" license-file = "../LICENSE" description = "foo" homepage = "foo" "#, ) .file("src/lib.rs", "") .file("../LICENSE", "outer license") .file("LICENSE", "inner license") .build(); p.cargo("package --list") .with_stdout( "\ Cargo.toml Cargo.toml.orig LICENSE src/lib.rs ", ) .with_stderr("[WARNING] license-file `../LICENSE` appears to be [..]") .run(); p.cargo("package") .with_stderr( "\ [WARNING] license-file `../LICENSE` appears to be [..] [PACKAGING] foo v1.0.0 [..] [VERIFYING] foo v1.0.0 [..] [COMPILING] foo v1.0.0 [..] [FINISHED] [..] ", ) .run(); let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); validate_crate_contents( f, "foo-1.0.0.crate", &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"], &[("LICENSE", "inner license")], ); let manifest = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap(); assert!(manifest.contains("license-file = \"LICENSE\"")); let orig = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap(); assert!(orig.contains("license-file = \"../LICENSE\"")); } #[cargo_test] #[cfg(not(windows))] // Don't want to create invalid files on Windows. fn package_restricted_windows() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" license = "MIT" description = "foo" homepage = "foo" "#, ) .file("src/lib.rs", "pub mod con;\npub mod aux;") .file("src/con.rs", "pub fn f() {}") .file("src/aux/mod.rs", "pub fn f() {}") .build(); p.cargo("package") // use unordered here because the order of the warning is different on each platform. .with_stderr_unordered( "\ [WARNING] file src/aux/mod.rs is a reserved Windows filename, it will not work on Windows platforms [WARNING] file src/con.rs is a reserved Windows filename, it will not work on Windows platforms [PACKAGING] foo [..] [VERIFYING] foo [..] [COMPILING] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn finds_git_in_parent() { // Test where `Cargo.toml` is not in the root of the git repo. let repo_path = paths::root().join("repo"); fs::create_dir(&repo_path).unwrap(); let p = project() .at("repo/foo") .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "") .build(); let repo = git::init(&repo_path); git::add(&repo); git::commit(&repo); p.change_file("ignoreme", ""); p.change_file("ignoreme2", ""); p.cargo("package --list --allow-dirty") .with_stdout( "\ Cargo.toml Cargo.toml.orig ignoreme ignoreme2 src/lib.rs ", ) .run(); p.change_file(".gitignore", "ignoreme"); p.cargo("package --list --allow-dirty") .with_stdout( "\ .gitignore Cargo.toml Cargo.toml.orig ignoreme2 src/lib.rs ", ) .run(); fs::write(repo_path.join(".gitignore"), "ignoreme2").unwrap(); p.cargo("package --list --allow-dirty") .with_stdout( "\ .gitignore Cargo.toml Cargo.toml.orig src/lib.rs ", ) .run(); } #[cargo_test] #[cfg(windows)] fn reserved_windows_name() { // If we are running on a version of Windows that allows these reserved filenames, // skip this test. if paths::windows_reserved_names_are_allowed() { return; } Package::new("bar", "1.0.0") .file("src/lib.rs", "pub mod aux;") .file("src/aux.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies] bar = "1.0.0" "#, ) .file("src/main.rs", "extern crate bar;\nfn main() { }") .build(); p.cargo("package") .with_status(101) .with_stderr_contains( "\ error: failed to verify package tarball Caused by: failed to download replaced source registry `[..]` Caused by: failed to unpack package `[..] `[..]`)` Caused by: failed to unpack entry at `[..]aux.rs` Caused by: `[..]aux.rs` appears to contain a reserved Windows path, it cannot be extracted on Windows Caused by: failed to unpack `[..]aux.rs` Caused by: failed to unpack `[..]aux.rs` into `[..]aux.rs`", ) .run(); } #[cargo_test] fn list_with_path_and_lock() { // Allow --list even for something that isn't packageable. // Init an empty registry because a versionless path dep will search for // the package on crates.io. registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" license = "MIT" description = "foo" homepage = "foo" [dependencies] bar = {path="bar"} "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("package --list") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package") .with_status(101) .with_stderr( "\ [ERROR] all dependencies must have a version specified when packaging. dependency `bar` does not specify a version Note: The packaged dependency will use the version from crates.io, the `path` specification will be removed from the dependency declaration. ", ) .run(); } #[cargo_test] fn long_file_names() { // Filenames over 100 characters require a GNU extension tarfile. // See #8453. registry::init(); let long_name = concat!( "012345678901234567890123456789012345678901234567890123456789", "012345678901234567890123456789012345678901234567890123456789", "012345678901234567890123456789012345678901234567890123456789" ); if cfg!(windows) { // Long paths on Windows require a special registry entry that is // disabled by default (even on Windows 10). // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file // If the directory where Cargo runs happens to be more than 80 characters // long, then it will bump into this limit. // // First create a directory to account for various paths Cargo will // be using in the target directory (such as "target/package/foo-0.1.0"). let test_path = paths::root().join("test-dir-probe-long-path-support"); test_path.mkdir_p(); let test_path = test_path.join(long_name); if let Err(e) = File::create(&test_path) { // write to stderr directly to avoid output from being captured // and always display text, even without --nocapture use std::io::Write; writeln!( std::io::stderr(), "\nSkipping long_file_names test, this OS or filesystem does not \ appear to support long file paths: {:?}\n{:?}", e, test_path ) .unwrap(); return; } } let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" license = "MIT" description = "foo" homepage = "foo" [dependencies] "#, ) .file(long_name, "something") .file("src/main.rs", "fn main() {}") .build(); p.cargo("package").run(); p.cargo("package --list") .with_stdout(&format!( "\ {} Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", long_name )) .run(); } #[cargo_test] fn reproducible_output() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] license = "MIT" description = "foo" "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("package").run(); assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); let decoder = GzDecoder::new(f); let mut archive = Archive::new(decoder); for ent in archive.entries().unwrap() { let ent = ent.unwrap(); println!("checking {:?}", ent.path()); let header = ent.header(); assert_eq!(header.mode().unwrap(), 0o644); assert!(header.mtime().unwrap() != 0); assert_eq!(header.username().unwrap().unwrap(), ""); assert_eq!(header.groupname().unwrap().unwrap(), ""); } } #[cargo_test] fn package_with_resolver_and_metadata() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] resolver = '2' [package.metadata.docs.rs] all-features = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("package").run(); } #[cargo_test] fn deleted_git_working_tree() { // When deleting a file, but not staged, cargo should ignore the file. let (p, repo) = git::new_repo("foo", |p| { p.file("src/lib.rs", "").file("src/main.rs", "fn main() {}") }); p.root().join("src/lib.rs").rm_rf(); p.cargo("package --allow-dirty --list") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package --allow-dirty").run(); let mut index = t!(repo.index()); t!(index.remove(Path::new("src/lib.rs"), 0)); t!(index.write()); p.cargo("package --allow-dirty --list") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package --allow-dirty").run(); } #[cargo_test] fn in_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] license = "MIT" description = "bar" workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("package --workspace") .with_stderr( "\ [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] bar v0.0.1 ([CWD]/bar) [VERIFYING] bar v0.0.1 ([CWD]/bar) [COMPILING] bar v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); assert!(p.root().join("target/package/bar-0.0.1.crate").is_file()); } #[cargo_test] fn workspace_overrides_resolver() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" edition = "2021" "#, ) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" edition = "2015" "#, ) .file("baz/src/lib.rs", "") .build(); p.cargo("package --no-verify -p bar -p baz").run(); let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); let rewritten_toml = format!( r#"{} [package] edition = "2021" name = "bar" version = "0.1.0" resolver = "1" "#, cargo::core::package::MANIFEST_PREAMBLE ); validate_crate_contents( f, "bar-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[("Cargo.toml", &rewritten_toml)], ); // When the crate has the same implicit resolver as the workspace it is not overridden let f = File::open(&p.root().join("target/package/baz-0.1.0.crate")).unwrap(); let rewritten_toml = format!( r#"{} [package] edition = "2015" name = "baz" version = "0.1.0" "#, cargo::core::package::MANIFEST_PREAMBLE ); validate_crate_contents( f, "baz-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[("Cargo.toml", &rewritten_toml)], ); } cargo-0.66.0/tests/testsuite/package_features.rs000066400000000000000000000416001432416201200217240ustar00rootroot00000000000000//! Tests for feature selection on the command-line. use super::features2::switch_to_resolver_2; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{basic_manifest, project}; use std::fmt::Write; #[cargo_test] fn virtual_no_default_features() { // --no-default-features in root of virtual workspace. Package::new("dep1", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] dep1 = {version = "1.0", optional = true} [features] default = ["dep1"] "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] default = ["f1"] f1 = [] "#, ) .file( "b/src/lib.rs", r#" #[cfg(feature = "f1")] compile_error!{"expected f1 off"} "#, ) .build(); p.cargo("check --no-default-features") .with_stderr_unordered( "\ [UPDATING] [..] [CHECKING] a v0.1.0 [..] [CHECKING] b v0.1.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("check --features foo") .with_status(101) .with_stderr( "[ERROR] none of the selected packages contains these features: foo, did you mean: f1?", ) .run(); p.cargo("check --features a/dep1,b/f1,b/f2,f2") .with_status(101) .with_stderr("[ERROR] none of the selected packages contains these features: b/f2, f2, did you mean: f1?") .run(); p.cargo("check --features a/dep,b/f1,b/f2,f2") .with_status(101) .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, b/f2, f2, did you mean: a/dep1, f1?") .run(); p.cargo("check --features a/dep,a/dep1") .with_status(101) .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, did you mean: b/f1?") .run(); } #[cargo_test] fn virtual_typo_member_feature() { project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.1.0" resolver = "2" [features] deny-warnings = [] "#, ) .file("src/lib.rs", "") .build() .cargo("check --features a/deny-warning") .with_status(101) .with_stderr( "[ERROR] none of the selected packages contains these features: a/deny-warning, did you mean: a/deny-warnings?", ) .run(); } #[cargo_test] fn virtual_features() { // --features in root of virtual workspace. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [features] f1 = [] "#, ) .file( "a/src/lib.rs", r#" #[cfg(not(feature = "f1"))] compile_error!{"f1 is missing"} "#, ) .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "") .build(); p.cargo("check --features f1") .with_stderr_unordered( "\ [CHECKING] a [..] [CHECKING] b [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn virtual_with_specific() { // -p flags with --features in root of virtual. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [features] f1 = [] f2 = [] "#, ) .file( "a/src/lib.rs", r#" #[cfg(not_feature = "f1")] compile_error!{"f1 is missing"} #[cfg(not_feature = "f2")] compile_error!{"f2 is missing"} "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] f2 = [] f3 = [] "#, ) .file( "b/src/lib.rs", r#" #[cfg(not_feature = "f2")] compile_error!{"f2 is missing"} #[cfg(not_feature = "f3")] compile_error!{"f3 is missing"} "#, ) .build(); p.cargo("check -p a -p b --features f1,f2,f3") .with_stderr_unordered( "\ [CHECKING] a [..] [CHECKING] b [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn other_member_from_current() { // -p for another member while in the current directory. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [package] name = "foo" version = "0.1.0" [dependencies] bar = { path="bar", features=["f3"] } [features] f1 = ["bar/f4"] "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [features] f1 = [] f2 = [] f3 = [] f4 = [] "#, ) .file("bar/src/lib.rs", "") .file( "bar/src/main.rs", r#" fn main() { if cfg!(feature = "f1") { print!("f1"); } if cfg!(feature = "f2") { print!("f2"); } if cfg!(feature = "f3") { print!("f3"); } if cfg!(feature = "f4") { print!("f4"); } println!(); } "#, ) .build(); // Old behavior. p.cargo("run -p bar --features f1") .with_stdout("f3f4") .run(); p.cargo("run -p bar --features f1,f2") .with_status(101) .with_stderr("[ERROR] Package `foo[..]` does not have the feature `f2`") .run(); p.cargo("run -p bar --features bar/f1") .with_stdout("f1f3") .run(); // New behavior. switch_to_resolver_2(&p); p.cargo("run -p bar --features f1").with_stdout("f1").run(); p.cargo("run -p bar --features f1,f2") .with_stdout("f1f2") .run(); p.cargo("run -p bar --features bar/f1") .with_stdout("f1") .run(); } #[cargo_test] fn feature_default_resolver() { let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.1.0" [features] test = [] "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(feature = "test") { println!("feature set"); } } "#, ) .build(); p.cargo("check --features testt") .with_status(101) .with_stderr("[ERROR] Package `a[..]` does not have the feature `testt`") .run(); p.cargo("run --features test") .with_status(0) .with_stdout("feature set") .run(); p.cargo("run --features a/test") .with_status(101) .with_stderr("[ERROR] package `a[..]` does not have a dependency named `a`") .run(); } #[cargo_test] fn virtual_member_slash() { // member slash feature syntax let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] b = {path="../b", optional=true} [features] default = ["f1"] f1 = [] f2 = [] "#, ) .file( "a/src/lib.rs", r#" #[cfg(feature = "f1")] compile_error!{"f1 is set"} #[cfg(feature = "f2")] compile_error!{"f2 is set"} #[cfg(feature = "b")] compile_error!{"b is set"} "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] bfeat = [] "#, ) .file( "b/src/lib.rs", r#" #[cfg(feature = "bfeat")] compile_error!{"bfeat is set"} "#, ) .build(); p.cargo("check -p a") .with_status(101) .with_stderr_contains("[..]f1 is set[..]") .with_stderr_does_not_contain("[..]f2 is set[..]") .with_stderr_does_not_contain("[..]b is set[..]") .run(); p.cargo("check -p a --features a/f1") .with_status(101) .with_stderr_contains("[..]f1 is set[..]") .with_stderr_does_not_contain("[..]f2 is set[..]") .with_stderr_does_not_contain("[..]b is set[..]") .run(); p.cargo("check -p a --features a/f2") .with_status(101) .with_stderr_contains("[..]f1 is set[..]") .with_stderr_contains("[..]f2 is set[..]") .with_stderr_does_not_contain("[..]b is set[..]") .run(); p.cargo("check -p a --features b/bfeat") .with_status(101) .with_stderr_contains("[..]bfeat is set[..]") .run(); p.cargo("check -p a --no-default-features").run(); p.cargo("check -p a --no-default-features --features b") .with_status(101) .with_stderr_contains("[..]b is set[..]") .run(); } #[cargo_test] fn non_member() { // -p for a non-member Package::new("dep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [dependencies] dep = "1.0" [features] f1 = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -p dep --features f1") .with_status(101) .with_stderr( "[UPDATING][..]\n[ERROR] cannot specify features for packages outside of workspace", ) .run(); p.cargo("build -p dep --all-features") .with_status(101) .with_stderr("[ERROR] cannot specify features for packages outside of workspace") .run(); p.cargo("build -p dep --no-default-features") .with_status(101) .with_stderr("[ERROR] cannot specify features for packages outside of workspace") .run(); p.cargo("build -p dep") .with_stderr( "\ [DOWNLOADING] [..] [DOWNLOADED] [..] [COMPILING] dep [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn resolver1_member_features() { // --features member-name/feature-name with resolver="1" let p = project() .file( "Cargo.toml", r#" [workspace] members = ["member1", "member2"] "#, ) .file( "member1/Cargo.toml", r#" [package] name = "member1" version = "0.1.0" [features] m1-feature = [] "#, ) .file( "member1/src/main.rs", r#" fn main() { if cfg!(feature = "m1-feature") { println!("m1-feature set"); } } "#, ) .file("member2/Cargo.toml", &basic_manifest("member2", "0.1.0")) .file("member2/src/lib.rs", "") .build(); p.cargo("run -p member1 --features member1/m1-feature") .cwd("member2") .with_stdout("m1-feature set") .run(); p.cargo("check -p member1 --features member1/m2-feature") .cwd("member2") .with_status(101) .with_stderr("[ERROR] Package `member1[..]` does not have the feature `m2-feature`") .run(); } #[cargo_test] fn non_member_feature() { // --features for a non-member Package::new("jazz", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("jazz", "1.0").optional(true)) .publish(); let make_toml = |resolver, optional| { let mut s = String::new(); write!( s, r#" [package] name = "foo" version = "0.1.0" resolver = "{}" [dependencies] "#, resolver ) .unwrap(); if optional { s.push_str(r#"bar = { version = "1.0", optional = true } "#); } else { s.push_str(r#"bar = "1.0""#) } s.push('\n'); s }; let p = project() .file("Cargo.toml", &make_toml("1", false)) .file("src/lib.rs", "") .build(); p.cargo("fetch").run(); ///////////////////////// V1 non-optional eprintln!("V1 non-optional"); p.cargo("check -p bar") .with_stderr( "\ [CHECKING] bar v1.0.0 [FINISHED] [..] ", ) .run(); // TODO: This should not be allowed (future warning?) p.cargo("check --features bar/jazz") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] jazz v1.0.0 [..] [CHECKING] jazz v1.0.0 [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); // TODO: This should not be allowed (future warning?) p.cargo("check -p bar --features bar/jazz -v") .with_stderr( "\ [FRESH] jazz v1.0.0 [FRESH] bar v1.0.0 [FINISHED] [..] ", ) .run(); ///////////////////////// V1 optional eprintln!("V1 optional"); p.change_file("Cargo.toml", &make_toml("1", true)); // This error isn't great, but is probably unlikely to be common in // practice, so I'm not going to put much effort into improving it. p.cargo("check -p bar") .with_status(101) .with_stderr( "\ error: package ID specification `bar` did not match any packages Did you mean `foo`? ", ) .run(); p.cargo("check -p bar --features bar -v") .with_stderr( "\ [FRESH] bar v1.0.0 [FINISHED] [..] ", ) .run(); // TODO: This should not be allowed (future warning?) p.cargo("check -p bar --features bar/jazz -v") .with_stderr( "\ [FRESH] jazz v1.0.0 [FRESH] bar v1.0.0 [FINISHED] [..] ", ) .run(); ///////////////////////// V2 non-optional eprintln!("V2 non-optional"); p.change_file("Cargo.toml", &make_toml("2", false)); // TODO: This should not be allowed (future warning?) p.cargo("check --features bar/jazz -v") .with_stderr( "\ [FRESH] jazz v1.0.0 [FRESH] bar v1.0.0 [FRESH] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("check -p bar -v") .with_stderr( "\ [FRESH] bar v1.0.0 [FINISHED] [..] ", ) .run(); p.cargo("check -p bar --features bar/jazz") .with_status(101) .with_stderr("error: cannot specify features for packages outside of workspace") .run(); ///////////////////////// V2 optional eprintln!("V2 optional"); p.change_file("Cargo.toml", &make_toml("2", true)); p.cargo("check -p bar") .with_status(101) .with_stderr( "\ error: package ID specification `bar` did not match any packages Did you mean `foo`? ", ) .run(); // New --features behavior does not look at cwd. p.cargo("check -p bar --features bar") .with_status(101) .with_stderr("error: cannot specify features for packages outside of workspace") .run(); p.cargo("check -p bar --features bar/jazz") .with_status(101) .with_stderr("error: cannot specify features for packages outside of workspace") .run(); p.cargo("check -p bar --features foo/bar") .with_status(101) .with_stderr("error: cannot specify features for packages outside of workspace") .run(); } cargo-0.66.0/tests/testsuite/patch.rs000066400000000000000000001731541432416201200175440ustar00rootroot00000000000000//! Tests for `[patch]` table source replacement. use cargo_test_support::git; use cargo_test_support::paths; use cargo_test_support::registry::{self, Package}; use cargo_test_support::{basic_manifest, project}; use std::fs; use toml_edit::easy as toml; #[cargo_test] fn replace() { Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0") .file( "src/lib.rs", "extern crate bar; pub fn baz() { bar::bar(); }", ) .dep("bar", "0.1.0") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" baz = "0.1.0" [patch.crates-io] bar = { path = "bar" } "#, ) .file( "src/lib.rs", " extern crate bar; extern crate baz; pub fn bar() { bar::bar(); baz::baz(); } ", ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] baz v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn from_config() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( ".cargo/config.toml", r#" [patch.crates-io] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.1 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn from_config_relative() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( "../.cargo/config.toml", r#" [patch.crates-io] bar = { path = 'foo/bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.1 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn from_config_precedence() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'no-such-path' } "#, ) .file( ".cargo/config.toml", r#" [patch.crates-io] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.1 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn nonexistent() { Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = "bar" } "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn patch_git() { let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = {{ git = '{}' }} [patch.'{0}'] bar = {{ path = "bar" }} "#, bar.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]` [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn patch_to_git() { let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [patch.crates-io] bar = {{ git = '{}' }} "#, bar.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]` [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn unused() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("bar/src/lib.rs", "not rust code") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [FINISHED] [..] ", ) .run(); // unused patch should be in the lock file let lock = p.read_lockfile(); let toml: toml::Value = toml::from_str(&lock).unwrap(); assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); assert_eq!( toml["patch"]["unused"][0]["version"].as_str(), Some("0.2.0") ); } #[cargo_test] fn unused_with_mismatch_source_being_patched() { registry::alt_init(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.alternative] bar = { path = "bar" } [patch.crates-io] bar = { path = "baz" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("bar/src/lib.rs", "not rust code") .file("baz/Cargo.toml", &basic_manifest("bar", "0.3.0")) .file("baz/src/lib.rs", "not rust code") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. Perhaps you misspelled the source URL being patched. Possible URLs for `[patch.]`: crates-io [WARNING] Patch `bar v0.3.0 ([CWD]/baz)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn prefer_patch_version() { Package::new("bar", "0.1.2").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [FINISHED] [..] ", ) .run(); // there should be no patch.unused in the toml file let lock = p.read_lockfile(); let toml: toml::Value = toml::from_str(&lock).unwrap(); assert!(toml.get("patch").is_none()); } #[cargo_test] fn unused_from_config() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file( ".cargo/config.toml", r#" [patch.crates-io] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("bar/src/lib.rs", "not rust code") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [FINISHED] [..] ", ) .run(); // unused patch should be in the lock file let lock = p.read_lockfile(); let toml: toml::Value = toml::from_str(&lock).unwrap(); assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); assert_eq!( toml["patch"]["unused"][0]["version"].as_str(), Some("0.2.0") ); } #[cargo_test] fn unused_git() { Package::new("bar", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [patch.crates-io] bar = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]` [UPDATING] `dummy-registry` index [WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn add_patch() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'bar' } "#, ); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn add_patch_from_config() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.change_file( ".cargo/config.toml", r#" [patch.crates-io] bar = { path = 'bar' } "#, ); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn add_ignored_patch() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 [..] [COMPILING] bar v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'bar' } "#, ); p.cargo("build") .with_stderr( "\ [WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. Check that [..] with the [..] what is [..] version. [..] [FINISHED] [..]", ) .run(); p.cargo("update").run(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [..] ", ) .run(); } #[cargo_test] fn add_patch_with_features() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'bar', features = ["some_feature"] } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [WARNING] patch for `bar` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] patch for `bar` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism [FINISHED] [..] ", ) .run(); } #[cargo_test] fn add_patch_with_setting_default_features() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'bar', default-features = false, features = ["none_default_feature"] } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [WARNING] patch for `bar` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .with_stderr( "\ [WARNING] patch for `bar` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism [FINISHED] [..] ", ) .run(); } #[cargo_test] fn no_warn_ws_patch() { Package::new("c", "0.1.0").publish(); // Don't issue an unused patch warning when the patch isn't used when // partially building a workspace. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b", "c"] [patch.crates-io] c = { path = "c" } "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [dependencies] c = "0.1.0" "#, ) .file("b/src/lib.rs", "") .file("c/Cargo.toml", &basic_manifest("c", "0.1.0")) .file("c/src/lib.rs", "") .build(); p.cargo("build -p a") .with_stderr( "\ [UPDATING] [..] [COMPILING] a [..] [FINISHED] [..]", ) .run(); } #[cargo_test] fn new_minor() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [patch.crates-io] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.1 [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn transitive_new_minor() { Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = 'bar' } [patch.crates-io] baz = { path = 'baz' } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = '0.1.0' "#, ) .file("bar/src/lib.rs", r#""#) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) .file("baz/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] baz v0.1.1 [..] [COMPILING] bar v0.1.0 [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn new_major() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.2.0" [patch.crates-io] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.2.0 [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); Package::new("bar", "0.2.0").publish(); p.cargo("update").run(); p.cargo("build") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.2.0" "#, ); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.2.0 [..] [COMPILING] bar v0.2.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn transitive_new_major() { Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = 'bar' } [patch.crates-io] baz = { path = 'baz' } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = '0.2.0' "#, ) .file("bar/src/lib.rs", r#""#) .file("baz/Cargo.toml", &basic_manifest("baz", "0.2.0")) .file("baz/src/lib.rs", r#""#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] baz v0.2.0 [..] [COMPILING] bar v0.1.0 [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn shared_by_transitive() { Package::new("baz", "0.1.1").publish(); let baz = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("baz", "0.1.2")) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = " 0.1.0" [dependencies] bar = {{ path = "bar" }} baz = "0.1" [patch.crates-io] baz = {{ git = "{}", version = "0.1" }} "#, baz.url(), ), ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] baz = "0.1.1" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] git repository `file://[..]` [UPDATING] `dummy-registry` index [COMPILING] baz v0.1.2 [..] [COMPILING] bar v0.1.0 [..] [COMPILING] foo v0.1.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn remove_patch() { Package::new("foo", "0.1.0").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [patch.crates-io] foo = { path = 'foo' } bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", r#""#) .build(); // Generate a lock file where `foo` is unused p.cargo("build").run(); let lock_file1 = p.read_lockfile(); // Remove `foo` and generate a new lock file form the old one p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [patch.crates-io] bar = { path = 'bar' } "#, ); p.cargo("build").run(); let lock_file2 = p.read_lockfile(); // Remove the lock file and build from scratch fs::remove_file(p.root().join("Cargo.lock")).unwrap(); p.cargo("build").run(); let lock_file3 = p.read_lockfile(); assert!(lock_file1.contains("foo")); assert_eq!(lock_file2, lock_file3); assert_ne!(lock_file1, lock_file2); } #[cargo_test] fn non_crates_io() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [patch.some-other-source] bar = { path = 'bar' } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: [patch] entry `some-other-source` should be a URL or registry name Caused by: invalid url `some-other-source`: relative URL without a base ", ) .run(); } #[cargo_test] fn replace_with_crates_io() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [patch.crates-io] bar = "0.1" "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] error: failed to resolve patches for `[..]` Caused by: patch for `bar` in `[..]` points to the same source, but patches must point \ to different sources ", ) .run(); } #[cargo_test] fn patch_in_virtual() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] [patch.crates-io] bar = { path = "bar" } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", r#""#) .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = "0.1" "#, ) .file("foo/src/lib.rs", r#""#) .build(); p.cargo("build").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn patch_depends_on_another_patch() { Package::new("bar", "0.1.0") .file("src/lib.rs", "broken code") .publish(); Package::new("baz", "0.1.0") .dep("bar", "0.1") .file("src/lib.rs", "broken code") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.1.0" [dependencies] bar = "0.1" baz = "0.1" [patch.crates-io] bar = { path = "bar" } baz = { path = "baz" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", r#""#) .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.1" authors = [] [dependencies] bar = "0.1" "#, ) .file("baz/src/lib.rs", r#""#) .build(); p.cargo("build").run(); // Nothing should be rebuilt, no registry should be updated. p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn replace_prerelease() { Package::new("baz", "1.1.0-pre.1").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [patch.crates-io] baz = { path = "./baz" } "#, ) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = "1.1.0-pre.1" "#, ) .file( "bar/src/main.rs", "extern crate baz; fn main() { baz::baz() }", ) .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "1.1.0-pre.1" authors = [] [workspace] "#, ) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn patch_older() { Package::new("baz", "1.0.2").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = 'bar' } baz = "=1.0.1" [patch.crates-io] baz = { path = "./baz" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = "1.0.0" "#, ) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "1.0.1" authors = [] "#, ) .file("baz/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] [COMPILING] baz v1.0.1 [..] [COMPILING] bar v0.5.0 [..] [COMPILING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn cycle() { Package::new("a", "1.0.0").publish(); Package::new("b", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] [patch.crates-io] a = {path="a"} b = {path="b"} "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "1.0.0" [dependencies] b = "1.0" "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "1.0.0" [dependencies] a = "1.0" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] cyclic package dependency: [..] package `[..]` ... which satisfies dependency `[..]` of package `[..]` ... which satisfies dependency `[..]` of package `[..]` ", ) .run(); } #[cargo_test] fn multipatch() { Package::new("a", "1.0.0").publish(); Package::new("a", "2.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] a1 = { version = "1", package = "a" } a2 = { version = "2", package = "a" } [patch.crates-io] b1 = { path = "a1", package = "a" } b2 = { path = "a2", package = "a" } "#, ) .file("src/lib.rs", "pub fn foo() { a1::f1(); a2::f2(); }") .file( "a1/Cargo.toml", r#" [package] name = "a" version = "1.0.0" "#, ) .file("a1/src/lib.rs", "pub fn f1() {}") .file( "a2/Cargo.toml", r#" [package] name = "a" version = "2.0.0" "#, ) .file("a2/src/lib.rs", "pub fn f2() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn patch_same_version() { let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); cargo_test_support::registry::init(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "0.1" [patch.crates-io] bar = {{ path = "bar" }} bar2 = {{ git = '{}', package = 'bar' }} "#, bar.url(), ), ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] error: cannot have two `[patch]` entries which both resolve to `bar v0.1.0` ", ) .run(); } #[cargo_test] fn two_semver_compatible() { let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("src/lib.rs", "") .build(); cargo_test_support::registry::init(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "0.1" [patch.crates-io] bar = {{ path = "bar" }} bar2 = {{ git = '{}', package = 'bar' }} "#, bar.url(), ), ) .file("src/lib.rs", "pub fn foo() { bar::foo() }") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.2" "#, ) .file("bar/src/lib.rs", "pub fn foo() {}") .build(); // assert the build succeeds and doesn't panic anywhere, and then afterwards // assert that the build succeeds again without updating anything or // building anything else. p.cargo("build").run(); p.cargo("build") .with_stderr( "\ warning: Patch `bar v0.1.1 [..]` was not used in the crate graph. Perhaps you misspelled the source URL being patched. Possible URLs for `[patch.]`: [CWD]/bar [FINISHED] [..]", ) .run(); } #[cargo_test] fn multipatch_select_big() { let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); cargo_test_support::registry::init(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "*" [patch.crates-io] bar = {{ path = "bar" }} bar2 = {{ git = '{}', package = 'bar' }} "#, bar.url(), ), ) .file("src/lib.rs", "pub fn foo() { bar::foo() }") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.2.0" "#, ) .file("bar/src/lib.rs", "pub fn foo() {}") .build(); // assert the build succeeds, which is only possible if 0.2.0 is selected // since 0.1.0 is missing the function we need. Afterwards assert that the // build succeeds again without updating anything or building anything else. p.cargo("build").run(); p.cargo("build") .with_stderr( "\ warning: Patch `bar v0.1.0 [..]` was not used in the crate graph. Perhaps you misspelled the source URL being patched. Possible URLs for `[patch.]`: [CWD]/bar [FINISHED] [..]", ) .run(); } #[cargo_test] fn canonicalize_a_bunch() { let base = git::repo(&paths::root().join("base")) .file("Cargo.toml", &basic_manifest("base", "0.1.0")) .file("src/lib.rs", "") .build(); let intermediate = git::repo(&paths::root().join("intermediate")) .file( "Cargo.toml", &format!( r#" [package] name = "intermediate" version = "0.1.0" [dependencies] # Note the lack of trailing slash base = {{ git = '{}' }} "#, base.url(), ), ) .file("src/lib.rs", "pub fn f() { base::f() }") .build(); let newbase = git::repo(&paths::root().join("newbase")) .file("Cargo.toml", &basic_manifest("base", "0.1.0")) .file("src/lib.rs", "pub fn f() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" [dependencies] # Note the trailing slashes base = {{ git = '{base}/' }} intermediate = {{ git = '{intermediate}/' }} [patch.'{base}'] # Note the lack of trailing slash base = {{ git = '{newbase}' }} "#, base = base.url(), intermediate = intermediate.url(), newbase = newbase.url(), ), ) .file("src/lib.rs", "pub fn a() { base::f(); intermediate::f() }") .build(); // Once to make sure it actually works p.cargo("build").run(); // Then a few more times for good measure to ensure no weird warnings about // `[patch]` are printed. p.cargo("build").with_stderr("[FINISHED] [..]").run(); p.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn update_unused_new_version() { // If there is an unused patch entry, and then you update the patch, // make sure `cargo update` will be able to fix the lock file. Package::new("bar", "0.1.5").publish(); // Start with a lock file to 0.1.5, and an "unused" patch because the // version is too old. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "0.1.5" [patch.crates-io] bar = { path = "../bar" } "#, ) .file("src/lib.rs", "") .build(); // Patch is too old. let bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.4")) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains("[WARNING] Patch `bar v0.1.4 [..] was not used in the crate graph.") .run(); // unused patch should be in the lock file let lock = p.read_lockfile(); let toml: toml::Value = toml::from_str(&lock).unwrap(); assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); assert_eq!( toml["patch"]["unused"][0]["version"].as_str(), Some("0.1.4") ); // Oh, OK, let's update to the latest version. bar.change_file("Cargo.toml", &basic_manifest("bar", "0.1.6")); // Create a backup so we can test it with different options. fs::copy(p.root().join("Cargo.lock"), p.root().join("Cargo.lock.bak")).unwrap(); // Try to build again, this should automatically update Cargo.lock. p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.6 ([..]/bar) [COMPILING] foo v0.0.1 ([..]/foo) [FINISHED] [..] ", ) .run(); // This should not update any registry. p.cargo("build").with_stderr("[FINISHED] [..]").run(); assert!(!p.read_lockfile().contains("unused")); // Restore the lock file, and see if `update` will work, too. fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap(); // Try `update -p`. p.cargo("update -p bar") .with_stderr( "\ [UPDATING] `dummy-registry` index [ADDING] bar v0.1.6 ([..]/bar) [REMOVING] bar v0.1.5 ", ) .run(); // Try with bare `cargo update`. fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap(); p.cargo("update") .with_stderr( "\ [UPDATING] `dummy-registry` index [ADDING] bar v0.1.6 ([..]/bar) [REMOVING] bar v0.1.5 ", ) .run(); } #[cargo_test] fn too_many_matches() { // The patch locations has multiple versions that match. registry::alt_init(); Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.1.0").alternative(true).publish(); Package::new("bar", "0.1.1").alternative(true).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = { version = "0.1", registry = "alternative" } "#, ) .file("src/lib.rs", "") .build(); // Picks 0.1.1, the most recent version. p.cargo("check") .with_status(101) .with_stderr( "\ [UPDATING] `alternative` index [ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index` Caused by: patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve Caused by: patch for `bar` in `registry `alternative`` resolved to more than one candidate Found versions: 0.1.0, 0.1.1 Update the patch definition to select only one package. For example, add an `=` version requirement to the patch definition, such as `version = \"=0.1.1\"`. ", ) .run(); } #[cargo_test] fn no_matches() { // A patch to a location that does not contain the named package. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("abc", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index` Caused by: patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve Caused by: The patch location `[..]/foo/bar` does not appear to contain any packages matching the name `bar`. ", ) .run(); } #[cargo_test] fn mismatched_version() { // A patch to a location that has an old version. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1.1" [patch.crates-io] bar = { path = "bar", version = "0.1.1" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index` Caused by: patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve Caused by: The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, \ but the patch definition requires `^0.1.1`. Check that the version in the patch location is what you expect, \ and update the patch definition to match. ", ) .run(); } #[cargo_test] fn patch_walks_backwards() { // Starting with a locked patch, change the patch so it points to an older version. Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = {path="bar"} "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [CHECKING] bar v0.1.1 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); // Somehow the user changes the version backwards. p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")); p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [CHECKING] bar v0.1.0 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn patch_walks_backwards_restricted() { // This is the same as `patch_walks_backwards`, but the patch contains a // `version` qualifier. This is unusual, just checking a strange edge case. Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = {path="bar", version="0.1.1"} "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [CHECKING] bar v0.1.1 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); // Somehow the user changes the version backwards. p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")); p.cargo("check") .with_status(101) .with_stderr( "\ error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index` Caused by: patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve Caused by: The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, but the patch definition requires `^0.1.1`. Check that the version in the patch location is what you expect, and update the patch definition to match. ", ) .run(); } #[cargo_test] fn patched_dep_new_version() { // What happens when a patch is locked, and then one of the patched // dependencies needs to be updated. In this case, the baz requirement // gets updated from 0.1.0 to 0.1.1. Package::new("bar", "0.1.0").dep("baz", "0.1.0").publish(); Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = {path="bar"} "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] baz = "0.1" "#, ) .file("bar/src/lib.rs", "") .build(); // Lock everything. p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.0 [..] [CHECKING] baz v0.1.0 [CHECKING] bar v0.1.0 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); Package::new("baz", "0.1.1").publish(); // Just the presence of the new version should not have changed anything. p.cargo("check").with_stderr("[FINISHED] [..]").run(); // Modify the patch so it requires the new version. p.change_file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] baz = "0.1.1" "#, ); // Should unlock and update cleanly. p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.1 (registry `dummy-registry`) [CHECKING] baz v0.1.1 [CHECKING] bar v0.1.0 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn patch_update_doesnt_update_other_sources() { // Very extreme edge case, make sure a patch update doesn't update other // sources. registry::alt_init(); Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.1.0").alternative(true).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" bar_alt = { version = "0.1", registry = "alternative", package = "bar" } [patch.crates-io] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .build(); p.cargo("check") .with_stderr_unordered( "\ [UPDATING] `dummy-registry` index [UPDATING] `alternative` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 (registry `alternative`) [CHECKING] bar v0.1.0 (registry `alternative`) [CHECKING] bar v0.1.0 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); // Publish new versions in both sources. Package::new("bar", "0.1.1").publish(); Package::new("bar", "0.1.1").alternative(true).publish(); // Since it is locked, nothing should change. p.cargo("check").with_stderr("[FINISHED] [..]").run(); // Require new version on crates.io. p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")); // This should not update bar_alt. p.cargo("check") .with_stderr( "\ [UPDATING] `dummy-registry` index [CHECKING] bar v0.1.1 ([..]/foo/bar) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn can_update_with_alt_reg() { // A patch to an alt reg can update. registry::alt_init(); Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.1.0").alternative(true).publish(); Package::new("bar", "0.1.1").alternative(true).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = { version = "=0.1.1", registry = "alternative" } "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] `alternative` index [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.1 (registry `alternative`) [CHECKING] bar v0.1.1 (registry `alternative`) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); Package::new("bar", "0.1.2").alternative(true).publish(); // Should remain locked. p.cargo("check").with_stderr("[FINISHED] [..]").run(); // This does nothing, due to `=` requirement. p.cargo("update -p bar") .with_stderr( "\ [UPDATING] `alternative` index [UPDATING] `dummy-registry` index ", ) .run(); // Bump to 0.1.2. p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" [patch.crates-io] bar = { version = "=0.1.2", registry = "alternative" } "#, ); p.cargo("check") .with_stderr( "\ [UPDATING] `alternative` index [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.2 (registry `alternative`) [CHECKING] bar v0.1.2 (registry `alternative`) [CHECKING] foo v0.1.0 ([..]/foo) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn old_git_patch() { // Example where an old lockfile with an explicit branch="master" in Cargo.toml. Package::new("bar", "1.0.0").publish(); let (bar, bar_repo) = git::new_repo("bar", |p| { p.file("Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("src/lib.rs", "") }); let bar_oid = bar_repo.head().unwrap().target().unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" [patch.crates-io] bar = {{ git = "{}", branch = "master" }} "#, bar.url() ), ) .file( "Cargo.lock", &format!( r#" # This file is automatically @generated by Cargo. # It is not intended for manual editing. [[package]] name = "bar" version = "1.0.0" source = "git+{}#{}" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar", ] "#, bar.url(), bar_oid ), ) .file("src/lib.rs", "") .build(); bar.change_file("Cargo.toml", &basic_manifest("bar", "2.0.0")); git::add(&bar_repo); git::commit(&bar_repo); // This *should* keep the old lock. p.cargo("tree") // .env("CARGO_LOG", "trace") .with_stderr( "\ [UPDATING] [..] ", ) // .with_status(1) .with_stdout(format!( "\ foo v0.1.0 [..] └── bar v1.0.0 (file:///[..]branch=master#{}) ", &bar_oid.to_string()[..8] )) .run(); } cargo-0.66.0/tests/testsuite/path.rs000066400000000000000000000675341432416201200174050ustar00rootroot00000000000000//! Tests for `path` dependencies. use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, main_file, project}; use cargo_test_support::{sleep_ms, t}; use std::fs; #[cargo_test] // I have no idea why this is failing spuriously on Windows; // for more info, see #3466. #[cfg(not(windows))] fn cargo_compile_with_nested_deps_shorthand() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] version = "0.5.0" path = "baz" [lib] name = "bar" "#, ) .file( "bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#, ) .file("bar/baz/Cargo.toml", &basic_lib_manifest("baz")) .file( "bar/baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#, ) .build(); p.cargo("build") .with_stderr( "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("test passed\n").run(); println!("cleaning"); p.cargo("clean -v").with_stdout("").run(); println!("building baz"); p.cargo("build -p baz") .with_stderr( "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); println!("building foo"); p.cargo("build -p foo") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); } #[cargo_test] fn cargo_compile_with_root_dev_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" path = "../bar" [[bin]] name = "foo" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .build(); let _p2 = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]can't find crate for `bar`") .run(); } #[cargo_test] fn cargo_compile_with_root_dev_deps_with_testing() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" path = "../bar" [[bin]] name = "foo" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .build(); let _p2 = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file( "src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#, ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] [..] v0.5.0 ([..]) [COMPILING] [..] v0.5.0 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn cargo_compile_with_transitive_dev_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.baz] git = "git://example.com/path/to/nowhere" [lib] name = "bar" "#, ) .file( "bar/src/bar.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#, ) .build(); p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in \ [..]\n", ) .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("zoidberg\n").run(); } #[cargo_test] fn no_rebuild_dependency() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/bar.rs", "pub fn bar() {}") .build(); // First time around we should compile both foo and bar p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); sleep_ms(1000); p.change_file( "src/main.rs", r#" extern crate bar; fn main() { bar::bar(); } "#, ); // Don't compile bar, but do recompile foo. p.cargo("build") .with_stderr( "[COMPILING] foo v0.5.0 ([..])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); } #[cargo_test] fn deep_dependencies_trigger_rebuild() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" [dependencies.baz] path = "../baz" "#, ) .file( "bar/src/bar.rs", "extern crate baz; pub fn bar() { baz::baz() }", ) .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file("baz/src/baz.rs", "pub fn baz() {}") .build(); p.cargo("build") .with_stderr( "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); p.cargo("build").with_stdout("").run(); // Make sure an update to baz triggers a rebuild of bar // // We base recompilation off mtime, so sleep for at least a second to ensure // that this write will change the mtime. sleep_ms(1000); p.change_file("baz/src/baz.rs", r#"pub fn baz() { println!("hello!"); }"#); sleep_ms(1000); p.cargo("build") .with_stderr( "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); // Make sure an update to bar doesn't trigger baz sleep_ms(1000); p.change_file( "bar/src/bar.rs", r#" extern crate baz; pub fn bar() { println!("hello!"); baz::baz(); } "#, ); sleep_ms(1000); p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); } #[cargo_test] fn no_rebuild_two_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [dependencies.baz] path = "baz" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" [dependencies.baz] path = "../baz" "#, ) .file("bar/src/bar.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_lib_manifest("baz")) .file("baz/src/baz.rs", "pub fn baz() {}") .build(); p.cargo("build") .with_stderr( "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); assert!(p.bin("foo").is_file()); p.cargo("build").with_stdout("").run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn nested_deps_recompile() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "src/bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("src/bar/Cargo.toml", &basic_lib_manifest("bar")) .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }") .build(); p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/src/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); sleep_ms(1000); p.change_file("src/main.rs", r#"fn main() {}"#); // This shouldn't recompile `bar` p.cargo("build") .with_stderr( "[COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); } #[cargo_test] fn error_message_for_missing_manifest() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "src/bar" "#, ) .file("src/lib.rs", "") .file("src/bar/not-a-manifest", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to get `bar` as a dependency of package `foo v0.5.0 [..]` Caused by: failed to load source for dependency `bar` Caused by: Unable to update [CWD]/src/bar Caused by: failed to read `[..]bar/Cargo.toml` Caused by: [..] (os error [..]) ", ) .run(); } #[cargo_test] fn override_relative() { let bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") .build(); fs::create_dir(&paths::root().join(".cargo")).unwrap(); fs::write(&paths::root().join(".cargo/config"), r#"paths = ["bar"]"#).unwrap(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = '{}' "#, bar.root().display() ), ) .file("src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn override_self() { let bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("src/lib.rs", "") .build(); let p = project(); let root = p.root(); let p = p .file(".cargo/config", &format!("paths = ['{}']", root.display())) .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = '{}' "#, bar.root().display() ), ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn override_path_dep() { let bar = project() .at("bar") .file( "p1/Cargo.toml", r#" [package] name = "p1" version = "0.5.0" authors = [] [dependencies.p2] path = "../p2" "#, ) .file("p1/src/lib.rs", "") .file("p2/Cargo.toml", &basic_manifest("p2", "0.5.0")) .file("p2/src/lib.rs", "") .build(); let p = project() .file( ".cargo/config", &format!( "paths = ['{}', '{}']", bar.root().join("p1").display(), bar.root().join("p2").display() ), ) .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.p2] path = '{}' "#, bar.root().join("p2").display() ), ) .file("src/lib.rs", "") .build(); p.cargo("build -v").run(); } #[cargo_test] fn path_dep_build_cmd() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" "#, ) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [lib] name = "bar" path = "src/bar.rs" "#, ) .file( "bar/build.rs", r#" use std::fs; fn main() { fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); } "#, ) .file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 0 }") .build(); p.root().join("bar").move_into_the_past(); p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in \ [..]\n", ) .run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("0\n").run(); // Touching bar.rs.in should cause the `build` command to run again. p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }"); p.cargo("build") .with_stderr( "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ [COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) in \ [..]\n", ) .run(); p.process(&p.bin("foo")).with_stdout("1\n").run(); } #[cargo_test] fn dev_deps_no_rebuild_lib() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.bar] path = "bar" [lib] name = "foo" doctest = false "#, ) .file( "src/lib.rs", r#" #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar; #[cfg(not(test))] pub fn foo() { env!("FOO"); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .env("FOO", "bar") .with_stderr( "[COMPILING] foo v0.5.0 ([CWD])\n\ [FINISHED] dev [unoptimized + debuginfo] target(s) \ in [..]\n", ) .run(); p.cargo("test") .with_stderr( "\ [COMPILING] [..] v0.5.0 ([CWD][..]) [COMPILING] [..] v0.5.0 ([CWD][..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn custom_target_no_rebuild() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } [workspace] members = ["a", "b"] "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a = { path = "../a" } "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [COMPILING] a v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); t!(fs::rename( p.root().join("target"), p.root().join("target_moved") )); p.cargo("build --manifest-path=b/Cargo.toml") .env("CARGO_TARGET_DIR", "target_moved") .with_stderr( "\ [COMPILING] b v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn override_and_depend() { let p = project() .no_manifest() .file( "a/a1/Cargo.toml", r#" [project] name = "a1" version = "0.5.0" authors = [] [dependencies] a2 = { path = "../a2" } "#, ) .file("a/a1/src/lib.rs", "") .file("a/a2/Cargo.toml", &basic_manifest("a2", "0.5.0")) .file("a/a2/src/lib.rs", "") .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a1 = { path = "../a/a1" } a2 = { path = "../a/a2" } "#, ) .file("b/src/lib.rs", "") .file("b/.cargo/config", r#"paths = ["../a"]"#) .build(); p.cargo("build") .cwd("b") .with_stderr( "\ [WARNING] skipping duplicate package `a2` found at `[..]` [COMPILING] a2 v0.5.0 ([..]) [COMPILING] a1 v0.5.0 ([..]) [COMPILING] b v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn missing_path_dependency() { let p = project() .file("Cargo.toml", &basic_manifest("a", "0.5.0")) .file("src/lib.rs", "") .file( ".cargo/config", r#"paths = ["../whoa-this-does-not-exist"]"#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \ (defined in `[..]`) Caused by: failed to read directory `[..]` Caused by: [..] (os error [..]) ", ) .run(); } #[cargo_test] fn invalid_path_dep_in_workspace_with_lockfile() { Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "top" version = "0.5.0" authors = [] [workspace] [dependencies] foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = "*" "#, ) .file("foo/src/lib.rs", "") .build(); // Generate a lock file p.cargo("build").run(); // Change the dependency on `bar` to an invalid path p.change_file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = { path = "" } "#, ); // Make sure we get a nice error. In the past this actually stack // overflowed! p.cargo("build") .with_status(101) .with_stderr( "\ error: no matching package found searched package name: `bar` perhaps you meant: foo location searched: [..] required by package `foo v0.5.0 ([..])` ", ) .run(); } #[cargo_test] fn workspace_produces_rlib() { let p = project() .file( "Cargo.toml", r#" [project] name = "top" version = "0.5.0" authors = [] [workspace] [dependencies] foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file("foo/Cargo.toml", &basic_manifest("foo", "0.5.0")) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); assert!(p.root().join("target/debug/libtop.rlib").is_file()); assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); } #[cargo_test] fn deep_path_error() { // Test for an error loading a path deep in the dependency graph. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {path="a"} "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] b = {path="../b"} "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [dependencies] c = {path="../c"} "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [ERROR] failed to get `c` as a dependency of package `b v0.1.0 [..]` ... which satisfies path dependency `b` of package `a v0.1.0 [..]` ... which satisfies path dependency `a` of package `foo v0.1.0 [..]` Caused by: failed to load source for dependency `c` Caused by: Unable to update [..]/foo/c Caused by: failed to read `[..]/foo/c/Cargo.toml` Caused by: [..] ", ) .run(); } #[cargo_test] fn catch_tricky_cycle() { let p = project() .file( "Cargo.toml", r#" [package] name = "message" version = "0.1.0" [dev-dependencies] test = { path = "test" } "#, ) .file("src/lib.rs", "") .file( "tangle/Cargo.toml", r#" [package] name = "tangle" version = "0.1.0" [dependencies] message = { path = ".." } snapshot = { path = "../snapshot" } "#, ) .file("tangle/src/lib.rs", "") .file( "snapshot/Cargo.toml", r#" [package] name = "snapshot" version = "0.1.0" [dependencies] ledger = { path = "../ledger" } "#, ) .file("snapshot/src/lib.rs", "") .file( "ledger/Cargo.toml", r#" [package] name = "ledger" version = "0.1.0" [dependencies] tangle = { path = "../tangle" } "#, ) .file("ledger/src/lib.rs", "") .file( "test/Cargo.toml", r#" [package] name = "test" version = "0.1.0" [dependencies] snapshot = { path = "../snapshot" } "#, ) .file("test/src/lib.rs", "") .build(); p.cargo("test") .with_stderr_contains("[..]cyclic package dependency[..]") .with_status(101) .run(); } cargo-0.66.0/tests/testsuite/paths.rs000066400000000000000000000131041432416201200175500ustar00rootroot00000000000000//! Tests for `paths` overrides. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn broken_path_override_warns() { Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a1" } "#, ) .file("src/lib.rs", "") .file( "a1/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "0.1" "#, ) .file("a1/src/lib.rs", "") .file( "a2/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "0.2" "#, ) .file("a2/src/lib.rs", "") .file(".cargo/config", r#"paths = ["a2"]"#) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] warning: path override for crate `a` has altered the original list of dependencies; the dependency on `bar` was either added or modified to not match the previously resolved version This is currently allowed but is known to produce buggy behavior with spurious recompiles and changes to the crate graph. Path overrides unfortunately were never intended to support this feature, so for now this message is just a warning. In the future, however, this message will become a hard error. To change the dependency graph via an override it's recommended to use the `[patch]` feature of Cargo instead of the path override feature. This is documented online at the url below for more information. https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html [DOWNLOADING] crates ... [DOWNLOADED] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn override_to_path_dep() { Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] baz = { path = "baz" } "#, ) .file("bar/src/lib.rs", "") .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("bar/baz/src/lib.rs", "") .file(".cargo/config", r#"paths = ["bar"]"#) .build(); p.cargo("build").run(); } #[cargo_test] fn paths_ok_with_optional() { Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file("bar/src/lib.rs", "") .file( "bar2/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file("bar2/src/lib.rs", "") .file(".cargo/config", r#"paths = ["bar2"]"#) .build(); p.cargo("build") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]bar2) [COMPILING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn paths_add_optional_bad() { Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .file( "bar2/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { version = "0.1", optional = true } "#, ) .file("bar2/src/lib.rs", "") .file(".cargo/config", r#"paths = ["bar2"]"#) .build(); p.cargo("build") .with_stderr_contains( "\ warning: path override for crate `bar` has altered the original list of dependencies; the dependency on `baz` was either added or\ ", ) .run(); } cargo-0.66.0/tests/testsuite/pkgid.rs000066400000000000000000000057371432416201200175440ustar00rootroot00000000000000//! Tests for the `cargo pkgid` command. use cargo_test_support::project; use cargo_test_support::registry::Package; #[cargo_test] fn simple() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] bar = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); p.cargo("pkgid foo") .with_stdout(format!("file://[..]{}#0.1.0", p.root().to_str().unwrap())) .run(); p.cargo("pkgid bar") .with_stdout("https://github.com/rust-lang/crates.io-index#bar@0.1.0") .run(); } #[cargo_test] fn suggestion_bad_pkgid() { Package::new("crates-io", "0.1.0").publish(); Package::new("two-ver", "0.1.0").publish(); Package::new("two-ver", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] crates-io = "0.1.0" two-ver = "0.1.0" two-ver2 = { package = "two-ver", version = "0.2.0" } "#, ) .file("src/lib.rs", "") .file("cratesio", "") .build(); p.cargo("generate-lockfile").run(); // Bad URL. p.cargo("pkgid https://example.com/crates-io") .with_status(101) .with_stderr( "\ error: package ID specification `https://example.com/crates-io` did not match any packages Did you mean one of these? crates-io@0.1.0 ", ) .run(); // Bad name. p.cargo("pkgid crates_io") .with_status(101) .with_stderr( "\ error: package ID specification `crates_io` did not match any packages Did you mean `crates-io`? ", ) .run(); // Bad version. p.cargo("pkgid two-ver:0.3.0") .with_status(101) .with_stderr( "\ error: package ID specification `two-ver@0.3.0` did not match any packages Did you mean one of these? two-ver@0.1.0 two-ver@0.2.0 ", ) .run(); // Bad file URL. p.cargo("pkgid ./Cargo.toml") .with_status(101) .with_stderr( "\ error: invalid package ID specification: `./Cargo.toml` Caused by: package ID specification `./Cargo.toml` looks like a file path, maybe try file://[..]/Cargo.toml ", ) .run(); // Bad file URL with similar name. p.cargo("pkgid './cratesio'") .with_status(101) .with_stderr( "\ error: invalid package ID specification: `./cratesio` Did you mean `crates-io`? Caused by: package ID specification `./cratesio` looks like a file path, maybe try file://[..]/cratesio ", ) .run(); } cargo-0.66.0/tests/testsuite/plugins.rs000066400000000000000000000237071432416201200201240ustar00rootroot00000000000000//! Tests for rustc plugins. use cargo_test_support::rustc_host; use cargo_test_support::{basic_manifest, project}; #[cargo_test(nightly, reason = "plugins are unstable")] fn plugin_to_the_max() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo_lib" [dependencies.bar] path = "../bar" "#, ) .file( "src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate foo_lib; fn main() { foo_lib::foo(); } "#, ) .file( "src/foo_lib.rs", r#" #![feature(plugin)] #![plugin(bar)] pub fn foo() {} "#, ) .build(); let _bar = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#, ) .file( "src/lib.rs", r#" #![feature(rustc_private)] extern crate baz; extern crate rustc_driver; use rustc_driver::plugin::Registry; #[no_mangle] pub fn __rustc_plugin_registrar(_reg: &mut Registry) { println!("{}", baz::baz()); } "#, ) .build(); let _baz = project() .at("baz") .file( "Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] [lib] name = "baz" crate_type = ["dylib"] "#, ) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") .build(); foo.cargo("build").run(); foo.cargo("doc").run(); } #[cargo_test(nightly, reason = "plugins are unstable")] fn plugin_with_dynamic_native_dependency() { let build = project() .at("builder") .file( "Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] [lib] name = "builder" crate-type = ["dylib"] "#, ) .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") .build(); let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file( "src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] fn main() {} "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = 'build.rs' [lib] name = "bar" plugin = true "#, ) .file( "bar/build.rs", r#" use std::env; use std::fs; use std::path::PathBuf; fn main() { let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); let file = format!("{}builder{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); let src = root.join(&file); let dst = out_dir.join(&file); fs::copy(src, dst).unwrap(); if cfg!(target_env = "msvc") { fs::copy(root.join("builder.dll.lib"), out_dir.join("builder.dll.lib")).unwrap(); } println!("cargo:rustc-flags=-L {}", out_dir.display()); } "#, ) .file( "bar/src/lib.rs", r#" #![feature(rustc_private)] extern crate rustc_driver; use rustc_driver::plugin::Registry; #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] extern { fn foo(); } #[no_mangle] pub fn __rustc_plugin_registrar(_reg: &mut Registry) { unsafe { foo() } } "#, ) .build(); build.cargo("build").run(); let root = build.root().join("target").join("debug"); foo.cargo("build -v").env("BUILDER_ROOT", root).run(); } #[cargo_test] fn plugin_integration() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [lib] name = "foo" plugin = true doctest = false "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("tests/it_works.rs", "") .build(); p.cargo("test -v").run(); } #[cargo_test] fn doctest_a_plugin() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "#[macro_use] extern crate bar;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true "#, ) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("test -v").run(); } // See #1515 #[cargo_test] fn native_plugin_dependency_with_custom_linker() { let target = rustc_host(); let _foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] plugin = true "#, ) .file("src/lib.rs", "") .build(); let bar = project() .at("bar") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies.foo] path = "../foo" "#, ) .file("src/lib.rs", "") .file( ".cargo/config", &format!( r#" [target.{}] linker = "nonexistent-linker" "#, target ), ) .build(); bar.cargo("build --verbose") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` [ERROR] [..]linker[..] ", ) .run(); } #[cargo_test(nightly, reason = "requires rustc_private")] fn panic_abort_plugins() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.dev] panic = 'abort' [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] plugin = true "#, ) .file( "bar/src/lib.rs", r#" #![feature(rustc_private)] extern crate rustc_ast; "#, ) .build(); p.cargo("build").run(); } #[cargo_test(nightly, reason = "requires rustc_private")] fn shared_panic_abort_plugins() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.dev] panic = 'abort' [dependencies] bar = { path = "bar" } baz = { path = "baz" } "#, ) .file("src/lib.rs", "extern crate baz;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] plugin = true [dependencies] baz = { path = "../baz" } "#, ) .file( "bar/src/lib.rs", r#" #![feature(rustc_private)] extern crate rustc_ast; extern crate baz; "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) .file("baz/src/lib.rs", "") .build(); p.cargo("build -v").run(); } cargo-0.66.0/tests/testsuite/proc_macro.rs000066400000000000000000000327261432416201200205700ustar00rootroot00000000000000//! Tests for proc-macros. use cargo_test_support::project; #[cargo_test] fn probe_cfg_before_crate_type_discovery() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.'cfg(not(stage300))'.dependencies.noop] path = "../noop" "#, ) .file( "src/main.rs", r#" #[macro_use] extern crate noop; #[derive(Noop)] struct X; fn main() {} "#, ) .build(); let _noop = project() .at("noop") .file( "Cargo.toml", r#" [package] name = "noop" version = "0.0.1" authors = [] [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Noop)] pub fn noop(_input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn noop() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.noop] path = "../noop" "#, ) .file( "src/main.rs", r#" #[macro_use] extern crate noop; #[derive(Noop)] struct X; fn main() {} "#, ) .build(); let _noop = project() .at("noop") .file( "Cargo.toml", r#" [package] name = "noop" version = "0.0.1" authors = [] [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Noop)] pub fn noop(_input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); p.cargo("build").run(); p.cargo("build").run(); } #[cargo_test] fn impl_and_derive() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.transmogrify] path = "../transmogrify" "#, ) .file( "src/main.rs", r#" #[macro_use] extern crate transmogrify; trait ImplByTransmogrify { fn impl_by_transmogrify(&self) -> bool; } #[derive(Transmogrify, Debug)] struct X { success: bool } fn main() { let x = X::new(); assert!(x.impl_by_transmogrify()); println!("{:?}", x); } "#, ) .build(); let _transmogrify = project() .at("transmogrify") .file( "Cargo.toml", r#" [package] name = "transmogrify" version = "0.0.1" authors = [] [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Transmogrify)] #[doc(hidden)] pub fn transmogrify(input: TokenStream) -> TokenStream { " impl X { fn new() -> Self { X { success: true } } } impl ImplByTransmogrify for X { fn impl_by_transmogrify(&self) -> bool { true } } ".parse().unwrap() } "#, ) .build(); p.cargo("build").run(); p.cargo("run").with_stdout("X { success: true }").run(); } #[cargo_test(nightly, reason = "plugins are unstable")] fn plugin_and_proc_macro() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] plugin = true proc-macro = true "#, ) .file( "src/lib.rs", r#" #![feature(rustc_private)] #![feature(proc_macro, proc_macro_lib)] extern crate rustc_driver; use rustc_driver::plugin::Registry; extern crate proc_macro; use proc_macro::TokenStream; #[no_mangle] pub fn __rustc_plugin_registrar(reg: &mut Registry) {} #[proc_macro_derive(Questionable)] pub fn questionable(input: TokenStream) -> TokenStream { input } "#, ) .build(); let msg = " `lib.plugin` and `lib.proc-macro` cannot both be `true`"; p.cargo("build") .with_status(101) .with_stderr_contains(msg) .run(); } #[cargo_test] fn proc_macro_doctest() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" #![crate_type = "proc-macro"] extern crate proc_macro; use proc_macro::TokenStream; /// ``` /// assert!(true); /// ``` #[proc_macro_derive(Bar)] pub fn derive(_input: TokenStream) -> TokenStream { "".parse().unwrap() } #[test] fn a() { assert!(true); } "#, ) .build(); foo.cargo("test") .with_stdout_contains("test a ... ok") .with_stdout_contains_n("test [..] ... ok", 2) .run(); } #[cargo_test] fn proc_macro_crate_type() { // Verify that `crate-type = ["proc-macro"]` is the same as `proc-macro = true` // and that everything, including rustdoc, works correctly. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = { path = "pm" } "#, ) .file( "src/lib.rs", r#" //! ``` //! use foo::THING; //! assert_eq!(THING, 123); //! ``` #[macro_use] extern crate pm; #[derive(MkItem)] pub struct S; #[cfg(test)] mod tests { use super::THING; #[test] fn it_works() { assert_eq!(THING, 123); } } "#, ) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] crate-type = ["proc-macro"] "#, ) .file( "pm/src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(MkItem)] pub fn mk_item(_input: TokenStream) -> TokenStream { "pub const THING: i32 = 123;".parse().unwrap() } "#, ) .build(); foo.cargo("test") .with_stdout_contains("test tests::it_works ... ok") .with_stdout_contains_n("test [..] ... ok", 2) .run(); } #[cargo_test] fn proc_macro_crate_type_warning() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["proc-macro"] "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build") .with_stderr_contains( "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") .run(); } #[cargo_test] fn proc_macro_conflicting_warning() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] proc-macro = false proc_macro = true "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build") .with_stderr_contains( "[WARNING] conflicting between `proc-macro` and `proc_macro` in the `foo` library target.\n `proc_macro` is ignored and not recommended for use in the future", ) .run(); } #[cargo_test] fn proc_macro_crate_type_warning_plugin() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["proc-macro"] plugin = true "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build") .with_stderr_contains( "[WARNING] proc-macro library `foo` should not specify `plugin = true`") .with_stderr_contains( "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") .run(); } #[cargo_test] fn proc_macro_crate_type_multiple() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [lib] crate-type = ["proc-macro", "rlib"] "#, ) .file("src/lib.rs", "") .build(); foo.cargo("build") .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: cannot mix `proc-macro` crate type with others ", ) .with_status(101) .run(); } #[cargo_test] fn proc_macro_extern_prelude() { // Check that proc_macro is in the extern prelude. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [lib] proc-macro = true "#, ) .file( "src/lib.rs", r#" use proc_macro::TokenStream; #[proc_macro] pub fn foo(input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); p.cargo("test").run(); p.cargo("doc").run(); } #[cargo_test] fn proc_macro_built_once() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ['a', 'b'] resolver = "2" "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [build-dependencies] the-macro = { path = '../the-macro' } "#, ) .file("a/build.rs", "fn main() {}") .file("a/src/main.rs", "fn main() {}") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [dependencies] the-macro = { path = '../the-macro', features = ['a'] } "#, ) .file("b/src/main.rs", "fn main() {}") .file( "the-macro/Cargo.toml", r#" [package] name = "the-macro" version = "0.1.0" [lib] proc_macro = true [features] a = [] "#, ) .file("the-macro/src/lib.rs", "") .build(); p.cargo("build --verbose") .with_stderr_unordered( "\ [COMPILING] the-macro [..] [RUNNING] `rustc --crate-name the_macro [..]` [COMPILING] b [..] [RUNNING] `rustc --crate-name b [..]` [COMPILING] a [..] [RUNNING] `rustc --crate-name build_script_build [..]` [RUNNING] `[..]build[..]script[..]build[..]` [RUNNING] `rustc --crate-name a [..]` [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/profile_config.rs000066400000000000000000000303571432416201200214270ustar00rootroot00000000000000//! Tests for profiles defined in config files. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, paths, project}; #[cargo_test] fn profile_config_validate_warnings() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.test] opt-level = 3 [profile.asdf] opt-level = 3 [profile.dev] bad-key = true [profile.dev.build-override] bad-key-bo = true [profile.dev.package.bar] bad-key-bar = true "#, ) .build(); p.cargo("build") .with_stderr_unordered( "\ [WARNING] unused config key `profile.dev.bad-key` in `[..].cargo/config` [WARNING] unused config key `profile.dev.package.bar.bad-key-bar` in `[..].cargo/config` [WARNING] unused config key `profile.dev.build-override.bad-key-bo` in `[..].cargo/config` [COMPILING] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn profile_config_error_paths() { // Errors in config show where the error is located. let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev] opt-level = 3 "#, ) .file( paths::home().join(".cargo/config"), r#" [profile.dev] rpath = "foo" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] error in [..]/foo/.cargo/config: could not load config key `profile.dev` Caused by: error in [..]/home/.cargo/config: `profile.dev.rpath` expected true/false, but found a string ", ) .run(); } #[cargo_test] fn profile_config_validate_errors() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev.package.foo] panic = "abort" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] config profile `dev` is not valid (defined in `[..]/foo/.cargo/config`) Caused by: `panic` may not be specified in a `package` profile ", ) .run(); } #[cargo_test] fn profile_config_syntax_errors() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev] codegen-units = "foo" "#, ) .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] error in [..]/.cargo/config: could not load config key `profile.dev` Caused by: error in [..]/foo/.cargo/config: `profile.dev.codegen-units` expected an integer, but found a string ", ) .run(); } #[cargo_test] fn profile_config_override_spec_multiple() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } "#, ) .file( ".cargo/config", r#" [profile.dev.package.bar] opt-level = 3 [profile.dev.package."bar:0.5.0"] opt-level = 3 "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); // Unfortunately this doesn't tell you which file, hopefully it's not too // much of a problem. p.cargo("build -v") .with_status(101) .with_stderr( "\ [ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])` found package specs: bar, bar@0.5.0", ) .run(); } #[cargo_test] fn profile_config_all_options() { // Ensure all profile options are supported. let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [profile.release] opt-level = 1 debug = true debug-assertions = true overflow-checks = false rpath = true lto = true codegen-units = 2 panic = "abort" incremental = true "#, ) .build(); p.cargo("build --release -v") .env_remove("CARGO_INCREMENTAL") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..] \ -C opt-level=1 \ -C panic=abort \ -C lto[..]\ -C codegen-units=2 \ -C debuginfo=2 \ -C debug-assertions=on \ -C overflow-checks=off [..]\ -C rpath [..]\ -C incremental=[..] [FINISHED] release [optimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_config_override_precedence() { // Config values take precedence over manifest values. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = {path = "bar"} [profile.dev] codegen-units = 2 [profile.dev.package.bar] opt-level = 3 "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev.package.bar] opt-level = 2 "#, ) .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..] -C opt-level=2[..]-C codegen-units=2 [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..]-C codegen-units=2 [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn profile_config_no_warn_unknown_override() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev.package.bar] codegen-units = 4 "#, ) .build(); p.cargo("build") .with_stderr_does_not_contain("[..]warning[..]") .run(); } #[cargo_test] fn profile_config_mixed_types() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", r#" [profile.dev] opt-level = 3 "#, ) .file( paths::home().join(".cargo/config"), r#" [profile.dev] opt-level = 's' "#, ) .build(); p.cargo("build -v") .with_stderr_contains("[..]-C opt-level=3 [..]") .run(); } #[cargo_test] fn named_config_profile() { // Exercises config named profies. // foo -> middle -> bar -> dev // middle exists in Cargo.toml, the others in .cargo/config use super::config::ConfigBuilder; use cargo::core::compiler::CompileKind; use cargo::core::profiles::{Profiles, UnitFor}; use cargo::core::{PackageId, Workspace}; use cargo::util::interning::InternedString; use std::fs; paths::root().join(".cargo").mkdir_p(); fs::write( paths::root().join(".cargo/config"), r#" [profile.foo] inherits = "middle" codegen-units = 2 [profile.foo.build-override] codegen-units = 6 [profile.foo.package.dep] codegen-units = 7 [profile.middle] inherits = "bar" codegen-units = 3 [profile.bar] inherits = "dev" codegen-units = 4 debug = 1 "#, ) .unwrap(); fs::write( paths::root().join("Cargo.toml"), r#" [workspace] [profile.middle] inherits = "bar" codegen-units = 1 opt-level = 1 [profile.middle.package.dep] overflow-checks = false [profile.foo.build-override] codegen-units = 5 debug-assertions = false [profile.foo.package.dep] codegen-units = 8 "#, ) .unwrap(); let config = ConfigBuilder::new().build(); let profile_name = InternedString::new("foo"); let ws = Workspace::new(&paths::root().join("Cargo.toml"), &config).unwrap(); let profiles = Profiles::new(&ws, profile_name).unwrap(); let crates_io = cargo::core::source::SourceId::crates_io(&config).unwrap(); let a_pkg = PackageId::new("a", "0.1.0", crates_io).unwrap(); let dep_pkg = PackageId::new("dep", "0.1.0", crates_io).unwrap(); // normal package let kind = CompileKind::Host; let p = profiles.get_profile(a_pkg, true, true, UnitFor::new_normal(kind), kind); assert_eq!(p.name, "foo"); assert_eq!(p.codegen_units, Some(2)); // "foo" from config assert_eq!(p.opt_level, "1"); // "middle" from manifest assert_eq!(p.debuginfo, Some(1)); // "bar" from config assert_eq!(p.debug_assertions, true); // "dev" built-in (ignore build-override) assert_eq!(p.overflow_checks, true); // "dev" built-in (ignore package override) // build-override let bo = profiles.get_profile(a_pkg, true, true, UnitFor::new_host(false, kind), kind); assert_eq!(bo.name, "foo"); assert_eq!(bo.codegen_units, Some(6)); // "foo" build override from config assert_eq!(bo.opt_level, "0"); // default to zero assert_eq!(bo.debuginfo, Some(1)); // SAME as normal assert_eq!(bo.debug_assertions, false); // "foo" build override from manifest assert_eq!(bo.overflow_checks, true); // SAME as normal // package overrides let po = profiles.get_profile(dep_pkg, false, true, UnitFor::new_normal(kind), kind); assert_eq!(po.name, "foo"); assert_eq!(po.codegen_units, Some(7)); // "foo" package override from config assert_eq!(po.opt_level, "1"); // SAME as normal assert_eq!(po.debuginfo, Some(1)); // SAME as normal assert_eq!(po.debug_assertions, true); // SAME as normal assert_eq!(po.overflow_checks, false); // "middle" package override from manifest } #[cargo_test] fn named_env_profile() { // Environment variables used to define a named profile. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v --profile=other") .env("CARGO_PROFILE_OTHER_CODEGEN_UNITS", "1") .env("CARGO_PROFILE_OTHER_INHERITS", "dev") .with_stderr_contains("[..]-C codegen-units=1 [..]") .run(); } #[cargo_test] fn test_with_dev_profile() { // The `test` profile inherits from `dev` for both local crates and // dependencies. Package::new("somedep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] somedep = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("test --lib --no-run -v") .env("CARGO_PROFILE_DEV_DEBUG", "0") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] [..] [COMPILING] somedep v1.0.0 [RUNNING] `rustc --crate-name somedep [..]-C debuginfo=0[..] [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo [..]-C debuginfo=0[..] [FINISHED] [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` ", ) .run(); } cargo-0.66.0/tests/testsuite/profile_custom.rs000066400000000000000000000426361432416201200214770ustar00rootroot00000000000000//! Tests for named profiles. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::{basic_lib_manifest, project}; #[cargo_test] fn inherits_on_release() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release] inherits = "dev" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] `inherits` must not be specified in root profile `release` ", ) .run(); } #[cargo_test] fn missing_inherits() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release-lto] codegen-units = 7 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] profile `release-lto` is missing an `inherits` directive \ (`inherits` is required for all profiles except `dev` or `release`) ", ) .run(); } #[cargo_test] fn invalid_profile_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.'.release-lto'] inherits = "release" codegen-units = 7 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..] Caused by: invalid character `.` in profile name `.release-lto` Allowed characters are letters, numbers, underscore, and hyphen. ", ) .run(); } #[cargo_test] // We are currently uncertain if dir-name will ever be exposed to the user. // The code for it still roughly exists, but only for the internal profiles. // This test was kept in case we ever want to enable support for it again. #[ignore = "dir-name is disabled"] fn invalid_dir_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.'release-lto'] inherits = "release" dir-name = ".subdir" codegen-units = 7 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..] Caused by: Invalid character `.` in dir-name: `.subdir`", ) .run(); } #[cargo_test] fn dir_name_disabled() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release-lto] inherits = "release" dir-name = "lto" lto = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: dir-name=\"lto\" in profile `release-lto` is not currently allowed, \ directory names are tied to the profile name for custom profiles ", ) .run(); } #[cargo_test] fn invalid_inherits() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.'release-lto'] inherits = ".release" codegen-units = 7 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "error: profile `release-lto` inherits from `.release`, \ but that profile is not defined", ) .run(); } #[cargo_test] fn non_existent_inherits() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release-lto] codegen-units = 7 inherits = "non-existent" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] profile `release-lto` inherits from `non-existent`, but that profile is not defined ", ) .run(); } #[cargo_test] fn self_inherits() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release-lto] codegen-units = 7 inherits = "release-lto" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] profile inheritance loop detected with profile `release-lto` inheriting `release-lto` ", ) .run(); } #[cargo_test] fn inherits_loop() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release-lto] codegen-units = 7 inherits = "release-lto2" [profile.release-lto2] codegen-units = 7 inherits = "release-lto" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] profile inheritance loop detected with profile `release-lto2` inheriting `release-lto` ", ) .run(); } #[cargo_test] fn overrides_with_custom() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] xxx = {path = "xxx"} yyy = {path = "yyy"} [profile.dev] codegen-units = 7 [profile.dev.package.xxx] codegen-units = 5 [profile.dev.package.yyy] codegen-units = 3 [profile.other] inherits = "dev" codegen-units = 2 [profile.other.package.yyy] codegen-units = 6 "#, ) .file("src/lib.rs", "") .file("xxx/Cargo.toml", &basic_lib_manifest("xxx")) .file("xxx/src/lib.rs", "") .file("yyy/Cargo.toml", &basic_lib_manifest("yyy")) .file("yyy/src/lib.rs", "") .build(); // profile overrides are inherited between profiles using inherits and have a // higher priority than profile options provided by custom profiles p.cargo("build -v") .with_stderr_unordered( "\ [COMPILING] xxx [..] [COMPILING] yyy [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]` [RUNNING] `rustc --crate-name yyy [..] -C codegen-units=3 [..]` [RUNNING] `rustc --crate-name foo [..] -C codegen-units=7 [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // This also verifies that the custom profile names appears in the finished line. p.cargo("build --profile=other -v") .with_stderr_unordered( "\ [COMPILING] xxx [..] [COMPILING] yyy [..] [COMPILING] foo [..] [RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]` [RUNNING] `rustc --crate-name yyy [..] -C codegen-units=6 [..]` [RUNNING] `rustc --crate-name foo [..] -C codegen-units=2 [..]` [FINISHED] other [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn conflicting_usage() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --profile=dev --release") .with_status(101) .with_stderr( "\ error: conflicting usage of --profile=dev and --release The `--release` flag is the same as `--profile=release`. Remove one flag or the other to continue. ", ) .run(); p.cargo("install --profile=release --debug") .with_status(101) .with_stderr( "\ error: conflicting usage of --profile=release and --debug The `--debug` flag is the same as `--profile=dev`. Remove one flag or the other to continue. ", ) .run(); p.cargo("rustc --profile=dev --release") .with_stderr( "\ warning: the `--release` flag should not be specified with the `--profile` flag The `--release` flag will be ignored. This was historically accepted, but will become an error in a future release. [COMPILING] foo [..] [FINISHED] dev [..] ", ) .run(); p.cargo("check --profile=dev --release") .with_status(101) .with_stderr( "\ error: conflicting usage of --profile=dev and --release The `--release` flag is the same as `--profile=release`. Remove one flag or the other to continue. ", ) .run(); p.cargo("check --profile=test --release") .with_stderr( "\ warning: the `--release` flag should not be specified with the `--profile` flag The `--release` flag will be ignored. This was historically accepted, but will become an error in a future release. [CHECKING] foo [..] [FINISHED] test [..] ", ) .run(); // This is OK since the two are the same. p.cargo("rustc --profile=release --release") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] release [..] ", ) .run(); p.cargo("build --profile=release --release") .with_stderr( "\ [FINISHED] release [..] ", ) .run(); p.cargo("install --path . --profile=dev --debug") .with_stderr( "\ [INSTALLING] foo [..] [FINISHED] dev [..] [INSTALLING] [..] [INSTALLED] [..] [WARNING] be sure to add [..] ", ) .run(); p.cargo("install --path . --profile=release --debug") .with_status(101) .with_stderr( "\ error: conflicting usage of --profile=release and --debug The `--debug` flag is the same as `--profile=dev`. Remove one flag or the other to continue. ", ) .run(); } #[cargo_test] fn clean_custom_dirname() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.other] inherits = "release" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release") .with_stdout("") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] ", ) .run(); p.cargo("clean -p foo").run(); p.cargo("build --release") .with_stdout("") .with_stderr( "\ [FINISHED] release [optimized] target(s) in [..] ", ) .run(); p.cargo("clean -p foo --release").run(); p.cargo("build --release") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] ", ) .run(); p.cargo("build") .with_stdout("") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --profile=other") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] other [optimized] target(s) in [..] ", ) .run(); p.cargo("clean").arg("--release").run(); // Make sure that 'other' was not cleaned assert!(p.build_dir().is_dir()); assert!(p.build_dir().join("debug").is_dir()); assert!(p.build_dir().join("other").is_dir()); assert!(!p.build_dir().join("release").is_dir()); // This should clean 'other' p.cargo("clean --profile=other").with_stderr("").run(); assert!(p.build_dir().join("debug").is_dir()); assert!(!p.build_dir().join("other").is_dir()); } #[cargo_test] fn unknown_profile() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --profile alpha") .with_stderr("[ERROR] profile `alpha` is not defined") .with_status(101) .run(); // Clean has a separate code path, need to check it too. p.cargo("clean --profile alpha") .with_stderr("[ERROR] profile `alpha` is not defined") .with_status(101) .run(); } #[cargo_test] fn reserved_profile_names() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.doc] opt-level = 1 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --profile=doc") .with_status(101) .with_stderr("error: profile `doc` is reserved and not allowed to be explicitly specified") .run(); // Not an exhaustive list, just a sample. for name in ["build", "cargo", "check", "rustc", "CaRgO_startswith"] { p.cargo(&format!("build --profile={}", name)) .with_status(101) .with_stderr(&format!( "\ error: profile name `{}` is reserved Please choose a different name. See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. ", name )) .run(); } for name in ["build", "check", "cargo", "rustc", "CaRgO_startswith"] { p.change_file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [profile.{}] opt-level = 1 "#, name ), ); p.cargo("build") .with_status(101) .with_stderr(&format!( "\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: profile name `{}` is reserved Please choose a different name. See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. ", name )) .run(); } p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [profile.debug] debug = 1 inherits = "dev" "#, ); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: profile name `debug` is reserved To configure the default development profile, use the name `dev` as in [profile.dev] See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. ", ) .run(); } #[cargo_test] fn legacy_commands_support_custom() { // These commands have had `--profile` before custom named profiles. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.super-dev] codegen-units = 3 inherits = "dev" "#, ) .file("src/lib.rs", "") .build(); for command in ["rustc", "fix", "check"] { let mut pb = p.cargo(command); if command == "fix" { pb.arg("--allow-no-vcs"); } pb.arg("--profile=super-dev") .arg("-v") .with_stderr_contains("[RUNNING] [..]codegen-units=3[..]") .run(); p.build_dir().rm_rf(); } } #[cargo_test] fn legacy_rustc() { // `cargo rustc` historically has supported dev/test/bench/check // other profiles are covered in check::rustc_check let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.dev] codegen-units = 3 "#, ) .file("src/lib.rs", "") .build(); p.cargo("rustc --profile dev -v") .with_stderr( "\ [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo [..]-C codegen-units=3[..] [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/profile_overrides.rs000066400000000000000000000334771432416201200221720ustar00rootroot00000000000000//! Tests for profile overrides (build-override and per-package overrides). use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; #[cargo_test] fn profile_override_basic() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = {path = "bar"} [profile.dev] opt-level = 1 [profile.dev.package.bar] opt-level = 3 "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "[COMPILING] bar [..] [RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]` [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..] -C opt-level=1 [..]` [FINISHED] dev [optimized + debuginfo] target(s) in [..]", ) .run(); } #[cargo_test] fn profile_override_warnings() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = {path = "bar"} [profile.dev.package.bart] opt-level = 3 [profile.dev.package.no-suggestion] opt-level = 3 [profile.dev.package."bar:1.2.3"] opt-level = 3 "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains( "\ [WARNING] profile package spec `bar@1.2.3` in profile `dev` \ has a version or URL that does not match any of the packages: \ bar v0.5.0 ([..]/foo/bar) [WARNING] profile package spec `bart` in profile `dev` did not match any packages Did you mean `bar`? [WARNING] profile package spec `no-suggestion` in profile `dev` did not match any packages [COMPILING] [..] ", ) .run(); } #[cargo_test] fn profile_override_bad_settings() { let bad_values = [ ( "panic = \"abort\"", "`panic` may not be specified in a `package` profile", ), ( "lto = true", "`lto` may not be specified in a `package` profile", ), ( "rpath = true", "`rpath` may not be specified in a `package` profile", ), ("package = {}", "package-specific profiles cannot be nested"), ]; for &(snippet, expected) in bad_values.iter() { let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = {{path = "bar"}} [profile.dev.package.bar] {} "#, snippet ), ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr_contains(format!("Caused by:\n {}", expected)) .run(); } } #[cargo_test] fn profile_override_hierarchy() { // Test that the precedence rules are correct for different types. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["m1", "m2", "m3"] [profile.dev] codegen-units = 1 [profile.dev.package.m2] codegen-units = 2 [profile.dev.package."*"] codegen-units = 3 [profile.dev.build-override] codegen-units = 4 "#, ) // m1 .file( "m1/Cargo.toml", r#" [package] name = "m1" version = "0.0.1" [dependencies] m2 = { path = "../m2" } dep = { path = "../../dep" } "#, ) .file("m1/src/lib.rs", "extern crate m2; extern crate dep;") .file("m1/build.rs", "fn main() {}") // m2 .file( "m2/Cargo.toml", r#" [package] name = "m2" version = "0.0.1" [dependencies] m3 = { path = "../m3" } [build-dependencies] m3 = { path = "../m3" } dep = { path = "../../dep" } "#, ) .file("m2/src/lib.rs", "extern crate m3;") .file( "m2/build.rs", "extern crate m3; extern crate dep; fn main() {}", ) // m3 .file("m3/Cargo.toml", &basic_lib_manifest("m3")) .file("m3/src/lib.rs", "") .build(); // dep (outside of workspace) let _dep = project() .at("dep") .file("Cargo.toml", &basic_lib_manifest("dep")) .file("src/lib.rs", "") .build(); // Profiles should be: // m3: 4 (as build.rs dependency) // m3: 1 (as [profile.dev] as workspace member) // dep: 3 (as [profile.dev.package."*"] as non-workspace member) // m1 build.rs: 4 (as [profile.dev.build-override]) // m2 build.rs: 2 (as [profile.dev.package.m2]) // m2: 2 (as [profile.dev.package.m2]) // m1: 1 (as [profile.dev]) p.cargo("build -v").with_stderr_unordered("\ [COMPILING] m3 [..] [COMPILING] dep [..] [RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=4 [..] [RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=3 [..] [RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..] [RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=4 [..] [COMPILING] m2 [..] [RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=2 [..] [RUNNING] `[..]/m1-[..]/build-script-build` [RUNNING] `[..]/m2-[..]/build-script-build` [RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=2 [..] [COMPILING] m1 [..] [RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_override_spec_multiple() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } [profile.dev.package.bar] opt-level = 3 [profile.dev.package."bar:0.5.0"] opt-level = 3 "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr_contains( "\ [ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])` found package specs: bar, bar@0.5.0", ) .run(); } #[cargo_test] fn profile_override_spec() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["m1", "m2"] [profile.dev.package."dep:1.0.0"] codegen-units = 1 [profile.dev.package."dep:2.0.0"] codegen-units = 2 "#, ) // m1 .file( "m1/Cargo.toml", r#" [package] name = "m1" version = "0.0.1" [dependencies] dep = { path = "../../dep1" } "#, ) .file("m1/src/lib.rs", "extern crate dep;") // m2 .file( "m2/Cargo.toml", r#" [package] name = "m2" version = "0.0.1" [dependencies] dep = {path = "../../dep2" } "#, ) .file("m2/src/lib.rs", "extern crate dep;") .build(); project() .at("dep1") .file("Cargo.toml", &basic_manifest("dep", "1.0.0")) .file("src/lib.rs", "") .build(); project() .at("dep2") .file("Cargo.toml", &basic_manifest("dep", "2.0.0")) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr_contains("[RUNNING] `rustc [..]dep1/src/lib.rs [..] -C codegen-units=1 [..]") .with_stderr_contains("[RUNNING] `rustc [..]dep2/src/lib.rs [..] -C codegen-units=2 [..]") .run(); } #[cargo_test] fn override_proc_macro() { Package::new("shared", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] shared = "1.0" pm = {path = "pm"} [profile.dev.build-override] codegen-units = 4 "#, ) .file("src/lib.rs", r#"pm::eat!{}"#) .file( "pm/Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true [dependencies] shared = "1.0" "#, ) .file( "pm/src/lib.rs", r#" extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro] pub fn eat(_item: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); p.cargo("build -v") // Shared built for the proc-macro. .with_stderr_contains("[RUNNING] `rustc [..]--crate-name shared [..]-C codegen-units=4[..]") // Shared built for the library. .with_stderr_line_without( &["[RUNNING] `rustc --crate-name shared"], &["-C codegen-units"], ) .with_stderr_contains("[RUNNING] `rustc [..]--crate-name pm [..]-C codegen-units=4[..]") .with_stderr_line_without( &["[RUNNING] `rustc [..]--crate-name foo"], &["-C codegen-units"], ) .run(); } #[cargo_test] fn no_warning_ws() { // https://github.com/rust-lang/cargo/issues/7378, avoid warnings in a workspace. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] [profile.dev.package.a] codegen-units = 3 "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "") .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "") .build(); p.cargo("build -p b") .with_stderr( "\ [COMPILING] b [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn build_override_shared() { // A dependency with a build script that is shared with a build // dependency, using different profile settings. That is: // // foo DEBUG=2 // β”œβ”€β”€ common DEBUG=2 // β”‚ └── common Run build.rs DEBUG=2 // β”‚ └── common build.rs DEBUG=0 (build_override) // └── foo Run build.rs DEBUG=2 // └── foo build.rs DEBUG=0 (build_override) // └── common DEBUG=0 (build_override) // └── common Run build.rs DEBUG=0 (build_override) // └── common build.rs DEBUG=0 (build_override) // // The key part here is that `common` RunCustomBuild is run twice, once // with DEBUG=2 (as a dependency of foo) and once with DEBUG=0 (as a // build-dependency of foo's build script). Package::new("common", "1.0.0") .file( "build.rs", r#" fn main() { if std::env::var("DEBUG").unwrap() != "false" { println!("cargo:rustc-cfg=foo_debug"); } else { println!("cargo:rustc-cfg=foo_release"); } } "#, ) .file( "src/lib.rs", r#" pub fn foo() -> u32 { if cfg!(foo_debug) { assert!(cfg!(debug_assertions)); 1 } else if cfg!(foo_release) { assert!(!cfg!(debug_assertions)); 2 } else { panic!("not set"); } } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [build-dependencies] common = "1.0" [dependencies] common = "1.0" [profile.dev.build-override] debug = 0 debug-assertions = false "#, ) .file( "build.rs", r#" fn main() { assert_eq!(common::foo(), 2); } "#, ) .file( "src/main.rs", r#" fn main() { assert_eq!(common::foo(), 1); } "#, ) .build(); p.cargo("run").run(); } cargo-0.66.0/tests/testsuite/profile_targets.rs000066400000000000000000000777561432416201200216510ustar00rootroot00000000000000//! Tests for checking exactly how profiles correspond with each unit. For //! example, the `test` profile applying to test targets, but not other //! targets, etc. use cargo_test_support::{basic_manifest, project, Project}; fn all_target_project() -> Project { // This abuses the `codegen-units` setting so that we can verify exactly // which profile is used for each compiler invocation. project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } [build-dependencies] bdep = { path = "bdep" } [profile.dev] codegen-units = 1 panic = "abort" [profile.release] codegen-units = 2 panic = "abort" [profile.test] codegen-units = 3 [profile.bench] codegen-units = 4 [profile.dev.build-override] codegen-units = 5 [profile.release.build-override] codegen-units = 6 "#, ) .file("src/lib.rs", "extern crate bar;") .file("src/main.rs", "extern crate foo; fn main() {}") .file("examples/ex1.rs", "extern crate foo; fn main() {}") .file("tests/test1.rs", "extern crate foo;") .file("benches/bench1.rs", "extern crate foo;") .file( "build.rs", r#" extern crate bdep; fn main() { eprintln!("foo custom build PROFILE={} DEBUG={} OPT_LEVEL={}", std::env::var("PROFILE").unwrap(), std::env::var("DEBUG").unwrap(), std::env::var("OPT_LEVEL").unwrap(), ); } "#, ) // `bar` package. .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") // `bdep` package. .file( "bdep/Cargo.toml", r#" [package] name = "bdep" version = "0.0.1" [dependencies] bar = { path = "../bar" } "#, ) .file("bdep/src/lib.rs", "extern crate bar;") .build() } #[cargo_test] fn profile_selection_build() { let p = all_target_project(); // `build` // NOTES: // - bdep `panic` is not set because it thinks `build.rs` is a plugin. // - build_script_build is built without panic because it thinks `build.rs` is a plugin. p.cargo("build -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ").run(); p.cargo("build -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_selection_build_release() { let p = all_target_project(); // `build --release` p.cargo("build --release -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [FINISHED] release [optimized] [..] ").run(); p.cargo("build --release -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] release [optimized] [..] ", ) .run(); } #[cargo_test] fn profile_selection_build_all_targets() { let p = all_target_project(); // `build` // NOTES: // - bdep `panic` is not set because it thinks `build.rs` is a plugin. // - build_script_build is built without panic because it thinks // `build.rs` is a plugin. // - Benchmark dependencies are compiled in `dev` mode, which may be // surprising. See issue rust-lang/cargo#4929. // // - Dependency profiles: // Pkg Target Profile Reason // --- ------ ------- ------ // bar lib dev For foo-bin // bar lib dev-panic For tests/benches and bdep // bdep lib dev-panic For foo build.rs // foo custom dev-panic // // - `foo` target list is: // Target Profile Mode // ------ ------- ---- // lib dev+panic build (a normal lib target) // lib dev-panic build (used by tests/benches) // lib dev dev // test dev dev // bench dev dev // bin dev dev // bin dev build // example dev build p.cargo("build --all-targets -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..]` [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` [FINISHED] dev [unoptimized + debuginfo] [..] ").run(); p.cargo("build -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_selection_build_all_targets_release() { let p = all_target_project(); // `build --all-targets --release` // NOTES: // - bdep `panic` is not set because it thinks `build.rs` is a plugin. // - bar compiled twice. It tries with and without panic, but the "is a // plugin" logic is forcing it to be cleared. // - build_script_build is built without panic because it thinks // `build.rs` is a plugin. // - build_script_build is being run two times. Once for the `dev` and // `test` targets, once for the `bench` targets. // TODO: "PROFILE" says debug both times, though! // // - Dependency profiles: // Pkg Target Profile Reason // --- ------ ------- ------ // bar lib release For foo-bin // bar lib release-panic For tests/benches and bdep // bdep lib release-panic For foo build.rs // foo custom release-panic // // - `foo` target list is: // Target Profile Mode // ------ ------- ---- // lib release+panic build (a normal lib target) // lib release-panic build (used by tests/benches) // lib release test (bench/test de-duped) // test release test // bench release test // bin release test (bench/test de-duped) // bin release build // example release build p.cargo("build --all-targets --release -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]` [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` [FINISHED] release [optimized] [..] ").run(); p.cargo("build --all-targets --release -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] release [optimized] [..] ", ) .run(); } #[cargo_test] fn profile_selection_test() { let p = all_target_project(); // `test` // NOTES: // - Dependency profiles: // Pkg Target Profile Reason // --- ------ ------- ------ // bar lib test For foo-bin // bar lib test-panic For tests/benches and bdep // bdep lib test-panic For foo build.rs // foo custom test-panic // // - `foo` target list is: // Target Profile Mode // ------ ------- ---- // lib test-panic build (for tests) // lib test build (for bins) // lib test test // test test test // example test-panic build // bin test test // bin test build // p.cargo("test -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] [FINISHED] test [unoptimized + debuginfo] [..] [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/test1-[..]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..] ").run(); p.cargo("test -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] test [unoptimized + debuginfo] [..] [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/test1-[..]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..] ", ) .run(); } #[cargo_test] fn profile_selection_test_release() { let p = all_target_project(); // `test --release` // NOTES: // - Dependency profiles: // Pkg Target Profile Reason // --- ------ ------- ------ // bar lib release For foo-bin // bar lib release-panic For tests/benches and bdep // bdep lib release-panic For foo build.rs // foo custom release-panic // // - `foo` target list is: // Target Profile Mode // ------ ------- ---- // lib release-panic build (for tests) // lib release build (for bins) // lib release test // test release test // example release-panic build // bin release test // bin release build // p.cargo("test --release -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C opt-level=3[..]-C codegen-units=2[..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [FINISHED] release [optimized] [..] [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/test1-[..]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..]` ").run(); p.cargo("test --release -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] release [optimized] [..] [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/foo-[..]` [RUNNING] `[..]/deps/test1-[..]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..] ", ) .run(); } #[cargo_test] fn profile_selection_bench() { let p = all_target_project(); // `bench` // NOTES: // - Dependency profiles: // Pkg Target Profile Reason // --- ------ ------- ------ // bar lib bench For foo-bin // bar lib bench-panic For tests/benches and bdep // bdep lib bench-panic For foo build.rs // foo custom bench-panic // // - `foo` target list is: // Target Profile Mode // ------ ------- ---- // lib bench-panic build (for benches) // lib bench build (for bins) // lib bench test(bench) // bench bench test(bench) // bin bench test(bench) // bin bench build // p.cargo("bench -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..]target/release/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] [FINISHED] bench [optimized] [..] [RUNNING] `[..]/deps/foo-[..] --bench` [RUNNING] `[..]/deps/foo-[..] --bench` [RUNNING] `[..]/deps/bench1-[..] --bench` ").run(); p.cargo("bench -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] bench [optimized] [..] [RUNNING] `[..]/deps/foo-[..] --bench` [RUNNING] `[..]/deps/foo-[..] --bench` [RUNNING] `[..]/deps/bench1-[..] --bench` ", ) .run(); } #[cargo_test] fn profile_selection_check_all_targets() { let p = all_target_project(); // `check` // NOTES: // - Dependency profiles: // Pkg Target Profile Action Reason // --- ------ ------- ------ ------ // bar lib dev* link For bdep // bar lib dev-panic metadata For tests/benches // bar lib dev metadata For lib/bins // bdep lib dev* link For foo build.rs // foo custom dev* link For build.rs // // `*` = wants panic, but it is cleared when args are built. // // - foo target list is: // Target Profile Mode // ------ ------- ---- // lib dev check // lib dev-panic check (for tests/benches) // lib dev-panic check-test (checking lib as a unittest) // example dev check // test dev-panic check-test // bench dev-panic check-test // bin dev check // bin dev-panic check-test (checking bin as a unittest) // p.cargo("check --all-targets -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [COMPILING] bdep[..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ").run(); // Starting with Rust 1.27, rustc emits `rmeta` files for bins, so // everything should be completely fresh. Previously, bins were being // rechecked. // See PR rust-lang/rust#49289 and issue rust-lang/cargo#3624. p.cargo("check --all-targets -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_selection_check_all_targets_release() { let p = all_target_project(); // `check --release` // See issue rust-lang/cargo#5218. // This is a pretty straightforward variant of // `profile_selection_check_all_targets` that uses `release` instead of // `dev` for all targets. p.cargo("check --all-targets --release -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [COMPILING] bdep[..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link [..]-C codegen-units=6 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] [RUNNING] `[..]target/release/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] [FINISHED] release [optimized] [..] ").run(); p.cargo("check --all-targets --release -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] release [optimized] [..] ", ) .run(); } #[cargo_test] fn profile_selection_check_all_targets_test() { let p = all_target_project(); // `check --profile=test` // - Dependency profiles: // Pkg Target Profile Action Reason // --- ------ ------- ------ ------ // bar lib test* link For bdep // bar lib test-panic metadata For tests/benches // bdep lib test* link For foo build.rs // foo custom test* link For build.rs // // `*` = wants panic, but it is cleared when args are built. // // - foo target list is: // Target Profile Mode // ------ ------- ---- // lib test-panic check-test (for tests/benches) // lib test-panic check-test (checking lib as a unittest) // example test-panic check-test // test test-panic check-test // bench test-panic check-test // bin test-panic check-test // p.cargo("check --all-targets --profile=test -vv").with_stderr_unordered("\ [COMPILING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..] [COMPILING] bdep[..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..] [RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] [RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] [FINISHED] test [unoptimized + debuginfo] [..] ").run(); p.cargo("check --all-targets --profile=test -vv") .with_stderr_unordered( "\ [FRESH] bar [..] [FRESH] bdep [..] [FRESH] foo [..] [FINISHED] test [unoptimized + debuginfo] [..] ", ) .run(); } #[cargo_test] fn profile_selection_doc() { let p = all_target_project(); // `doc` // NOTES: // - Dependency profiles: // Pkg Target Profile Action Reason // --- ------ ------- ------ ------ // bar lib dev* link For bdep // bar lib dev metadata For rustdoc // bdep lib dev* link For foo build.rs // foo custom dev* link For build.rs // // `*` = wants panic, but it is cleared when args are built. p.cargo("doc -vv").with_stderr_unordered("\ [COMPILING] bar [..] [DOCUMENTING] bar [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `rustdoc [..]--crate-name bar bar/src/lib.rs [..] [RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] [COMPILING] bdep [..] [RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [COMPILING] foo [..] [RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] [RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` [foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 [DOCUMENTING] foo [..] [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..] [FINISHED] dev [unoptimized + debuginfo] [..] ").run(); } cargo-0.66.0/tests/testsuite/profiles.rs000066400000000000000000000427601432416201200202660ustar00rootroot00000000000000//! Tests for profiles. use cargo_test_support::project; use cargo_test_support::registry::Package; use std::env; #[cargo_test] fn profile_overrides() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.dev] opt-level = 1 debug = false rpath = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C opt-level=1[..]\ -C debug-assertions=on \ -C metadata=[..] \ -C rpath \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [optimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn opt_level_override_0() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.dev] opt-level = 0 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] [..] target(s) in [..] ", ) .run(); } #[cargo_test] fn debug_override_1() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.dev] debug = 1 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C debuginfo=1 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] [..] target(s) in [..] ", ) .run(); } fn check_opt_level_override(profile_level: &str, rustc_level: &str) { let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "test" version = "0.0.0" authors = [] [profile.dev] opt-level = {level} "#, level = profile_level ), ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr(&format!( "\ [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link \ -C opt-level={level}[..]\ -C debuginfo=2 \ -C debug-assertions=on \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] [..] target(s) in [..] ", level = rustc_level )) .run(); } #[cargo_test] fn opt_level_overrides() { for &(profile_level, rustc_level) in &[ ("1", "1"), ("2", "2"), ("3", "3"), ("\"s\"", "s"), ("\"z\"", "z"), ] { check_opt_level_override(profile_level, rustc_level) } } #[cargo_test] fn top_level_overrides_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.release] opt-level = 1 debug = true [dependencies.foo] path = "foo" "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [profile.release] opt-level = 0 debug = false [lib] name = "foo" crate_type = ["dylib", "rlib"] "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build -v --release") .with_stderr(&format!( "\ [COMPILING] foo v0.0.0 ([CWD]/foo) [RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ --crate-type dylib --crate-type rlib \ --emit=[..]link \ -C prefer-dynamic \ -C opt-level=1[..]\ -C debuginfo=2 \ -C metadata=[..] \ --out-dir [CWD]/target/release/deps \ -L dependency=[CWD]/target/release/deps` [COMPILING] test v0.0.0 ([CWD]) [RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ --emit=[..]link \ -C opt-level=1[..]\ -C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/release/deps \ --extern foo=[CWD]/target/release/deps/\ {prefix}foo[..]{suffix} \ --extern foo=[CWD]/target/release/deps/libfoo.rlib` [FINISHED] release [optimized + debuginfo] target(s) in [..] ", prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX )) .run(); } #[cargo_test] fn profile_in_non_root_manifest_triggers_a_warning() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] [profile.dev] debug = false "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." [profile.dev] opt-level = 1 "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .cwd("bar") .with_stderr( "\ [WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root: package: [..] workspace: [..] [COMPILING] bar v0.1.0 ([..]) [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized] target(s) in [..]", ) .run(); } #[cargo_test] fn profile_in_virtual_manifest_works() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] [profile.dev] opt-level = 1 debug = false "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .cwd("bar") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]) [RUNNING] `rustc [..]` [FINISHED] dev [optimized] target(s) in [..]", ) .run(); } #[cargo_test] fn profile_lto_string_bool_dev() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [profile.dev] lto = "true" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: `lto` setting of string `\"true\"` for `dev` profile is not a valid setting, \ must be a boolean (`true`/`false`) or a string (`\"thin\"`/`\"fat\"`/`\"off\"`) or omitted. ", ) .run(); } #[cargo_test] fn profile_panic_test_bench() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [profile.test] panic = "abort" [profile.bench] panic = "abort" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains( "\ [WARNING] `panic` setting is ignored for `bench` profile [WARNING] `panic` setting is ignored for `test` profile ", ) .run(); } #[cargo_test] fn profile_doc_deprecated() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [profile.doc] opt-level = 0 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains("[WARNING] profile `doc` is deprecated and has no effect") .run(); } #[cargo_test] fn panic_unwind_does_not_build_twice() { // Check for a bug where `lib` was built twice, once with panic set and // once without. Since "unwind" is the default, they are the same and // should only be built once. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.dev] panic = "unwind" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/t1.rs", "") .build(); p.cargo("test -v --tests --no-run") .with_stderr_unordered( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..] [RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin [..] [RUNNING] `rustc --crate-name foo src/main.rs [..] --test [..] [RUNNING] `rustc --crate-name t1 tests/t1.rs [..] [FINISHED] [..] [EXECUTABLE] `[..]/target/debug/deps/t1-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` ", ) .run(); } #[cargo_test] fn debug_0_report() { // The finished line handles 0 correctly. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.dev] debug = 0 "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.1.0 [..] [RUNNING] `rustc --crate-name foo src/lib.rs [..]-C debuginfo=0 [..] [FINISHED] dev [unoptimized] target(s) in [..] ", ) .run(); } #[cargo_test] fn thin_lto_works() { let p = project() .file( "Cargo.toml", r#" [project] name = "top" version = "0.5.0" authors = [] [profile.release] lto = 'thin' "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release -v") .with_stderr( "\ [COMPILING] top [..] [RUNNING] `rustc [..] -C lto=thin [..]` [FINISHED] [..] ", ) .run(); } #[cargo_test] fn strip_works() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release] strip = 'symbols' "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release -v") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc [..] -C strip=symbols [..]` [FINISHED] [..] ", ) .run(); } #[cargo_test] fn strip_passes_unknown_option_to_rustc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release] strip = 'unknown' "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release -v") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo [..] [RUNNING] `rustc [..] -C strip=unknown [..]` error: incorrect value `unknown` for [..] `strip` [..] was expected ", ) .run(); } #[cargo_test] fn strip_accepts_true_to_strip_symbols() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release] strip = true "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release -v") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc [..] -C strip=symbols [..]` [FINISHED] [..] ", ) .run(); } #[cargo_test] fn strip_accepts_false_to_disable_strip() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [profile.release] strip = false "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --release -v") .with_stderr_does_not_contain("-C strip") .run(); } #[cargo_test] fn rustflags_works() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["profile-rustflags"] [profile.dev] rustflags = ["-C", "link-dead-code=yes"] [package] name = "foo" version = "0.0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .masquerade_as_nightly_cargo(&["profile-rustflags"]) .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..] -C link-dead-code=yes [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rustflags_works_with_env() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["profile-rustflags"] [package] name = "foo" version = "0.0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .env("CARGO_PROFILE_DEV_RUSTFLAGS", "-C link-dead-code=yes") .masquerade_as_nightly_cargo(&["profile-rustflags"]) .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc --crate-name foo [..] -C link-dead-code=yes [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rustflags_requires_cargo_feature() { let p = project() .file( "Cargo.toml", r#" [profile.dev] rustflags = ["-C", "link-dead-code=yes"] [package] name = "foo" version = "0.0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v") .masquerade_as_nightly_cargo(&["profile-rustflags"]) .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[CWD]/Cargo.toml` Caused by: feature `profile-rustflags` is required The package requires the Cargo feature called `profile-rustflags`, but that feature is \ not stabilized in this version of Cargo (1.[..]). Consider adding `cargo-features = [\"profile-rustflags\"]` to the top of Cargo.toml \ (above the [package] table) to tell Cargo you are opting in to use this unstable feature. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-rustflags-option \ for more information about the status of this feature. ", ) .run(); Package::new("bar", "1.0.0").publish(); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = "1.0" [profile.dev.package.bar] rustflags = ["-C", "link-dead-code=yes"] "#, ); p.cargo("check") .masquerade_as_nightly_cargo(&["profile-rustflags"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: feature `profile-rustflags` is required The package requires the Cargo feature called `profile-rustflags`, but that feature is \ not stabilized in this version of Cargo (1.[..]). Consider adding `cargo-features = [\"profile-rustflags\"]` to the top of Cargo.toml \ (above the [package] table) to tell Cargo you are opting in to use this unstable feature. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-rustflags-option \ for more information about the status of this feature. ", ) .run(); } cargo-0.66.0/tests/testsuite/progress.rs000066400000000000000000000067371432416201200203130ustar00rootroot00000000000000//! Tests for progress bar. use cargo_test_support::project; use cargo_test_support::registry::Package; #[cargo_test] fn bad_progress_config_unknown_when() { let p = project() .file( ".cargo/config", r#" [term] progress = { when = 'unknown' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] error in [..].cargo/config: \ could not load config key `term.progress.when` Caused by: unknown variant `unknown`, expected one of `auto`, `never`, `always` ", ) .run(); } #[cargo_test] fn bad_progress_config_missing_width() { let p = project() .file( ".cargo/config", r#" [term] progress = { when = 'always' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] \"always\" progress requires a `width` key ", ) .run(); } #[cargo_test] fn bad_progress_config_missing_when() { let p = project() .file( ".cargo/config", r#" [term] progress = { width = 1000 } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: missing field `when` ", ) .run(); } #[cargo_test] fn always_shows_progress() { const N: usize = 3; let mut deps = String::new(); for i in 1..=N { Package::new(&format!("dep{}", i), "1.0.0").publish(); deps.push_str(&format!("dep{} = \"1.0\"\n", i)); } let p = project() .file( ".cargo/config", r#" [term] progress = { when = 'always', width = 100 } "#, ) .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] {} "#, deps ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr_contains("[DOWNLOADING] [..] crates [..]") .with_stderr_contains("[..][DOWNLOADED] 3 crates ([..]) in [..]") .with_stderr_contains("[BUILDING] [..] [..]/4: [..]") .run(); } #[cargo_test] fn never_progress() { const N: usize = 3; let mut deps = String::new(); for i in 1..=N { Package::new(&format!("dep{}", i), "1.0.0").publish(); deps.push_str(&format!("dep{} = \"1.0\"\n", i)); } let p = project() .file( ".cargo/config", r#" [term] progress = { when = 'never' } "#, ) .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] {} "#, deps ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr_does_not_contain("[DOWNLOADING] [..] crates [..]") .with_stderr_does_not_contain("[..][DOWNLOADED] 3 crates ([..]) in [..]") .with_stderr_does_not_contain("[BUILDING] [..] [..]/4: [..]") .run(); } cargo-0.66.0/tests/testsuite/pub_priv.rs000066400000000000000000000122651432416201200202660ustar00rootroot00000000000000//! Tests for public/private dependencies. use cargo_test_support::project; use cargo_test_support::registry::Package; #[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")] fn exported_priv_warning() { Package::new("priv_dep", "0.1.0") .file("src/lib.rs", "pub struct FromPriv;") .publish(); let p = project() .file( "Cargo.toml", r#" cargo-features = ["public-dependency"] [package] name = "foo" version = "0.0.1" [dependencies] priv_dep = "0.1.0" "#, ) .file( "src/lib.rs", " extern crate priv_dep; pub fn use_priv(_: priv_dep::FromPriv) {} ", ) .build(); p.cargo("build --message-format=short") .masquerade_as_nightly_cargo(&["public-dependency"]) .with_stderr_contains( "\ src/lib.rs:3:13: warning: type `[..]FromPriv` from private dependency 'priv_dep' in public interface ", ) .run() } #[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")] fn exported_pub_dep() { Package::new("pub_dep", "0.1.0") .file("src/lib.rs", "pub struct FromPub;") .publish(); let p = project() .file( "Cargo.toml", r#" cargo-features = ["public-dependency"] [package] name = "foo" version = "0.0.1" [dependencies] pub_dep = {version = "0.1.0", public = true} "#, ) .file( "src/lib.rs", " extern crate pub_dep; pub fn use_pub(_: pub_dep::FromPub) {} ", ) .build(); p.cargo("build --message-format=short") .masquerade_as_nightly_cargo(&["public-dependency"]) .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] pub_dep v0.1.0 ([..]) [COMPILING] pub_dep v0.1.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run() } #[cargo_test] pub fn requires_nightly_cargo() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["public-dependency"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --message-format=short") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: the cargo feature `public-dependency` requires a nightly version of Cargo, but this is the `stable` channel See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information about Rust release channels. See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#public-dependency for more information about using this feature. " ) .run() } #[cargo_test] fn requires_feature() { Package::new("pub_dep", "0.1.0") .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] pub_dep = { version = "0.1.0", public = true } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --message-format=short") .masquerade_as_nightly_cargo(&["public-dependency"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: feature `public-dependency` is required The package requires the Cargo feature called `public-dependency`, \ but that feature is not stabilized in this version of Cargo (1.[..]). Consider adding `cargo-features = [\"public-dependency\"]` to the top of Cargo.toml \ (above the [package] table) to tell Cargo you are opting in to use this unstable feature. See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency \ for more information about the status of this feature. ", ) .run() } #[cargo_test] fn pub_dev_dependency() { Package::new("pub_dep", "0.1.0") .file("src/lib.rs", "pub struct FromPub;") .publish(); let p = project() .file( "Cargo.toml", r#" cargo-features = ["public-dependency"] [package] name = "foo" version = "0.0.1" [dev-dependencies] pub_dep = {version = "0.1.0", public = true} "#, ) .file( "src/lib.rs", " extern crate pub_dep; pub fn use_pub(_: pub_dep::FromPub) {} ", ) .build(); p.cargo("build --message-format=short") .masquerade_as_nightly_cargo(&["public-dependency"]) .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: 'public' specifier can only be used on regular dependencies, not Development dependencies ", ) .run() } cargo-0.66.0/tests/testsuite/publish.rs000066400000000000000000001432601432416201200201060ustar00rootroot00000000000000//! Tests for the `cargo publish` command. use cargo_test_support::git::{self, repo}; use cargo_test_support::paths; use cargo_test_support::registry::{self, Package, Response}; use cargo_test_support::{basic_manifest, no_such_file_err_msg, project, publish}; use std::fs; const CLEAN_FOO_JSON: &str = r#" { "authors": [], "badges": {}, "categories": [], "deps": [], "description": "foo", "documentation": "foo", "features": {}, "homepage": "foo", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": "foo", "vers": "0.0.1" } "#; fn validate_upload_foo() { publish::validate_upload( r#" { "authors": [], "badges": {}, "categories": [], "deps": [], "description": "foo", "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.0.1" } "#, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } fn validate_upload_li() { publish::validate_upload( r#" { "authors": [], "badges": {}, "categories": [], "deps": [], "description": "li", "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "li", "readme": null, "readme_file": null, "repository": null, "vers": "0.0.1" } "#, "li-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } fn validate_upload_foo_clean() { publish::validate_upload( CLEAN_FOO_JSON, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".cargo_vcs_info.json", ], ); } #[cargo_test] fn simple() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --no-verify --token sekrit") .with_stderr( "\ [UPDATING] `dummy-registry` index [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [UPLOADING] foo v0.0.1 ([CWD]) ", ) .run(); validate_upload_foo(); } #[cargo_test] fn old_token_location() { // Check that the `token` key works at the root instead of under a // `[registry]` table. registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); // Verify can't publish without a token. p.cargo("publish --no-verify") .with_status(101) .with_stderr_contains( "[ERROR] no upload token found, \ please run `cargo login` or pass `--token`", ) .run(); fs::write(&credentials, r#"token = "api-token""#).unwrap(); p.cargo("publish --no-verify") .with_stderr( "\ [UPDATING] `dummy-registry` index [WARNING] using `registry.token` config value with source replacement is deprecated This may become a hard error in the future[..] Use the --token command-line flag to remove this warning. [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [UPLOADING] foo v0.0.1 ([CWD]) ", ) .run(); validate_upload_foo(); } #[cargo_test] fn simple_with_index() { let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --no-verify --token sekrit --index") .arg(registry.index_url().as_str()) .run(); validate_upload_foo(); } #[cargo_test] fn git_deps() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.foo] git = "git://path/to/nowhere" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish -v --no-verify --token sekrit") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [ERROR] all dependencies must have a version specified when publishing. dependency `foo` does not specify a version Note: The published dependency will use the version from crates.io, the `git` specification will be removed from the dependency declaration. ", ) .run(); } #[cargo_test] fn path_dependency_no_version() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("publish --token sekrit") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [ERROR] all dependencies must have a version specified when publishing. dependency `bar` does not specify a version Note: The published dependency will use the version from crates.io, the `path` specification will be removed from the dependency declaration. ", ) .run(); } #[cargo_test] fn unpublishable_crate() { let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" publish = false "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --index") .arg(registry.index_url().as_str()) .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `crates-io` is not listed in the `publish` value in Cargo.toml. ", ) .run(); } #[cargo_test] fn dont_publish_dirty() { registry::init(); let p = project().file("bar", "").build(); let _ = git::repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --token sekrit") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index error: 1 files in the working directory contain changes that were not yet \ committed into git: bar to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag ", ) .run(); } #[cargo_test] fn publish_clean() { registry::init(); let p = project().build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --token sekrit").run(); validate_upload_foo_clean(); } #[cargo_test] fn publish_in_sub_repo() { registry::init(); let p = project().no_manifest().file("baz", "").build(); let _ = repo(&paths::root().join("foo")) .file( "bar/Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("publish --token sekrit").cwd("bar").run(); validate_upload_foo_clean(); } #[cargo_test] fn publish_when_ignored() { registry::init(); let p = project().file("baz", "").build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/main.rs", "fn main() {}") .file(".gitignore", "baz") .build(); p.cargo("publish --token sekrit").run(); publish::validate_upload( CLEAN_FOO_JSON, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".gitignore", ".cargo_vcs_info.json", ], ); } #[cargo_test] fn ignore_when_crate_ignored() { registry::init(); let p = project().no_manifest().file("bar/baz", "").build(); let _ = repo(&paths::root().join("foo")) .file(".gitignore", "bar") .nocommit_file( "bar/Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .nocommit_file("bar/src/main.rs", "fn main() {}"); p.cargo("publish --token sekrit").cwd("bar").run(); publish::validate_upload( CLEAN_FOO_JSON, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", "baz", ], ); } #[cargo_test] fn new_crate_rejected() { registry::init(); let p = project().file("baz", "").build(); let _ = repo(&paths::root().join("foo")) .nocommit_file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .nocommit_file("src/main.rs", "fn main() {}"); p.cargo("publish --token sekrit") .with_status(101) .with_stderr_contains( "[ERROR] 3 files in the working directory contain \ changes that were not yet committed into git:", ) .run(); } #[cargo_test] fn dry_run() { let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --dry-run --index") .arg(registry.index_url().as_str()) .with_stderr( "\ [UPDATING] `[..]` index [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [UPLOADING] foo v0.0.1 ([CWD]) [WARNING] aborting upload due to dry run ", ) .run(); // Ensure the API request wasn't actually made assert!(registry::api_path().join("api/v1/crates").exists()); assert!(!registry::api_path().join("api/v1/crates/new").exists()); } #[cargo_test] fn registry_not_in_publish_list() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" publish = [ "test" ] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish") .arg("--registry") .arg("alternative") .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `alternative` is not listed in the `publish` value in Cargo.toml. ", ) .run(); } #[cargo_test] fn publish_empty_list() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" publish = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --registry alternative") .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `alternative` is not listed in the `publish` value in Cargo.toml. ", ) .run(); } #[cargo_test] fn publish_allowed_registry() { registry::alt_init(); let p = project().build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" publish = ["alternative"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --registry alternative").run(); publish::validate_alt_upload( CLEAN_FOO_JSON, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".cargo_vcs_info.json", ], ); } #[cargo_test] fn publish_implicitly_to_only_allowed_registry() { registry::alt_init(); let p = project().build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" publish = ["alternative"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish").run(); publish::validate_alt_upload( CLEAN_FOO_JSON, "foo-0.0.1.crate", &[ "Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs", ".cargo_vcs_info.json", ], ); } #[cargo_test] fn publish_fail_with_no_registry_specified() { registry::init(); let p = project().build(); let _ = repo(&paths::root().join("foo")) .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" publish = ["alternative", "test"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish") .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `crates-io` is not listed in the `publish` value in Cargo.toml. ", ) .run(); } #[cargo_test] fn block_publish_no_registry() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" publish = [] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --registry alternative") .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `alternative` is not listed in the `publish` value in Cargo.toml. ", ) .run(); } #[cargo_test] fn publish_with_crates_io_explicit() { // Explicitly setting `crates-io` in the publish list. registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" publish = ["crates-io"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --registry alternative") .with_status(101) .with_stderr( "\ [ERROR] `foo` cannot be published. The registry `alternative` is not listed in the `publish` value in Cargo.toml. ", ) .run(); p.cargo("publish").run(); } #[cargo_test] fn publish_with_select_features() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] required = [] optional = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("publish --features required --token sekrit") .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") .run(); } #[cargo_test] fn publish_with_all_features() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] required = [] optional = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("publish --all-features --token sekrit") .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") .run(); } #[cargo_test] fn publish_with_no_default_features() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [features] default = ["required"] required = [] "#, ) .file( "src/main.rs", "#[cfg(not(feature = \"required\"))] compile_error!(\"This crate requires `required` feature!\"); fn main() {}", ) .build(); p.cargo("publish --no-default-features --token sekrit") .with_stderr_contains("error: This crate requires `required` feature!") .with_status(101) .run(); } #[cargo_test] fn publish_with_patch() { Package::new("bar", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies] bar = "1.0" [patch.crates-io] bar = { path = "bar" } "#, ) .file( "src/main.rs", "extern crate bar; fn main() { bar::newfunc(); }", ) .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) .file("bar/src/lib.rs", "pub fn newfunc() {}") .build(); // Check that it works with the patched crate. p.cargo("build").run(); // Check that verify fails with patched crate which has new functionality. p.cargo("publish --token sekrit") .with_stderr_contains("[..]newfunc[..]") .with_status(101) .run(); // Remove the usage of new functionality and try again. p.change_file("src/main.rs", "extern crate bar; pub fn main() {}"); p.cargo("publish --token sekrit").run(); // Note, use of `registry` in the deps here is an artifact that this // publishes to a fake, local registry that is pretending to be crates.io. // Normal publishes would set it to null. publish::validate_upload( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.0.1" } "#, "foo-0.0.1.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], ); } #[cargo_test] fn publish_checks_for_token_before_verify() { registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); // Assert upload token error before the package is verified p.cargo("publish") .with_status(101) .with_stderr_contains( "[ERROR] no upload token found, \ please run `cargo login` or pass `--token`", ) .with_stderr_does_not_contain("[VERIFYING] foo v0.0.1 ([CWD])") .run(); // Assert package verified successfully on dry run p.cargo("publish --dry-run") .with_status(0) .with_stderr_contains("[VERIFYING] foo v0.0.1 ([CWD])") .run(); } #[cargo_test] fn publish_with_bad_source() { let p = project() .file( ".cargo/config", r#" [source.crates-io] replace-with = 'local-registry' [source.local-registry] local-registry = 'registry' "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --token sekrit") .with_status(101) .with_stderr( "\ [ERROR] registry `[..]/foo/registry` does not support API commands. Check for a source-replacement in .cargo/config. ", ) .run(); p.change_file( ".cargo/config", r#" [source.crates-io] replace-with = "vendored-sources" [source.vendored-sources] directory = "vendor" "#, ); p.cargo("publish --token sekrit") .with_status(101) .with_stderr( "\ [ERROR] dir [..]/foo/vendor does not support API commands. Check for a source-replacement in .cargo/config. ", ) .run(); } #[cargo_test] fn publish_git_with_version() { // A dependency with both `git` and `version`. Package::new("dep1", "1.0.1") .file("src/lib.rs", "pub fn f() -> i32 {1}") .publish(); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) .file("src/lib.rs", "pub fn f() -> i32 {2}") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" authors = [] edition = "2018" license = "MIT" description = "foo" [dependencies] dep1 = {{version = "1.0", git="{}"}} "#, git_project.url() ), ) .file( "src/main.rs", r#" pub fn main() { println!("{}", dep1::f()); } "#, ) .build(); p.cargo("run").with_stdout("2").run(); p.cargo("publish --no-verify --token sekrit").run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "dep1", "optional": false, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": null, "features": {}, "homepage": null, "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], &[ ( "Cargo.toml", // Check that only `version` is included in Cargo.toml. &format!( "{}\n\ [package]\n\ edition = \"2018\"\n\ name = \"foo\"\n\ version = \"0.1.0\"\n\ authors = []\n\ description = \"foo\"\n\ license = \"MIT\"\n\ \n\ [dependencies.dep1]\n\ version = \"1.0\"\n\ ", cargo::core::package::MANIFEST_PREAMBLE ), ), ( "Cargo.lock", // The important check here is that it is 1.0.1 in the registry. "# This file is automatically @generated by Cargo.\n\ # It is not intended for manual editing.\n\ version = 3\n\ \n\ [[package]]\n\ name = \"dep1\"\n\ version = \"1.0.1\"\n\ source = \"registry+https://github.com/rust-lang/crates.io-index\"\n\ checksum = \"[..]\"\n\ \n\ [[package]]\n\ name = \"foo\"\n\ version = \"0.1.0\"\n\ dependencies = [\n\ \x20\"dep1\",\n\ ]\n\ ", ), ], ); } #[cargo_test] fn publish_dev_dep_no_version() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" [dev-dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("publish --no-verify --token sekrit") .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [], "description": "foo", "documentation": "foo", "features": {}, "homepage": "foo", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": "foo", "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "foo" version = "0.1.0" authors = [] description = "foo" homepage = "foo" documentation = "foo" license = "MIT" repository = "foo" [dev-dependencies] "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } #[cargo_test] fn credentials_ambiguous_filename() { registry::init(); let credentials_toml = paths::home().join(".cargo/credentials.toml"); fs::write(credentials_toml, r#"token = "api-token""#).unwrap(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("publish --no-verify --token sekrit") .with_stderr_contains( "\ [WARNING] Both `[..]/credentials` and `[..]/credentials.toml` exist. Using `[..]/credentials` ", ) .run(); validate_upload_foo(); } #[cargo_test] fn index_requires_token() { // --index will not load registry.token to avoid possibly leaking // crates.io token to another server. let registry = registry::init(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --index") .arg(registry.index_url().as_str()) .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] command-line argument --index requires --token to be specified ", ) .run(); } #[cargo_test] fn registry_token_with_source_replacement() { // publish with source replacement without --token registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify") .with_stderr( "\ [UPDATING] [..] [WARNING] using `registry.token` config value with source replacement is deprecated This may become a hard error in the future[..] Use the --token command-line flag to remove this warning. [WARNING] manifest has no documentation, [..] See [..] [PACKAGING] foo v0.0.1 ([CWD]) [UPLOADING] foo v0.0.1 ([CWD]) ", ) .run(); } #[cargo_test] fn publish_with_missing_readme() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] license = "MIT" description = "foo" homepage = "https://example.com/" readme = "foo.md" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --token sekrit") .with_status(101) .with_stderr(&format!( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [UPLOADING] foo v0.1.0 [..] [ERROR] failed to read `readme` file for package `foo v0.1.0 ([ROOT]/foo)` Caused by: failed to read `[ROOT]/foo/foo.md` Caused by: {} ", no_such_file_err_msg() )) .run(); } #[cargo_test] fn api_error_json() { // Registry returns an API error. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() .add_responder("/api/v1/crates/new", |_| Response { body: br#"{"errors": [{"detail": "you must be logged in"}]}"#.to_vec(), code: 403, headers: vec![], }) .build(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative") .with_status(101) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.0.1 [..] [UPLOADING] foo v0.0.1 [..] [ERROR] failed to publish to registry at http://127.0.0.1:[..]/ Caused by: the remote server responded with an error (status 403 Forbidden): you must be logged in ", ) .run(); } #[cargo_test] fn api_error_200() { // Registry returns an API error with a 200 status code. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() .add_responder("/api/v1/crates/new", |_| Response { body: br#"{"errors": [{"detail": "max upload size is 123"}]}"#.to_vec(), code: 200, headers: vec![], }) .build(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative") .with_status(101) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.0.1 [..] [UPLOADING] foo v0.0.1 [..] [ERROR] failed to publish to registry at http://127.0.0.1:[..]/ Caused by: the remote server responded with an error: max upload size is 123 ", ) .run(); } #[cargo_test] fn api_error_code() { // Registry returns an error code without a JSON message. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() .add_responder("/api/v1/crates/new", |_| Response { body: br#"go away"#.to_vec(), code: 400, headers: vec![], }) .build(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative") .with_status(101) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.0.1 [..] [UPLOADING] foo v0.0.1 [..] [ERROR] failed to publish to registry at http://127.0.0.1:[..]/ Caused by: failed to get a 200 OK response, got 400 headers: HTTP/1.1 400 Content-Length: 7 body: go away ", ) .run(); } #[cargo_test] fn api_curl_error() { // Registry has a network error. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() .add_responder("/api/v1/crates/new", |_| { panic!("broke"); }) .build(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); // This doesn't check for the exact text of the error in the remote // possibility that cargo is linked with a weird version of libcurl, or // curl changes the text of the message. Currently the message 52 // (CURLE_GOT_NOTHING) is: // Server returned nothing (no headers, no data) (Empty reply from server) p.cargo("publish --no-verify --registry alternative") .with_status(101) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.0.1 [..] [UPLOADING] foo v0.0.1 [..] [ERROR] failed to publish to registry at http://127.0.0.1:[..]/ Caused by: [52] [..] ", ) .run(); } #[cargo_test] fn api_other_error() { // Registry returns an invalid response. let _registry = registry::RegistryBuilder::new() .alternative() .http_api() .add_responder("/api/v1/crates/new", |_| Response { body: b"\xff".to_vec(), code: 200, headers: vec![], }) .build(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --no-verify --registry alternative") .with_status(101) .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.0.1 [..] [UPLOADING] foo v0.0.1 [..] [ERROR] failed to publish to registry at http://127.0.0.1:[..]/ Caused by: invalid response from server Caused by: response body was not valid utf-8 ", ) .run(); } #[cargo_test] fn in_package_workspace() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2021" [workspace] members = ["li"] "#, ) .file("src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" description = "li" license = "MIT" "#, ) .file("li/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p li --no-verify --token sekrit") .with_stderr( "\ [UPDATING] [..] [WARNING] manifest has no documentation, homepage or repository. See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) ", ) .run(); validate_upload_li(); } #[cargo_test] fn with_duplicate_spec_in_members() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [workspace] resolver = "2" members = ["li","bar"] default-members = ["li","bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" description = "li" license = "MIT" "#, ) .file("li/src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" description = "bar" license = "MIT" "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("publish --no-verify --token sekrit") .with_status(101) .with_stderr( "error: the `-p` argument must be specified to select a single package to publish", ) .run(); } #[cargo_test] fn in_package_workspace_with_members_with_features_old() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [workspace] members = ["li"] "#, ) .file("src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" description = "li" license = "MIT" "#, ) .file("li/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p li --no-verify --token sekrit") .with_stderr( "\ [UPDATING] [..] [WARNING] manifest has no documentation, homepage or repository. See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) ", ) .run(); validate_upload_li(); } #[cargo_test] fn in_virtual_workspace() { registry::init(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("foo/src/main.rs", "fn main() {}") .build(); p.cargo("publish --no-verify --token sekrit") .with_status(101) .with_stderr( "error: the `-p` argument must be specified in the root of a virtual workspace", ) .run(); } #[cargo_test] fn in_virtual_workspace_with_p() { registry::init(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo","li"] "#, ) .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("foo/src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" description = "li" license = "MIT" "#, ) .file("li/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p li --no-verify --token sekrit") .with_stderr( "\ [UPDATING] [..] [WARNING] manifest has no documentation, homepage or repository. See [..] [PACKAGING] li v0.0.1 ([CWD]/li) [UPLOADING] li v0.0.1 ([CWD]/li) ", ) .run(); } #[cargo_test] fn in_package_workspace_not_found() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2021" [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" edition = "2021" authors = [] license = "MIT" description = "li" "#, ) .file("li/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p li --no-verify --token sekrit ") .with_status(101) .with_stderr( "\ error: package ID specification `li` did not match any packages Did you mean `foo`? ", ) .run(); } #[cargo_test] fn in_package_workspace_found_multiple() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2021" [workspace] members = ["li","lii"] "#, ) .file("src/main.rs", "fn main() {}") .file( "li/Cargo.toml", r#" [package] name = "li" version = "0.0.1" edition = "2021" authors = [] license = "MIT" description = "li" "#, ) .file("li/src/main.rs", "fn main() {}") .file( "lii/Cargo.toml", r#" [package] name = "lii" version = "0.0.1" edition = "2021" authors = [] license = "MIT" description = "lii" "#, ) .file("lii/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p li* --no-verify --token sekrit ") .with_status(101) .with_stderr( "\ error: the `-p` argument must be specified to select a single package to publish ", ) .run(); } #[cargo_test] // https://github.com/rust-lang/cargo/issues/10536 fn publish_path_dependency_without_workspace() { registry::init(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2021" [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" edition = "2021" authors = [] license = "MIT" description = "bar" "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("publish -p bar --no-verify --token sekrit ") .with_status(101) .with_stderr( "\ error: package ID specification `bar` did not match any packages Did you mean `foo`? ", ) .run(); } cargo-0.66.0/tests/testsuite/publish_lockfile.rs000066400000000000000000000304701432416201200217540ustar00rootroot00000000000000//! Tests for including `Cargo.lock` when publishing/packaging. use std::fs::File; use cargo_test_support::registry::Package; use cargo_test_support::{ basic_manifest, cargo_process, git, paths, project, publish::validate_crate_contents, }; fn pl_manifest(name: &str, version: &str, extra: &str) -> String { format!( r#" [package] name = "{}" version = "{}" authors = [] license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" {} "#, name, version, extra ) } #[cargo_test] fn removed() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["publish-lockfile"] [package] name = "foo" version = "0.1.0" publish-lockfile = true license = "MIT" description = "foo" documentation = "foo" homepage = "foo" repository = "foo" "#, ) .file("src/lib.rs", "") .build(); p.cargo("package") .masquerade_as_nightly_cargo(&["publish-lockfile"]) .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at [..] Caused by: the cargo feature `publish-lockfile` has been removed in the 1.37 release Remove the feature from Cargo.toml to remove this error. See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#publish-lockfile [..] ", ) .run(); } #[cargo_test] fn package_lockfile() { let p = project() .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("package") .with_stderr( "\ [PACKAGING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); p.cargo("package -l") .with_stdout( "\ Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("package").with_stdout("").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], &[], ); } #[cargo_test] fn package_lockfile_git_repo() { // Create a Git repository containing a minimal Rust project. let g = git::repo(&paths::root().join("foo")) .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) .file("src/main.rs", "fn main() {}") .build(); cargo_process("package -l") .cwd(g.root()) .with_stdout( "\ .cargo_vcs_info.json Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); cargo_process("package -v") .cwd(g.root()) .with_stderr( "\ [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/main.rs [VERIFYING] foo v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc --crate-name foo src/main.rs [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn no_lock_file_with_library() { let p = project() .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) .file("src/lib.rs", "") .build(); p.cargo("package").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[], ); } #[cargo_test] fn lock_file_and_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file("foo/Cargo.toml", &pl_manifest("foo", "0.0.1", "")) .file("foo/src/main.rs", "fn main() {}") .build(); p.cargo("package").cwd("foo").run(); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); validate_crate_contents( f, "foo-0.0.1.crate", &["Cargo.toml", "Cargo.toml.orig", "src/main.rs", "Cargo.lock"], &[], ); } #[cargo_test] fn note_resolve_changes() { // `multi` has multiple sources (path and registry). Package::new("multi", "0.1.0").publish(); // `updated` is always from registry, but should not change. Package::new("updated", "1.0.0").publish(); // `patched` is [patch]ed. Package::new("patched", "1.0.0").publish(); let p = project() .file( "Cargo.toml", &pl_manifest( "foo", "0.0.1", r#" [dependencies] multi = { path = "multi", version = "0.1" } updated = "1.0" patched = "1.0" [patch.crates-io] patched = { path = "patched" } "#, ), ) .file("src/main.rs", "fn main() {}") .file("multi/Cargo.toml", &basic_manifest("multi", "0.1.0")) .file("multi/src/lib.rs", "") .file("patched/Cargo.toml", &basic_manifest("patched", "1.0.0")) .file("patched/src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); // Make sure this does not change or warn. Package::new("updated", "1.0.1").publish(); p.cargo("package --no-verify -v --allow-dirty") .with_stderr_unordered( "\ [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/main.rs [UPDATING] `[..]` index [NOTE] package `multi v0.1.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/multi` [NOTE] package `patched v1.0.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/patched` ", ) .run(); } #[cargo_test] fn outdated_lock_version_change_does_not_warn() { // If the version of the package being packaged changes, but Cargo.lock is // not updated, don't bother warning about it. let p = project() .file("Cargo.toml", &pl_manifest("foo", "0.1.0", "")) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); p.change_file("Cargo.toml", &pl_manifest("foo", "0.2.0", "")); p.cargo("package --no-verify") .with_stderr("[PACKAGING] foo v0.2.0 ([..])") .run(); } #[cargo_test] fn no_warn_workspace_extras() { // Other entries in workspace lock file should be ignored. Package::new("dep1", "1.0.0").publish(); Package::new("dep2", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", &pl_manifest( "a", "0.1.0", r#" [dependencies] dep1 = "1.0" "#, ), ) .file("a/src/main.rs", "fn main() {}") .file( "b/Cargo.toml", &pl_manifest( "b", "0.1.0", r#" [dependencies] dep2 = "1.0" "#, ), ) .file("b/src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); p.cargo("package --no-verify") .cwd("a") .with_stderr( "\ [PACKAGING] a v0.1.0 ([..]) [UPDATING] `[..]` index ", ) .run(); } #[cargo_test] fn warn_package_with_yanked() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", &pl_manifest( "foo", "0.0.1", r#" [dependencies] bar = "0.1" "#, ), ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("generate-lockfile").run(); Package::new("bar", "0.1.0").yanked(true).publish(); // Make sure it sticks with the locked (yanked) version. Package::new("bar", "0.1.1").publish(); p.cargo("package --no-verify") .with_stderr( "\ [PACKAGING] foo v0.0.1 ([..]) [UPDATING] `[..]` index [WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ `crates-io`, consider updating to a version that is not yanked ", ) .run(); } #[cargo_test] fn warn_install_with_yanked() { Package::new("bar", "0.1.0").yanked(true).publish(); Package::new("bar", "0.1.1").publish(); Package::new("foo", "0.1.0") .dep("bar", "0.1") .file("src/main.rs", "fn main() {}") .file( "Cargo.lock", r#" [[package]] name = "bar" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "foo" version = "0.1.0" dependencies = [ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] "#, ) .publish(); cargo_process("install --locked foo") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.1.0 (registry `[..]`) [INSTALLING] foo v0.1.0 [WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ `crates-io`, consider running without --locked [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 (registry `[..]`) [COMPILING] bar v0.1.0 [COMPILING] foo v0.1.0 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] [..]/.cargo/bin/foo[EXE] [INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); // Try again without --locked, make sure it uses 0.1.1 and does not warn. cargo_process("install --force foo") .with_stderr( "\ [UPDATING] `[..]` index [INSTALLING] foo v0.1.0 [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.1 (registry `[..]`) [COMPILING] bar v0.1.1 [COMPILING] foo v0.1.0 [FINISHED] release [optimized] target(s) in [..] [REPLACING] [..]/.cargo/bin/foo[EXE] [REPLACED] package `foo v0.1.0` with `foo v0.1.0` (executable `foo[EXE]`) [WARNING] be sure to add [..] ", ) .run(); } #[cargo_test] fn ignore_lockfile() { // With an explicit `include` list, but Cargo.lock in .gitignore, don't // complain about `Cargo.lock` being ignored. Note that it is still // included in the packaged regardless. let p = git::new("foo", |p| { p.file( "Cargo.toml", &pl_manifest( "foo", "0.0.1", r#" include = [ "src/main.rs" ] "#, ), ) .file("src/main.rs", "fn main() {}") .file(".gitignore", "Cargo.lock") }); p.cargo("package -l") .with_stdout( "\ .cargo_vcs_info.json Cargo.lock Cargo.toml Cargo.toml.orig src/main.rs ", ) .run(); p.cargo("generate-lockfile").run(); p.cargo("package -v") .with_stderr( "\ [PACKAGING] foo v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/main.rs [VERIFYING] foo v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc --crate-name foo src/main.rs [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn ignore_lockfile_inner() { // Ignore `Cargo.lock` if in .gitignore in a git subdirectory. let p = git::new("foo", |p| { p.no_manifest() .file("bar/Cargo.toml", &pl_manifest("bar", "0.0.1", "")) .file("bar/src/main.rs", "fn main() {}") .file("bar/.gitignore", "Cargo.lock") }); p.cargo("generate-lockfile").cwd("bar").run(); p.cargo("package -v --no-verify") .cwd("bar") .with_stderr( "\ [PACKAGING] bar v0.0.1 ([..]) [ARCHIVING] .cargo_vcs_info.json [ARCHIVING] .gitignore [ARCHIVING] Cargo.lock [ARCHIVING] Cargo.toml [ARCHIVING] Cargo.toml.orig [ARCHIVING] src/main.rs ", ) .run(); } cargo-0.66.0/tests/testsuite/read_manifest.rs000066400000000000000000000126231432416201200212370ustar00rootroot00000000000000//! Tests for the `cargo read-manifest` command. use cargo_test_support::{basic_bin_manifest, main_file, project}; fn manifest_output(readme_value: &str) -> String { format!( r#" {{ "authors": [ "wycats@example.com" ], "categories": [], "default_run": null, "name":"foo", "readme": {}, "homepage": null, "documentation": null, "repository": null, "rust_version": null, "version":"0.5.0", "id":"foo[..]0.5.0[..](path+file://[..]/foo)", "keywords": [], "license": null, "license_file": null, "links": null, "description": null, "edition": "2015", "source":null, "dependencies":[], "targets":[{{ "kind":["bin"], "crate_types":["bin"], "doc": true, "doctest": false, "test": true, "edition": "2015", "name":"foo", "src_path":"[..]/foo/src/foo.rs" }}], "features":{{}}, "manifest_path":"[..]Cargo.toml", "metadata": null, "publish": null }}"#, readme_value ) } fn manifest_output_no_readme() -> String { manifest_output("null") } pub fn basic_bin_manifest_with_readme(name: &str, readme_filename: &str) -> String { format!( r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] readme = {} [[bin]] name = "{}" "#, name, readme_filename, name ) } #[cargo_test] fn cargo_read_manifest_path_to_cargo_toml_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest --manifest-path foo/Cargo.toml") .cwd(p.root().parent().unwrap()) .with_json(&manifest_output_no_readme()) .run(); } #[cargo_test] fn cargo_read_manifest_path_to_cargo_toml_absolute() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest --manifest-path") .arg(p.root().join("Cargo.toml")) .cwd(p.root().parent().unwrap()) .with_json(&manifest_output_no_readme()) .run(); } #[cargo_test] fn cargo_read_manifest_path_to_cargo_toml_parent_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest --manifest-path foo") .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr( "[ERROR] the manifest-path must be \ a path to a Cargo.toml file", ) .run(); } #[cargo_test] fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest --manifest-path") .arg(p.root()) .cwd(p.root().parent().unwrap()) .with_status(101) .with_stderr( "[ERROR] the manifest-path must be \ a path to a Cargo.toml file", ) .run(); } #[cargo_test] fn cargo_read_manifest_cwd() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest") .with_json(&manifest_output_no_readme()) .run(); } #[cargo_test] fn cargo_read_manifest_with_specified_readme() { let p = project() .file( "Cargo.toml", &basic_bin_manifest_with_readme("foo", r#""SomeReadme.txt""#), ) .file("SomeReadme.txt", "Sample Project") .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest") .with_json(&manifest_output(&format!(r#""{}""#, "SomeReadme.txt"))) .run(); } #[cargo_test] fn cargo_read_manifest_default_readme() { let readme_filenames = ["README.md", "README.txt", "README"]; for readme in readme_filenames.iter() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file(readme, "Sample project") .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest") .with_json(&manifest_output(&format!(r#""{}""#, readme))) .run(); } } #[cargo_test] fn cargo_read_manifest_suppress_default_readme() { let p = project() .file( "Cargo.toml", &basic_bin_manifest_with_readme("foo", "false"), ) .file("README.txt", "Sample project") .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest") .with_json(&manifest_output_no_readme()) .run(); } // If a file named README.md exists, and `readme = true`, the value `README.md` should be defaulted in. #[cargo_test] fn cargo_read_manifest_defaults_readme_if_true() { let p = project() .file("Cargo.toml", &basic_bin_manifest_with_readme("foo", "true")) .file("README.md", "Sample project") .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("read-manifest") .with_json(&manifest_output(r#""README.md""#)) .run(); } cargo-0.66.0/tests/testsuite/registry.rs000066400000000000000000001731671432416201200203210ustar00rootroot00000000000000//! Tests for normal registry dependencies. use cargo::core::SourceId; use cargo_test_support::cargo_process; use cargo_test_support::paths::{self, CargoPathExt}; use cargo_test_support::registry::{ self, registry_path, Dependency, Package, RegistryBuilder, TestRegistry, }; use cargo_test_support::{basic_manifest, project, Execs, Project}; use cargo_test_support::{git, install::cargo_home, t}; use cargo_util::paths::remove_dir_all; use std::fs::{self, File}; use std::path::Path; fn cargo_http(p: &Project, s: &str) -> Execs { let mut e = p.cargo(s); e.arg("-Zsparse-registry") .masquerade_as_nightly_cargo(&["sparse-registry"]); e } fn cargo_stable(p: &Project, s: &str) -> Execs { p.cargo(s) } fn setup_http() -> TestRegistry { RegistryBuilder::new().http_index().build() } #[cargo_test] fn simple_http() { let _server = setup_http(); simple(cargo_http); } #[cargo_test] fn simple_git() { simple(cargo_stable); } fn simple(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); cargo(&p, "clean").run(); assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file()); // Don't download a second time cargo(&p, "build") .with_stderr( "\ [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn deps_http() { let _server = setup_http(); deps(cargo_http); } #[cargo_test] fn deps_git() { deps(cargo_stable); } fn deps(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [COMPILING] baz v0.0.1 [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file()); } #[cargo_test] fn nonexistent_http() { let _server = setup_http(); nonexistent(cargo_http); } #[cargo_test] fn nonexistent_git() { nonexistent(cargo_stable); } fn nonexistent(cargo: fn(&Project, &str) -> Execs) { Package::new("init", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] nonexistent = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index error: no matching package named `nonexistent` found location searched: registry [..] required by package `foo v0.0.1 ([..])` ", ) .run(); } #[cargo_test] fn wrong_case_http() { let _server = setup_http(); wrong_case(cargo_http); } #[cargo_test] fn wrong_case_git() { wrong_case(cargo_stable); } fn wrong_case(cargo: fn(&Project, &str) -> Execs) { Package::new("init", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] Init = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); // #5678 to make this work cargo(&p, "build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index error: no matching package found searched package name: `Init` perhaps you meant: init location searched: registry [..] required by package `foo v0.0.1 ([..])` ", ) .run(); } #[cargo_test] fn mis_hyphenated_http() { let _server = setup_http(); mis_hyphenated(cargo_http); } #[cargo_test] fn mis_hyphenated_git() { mis_hyphenated(cargo_stable); } fn mis_hyphenated(cargo: fn(&Project, &str) -> Execs) { Package::new("mis-hyphenated", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] mis_hyphenated = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); // #2775 to make this work cargo(&p, "build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index error: no matching package found searched package name: `mis_hyphenated` perhaps you meant: mis-hyphenated location searched: registry [..] required by package `foo v0.0.1 ([..])` ", ) .run(); } #[cargo_test] fn wrong_version_http() { let _server = setup_http(); wrong_version(cargo_http); } #[cargo_test] fn wrong_version_git() { wrong_version(cargo_stable); } fn wrong_version(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] foo = ">= 1.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foo", "0.0.1").publish(); Package::new("foo", "0.0.2").publish(); cargo(&p, "build") .with_status(101) .with_stderr_contains( "\ error: failed to select a version for the requirement `foo = \">=1.0.0\"` candidate versions found which didn't match: 0.0.2, 0.0.1 location searched: `[..]` index (which is replacing registry `[..]`) required by package `foo v0.0.1 ([..])` ", ) .run(); Package::new("foo", "0.0.3").publish(); Package::new("foo", "0.0.4").publish(); cargo(&p, "build") .with_status(101) .with_stderr_contains( "\ error: failed to select a version for the requirement `foo = \">=1.0.0\"` candidate versions found which didn't match: 0.0.4, 0.0.3, 0.0.2, ... location searched: `[..]` index (which is replacing registry `[..]`) required by package `foo v0.0.1 ([..])` ", ) .run(); } #[cargo_test] fn bad_cksum_http() { let _server = setup_http(); bad_cksum(cargo_http); } #[cargo_test] fn bad_cksum_git() { bad_cksum(cargo_stable); } fn bad_cksum(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bad-cksum = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); let pkg = Package::new("bad-cksum", "0.0.1"); pkg.publish(); t!(File::create(&pkg.archive_dst())); cargo(&p, "build -v") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [DOWNLOADING] crates ... [DOWNLOADED] bad-cksum [..] [ERROR] failed to download replaced source registry `crates-io` Caused by: failed to verify the checksum of `bad-cksum v0.0.1 (registry `dummy-registry`)` ", ) .run(); } #[cargo_test] fn update_registry_http() { let _server = setup_http(); update_registry(cargo_http); } #[cargo_test] fn update_registry_git() { update_registry(cargo_stable); } fn update_registry(cargo: fn(&Project, &str) -> Execs) { Package::new("init", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] notyet = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build") .with_status(101) .with_stderr_contains( "\ error: no matching package named `notyet` found location searched: registry `[..]` required by package `foo v0.0.1 ([..])` ", ) .run(); Package::new("notyet", "0.0.1").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `dummy-registry` index [DOWNLOADING] crates ... [DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`) [COMPILING] notyet v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn package_with_path_deps_http() { let _server = setup_http(); package_with_path_deps(cargo_http); } #[cargo_test] fn package_with_path_deps_git() { package_with_path_deps(cargo_stable); } fn package_with_path_deps(cargo: fn(&Project, &str) -> Execs) { Package::new("init", "0.0.1").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" repository = "bar" [dependencies.notyet] version = "0.0.1" path = "notyet" "#, ) .file("src/main.rs", "fn main() {}") .file("notyet/Cargo.toml", &basic_manifest("notyet", "0.0.1")) .file("notyet/src/lib.rs", "") .build(); cargo(&p, "package") .with_status(101) .with_stderr_contains( "\ [PACKAGING] foo [..] [UPDATING] [..] [ERROR] failed to prepare local package for uploading Caused by: no matching package named `notyet` found location searched: registry `crates-io` required by package `foo v0.0.1 [..]` ", ) .run(); Package::new("notyet", "0.0.1").publish(); cargo(&p, "package") .with_stderr( "\ [PACKAGING] foo v0.0.1 ([CWD]) [UPDATING] `[..]` index [VERIFYING] foo v0.0.1 ([CWD]) [DOWNLOADING] crates ... [DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`) [COMPILING] notyet v0.0.1 [COMPILING] foo v0.0.1 ([CWD][..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn lockfile_locks_http() { let _server = setup_http(); lockfile_locks(cargo_http); } #[cargo_test] fn lockfile_locks_git() { lockfile_locks(cargo_stable); } fn lockfile_locks(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); p.root().move_into_the_past(); Package::new("bar", "0.0.2").publish(); cargo(&p, "build").with_stdout("").run(); } #[cargo_test] fn lockfile_locks_transitively_http() { let _server = setup_http(); lockfile_locks_transitively(cargo_http); } #[cargo_test] fn lockfile_locks_transitively_git() { lockfile_locks_transitively(cargo_stable); } fn lockfile_locks_transitively(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [COMPILING] baz v0.0.1 [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); p.root().move_into_the_past(); Package::new("baz", "0.0.2").publish(); Package::new("bar", "0.0.2").dep("baz", "*").publish(); cargo(&p, "build").with_stdout("").run(); } #[cargo_test] fn yanks_are_not_used_http() { let _server = setup_http(); yanks_are_not_used(cargo_http); } #[cargo_test] fn yanks_are_not_used_git() { yanks_are_not_used(cargo_stable); } fn yanks_are_not_used(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("baz", "0.0.2").yanked(true).publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); Package::new("bar", "0.0.2") .dep("baz", "*") .yanked(true) .publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [COMPILING] baz v0.0.1 [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn relying_on_a_yank_is_bad_http() { let _server = setup_http(); relying_on_a_yank_is_bad(cargo_http); } #[cargo_test] fn relying_on_a_yank_is_bad_git() { relying_on_a_yank_is_bad(cargo_stable); } fn relying_on_a_yank_is_bad(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("baz", "0.0.2").yanked(true).publish(); Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); cargo(&p, "build") .with_status(101) .with_stderr_contains( "\ error: failed to select a version for the requirement `baz = \"=0.0.2\"` candidate versions found which didn't match: 0.0.1 location searched: `[..]` index (which is replacing registry `[..]`) required by package `bar v0.0.1` ... which satisfies dependency `bar = \"*\"` of package `foo [..]` ", ) .run(); } #[cargo_test] fn yanks_in_lockfiles_are_ok_http() { let _server = setup_http(); yanks_in_lockfiles_are_ok(cargo_http); } #[cargo_test] fn yanks_in_lockfiles_are_ok_git() { yanks_in_lockfiles_are_ok(cargo_stable); } fn yanks_in_lockfiles_are_ok(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build").run(); registry_path().join("3").rm_rf(); Package::new("bar", "0.0.1").yanked(true).publish(); cargo(&p, "build").with_stdout("").run(); cargo(&p, "update") .with_status(101) .with_stderr_contains( "\ error: no matching package named `bar` found location searched: registry [..] required by package `foo v0.0.1 ([..])` ", ) .run(); } #[cargo_test] fn yanks_in_lockfiles_are_ok_for_other_update_http() { let _server = setup_http(); yanks_in_lockfiles_are_ok_for_other_update(cargo_http); } #[cargo_test] fn yanks_in_lockfiles_are_ok_for_other_update_git() { yanks_in_lockfiles_are_ok_for_other_update(cargo_stable); } fn yanks_in_lockfiles_are_ok_for_other_update(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" baz = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); Package::new("baz", "0.0.1").publish(); cargo(&p, "build").run(); registry_path().join("3").rm_rf(); Package::new("bar", "0.0.1").yanked(true).publish(); Package::new("baz", "0.0.1").publish(); cargo(&p, "build").with_stdout("").run(); Package::new("baz", "0.0.2").publish(); cargo(&p, "update") .with_status(101) .with_stderr_contains( "\ error: no matching package named `bar` found location searched: registry [..] required by package `foo v0.0.1 ([..])` ", ) .run(); cargo(&p, "update -p baz") .with_stderr_contains( "\ [UPDATING] `[..]` index [UPDATING] baz v0.0.1 -> v0.0.2 ", ) .run(); } #[cargo_test] fn yanks_in_lockfiles_are_ok_with_new_dep_http() { let _server = setup_http(); yanks_in_lockfiles_are_ok_with_new_dep(cargo_http); } #[cargo_test] fn yanks_in_lockfiles_are_ok_with_new_dep_git() { yanks_in_lockfiles_are_ok_with_new_dep(cargo_stable); } fn yanks_in_lockfiles_are_ok_with_new_dep(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build").run(); registry_path().join("3").rm_rf(); Package::new("bar", "0.0.1").yanked(true).publish(); Package::new("baz", "0.0.1").publish(); p.change_file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" baz = "*" "#, ); cargo(&p, "build").with_stdout("").run(); } #[cargo_test] fn update_with_lockfile_if_packages_missing_http() { let _server = setup_http(); update_with_lockfile_if_packages_missing(cargo_http); } #[cargo_test] fn update_with_lockfile_if_packages_missing_git() { update_with_lockfile_if_packages_missing(cargo_stable); } fn update_with_lockfile_if_packages_missing(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build").run(); p.root().move_into_the_past(); paths::home().join(".cargo/registry").rm_rf(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn update_lockfile_http() { let _server = setup_http(); update_lockfile(cargo_http); } #[cargo_test] fn update_lockfile_git() { update_lockfile(cargo_stable); } fn update_lockfile(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); println!("0.0.1"); Package::new("bar", "0.0.1").publish(); cargo(&p, "build").run(); Package::new("bar", "0.0.2").publish(); Package::new("bar", "0.0.3").publish(); paths::home().join(".cargo/registry").rm_rf(); println!("0.0.2 update"); cargo(&p, "update -p bar --precise 0.0.2") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] bar v0.0.1 -> v0.0.2 ", ) .run(); println!("0.0.2 build"); cargo(&p, "build") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.2 (registry `dummy-registry`) [COMPILING] bar v0.0.2 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); println!("0.0.3 update"); cargo(&p, "update -p bar") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] bar v0.0.2 -> v0.0.3 ", ) .run(); println!("0.0.3 build"); cargo(&p, "build") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.3 (registry `dummy-registry`) [COMPILING] bar v0.0.3 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); println!("new dependencies update"); Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); Package::new("spam", "0.2.5").publish(); cargo(&p, "update -p bar") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] bar v0.0.3 -> v0.0.4 [ADDING] spam v0.2.5 ", ) .run(); println!("new dependencies update"); Package::new("bar", "0.0.5").publish(); cargo(&p, "update -p bar") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] bar v0.0.4 -> v0.0.5 [REMOVING] spam v0.2.5 ", ) .run(); } #[cargo_test] fn dev_dependency_not_used_http() { let _server = setup_http(); dev_dependency_not_used(cargo_http); } #[cargo_test] fn dev_dependency_not_used_git() { dev_dependency_not_used(cargo_stable); } fn dev_dependency_not_used(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn login_with_no_cargo_dir() { // Create a config in the root directory because `login` requires the // index to be updated, and we don't want to hit crates.io. registry::init(); fs::rename(paths::home().join(".cargo"), paths::root().join(".cargo")).unwrap(); paths::home().rm_rf(); cargo_process("login foo -v").run(); let credentials = fs::read_to_string(paths::home().join(".cargo/credentials")).unwrap(); assert_eq!(credentials, "[registry]\ntoken = \"foo\"\n"); } #[cargo_test] fn login_with_differently_sized_token() { // Verify that the configuration file gets properly truncated. registry::init(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); cargo_process("login lmaolmaolmao -v").run(); cargo_process("login lmao -v").run(); cargo_process("login lmaolmaolmao -v").run(); let credentials = fs::read_to_string(&credentials).unwrap(); assert_eq!(credentials, "[registry]\ntoken = \"lmaolmaolmao\"\n"); } #[cargo_test] fn login_with_token_on_stdin() { registry::init(); let credentials = paths::home().join(".cargo/credentials"); fs::remove_file(&credentials).unwrap(); cargo_process("login lmao -v").run(); cargo_process("login") .with_stdout("please paste the API Token found on [..]/me below") .with_stdin("some token") .run(); let credentials = fs::read_to_string(&credentials).unwrap(); assert_eq!(credentials, "[registry]\ntoken = \"some token\"\n"); } #[cargo_test] fn bad_license_file_http() { let _server = setup_http(); bad_license_file(cargo_http); } #[cargo_test] fn bad_license_file_git() { bad_license_file(cargo_stable); } fn bad_license_file(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license-file = "foo" description = "bar" repository = "baz" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "publish -v --token sekrit") .with_status(101) .with_stderr_contains("[ERROR] the license file `foo` does not exist") .run(); } #[cargo_test] fn updating_a_dep_http() { let _server = setup_http(); updating_a_dep(cargo_http); } #[cargo_test] fn updating_a_dep_git() { updating_a_dep(cargo_stable); } fn updating_a_dep(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#, ) .file("src/main.rs", "fn main() {}") .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("a/src/lib.rs", "") .build(); Package::new("bar", "0.0.1").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) [COMPILING] bar v0.0.1 [COMPILING] a v0.0.1 ([CWD]/a) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); assert!(paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file()); // Now delete the CACHEDIR.TAG file: this is the situation we'll be in after // upgrading from a version of Cargo that doesn't mark this directory, to one that // does. It should be recreated. fs::remove_file(paths::home().join(".cargo/registry/CACHEDIR.TAG")) .expect("remove CACHEDIR.TAG"); p.change_file( "a/Cargo.toml", r#" [project] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#, ); Package::new("bar", "0.1.0").publish(); println!("second"); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.1.0 (registry `dummy-registry`) [COMPILING] bar v0.1.0 [COMPILING] a v0.0.1 ([CWD]/a) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); assert!( paths::home().join(".cargo/registry/CACHEDIR.TAG").is_file(), "CACHEDIR.TAG recreated in existing registry" ); } #[cargo_test] fn git_and_registry_dep_http() { let _server = setup_http(); git_and_registry_dep(cargo_http); } #[cargo_test] fn git_and_registry_dep_git() { git_and_registry_dep(cargo_stable); } fn git_and_registry_dep(cargo: fn(&Project, &str) -> Execs) { let b = git::repo(&paths::root().join("b")) .file( "Cargo.toml", r#" [project] name = "b" version = "0.0.1" authors = [] [dependencies] a = "0.0.1" "#, ) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] a = "0.0.1" [dependencies.b] git = '{}' "#, b.url() ), ) .file("src/main.rs", "fn main() {}") .build(); Package::new("a", "0.0.1").publish(); p.root().move_into_the_past(); cargo(&p, "build") .with_stderr( "\ [UPDATING] [..] [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] a v0.0.1 (registry `dummy-registry`) [COMPILING] a v0.0.1 [COMPILING] b v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); p.root().move_into_the_past(); println!("second"); cargo(&p, "build").with_stdout("").run(); } #[cargo_test] fn update_publish_then_update_http() { let _server = setup_http(); update_publish_then_update(cargo_http); } #[cargo_test] fn update_publish_then_update_git() { update_publish_then_update(cargo_stable); } fn update_publish_then_update(cargo: fn(&Project, &str) -> Execs) { // First generate a Cargo.lock and a clone of the registry index at the // "head" of the current registry. let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("a", "0.1.0").publish(); cargo(&p, "build").run(); // Next, publish a new package and back up the copy of the registry we just // created. Package::new("a", "0.1.1").publish(); let registry = paths::home().join(".cargo/registry"); let backup = paths::root().join("registry-backup"); t!(fs::rename(®istry, &backup)); // Generate a Cargo.lock with the newer version, and then move the old copy // of the registry back into place. let p2 = project() .at("foo2") .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p2, "build").run(); registry.rm_rf(); t!(fs::rename(&backup, ®istry)); t!(fs::rename( p2.root().join("Cargo.lock"), p.root().join("Cargo.lock") )); // Finally, build the first project again (with our newer Cargo.lock) which // should force an update of the old registry, download the new crate, and // then build everything again. cargo(&p, "build") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] a v0.1.1 (registry `dummy-registry`) [COMPILING] a v0.1.1 [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn fetch_downloads_http() { let _server = setup_http(); fetch_downloads(cargo_http); } #[cargo_test] fn fetch_downloads_git() { fetch_downloads(cargo_stable); } fn fetch_downloads(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("a", "0.1.0").publish(); cargo(&p, "fetch") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] a v0.1.0 (registry [..]) ", ) .run(); } #[cargo_test] fn update_transitive_dependency_http() { let _server = setup_http(); update_transitive_dependency(cargo_http); } #[cargo_test] fn update_transitive_dependency_git() { update_transitive_dependency(cargo_stable); } fn update_transitive_dependency(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("a", "0.1.0").dep("b", "*").publish(); Package::new("b", "0.1.0").publish(); cargo(&p, "fetch").run(); Package::new("b", "0.1.1").publish(); cargo(&p, "update -pb") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] b v0.1.0 -> v0.1.1 ", ) .run(); cargo(&p, "build") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] b v0.1.1 (registry `dummy-registry`) [COMPILING] b v0.1.1 [COMPILING] a v0.1.0 [COMPILING] foo v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn update_backtracking_ok_http() { let _server = setup_http(); update_backtracking_ok(cargo_http); } #[cargo_test] fn update_backtracking_ok_git() { update_backtracking_ok(cargo_stable); } fn update_backtracking_ok(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] webdriver = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("webdriver", "0.1.0") .dep("hyper", "0.6") .publish(); Package::new("hyper", "0.6.5") .dep("openssl", "0.1") .dep("cookie", "0.1") .publish(); Package::new("cookie", "0.1.0") .dep("openssl", "0.1") .publish(); Package::new("openssl", "0.1.0").publish(); cargo(&p, "generate-lockfile").run(); Package::new("openssl", "0.1.1").publish(); Package::new("hyper", "0.6.6") .dep("openssl", "0.1.1") .dep("cookie", "0.1.0") .publish(); cargo(&p, "update -p hyper") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] hyper v0.6.5 -> v0.6.6 [UPDATING] openssl v0.1.0 -> v0.1.1 ", ) .run(); } #[cargo_test] fn update_multiple_packages_http() { let _server = setup_http(); update_multiple_packages(cargo_http); } #[cargo_test] fn update_multiple_packages_git() { update_multiple_packages(cargo_stable); } fn update_multiple_packages(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "*" b = "*" c = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("a", "0.1.0").publish(); Package::new("b", "0.1.0").publish(); Package::new("c", "0.1.0").publish(); cargo(&p, "fetch").run(); Package::new("a", "0.1.1").publish(); Package::new("b", "0.1.1").publish(); Package::new("c", "0.1.1").publish(); cargo(&p, "update -pa -pb") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] a v0.1.0 -> v0.1.1 [UPDATING] b v0.1.0 -> v0.1.1 ", ) .run(); cargo(&p, "update -pb -pc") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] c v0.1.0 -> v0.1.1 ", ) .run(); cargo(&p, "build") .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `dummy-registry`)") .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `dummy-registry`)") .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `dummy-registry`)") .with_stderr_contains("[COMPILING] a v0.1.1") .with_stderr_contains("[COMPILING] b v0.1.1") .with_stderr_contains("[COMPILING] c v0.1.1") .with_stderr_contains("[COMPILING] foo v0.5.0 ([..])") .run(); } #[cargo_test] fn bundled_crate_in_registry_http() { let _server = setup_http(); bundled_crate_in_registry(cargo_http); } #[cargo_test] fn bundled_crate_in_registry_git() { bundled_crate_in_registry(cargo_stable); } fn bundled_crate_in_registry(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = "0.1" baz = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0") .dep("bar", "0.1.0") .file( "Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar", version = "0.1.0" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "") .publish(); cargo(&p, "run").run(); } #[cargo_test] fn update_same_prefix_oh_my_how_was_this_a_bug_http() { let _server = setup_http(); update_same_prefix_oh_my_how_was_this_a_bug(cargo_http); } #[cargo_test] fn update_same_prefix_oh_my_how_was_this_a_bug_git() { update_same_prefix_oh_my_how_was_this_a_bug(cargo_stable); } fn update_same_prefix_oh_my_how_was_this_a_bug(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "ugh" version = "0.5.0" authors = [] [dependencies] foo = "0.1" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foobar", "0.2.0").publish(); Package::new("foo", "0.1.0") .dep("foobar", "0.2.0") .publish(); cargo(&p, "generate-lockfile").run(); cargo(&p, "update -pfoobar --precise=0.2.0").run(); } #[cargo_test] fn use_semver_http() { let _server = setup_http(); use_semver(cargo_http); } #[cargo_test] fn use_semver_git() { use_semver(cargo_stable); } fn use_semver(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] foo = "1.2.3-alpha.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foo", "1.2.3-alpha.0").publish(); cargo(&p, "build").run(); } #[cargo_test] fn use_semver_package_incorrectly_http() { let _server = setup_http(); use_semver_package_incorrectly(cargo_http); } #[cargo_test] fn use_semver_package_incorrectly_git() { use_semver_package_incorrectly(cargo_stable); } fn use_semver_package_incorrectly(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.1.1-alpha.0" authors = [] "#, ) .file( "b/Cargo.toml", r#" [project] name = "b" version = "0.1.0" authors = [] [dependencies] a = { version = "^0.1", path = "../a" } "#, ) .file("a/src/main.rs", "fn main() {}") .file("b/src/main.rs", "fn main() {}") .build(); cargo(&p, "build") .with_status(101) .with_stderr( "\ error: no matching package found searched package name: `a` prerelease package needs to be specified explicitly a = { version = \"0.1.1-alpha.0\" } location searched: [..] required by package `b v0.1.0 ([..])` ", ) .run(); } #[cargo_test] fn only_download_relevant_http() { let _server = setup_http(); only_download_relevant(cargo_http); } #[cargo_test] fn only_download_relevant_git() { only_download_relevant(cargo_stable); } fn only_download_relevant(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [target.foo.dependencies] foo = "*" [dev-dependencies] bar = "*" [dependencies] baz = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foo", "0.1.0").publish(); Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0").publish(); cargo(&p, "build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.0 ([..]) [COMPILING] baz v0.1.0 [COMPILING] bar v0.5.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s ", ) .run(); } #[cargo_test] fn resolve_and_backtracking_http() { let _server = setup_http(); resolve_and_backtracking(cargo_http); } #[cargo_test] fn resolve_and_backtracking_git() { resolve_and_backtracking(cargo_stable); } fn resolve_and_backtracking(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] foo = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foo", "0.1.1") .feature_dep("bar", "0.1", &["a", "b"]) .publish(); Package::new("foo", "0.1.0").publish(); cargo(&p, "build").run(); } #[cargo_test] fn upstream_warnings_on_extra_verbose_http() { let _server = setup_http(); upstream_warnings_on_extra_verbose(cargo_http); } #[cargo_test] fn upstream_warnings_on_extra_verbose_git() { upstream_warnings_on_extra_verbose(cargo_stable); } fn upstream_warnings_on_extra_verbose(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] foo = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("foo", "0.1.0") .file("src/lib.rs", "fn unused() {}") .publish(); cargo(&p, "build -vv") .with_stderr_contains("[WARNING] [..]unused[..]") .run(); } #[cargo_test] fn disallow_network_http() { let _server = setup_http(); let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] foo = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo_http(&p, "build --frozen") .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] failed to get `foo` as a dependency of package `bar v0.5.0 ([..])` Caused by: failed to query replaced source registry `crates-io` Caused by: attempting to make an HTTP request, but --frozen was specified ", ) .run(); } #[cargo_test] fn disallow_network_git() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] foo = "*" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo_stable(&p, "build --frozen") .with_status(101) .with_stderr( "\ [ERROR] failed to get `foo` as a dependency of package `bar v0.5.0 [..]` Caused by: failed to load source for dependency `foo` Caused by: Unable to update registry [..] Caused by: attempting to make an HTTP request, but --frozen was specified ", ) .run(); } #[cargo_test] fn add_dep_dont_update_registry_http() { let _server = setup_http(); add_dep_dont_update_registry(cargo_http); } #[cargo_test] fn add_dep_dont_update_registry_git() { add_dep_dont_update_registry(cargo_stable); } fn add_dep_dont_update_registry(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = { path = "baz" } "#, ) .file("src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = [] [dependencies] remote = "0.3" "#, ) .file("baz/src/lib.rs", "") .build(); Package::new("remote", "0.3.4").publish(); cargo(&p, "build").run(); p.change_file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = { path = "baz" } remote = "0.3" "#, ); cargo(&p, "build") .with_stderr( "\ [COMPILING] bar v0.5.0 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn bump_version_dont_update_registry_http() { let _server = setup_http(); bump_version_dont_update_registry(cargo_http); } #[cargo_test] fn bump_version_dont_update_registry_git() { bump_version_dont_update_registry(cargo_stable); } fn bump_version_dont_update_registry(cargo: fn(&Project, &str) -> Execs) { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] baz = { path = "baz" } "#, ) .file("src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = [] [dependencies] remote = "0.3" "#, ) .file("baz/src/lib.rs", "") .build(); Package::new("remote", "0.3.4").publish(); cargo(&p, "build").run(); p.change_file( "Cargo.toml", r#" [project] name = "bar" version = "0.6.0" authors = [] [dependencies] baz = { path = "baz" } "#, ); cargo(&p, "build") .with_stderr( "\ [COMPILING] bar v0.6.0 ([..]) [FINISHED] [..] ", ) .run(); } #[cargo_test] fn toml_lies_but_index_is_truth_http() { let _server = setup_http(); toml_lies_but_index_is_truth(cargo_http); } #[cargo_test] fn toml_lies_but_index_is_truth_git() { toml_lies_but_index_is_truth(cargo_stable); } fn toml_lies_but_index_is_truth(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.2.0").publish(); Package::new("bar", "0.3.0") .dep("foo", "0.2.0") .file( "Cargo.toml", r#" [project] name = "bar" version = "0.3.0" authors = [] [dependencies] foo = "0.1.0" "#, ) .file("src/lib.rs", "extern crate foo;") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = [] [dependencies] bar = "0.3" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build -v").run(); } #[cargo_test] fn vv_prints_warnings_http() { let _server = setup_http(); vv_prints_warnings(cargo_http); } #[cargo_test] fn vv_prints_warnings_git() { vv_prints_warnings(cargo_stable); } fn vv_prints_warnings(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.2.0") .file( "src/lib.rs", "#![deny(warnings)] fn foo() {} // unused function", ) .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "fo" version = "0.5.0" authors = [] [dependencies] foo = "0.2" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build -vv").run(); } #[cargo_test] fn bad_and_or_malicious_packages_rejected_http() { let _server = setup_http(); bad_and_or_malicious_packages_rejected(cargo_http); } #[cargo_test] fn bad_and_or_malicious_packages_rejected_git() { bad_and_or_malicious_packages_rejected(cargo_stable); } fn bad_and_or_malicious_packages_rejected(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.2.0") .extra_file("foo-0.1.0/src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "fo" version = "0.5.0" authors = [] [dependencies] foo = "0.2" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build -vv") .with_status(101) .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] [..] error: failed to download [..] Caused by: failed to unpack [..] Caused by: [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\" ", ) .run(); } #[cargo_test] fn git_init_templatedir_missing_http() { let _server = setup_http(); git_init_templatedir_missing(cargo_http); } #[cargo_test] fn git_init_templatedir_missing_git() { git_init_templatedir_missing(cargo_stable); } fn git_init_templatedir_missing(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.2.0").dep("bar", "*").publish(); Package::new("bar", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "fo" version = "0.5.0" authors = [] [dependencies] foo = "0.2" "#, ) .file("src/main.rs", "fn main() {}") .build(); cargo(&p, "build").run(); remove_dir_all(paths::home().join(".cargo/registry")).unwrap(); fs::write( paths::home().join(".gitconfig"), r#" [init] templatedir = nowhere "#, ) .unwrap(); cargo(&p, "build").run(); cargo(&p, "build").run(); } #[cargo_test] fn rename_deps_and_features_http() { let _server = setup_http(); rename_deps_and_features(cargo_http); } #[cargo_test] fn rename_deps_and_features_git() { rename_deps_and_features(cargo_stable); } fn rename_deps_and_features(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.1.0") .file("src/lib.rs", "pub fn f1() {}") .publish(); Package::new("foo", "0.2.0") .file("src/lib.rs", "pub fn f2() {}") .publish(); Package::new("bar", "0.2.0") .add_dep( Dependency::new("foo01", "0.1.0") .package("foo") .optional(true), ) .add_dep(Dependency::new("foo02", "0.2.0").package("foo")) .feature("another", &["foo01"]) .file( "src/lib.rs", r#" extern crate foo02; #[cfg(feature = "foo01")] extern crate foo01; pub fn foo() { foo02::f2(); #[cfg(feature = "foo01")] foo01::f1(); } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] bar = "0.2" "#, ) .file( "src/main.rs", " extern crate bar; fn main() { bar::foo(); } ", ) .build(); cargo(&p, "build").run(); cargo(&p, "build --features bar/foo01").run(); cargo(&p, "build --features bar/another").run(); } #[cargo_test] fn ignore_invalid_json_lines_http() { let _server = setup_http(); ignore_invalid_json_lines(cargo_http); } #[cargo_test] fn ignore_invalid_json_lines_git() { ignore_invalid_json_lines(cargo_stable); } fn ignore_invalid_json_lines(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.1.0").publish(); Package::new("foo", "0.1.1").invalid_json(true).publish(); Package::new("foo", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] foo = '0.1.0' foo02 = { version = '0.2.0', package = 'foo' } "#, ) .file("src/lib.rs", "") .build(); cargo(&p, "build").run(); } #[cargo_test] fn readonly_registry_still_works_http() { let _server = setup_http(); readonly_registry_still_works(cargo_http); } #[cargo_test] fn readonly_registry_still_works_git() { readonly_registry_still_works(cargo_stable); } fn readonly_registry_still_works(cargo: fn(&Project, &str) -> Execs) { Package::new("foo", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] foo = '0.1.0' "#, ) .file("src/lib.rs", "") .build(); cargo(&p, "generate-lockfile").run(); cargo(&p, "fetch --locked").run(); chmod_readonly(&paths::home(), true); cargo(&p, "build").run(); // make sure we un-readonly the files afterwards so "cargo clean" can remove them (#6934) chmod_readonly(&paths::home(), false); fn chmod_readonly(path: &Path, readonly: bool) { for entry in t!(path.read_dir()) { let entry = t!(entry); let path = entry.path(); if t!(entry.file_type()).is_dir() { chmod_readonly(&path, readonly); } else { set_readonly(&path, readonly); } } set_readonly(path, readonly); } fn set_readonly(path: &Path, readonly: bool) { let mut perms = t!(path.metadata()).permissions(); perms.set_readonly(readonly); t!(fs::set_permissions(path, perms)); } } #[cargo_test] fn registry_index_rejected_http() { let _server = setup_http(); registry_index_rejected(cargo_http); } #[cargo_test] fn registry_index_rejected_git() { registry_index_rejected(cargo_stable); } fn registry_index_rejected(cargo: fn(&Project, &str) -> Execs) { Package::new("dep", "0.1.0").publish(); let p = project() .file( ".cargo/config", r#" [registry] index = "https://example.com/" "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "0.1" "#, ) .file("src/lib.rs", "") .build(); cargo(&p, "check") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: the `registry.index` config value is no longer supported Use `[source]` replacement to alter the default index for crates.io. ", ) .run(); cargo(&p, "login") .with_status(101) .with_stderr( "\ [ERROR] the `registry.index` config value is no longer supported Use `[source]` replacement to alter the default index for crates.io. ", ) .run(); } #[cargo_test] fn package_lock_inside_package_is_overwritten() { let registry = registry::init(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); Package::new("bar", "0.0.1") .file("src/lib.rs", "") .file(".cargo-ok", "") .publish(); p.cargo("build").run(); let id = SourceId::for_registry(registry.index_url()).unwrap(); let hash = cargo::util::hex::short_hash(&id); let ok = cargo_home() .join("registry") .join("src") .join(format!("-{}", hash)) .join("bar-0.0.1") .join(".cargo-ok"); assert_eq!(ok.metadata().unwrap().len(), 2); } #[cargo_test] fn ignores_unknown_index_version_http() { let _server = setup_http(); ignores_unknown_index_version(cargo_http); } #[cargo_test] fn ignores_unknown_index_version_git() { ignores_unknown_index_version(cargo_stable); } fn ignores_unknown_index_version(cargo: fn(&Project, &str) -> Execs) { // If the version field is not understood, it is ignored. Package::new("bar", "1.0.0").publish(); Package::new("bar", "1.0.1").schema_version(9999).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); cargo(&p, "tree") .with_stdout( "foo v0.1.0 [..]\n\ └── bar v1.0.0\n\ ", ) .run(); } #[cargo_test] fn http_requires_z_flag() { let _server = setup_http(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr_contains(" usage of sparse registries requires `-Z sparse-registry`") .run(); } #[cargo_test] fn http_requires_trailing_slash() { cargo_process("-Z sparse-registry install bar --index sparse+https://index.crates.io") .masquerade_as_nightly_cargo(&["sparse-registry"]) .with_status(101) .with_stderr("[ERROR] registry url must end in a slash `/`: sparse+https://index.crates.io") .run() } cargo-0.66.0/tests/testsuite/rename_deps.rs000066400000000000000000000226411432416201200207210ustar00rootroot00000000000000//! Tests for renaming dependencies. use cargo_test_support::git; use cargo_test_support::paths; use cargo_test_support::registry::{self, Package}; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn rename_dependency() { Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { version = "0.1.0" } baz = { version = "0.2.0", package = "bar" } "#, ) .file("src/lib.rs", "extern crate bar; extern crate baz;") .build(); p.cargo("build").run(); } #[cargo_test] fn rename_with_different_names() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] baz = { path = "bar", package = "bar" } "#, ) .file("src/lib.rs", "extern crate baz;") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [lib] name = "random_name" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("build").run(); } #[cargo_test] fn lots_of_names() { registry::alt_init(); Package::new("foo", "0.1.0") .file("src/lib.rs", "pub fn foo1() {}") .publish(); Package::new("foo", "0.2.0") .file("src/lib.rs", "pub fn foo() {}") .publish(); Package::new("foo", "0.1.0") .file("src/lib.rs", "pub fn foo2() {}") .alternative(true) .publish(); let g = git::repo(&paths::root().join("another")) .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("src/lib.rs", "pub fn foo3() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = "0.2" foo1 = {{ version = "0.1", package = "foo" }} foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }} foo3 = {{ git = '{}', package = "foo" }} foo4 = {{ path = "foo", package = "foo" }} "#, g.url() ), ) .file( "src/lib.rs", " extern crate foo; extern crate foo1; extern crate foo2; extern crate foo3; extern crate foo4; pub fn foo() { foo::foo(); foo1::foo1(); foo2::foo2(); foo3::foo3(); foo4::foo4(); } ", ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "pub fn foo4() {}") .build(); p.cargo("build -v").run(); } #[cargo_test] fn rename_and_patch() { Package::new("foo", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] bar = { version = "0.1", package = "foo" } [patch.crates-io] foo = { path = "foo" } "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::foo(); }", ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("build -v").run(); } #[cargo_test] fn rename_twice() { Package::new("foo", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] bar = { version = "0.1", package = "foo" } [build-dependencies] foo = { version = "0.1" } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] foo v0.1.0 (registry [..]) error: the crate `test v0.1.0 ([CWD])` depends on crate `foo v0.1.0` multiple times with different names ", ) .run(); } #[cargo_test] fn rename_affects_fingerprint() { Package::new("foo", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = { version = "0.1", package = "foo" } "#, ) .file("src/lib.rs", "extern crate foo;") .build(); p.cargo("build -v").run(); p.change_file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] bar = { version = "0.1", package = "foo" } "#, ); p.cargo("build -v") .with_status(101) .with_stderr_contains("[..]can't find crate for `foo`") .run(); } #[cargo_test] fn can_run_doc_tests() { Package::new("bar", "0.1.0").publish(); Package::new("bar", "0.2.0").publish(); let foo = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [dependencies] bar = { version = "0.1.0" } baz = { version = "0.2.0", package = "bar" } "#, ) .file( "src/lib.rs", " extern crate bar; extern crate baz; ", ) .build(); foo.cargo("test -v") .with_stderr_contains( "\ [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..]src/lib.rs \ [..] \ --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \ --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \ [..]` ", ) .run(); } #[cargo_test] fn features_still_work() { Package::new("foo", "0.1.0").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] p1 = { path = 'a', features = ['b'] } p2 = { path = 'b' } "#, ) .file("src/lib.rs", "") .file( "a/Cargo.toml", r#" [package] name = "p1" version = "0.1.0" authors = [] [dependencies] b = { version = "0.1", package = "foo", optional = true } "#, ) .file("a/src/lib.rs", "extern crate b;") .file( "b/Cargo.toml", r#" [package] name = "p2" version = "0.1.0" authors = [] [dependencies] b = { version = "0.1", package = "bar", optional = true } [features] default = ['b'] "#, ) .file("b/src/lib.rs", "extern crate b;") .build(); p.cargo("build -v").run(); } #[cargo_test] fn features_not_working() { Package::new("foo", "0.1.0").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] a = { path = 'a', package = 'p1', optional = true } [features] default = ['p1'] "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("p1", "0.1.0")) .build(); p.cargo("build -v") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: feature `default` includes `p1` which is neither a dependency nor another feature ", ) .run(); } #[cargo_test] fn rename_with_dash() { let p = project() .file( "Cargo.toml", r#" [package] name = "qwerty" version = "0.1.0" [dependencies] foo-bar = { path = 'a', package = 'a' } "#, ) .file("src/lib.rs", "extern crate foo_bar;") .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "") .build(); p.cargo("build").run(); } cargo-0.66.0/tests/testsuite/replace.rs000066400000000000000000000760131432416201200200540ustar00rootroot00000000000000//! Tests for `[replace]` table source replacement. use cargo_test_support::git; use cargo_test_support::paths; use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn override_simple() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, bar.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [UPDATING] git repository `[..]` [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn override_with_features() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{}', features = ["some_feature"] }} "#, bar.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] index [UPDATING] git repository `[..]` [WARNING] replacement for `bar` uses the features mechanism. default-features and features \ will not take effect because the replacement dependency does not support this mechanism [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn override_with_setting_default_features() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{}', default-features = false, features = ["none_default_feature"] }} "#, bar.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] [..] index [UPDATING] git repository `[..]` [WARNING] replacement for `bar` uses the features mechanism. default-features and features \ will not take effect because the replacement dependency does not support this mechanism [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn missing_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] bar = { git = 'https://example.com' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: replacements must specify a version to replace, but `[..]bar` does not ", ) .run(); } #[cargo_test] fn invalid_semver_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" [replace] "bar:*" = { git = 'https://example.com' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr_contains( "\ error: failed to parse manifest at `[..]` Caused by: replacements must specify a valid semver version to replace, but `bar:*` does not ", ) .run(); } #[cargo_test] fn different_version() { Package::new("bar", "0.2.0").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = "0.2.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: replacements cannot specify a version requirement, but found one for [..] ", ) .run(); } #[cargo_test] fn transitive() { Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.2.0") .dep("bar", "0.1.0") .file("src/lib.rs", "extern crate bar; fn baz() { bar::bar(); }") .publish(); let foo = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] baz = "0.2.0" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [UPDATING] git repository `[..]` [DOWNLOADING] crates ... [DOWNLOADED] baz v0.2.0 (registry [..]) [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] baz v0.2.0 [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn persists_across_rebuilds() { Package::new("bar", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [UPDATING] git repository `file://[..]` [COMPILING] bar v0.1.0 (file://[..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn replace_registry_with_path() { Package::new("bar", "0.1.0").publish(); let _ = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = { path = "../bar" } "#, ) .file( "src/lib.rs", "extern crate bar; pub fn foo() { bar::bar(); }", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [COMPILING] bar v0.1.0 ([ROOT][..]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn use_a_spec_to_select() { Package::new("baz", "0.1.1") .file("src/lib.rs", "pub fn baz1() {}") .publish(); Package::new("baz", "0.2.0").publish(); Package::new("bar", "0.1.1") .dep("baz", "0.2") .file( "src/lib.rs", "extern crate baz; pub fn bar() { baz::baz3(); }", ) .publish(); let foo = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("baz", "0.2.0")) .file("src/lib.rs", "pub fn baz3() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" baz = "0.1" [replace] "baz:0.2.0" = {{ git = '{}' }} "#, foo.url() ), ) .file( "src/lib.rs", " extern crate bar; extern crate baz; pub fn local() { baz::baz1(); bar::bar(); } ", ) .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [UPDATING] git repository `[..]` [DOWNLOADING] crates ... [DOWNLOADED] [..] [DOWNLOADED] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] [..] [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn override_adds_some_deps() { Package::new("baz", "0.1.1").publish(); Package::new("bar", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = "0.1" "#, ) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_stderr( "\ [UPDATING] `dummy-registry` index [UPDATING] git repository `[..]` [DOWNLOADING] crates ... [DOWNLOADED] baz v0.1.1 (registry [..]) [COMPILING] baz v0.1.1 [COMPILING] bar v0.1.0 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build").with_stdout("").run(); Package::new("baz", "0.1.2").publish(); p.cargo("update -p") .arg(&format!("{}#bar", foo.url())) .with_stderr( "\ [UPDATING] git repository `file://[..]` [UPDATING] `dummy-registry` index ", ) .run(); p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar") .with_stderr( "\ [UPDATING] `dummy-registry` index ", ) .run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn locked_means_locked_yes_no_seriously_i_mean_locked() { // this in theory exercises #2041 Package::new("baz", "0.1.0").publish(); Package::new("baz", "0.2.0").publish(); Package::new("bar", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = "*" "#, ) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" baz = "0.1" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build").with_stdout("").run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn override_wrong_name() { Package::new("baz", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] baz = "0.1" [replace] "baz:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [UPDATING] git repository [..] [ERROR] failed to get `baz` as a dependency of package `foo v0.0.1 ([..])` Caused by: no matching package for override `[..]baz@0.1.0` found location searched: file://[..] version required: =0.1.0 ", ) .run(); } #[cargo_test] fn override_with_nothing() { Package::new("bar", "0.1.0").publish(); let foo = git::repo(&paths::root().join("override")) .file("src/lib.rs", "") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [replace] "bar:0.1.0" = {{ git = '{}' }} "#, foo.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [UPDATING] git repository [..] [ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])` Caused by: failed to load source for dependency `bar` Caused by: Unable to update file://[..] Caused by: Could not find Cargo.toml in `[..]` ", ) .run(); } #[cargo_test] fn override_wrong_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [replace] "bar:0.1.0" = { git = 'https://example.com', version = '0.2.0' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to parse manifest at `[..]` Caused by: replacements cannot specify a version requirement, but found one for `[..]bar@0.1.0` ", ) .run(); } #[cargo_test] fn multiple_specs() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{0}' }} [replace."https://github.com/rust-lang/crates.io-index#bar:0.1.0"] git = '{0}' "#, bar.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr( "\ [UPDATING] [..] index [UPDATING] git repository [..] [ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])` Caused by: overlapping replacement specifications found: * [..] * [..] both specifications match: bar v0.1.0 ", ) .run(); } #[cargo_test] fn test_override_dep() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{0}' }} "#, bar.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("test -p bar") .with_status(101) .with_stderr_contains( "\ error: There are multiple `bar` packages in your project, and the [..] Please re-run this command with [..] [..]#bar@0.1.0 [..]#bar@0.1.0 ", ) .run(); } #[cargo_test] fn update() { Package::new("bar", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn bar() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{0}' }} "#, bar.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); p.cargo("update") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] git repository `[..]` ", ) .run(); } // foo -> near -> far // near is overridden with itself #[cargo_test] fn no_override_self() { let deps = git::repo(&paths::root().join("override")) .file("far/Cargo.toml", &basic_manifest("far", "0.1.0")) .file("far/src/lib.rs", "") .file( "near/Cargo.toml", r#" [package] name = "near" version = "0.1.0" authors = [] [dependencies] far = { path = "../far" } "#, ) .file("near/src/lib.rs", "#![no_std] pub extern crate far;") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] near = {{ git = '{0}' }} [replace] "near:0.1.0" = {{ git = '{0}' }} "#, deps.url() ), ) .file("src/lib.rs", "#![no_std] pub extern crate near;") .build(); p.cargo("build --verbose").run(); } #[cargo_test] fn override_an_override() { Package::new("chrono", "0.2.0") .dep("serde", "< 0.9") .publish(); Package::new("serde", "0.7.0") .file("src/lib.rs", "pub fn serde07() {}") .publish(); Package::new("serde", "0.8.0") .file("src/lib.rs", "pub fn serde08() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] chrono = "0.2" serde = "0.8" [replace] "chrono:0.2.0" = { path = "chrono" } "serde:0.8.0" = { path = "serde" } "#, ) .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.0.1" dependencies = [ "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "chrono" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" replace = "chrono 0.2.0" [[package]] name = "chrono" version = "0.2.0" dependencies = [ "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" replace = "serde 0.8.0" [[package]] name = "serde" version = "0.8.0" "#, ) .file( "src/lib.rs", " extern crate chrono; extern crate serde; pub fn foo() { chrono::chrono(); serde::serde08_override(); } ", ) .file( "chrono/Cargo.toml", r#" [package] name = "chrono" version = "0.2.0" authors = [] [dependencies] serde = "< 0.9" "#, ) .file( "chrono/src/lib.rs", " extern crate serde; pub fn chrono() { serde::serde07(); } ", ) .file("serde/Cargo.toml", &basic_manifest("serde", "0.8.0")) .file("serde/src/lib.rs", "pub fn serde08_override() {}") .build(); p.cargo("build -v").run(); } #[cargo_test] fn overriding_nonexistent_no_spurious() { Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); Package::new("baz", "0.1.0").publish(); let bar = git::repo(&paths::root().join("override")) .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { path = "baz" } "#, ) .file("src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = {{ git = '{url}' }} "baz:0.1.0" = {{ git = '{url}' }} "#, url = bar.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("build") .with_stderr( "\ [WARNING] package replacement is not used: [..]baz@0.1.0 [FINISHED] [..] ", ) .with_stdout("") .run(); } #[cargo_test] fn no_warnings_when_replace_is_used_in_another_workspace_member() { Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = [ "first_crate", "second_crate"] [replace] "bar:0.1.0" = { path = "local_bar" } "#, ) .file( "first_crate/Cargo.toml", r#" [package] name = "first_crate" version = "0.1.0" [dependencies] bar = "0.1.0" "#, ) .file("first_crate/src/lib.rs", "") .file( "second_crate/Cargo.toml", &basic_manifest("second_crate", "0.1.0"), ) .file("second_crate/src/lib.rs", "") .file("local_bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("local_bar/src/lib.rs", "") .build(); p.cargo("build") .cwd("first_crate") .with_stdout("") .with_stderr( "\ [UPDATING] `[..]` index [COMPILING] bar v0.1.0 ([..]) [COMPILING] first_crate v0.1.0 ([..]) [FINISHED] [..]", ) .run(); p.cargo("build") .cwd("second_crate") .with_stdout("") .with_stderr( "\ [COMPILING] second_crate v0.1.0 ([..]) [FINISHED] [..]", ) .run(); } #[cargo_test] fn replace_to_path_dep() { Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); Package::new("baz", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" [replace] "bar:0.1.0" = { path = "bar" } "#, ) .file("src/lib.rs", "extern crate bar;") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { path = "baz" } "#, ) .file( "bar/src/lib.rs", "extern crate baz; pub fn bar() { baz::baz(); }", ) .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("bar/baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build").run(); } #[cargo_test] fn override_with_default_feature() { Package::new("another", "0.1.0").publish(); Package::new("another", "0.1.1").dep("bar", "0.1").publish(); Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar", default-features = false } another = "0.1" another2 = { path = "another2" } [replace] 'bar:0.1.0' = { path = "bar" } "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [features] default = [] "#, ) .file( "bar/src/lib.rs", r#" #[cfg(feature = "default")] pub fn bar() {} "#, ) .file( "another2/Cargo.toml", r#" [package] name = "another2" version = "0.1.0" authors = [] [dependencies] bar = { version = "0.1", default-features = false } "#, ) .file("another2/src/lib.rs", "") .build(); p.cargo("run").run(); } #[cargo_test] fn override_plus_dep() { Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" [replace] 'bar:0.1.0' = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] [dependencies] foo = { path = ".." } "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("build") .with_status(101) .with_stderr_contains("error: cyclic package dependency: [..]") .run(); } cargo-0.66.0/tests/testsuite/required_features.rs000066400000000000000000001037131432416201200221550ustar00rootroot00000000000000//! Tests for targets with `required-features`. use cargo_test_support::install::{ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, }; use cargo_test_support::is_nightly; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::project; #[cargo_test] fn build_bin_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] [[bin]] name = "foo" required-features = ["a"] "#, ) .file( "src/main.rs", r#" extern crate foo; #[cfg(feature = "a")] fn test() { foo::foo(); } fn main() {} "#, ) .file("src/lib.rs", r#"#[cfg(feature = "a")] pub fn foo() {}"#) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.cargo("build --no-default-features").run(); p.cargo("build --bin=foo").run(); assert!(p.bin("foo").is_file()); p.cargo("build --bin=foo --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); } #[cargo_test] fn build_bin_arg_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[bin]] name = "foo" required-features = ["a"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build --features a").run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn build_bin_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a", "b"] a = [] b = ["a"] c = [] [[bin]] name = "foo_1" path = "src/foo_1.rs" required-features = ["b", "c"] [[bin]] name = "foo_2" path = "src/foo_2.rs" required-features = ["a"] "#, ) .file("src/foo_1.rs", "fn main() {}") .file("src/foo_2.rs", "fn main() {}") .build(); p.cargo("build").run(); assert!(!p.bin("foo_1").is_file()); assert!(p.bin("foo_2").is_file()); p.cargo("build --features c").run(); assert!(p.bin("foo_1").is_file()); assert!(p.bin("foo_2").is_file()); p.cargo("build --no-default-features").run(); } #[cargo_test] fn build_example_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] [[example]] name = "foo" required-features = ["a"] "#, ) .file("examples/foo.rs", "fn main() {}") .build(); p.cargo("build --example=foo").run(); assert!(p.bin("examples/foo").is_file()); p.cargo("build --example=foo --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); } #[cargo_test] fn build_example_arg_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[example]] name = "foo" required-features = ["a"] "#, ) .file("examples/foo.rs", "fn main() {}") .build(); p.cargo("build --example=foo --features a").run(); assert!(p.bin("examples/foo").is_file()); } #[cargo_test] fn build_example_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a", "b"] a = [] b = ["a"] c = [] [[example]] name = "foo_1" required-features = ["b", "c"] [[example]] name = "foo_2" required-features = ["a"] "#, ) .file("examples/foo_1.rs", "fn main() {}") .file("examples/foo_2.rs", "fn main() {}") .build(); p.cargo("build --example=foo_1") .with_status(101) .with_stderr( "\ error: target `foo_1` in package `foo` requires the features: `b`, `c` Consider enabling them by passing, e.g., `--features=\"b c\"` ", ) .run(); p.cargo("build --example=foo_2").run(); assert!(!p.bin("examples/foo_1").is_file()); assert!(p.bin("examples/foo_2").is_file()); p.cargo("build --example=foo_1 --features c").run(); p.cargo("build --example=foo_2 --features c").run(); assert!(p.bin("examples/foo_1").is_file()); assert!(p.bin("examples/foo_2").is_file()); p.cargo("build --example=foo_1 --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo_1` in package `foo` requires the features: `b`, `c` Consider enabling them by passing, e.g., `--features=\"b c\"` ", ) .run(); p.cargo("build --example=foo_2 --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo_2` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); } #[cargo_test] fn test_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] [[test]] name = "foo" required-features = ["a"] "#, ) .file("tests/foo.rs", "#[test]\nfn test() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); p.cargo("test --no-default-features") .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") .with_stdout("") .run(); p.cargo("test --test=foo") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); p.cargo("test --test=foo --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); } #[cargo_test] fn test_arg_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[test]] name = "foo" required-features = ["a"] "#, ) .file("tests/foo.rs", "#[test]\nfn test() {}") .build(); p.cargo("test --features a") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); } #[cargo_test] fn test_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a", "b"] a = [] b = ["a"] c = [] [[test]] name = "foo_1" required-features = ["b", "c"] [[test]] name = "foo_2" required-features = ["a"] "#, ) .file("tests/foo_1.rs", "#[test]\nfn test() {}") .file("tests/foo_2.rs", "#[test]\nfn test() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); p.cargo("test --features c") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo_1-[..][EXE]) [RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])", ) .with_stdout_contains_n("test test ... ok", 2) .run(); p.cargo("test --no-default-features") .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") .with_stdout("") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] [[bench]] name = "foo" required-features = ["a"] "#, ) .file( "benches/foo.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); p.cargo("bench --no-default-features") .with_stderr("[FINISHED] bench [optimized] target(s) in [..]".to_string()) .with_stdout("") .run(); p.cargo("bench --bench=foo") .with_stderr( "\ [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); p.cargo("bench --bench=foo --no-default-features") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_arg_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[bench]] name = "foo" required-features = ["a"] "#, ) .file( "benches/foo.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench --features a") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); } #[cargo_test(nightly, reason = "bench")] fn bench_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a", "b"] a = [] b = ["a"] c = [] [[bench]] name = "foo_1" required-features = ["b", "c"] [[bench]] name = "foo_2" required-features = ["a"] "#, ) .file( "benches/foo_1.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .file( "benches/foo_2.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .build(); p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo_2-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); p.cargo("bench --features c") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo_1-[..][EXE]) [RUNNING] [..] (target/release/deps/foo_2-[..][EXE])", ) .with_stdout_contains_n("test bench ... bench: [..]", 2) .run(); p.cargo("bench --no-default-features") .with_stderr("[FINISHED] bench [optimized] target(s) in [..]") .with_stdout("") .run(); } #[cargo_test] fn install_default_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] [[bin]] name = "foo" required-features = ["a"] [[example]] name = "foo" required-features = ["a"] "#, ) .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}") .build(); p.cargo("install --path .").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); p.cargo("install --path . --no-default-features") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); p.cargo("install --path . --bin=foo").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); p.cargo("install --path . --bin=foo --no-default-features") .with_status(101) .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ `[..]target` Caused by: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); p.cargo("install --path . --example=foo").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); p.cargo("install --path . --example=foo --no-default-features") .with_status(101) .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ `[..]target` Caused by: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); } #[cargo_test] fn install_arg_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[bin]] name = "foo" required-features = ["a"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("install --features a").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); } #[cargo_test] fn install_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a", "b"] a = [] b = ["a"] c = [] [[bin]] name = "foo_1" path = "src/foo_1.rs" required-features = ["b", "c"] [[bin]] name = "foo_2" path = "src/foo_2.rs" required-features = ["a"] [[example]] name = "foo_3" path = "src/foo_3.rs" required-features = ["b", "c"] [[example]] name = "foo_4" path = "src/foo_4.rs" required-features = ["a"] "#, ) .file("src/foo_1.rs", "fn main() {}") .file("src/foo_2.rs", "fn main() {}") .file("src/foo_3.rs", "fn main() {}") .file("src/foo_4.rs", "fn main() {}") .build(); p.cargo("install --path .").run(); assert_has_not_installed_exe(cargo_home(), "foo_1"); assert_has_installed_exe(cargo_home(), "foo_2"); assert_has_not_installed_exe(cargo_home(), "foo_3"); assert_has_not_installed_exe(cargo_home(), "foo_4"); p.cargo("uninstall foo").run(); p.cargo("install --path . --bins --examples").run(); assert_has_not_installed_exe(cargo_home(), "foo_1"); assert_has_installed_exe(cargo_home(), "foo_2"); assert_has_not_installed_exe(cargo_home(), "foo_3"); assert_has_installed_exe(cargo_home(), "foo_4"); p.cargo("uninstall foo").run(); p.cargo("install --path . --features c").run(); assert_has_installed_exe(cargo_home(), "foo_1"); assert_has_installed_exe(cargo_home(), "foo_2"); assert_has_not_installed_exe(cargo_home(), "foo_3"); assert_has_not_installed_exe(cargo_home(), "foo_4"); p.cargo("uninstall foo").run(); p.cargo("install --path . --features c --bins --examples") .run(); assert_has_installed_exe(cargo_home(), "foo_1"); assert_has_installed_exe(cargo_home(), "foo_2"); assert_has_installed_exe(cargo_home(), "foo_3"); assert_has_installed_exe(cargo_home(), "foo_4"); p.cargo("uninstall foo").run(); p.cargo("install --path . --no-default-features") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); p.cargo("install --path . --no-default-features --bins") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [WARNING] Target filter `bins` specified, but no targets matched. This is a no-op [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); p.cargo("install --path . --no-default-features --examples") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [WARNING] Target filter `examples` specified, but no targets matched. This is a no-op [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); p.cargo("install --path . --no-default-features --bins --examples") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [WARNING] Target filters `bins`, `examples` specified, but no targets matched. This is a no-op [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo_1"); assert_has_not_installed_exe(cargo_home(), "foo_2"); assert_has_not_installed_exe(cargo_home(), "foo_3"); assert_has_not_installed_exe(cargo_home(), "foo_4"); } #[cargo_test] fn dep_feature_in_toml() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar", features = ["a"] } [[bin]] name = "foo" required-features = ["bar/a"] [[example]] name = "foo" required-features = ["bar/a"] [[test]] name = "foo" required-features = ["bar/a"] [[bench]] name = "foo" required-features = ["bar/a"] "#, ) .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}") .file("tests/foo.rs", "#[test]\nfn test() {}") .file( "benches/foo.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [features] a = [] "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("build").run(); // bin p.cargo("build --bin=foo").run(); assert!(p.bin("foo").is_file()); // example p.cargo("build --example=foo").run(); assert!(p.bin("examples/foo").is_file()); // test p.cargo("test --test=foo") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); // bench if is_nightly() { p.cargo("bench --bench=foo") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); } // install p.cargo("install").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); } #[cargo_test] fn dep_feature_in_cmd_line() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } [[bin]] name = "foo" required-features = ["bar/a"] [[example]] name = "foo" required-features = ["bar/a"] [[test]] name = "foo" required-features = ["bar/a"] [[bench]] name = "foo" required-features = ["bar/a"] "#, ) .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}") .file( "tests/foo.rs", r#" #[test] fn bin_is_built() { let s = format!("target/debug/foo{}", std::env::consts::EXE_SUFFIX); let p = std::path::Path::new(&s); assert!(p.exists(), "foo does not exist"); } "#, ) .file( "benches/foo.rs", r#" #![feature(test)] extern crate test; #[bench] fn bench(_: &mut test::Bencher) { } "#, ) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [features] a = [] "#, ) .file("bar/src/lib.rs", "") .build(); // This is a no-op p.cargo("build").with_stderr("[FINISHED] dev [..]").run(); assert!(!p.bin("foo").is_file()); // bin p.cargo("build --bin=foo") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `bar/a` Consider enabling them by passing, e.g., `--features=\"bar/a\"` ", ) .run(); p.cargo("build --bin=foo --features bar/a").run(); assert!(p.bin("foo").is_file()); // example p.cargo("build --example=foo") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `bar/a` Consider enabling them by passing, e.g., `--features=\"bar/a\"` ", ) .run(); p.cargo("build --example=foo --features bar/a").run(); assert!(p.bin("examples/foo").is_file()); // test // This is a no-op, since no tests are enabled p.cargo("test") .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") .with_stdout("") .run(); // Delete the target directory so this can check if the main.rs gets built. p.build_dir().rm_rf(); p.cargo("test --test=foo --features bar/a") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test bin_is_built ... ok") .run(); // bench if is_nightly() { p.cargo("bench") .with_stderr("[FINISHED] bench [optimized] target(s) in [..]") .with_stdout("") .run(); p.cargo("bench --bench=foo --features bar/a") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("test bench ... bench: [..]") .run(); } // install p.cargo("install --path .") .with_stderr( "\ [INSTALLING] foo v0.0.1 ([..]) [FINISHED] release [optimized] target(s) in [..] [WARNING] none of the package's binaries are available for install using the selected features ", ) .run(); assert_has_not_installed_exe(cargo_home(), "foo"); p.cargo("install --features bar/a").run(); assert_has_installed_exe(cargo_home(), "foo"); p.cargo("uninstall foo").run(); } #[cargo_test] fn test_skips_compiling_bin_with_missing_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] a = [] [[bin]] name = "bin_foo" path = "src/bin/foo.rs" required-features = ["a"] "#, ) .file("src/bin/foo.rs", "extern crate bar; fn main() {}") .file("tests/foo.rs", "") .file("benches/foo.rs", "") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); p.cargo("test --features a -j 1") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) error[E0463]: can't find crate for `bar`", ) .run(); if is_nightly() { p.cargo("bench") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] bench [optimized] target(s) in [..] [RUNNING] [..] (target/release/deps/foo-[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); p.cargo("bench --features a -j 1") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) error[E0463]: can't find crate for `bar`", ) .run(); } } #[cargo_test] fn run_default() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = [] a = [] [[bin]] name = "foo" required-features = ["a"] "#, ) .file("src/lib.rs", "") .file("src/main.rs", "extern crate foo; fn main() {}") .build(); p.cargo("run") .with_status(101) .with_stderr( "\ error: target `foo` in package `foo` requires the features: `a` Consider enabling them by passing, e.g., `--features=\"a\"` ", ) .run(); p.cargo("run --features a").run(); } #[cargo_test] fn run_default_multiple_required_features() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["a"] a = [] b = [] [[bin]] name = "foo1" path = "src/foo1.rs" required-features = ["a"] [[bin]] name = "foo3" path = "src/foo3.rs" required-features = ["b"] [[bin]] name = "foo2" path = "src/foo2.rs" required-features = ["b"] "#, ) .file("src/lib.rs", "") .file("src/foo1.rs", "extern crate foo; fn main() {}") .file("src/foo3.rs", "extern crate foo; fn main() {}") .file("src/foo2.rs", "extern crate foo; fn main() {}") .build(); p.cargo("run") .with_status(101) .with_stderr( "\ error: `cargo run` could not determine which binary to run[..] available binaries: foo1, foo2, foo3", ) .run(); } #[cargo_test] fn renamed_required_features() { // Test that required-features uses renamed package feature names. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [[bin]] name = "x" required-features = ["a1/f1"] [dependencies] a1 = {path="a1", package="a"} a2 = {path="a2", package="a"} "#, ) .file( "src/bin/x.rs", r#" fn main() { a1::f(); a2::f(); } "#, ) .file( "a1/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [features] f1 = [] "#, ) .file( "a1/src/lib.rs", r#" pub fn f() { if cfg!(feature="f1") { println!("a1 f1"); } } "#, ) .file( "a2/Cargo.toml", r#" [package] name = "a" version = "0.2.0" [features] f2 = [] "#, ) .file( "a2/src/lib.rs", r#" pub fn f() { if cfg!(feature="f2") { println!("a2 f2"); } } "#, ) .build(); p.cargo("run") .with_status(101) .with_stderr( "\ [ERROR] target `x` in package `foo` requires the features: `a1/f1` Consider enabling them by passing, e.g., `--features=\"a1/f1\"` ", ) .run(); p.cargo("build --features a1/f1").run(); p.rename_run("x", "x_with_f1").with_stdout("a1 f1").run(); p.cargo("build --features a1/f1,a2/f2").run(); p.rename_run("x", "x_with_f1_f2") .with_stdout("a1 f1\na2 f2") .run(); } cargo-0.66.0/tests/testsuite/run.rs000066400000000000000000001140451432416201200172430ustar00rootroot00000000000000//! Tests for the `cargo run` command. use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, project, Project}; use cargo_util::paths::dylib_path_envvar; #[cargo_test] fn simple() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]`", ) .with_stdout("hello") .run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn quiet_arg() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run -q").with_stderr("").with_stdout("hello").run(); p.cargo("run --quiet") .with_stderr("") .with_stdout("hello") .run(); } #[cargo_test] fn quiet_arg_and_verbose_arg() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run -q -v") .with_status(101) .with_stderr("[ERROR] cannot set both --verbose and --quiet") .run(); } #[cargo_test] fn quiet_arg_and_verbose_config() { let p = project() .file( ".cargo/config", r#" [term] verbose = true "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run -q").with_stderr("").with_stdout("hello").run(); } #[cargo_test] fn verbose_arg_and_quiet_config() { let p = project() .file( ".cargo/config", r#" [term] quiet = true "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn quiet_config_alone() { let p = project() .file( ".cargo/config", r#" [term] quiet = true "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run").with_stderr("").with_stdout("hello").run(); } #[cargo_test] fn verbose_config_alone() { let p = project() .file( ".cargo/config", r#" [term] verbose = true "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn quiet_config_and_verbose_config() { let p = project() .file( ".cargo/config", r#" [term] verbose = true quiet = true "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("run") .with_status(101) .with_stderr("[ERROR] cannot set both `term.verbose` and `term.quiet`") .run(); } #[cargo_test] fn simple_with_args() { let p = project() .file( "src/main.rs", r#" fn main() { assert_eq!(std::env::args().nth(1).unwrap(), "hello"); assert_eq!(std::env::args().nth(2).unwrap(), "world"); } "#, ) .build(); p.cargo("run hello world").run(); } #[cfg(unix)] #[cargo_test] fn simple_with_non_utf8_args() { use std::os::unix::ffi::OsStrExt; let p = project() .file( "src/main.rs", r#" use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; fn main() { assert_eq!(std::env::args_os().nth(1).unwrap(), OsStr::from_bytes(b"hello")); assert_eq!(std::env::args_os().nth(2).unwrap(), OsStr::from_bytes(b"ab\xffcd")); } "#, ) .build(); p.cargo("run") .arg("hello") .arg(std::ffi::OsStr::from_bytes(b"ab\xFFcd")) .run(); } #[cargo_test] fn exit_code() { let p = project() .file("src/main.rs", "fn main() { std::process::exit(2); }") .build(); let mut output = String::from( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target[..]` ", ); if !cfg!(unix) { output.push_str( "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)", ); } p.cargo("run").with_status(2).with_stderr(output).run(); } #[cargo_test] fn exit_code_verbose() { let p = project() .file("src/main.rs", "fn main() { std::process::exit(2); }") .build(); let mut output = String::from( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target[..]` ", ); if !cfg!(unix) { output.push_str( "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)", ); } p.cargo("run -v").with_status(2).with_stderr(output).run(); } #[cargo_test] fn no_main_file() { let p = project().file("src/lib.rs", "").build(); p.cargo("run") .with_status(101) .with_stderr( "[ERROR] a bin target must be available \ for `cargo run`\n", ) .run(); } #[cargo_test] fn too_many_bins() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "") .file("src/bin/b.rs", "") .build(); // Using [..] here because the order is not stable p.cargo("run") .with_status(101) .with_stderr( "[ERROR] `cargo run` could not determine which binary to run. \ Use the `--bin` option to specify a binary, or the \ `default-run` manifest key.\ \navailable binaries: [..]\n", ) .run(); } #[cargo_test] fn specify_name() { let p = project() .file("src/lib.rs", "") .file( "src/bin/a.rs", r#" #[allow(unused_extern_crates)] extern crate foo; fn main() { println!("hello a.rs"); } "#, ) .file( "src/bin/b.rs", r#" #[allow(unused_extern_crates)] extern crate foo; fn main() { println!("hello b.rs"); } "#, ) .build(); p.cargo("run --bin a -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..] src/lib.rs [..]` [RUNNING] `rustc [..] src/bin/a.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/a[EXE]`", ) .with_stdout("hello a.rs") .run(); p.cargo("run --bin b -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] src/bin/b.rs [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/b[EXE]`", ) .with_stdout("hello b.rs") .run(); } #[cargo_test] fn specify_default_run() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] default-run = "a" "#, ) .file("src/lib.rs", "") .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#) .build(); p.cargo("run").with_stdout("hello A").run(); p.cargo("run --bin a").with_stdout("hello A").run(); p.cargo("run --bin b").with_stdout("hello B").run(); } #[cargo_test] fn bogus_default_run() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] default-run = "b" "#, ) .file("src/lib.rs", "") .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) .build(); p.cargo("run") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: default-run target `b` not found Did you mean `a`? ", ) .run(); } #[cargo_test] fn run_example() { let p = project() .file("src/lib.rs", "") .file("examples/a.rs", r#"fn main() { println!("example"); }"#) .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) .build(); p.cargo("run --example a") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/examples/a[EXE]`", ) .with_stdout("example") .run(); } #[cargo_test] fn run_library_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "bar" crate_type = ["lib"] "#, ) .file("src/lib.rs", "") .file("examples/bar.rs", "fn foo() {}") .build(); p.cargo("run --example bar") .with_status(101) .with_stderr("[ERROR] example target `bar` is a library and cannot be executed") .run(); } #[cargo_test] fn run_bin_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [[example]] name = "bar" crate_type = ["bin"] "#, ) .file("src/lib.rs", "") .file("examples/bar.rs", r#"fn main() { println!("example"); }"#) .build(); p.cargo("run --example bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/examples/bar[EXE]`", ) .with_stdout("example") .run(); } fn autodiscover_examples_project(rust_edition: &str, autoexamples: Option) -> Project { let autoexamples = match autoexamples { None => "".to_string(), Some(bool) => format!("autoexamples = {}", bool), }; project() .file( "Cargo.toml", &format!( r#" [project] name = "foo" version = "0.0.1" authors = [] edition = "{rust_edition}" {autoexamples} [features] magic = [] [[example]] name = "do_magic" required-features = ["magic"] "#, rust_edition = rust_edition, autoexamples = autoexamples ), ) .file("examples/a.rs", r#"fn main() { println!("example"); }"#) .file( "examples/do_magic.rs", r#" fn main() { println!("magic example"); } "#, ) .build() } #[cargo_test] fn run_example_autodiscover_2015() { let p = autodiscover_examples_project("2015", None); p.cargo("run --example a") .with_status(101) .with_stderr( "warning: \ An explicit [[example]] section is specified in Cargo.toml which currently disables Cargo from automatically inferring other example targets. This inference behavior will change in the Rust 2018 edition and the following files will be included as a example target: * [..]a.rs This is likely to break cargo build or cargo test as these files may not be ready to be compiled as a example target today. You can future-proof yourself and disable this warning by adding `autoexamples = false` to your [package] section. You may also move the files to a location where Cargo would not automatically infer them to be a target, such as in subfolders. For more information on this warning you can consult https://github.com/rust-lang/cargo/issues/5330 error: no example target named `a`. Available example targets: do_magic ", ) .run(); } #[cargo_test] fn run_example_autodiscover_2015_with_autoexamples_enabled() { let p = autodiscover_examples_project("2015", Some(true)); p.cargo("run --example a") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/examples/a[EXE]`", ) .with_stdout("example") .run(); } #[cargo_test] fn run_example_autodiscover_2015_with_autoexamples_disabled() { let p = autodiscover_examples_project("2015", Some(false)); p.cargo("run --example a") .with_status(101) .with_stderr( "\ error: no example target named `a`. Available example targets: do_magic ", ) .run(); } #[cargo_test] fn run_example_autodiscover_2018() { let p = autodiscover_examples_project("2018", None); p.cargo("run --example a") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/examples/a[EXE]`", ) .with_stdout("example") .run(); } #[cargo_test] fn autobins_disables() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" autobins = false "#, ) .file("src/lib.rs", "pub mod bin;") .file("src/bin/mod.rs", "// empty") .build(); p.cargo("run") .with_status(101) .with_stderr("[ERROR] a bin target must be available for `cargo run`") .run(); } #[cargo_test] fn run_bins() { let p = project() .file("src/lib.rs", "") .file("examples/a.rs", r#"fn main() { println!("example"); }"#) .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) .build(); p.cargo("run --bins") .with_status(1) .with_stderr_contains( "error: Found argument '--bins' which wasn't expected, or isn't valid in this context", ) .run(); } #[cargo_test] fn run_with_filename() { let p = project() .file("src/lib.rs", "") .file( "src/bin/a.rs", r#" extern crate foo; fn main() { println!("hello a.rs"); } "#, ) .file("examples/a.rs", r#"fn main() { println!("example"); }"#) .build(); p.cargo("run --bin bin.rs") .with_status(101) .with_stderr( "\ [ERROR] no bin target named `bin.rs`. Available bin targets: a ", ) .run(); p.cargo("run --bin a.rs") .with_status(101) .with_stderr( "\ [ERROR] no bin target named `a.rs` Did you mean `a`?", ) .run(); p.cargo("run --example example.rs") .with_status(101) .with_stderr( "\ [ERROR] no example target named `example.rs`. Available example targets: a ", ) .run(); p.cargo("run --example a.rs") .with_status(101) .with_stderr( "\ [ERROR] no example target named `a.rs` Did you mean `a`?", ) .run(); } #[cargo_test] fn either_name_or_example() { let p = project() .file("src/bin/a.rs", r#"fn main() { println!("hello a.rs"); }"#) .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) .build(); p.cargo("run --bin a --example b") .with_status(101) .with_stderr( "[ERROR] `cargo run` can run at most one \ executable, but multiple were \ specified", ) .run(); } #[cargo_test] fn one_bin_multiple_examples() { let p = project() .file("src/lib.rs", "") .file( "src/bin/main.rs", r#"fn main() { println!("hello main.rs"); }"#, ) .file("examples/a.rs", r#"fn main() { println!("hello a.rs"); }"#) .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) .build(); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/main[EXE]`", ) .with_stdout("hello main.rs") .run(); } #[cargo_test] fn example_with_release_flag() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "*" path = "bar" "#, ) .file( "examples/a.rs", r#" extern crate bar; fn main() { if cfg!(debug_assertions) { println!("slow1") } else { println!("fast1") } bar::baz(); } "#, ) .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file( "bar/src/bar.rs", r#" pub fn baz() { if cfg!(debug_assertions) { println!("slow2") } else { println!("fast2") } } "#, ) .build(); p.cargo("run -v --release --example a") .with_stderr( "\ [COMPILING] bar v0.5.0 ([CWD]/bar) [RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \ --emit=[..]link \ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [CWD]/target/release/deps \ -L dependency=[CWD]/target/release/deps` [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \ --emit=[..]link \ -C opt-level=3[..]\ -C metadata=[..] \ --out-dir [CWD]/target/release/examples \ -L dependency=[CWD]/target/release/deps \ --extern bar=[CWD]/target/release/deps/libbar-[..].rlib` [FINISHED] release [optimized] target(s) in [..] [RUNNING] `target/release/examples/a[EXE]` ", ) .with_stdout( "\ fast1 fast2", ) .run(); p.cargo("run -v --example a") .with_stderr( "\ [COMPILING] bar v0.5.0 ([CWD]/bar) [RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \ --emit=[..]link[..]\ -C debuginfo=2 \ -C metadata=[..] \ --out-dir [CWD]/target/debug/deps \ -L dependency=[CWD]/target/debug/deps` [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \ --emit=[..]link[..]\ -C debuginfo=2 \ -C metadata=[..] \ --out-dir [CWD]/target/debug/examples \ -L dependency=[CWD]/target/debug/deps \ --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/examples/a[EXE]` ", ) .with_stdout( "\ slow1 slow2", ) .run(); } #[cargo_test] fn run_dylib_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#, ) .file( "src/main.rs", r#"extern crate bar; fn main() { bar::bar(); }"#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate-type = ["dylib"] "#, ) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("run hello world").run(); } #[cargo_test] fn run_with_bin_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies.bar] path = "bar" "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [[bin]] name = "bar" "#, ) .file("bar/src/main.rs", r#"fn main() { println!("bar"); }"#) .build(); p.cargo("run") .with_stderr( "\ [WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar` which is missing a lib target [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn run_with_bin_deps() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies.bar1] path = "bar1" [dependencies.bar2] path = "bar2" "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file( "bar1/Cargo.toml", r#" [package] name = "bar1" version = "0.0.1" authors = [] [[bin]] name = "bar1" "#, ) .file("bar1/src/main.rs", r#"fn main() { println!("bar1"); }"#) .file( "bar2/Cargo.toml", r#" [package] name = "bar2" version = "0.0.1" authors = [] [[bin]] name = "bar2" "#, ) .file("bar2/src/main.rs", r#"fn main() { println!("bar2"); }"#) .build(); p.cargo("run") .with_stderr( "\ [WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar1` which is missing a lib target [WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar2` which is missing a lib target [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn run_with_bin_dep_in_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo1", "foo2"] "#, ) .file( "foo1/Cargo.toml", r#" [package] name = "foo1" version = "0.0.1" [dependencies.bar1] path = "bar1" "#, ) .file("foo1/src/main.rs", r#"fn main() { println!("hello"); }"#) .file( "foo1/bar1/Cargo.toml", r#" [package] name = "bar1" version = "0.0.1" authors = [] [[bin]] name = "bar1" "#, ) .file( "foo1/bar1/src/main.rs", r#"fn main() { println!("bar1"); }"#, ) .file( "foo2/Cargo.toml", r#" [package] name = "foo2" version = "0.0.1" [dependencies.bar2] path = "bar2" "#, ) .file("foo2/src/main.rs", r#"fn main() { println!("hello"); }"#) .file( "foo2/bar2/Cargo.toml", r#" [package] name = "bar2" version = "0.0.1" authors = [] [[bin]] name = "bar2" "#, ) .file( "foo2/bar2/src/main.rs", r#"fn main() { println!("bar2"); }"#, ) .build(); p.cargo("run") .with_status(101) .with_stderr( "\ [ERROR] `cargo run` could not determine which binary to run[..] available binaries: bar1, bar2, foo1, foo2", ) .run(); p.cargo("run --bin foo1") .with_stderr( "\ [WARNING] foo1 v0.0.1 ([CWD]/foo1) ignoring invalid dependency `bar1` which is missing a lib target [WARNING] foo2 v0.0.1 ([CWD]/foo2) ignoring invalid dependency `bar2` which is missing a lib target [COMPILING] foo1 v0.0.1 ([CWD]/foo1) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `target/debug/foo1[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn release_works() { let p = project() .file( "src/main.rs", r#" fn main() { if cfg!(debug_assertions) { panic!() } } "#, ) .build(); p.cargo("run --release") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] release [optimized] target(s) in [..] [RUNNING] `target/release/foo[EXE]` ", ) .run(); assert!(p.release_bin("foo").is_file()); } #[cargo_test] fn release_short_works() { let p = project() .file( "src/main.rs", r#" fn main() { if cfg!(debug_assertions) { panic!() } } "#, ) .build(); p.cargo("run -r") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] release [optimized] target(s) in [..] [RUNNING] `target/release/foo[EXE]` ", ) .run(); assert!(p.release_bin("foo").is_file()); } #[cargo_test] fn run_bin_different_name() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "bar" "#, ) .file("src/bar.rs", "fn main() {}") .build(); p.cargo("run").run(); } #[cargo_test] fn dashes_are_forwarded() { let p = project() .file( "src/bin/bar.rs", r#" fn main() { let s: Vec = std::env::args().collect(); assert_eq!(s[1], "--"); assert_eq!(s[2], "a"); assert_eq!(s[3], "--"); assert_eq!(s[4], "b"); } "#, ) .build(); p.cargo("run -- -- a -- b").run(); } #[cargo_test] fn run_from_executable_folder() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); let cwd = p.root().join("target").join("debug"); p.cargo("build").run(); p.cargo("run") .cwd(cwd) .with_stderr( "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\ [RUNNING] `./foo[EXE]`", ) .with_stdout("hello") .run(); } #[cargo_test] fn run_with_library_paths() { let p = project(); // Only link search directories within the target output directory are // propagated through to dylib_path_envvar() (see #3366). let mut dir1 = p.target_debug_dir(); dir1.push("foo\\backslash"); let mut dir2 = p.target_debug_dir(); dir2.push("dir=containing=equal=signs"); let p = p .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "build.rs", &format!( r##" fn main() {{ println!(r#"cargo:rustc-link-search=native={}"#); println!(r#"cargo:rustc-link-search={}"#); }} "##, dir1.display(), dir2.display() ), ) .file( "src/main.rs", &format!( r##" fn main() {{ let search_path = std::env::var_os("{}").unwrap(); let paths = std::env::split_paths(&search_path).collect::>(); println!("{{:#?}}", paths); assert!(paths.contains(&r#"{}"#.into())); assert!(paths.contains(&r#"{}"#.into())); }} "##, dylib_path_envvar(), dir1.display(), dir2.display() ), ) .build(); p.cargo("run").run(); } #[cargo_test] fn library_paths_sorted_alphabetically() { let p = project(); let mut dir1 = p.target_debug_dir(); dir1.push("zzzzzzz"); let mut dir2 = p.target_debug_dir(); dir2.push("BBBBBBB"); let mut dir3 = p.target_debug_dir(); dir3.push("aaaaaaa"); let p = p .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file( "build.rs", &format!( r##" fn main() {{ println!(r#"cargo:rustc-link-search=native={}"#); println!(r#"cargo:rustc-link-search=native={}"#); println!(r#"cargo:rustc-link-search=native={}"#); }} "##, dir1.display(), dir2.display(), dir3.display() ), ) .file( "src/main.rs", &format!( r##" fn main() {{ let search_path = std::env::var_os("{}").unwrap(); let paths = std::env::split_paths(&search_path).collect::>(); // ASCII case-sensitive sort assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy()); assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy()); assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy()); }} "##, dylib_path_envvar() ), ) .build(); p.cargo("run").run(); } #[cargo_test] fn fail_no_extra_verbose() { let p = project() .file("src/main.rs", "fn main() { std::process::exit(1); }") .build(); p.cargo("run -q") .with_status(1) .with_stdout("") .with_stderr("") .run(); } #[cargo_test] fn run_multiple_packages() { let p = project() .no_manifest() .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [workspace] [dependencies] d1 = { path = "d1" } d2 = { path = "d2" } d3 = { path = "../d3" } # outside of the workspace [[bin]] name = "foo" "#, ) .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }") .file("foo/d1/Cargo.toml", &basic_bin_manifest("d1")) .file("foo/d1/src/lib.rs", "") .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }") .file("foo/d2/Cargo.toml", &basic_bin_manifest("d2")) .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }") .file("d3/Cargo.toml", &basic_bin_manifest("d3")) .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }") .build(); let cargo = || { let mut process_builder = p.cargo("run"); process_builder.cwd("foo"); process_builder }; cargo().arg("-p").arg("d1").with_stdout("d1").run(); cargo() .arg("-p") .arg("d2") .arg("--bin") .arg("d2") .with_stdout("d2") .run(); cargo().with_stdout("foo").run(); cargo().arg("-p").arg("d1").arg("-p").arg("d2") .with_status(1) .with_stderr_contains("error: The argument '--package [...]' was provided more than once, but cannot be used multiple times").run(); cargo() .arg("-p") .arg("d3") .with_status(101) .with_stderr_contains("[ERROR] package(s) `d3` not found in workspace [..]") .run(); cargo() .arg("-p") .arg("d*") .with_status(101) .with_stderr_contains( "[ERROR] `cargo run` does not support glob pattern `d*` on package selection", ) .run(); } #[cargo_test] fn explicit_bin_with_args() { let p = project() .file( "src/main.rs", r#" fn main() { assert_eq!(std::env::args().nth(1).unwrap(), "hello"); assert_eq!(std::env::args().nth(2).unwrap(), "world"); } "#, ) .build(); p.cargo("run --bin foo hello world").run(); } #[cargo_test] fn run_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_bin_manifest("a")) .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) .file("b/Cargo.toml", &basic_bin_manifest("b")) .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) .build(); p.cargo("run") .with_status(101) .with_stderr( "\ [ERROR] `cargo run` could not determine which binary to run[..] available binaries: a, b", ) .run(); p.cargo("run --bin a").with_stdout("run-a").run(); } #[cargo_test] fn default_run_workspace() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.0.1" default-run = "a" "#, ) .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) .file("b/Cargo.toml", &basic_bin_manifest("b")) .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) .build(); p.cargo("run").with_stdout("run-a").run(); } #[cargo_test] #[cfg(target_os = "macos")] fn run_link_system_path_macos() { use cargo_test_support::paths::{self, CargoPathExt}; use std::fs; // Check that the default system library path is honored. // First, build a shared library that will be accessed from // DYLD_FALLBACK_LIBRARY_PATH. let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" [lib] crate-type = ["cdylib"] "#, ) .file( "src/lib.rs", "#[no_mangle] pub extern fn something_shared() {}", ) .build(); p.cargo("build").run(); // This is convoluted. Since this test can't modify things in /usr, // this needs to dance around to check that things work. // // The default DYLD_FALLBACK_LIBRARY_PATH is: // $(HOME)/lib:/usr/local/lib:/lib:/usr/lib // // This will make use of ~/lib in the path, but the default cc link // path is /usr/lib:/usr/local/lib. So first need to build in one // location, and then move it to ~/lib. // // 1. Build with rustc-link-search pointing to libfoo so the initial // binary can be linked. // 2. Move the library to ~/lib // 3. Run `cargo run` to make sure it can still find the library in // ~/lib. // // This should be equivalent to having the library in /usr/local/lib. let p2 = project() .at("bar") .file("Cargo.toml", &basic_bin_manifest("bar")) .file( "src/main.rs", r#" extern { fn something_shared(); } fn main() { unsafe { something_shared(); } } "#, ) .file( "build.rs", &format!( r#" fn main() {{ println!("cargo:rustc-link-lib=foo"); println!("cargo:rustc-link-search={}"); }} "#, p.target_debug_dir().display() ), ) .build(); p2.cargo("build").run(); p2.cargo("test").run(); let libdir = paths::home().join("lib"); fs::create_dir(&libdir).unwrap(); fs::rename( p.target_debug_dir().join("libfoo.dylib"), libdir.join("libfoo.dylib"), ) .unwrap(); p.root().rm_rf(); const VAR: &str = "DYLD_FALLBACK_LIBRARY_PATH"; // Reset DYLD_FALLBACK_LIBRARY_PATH so that we don't inherit anything that // was set by the cargo that invoked the test. p2.cargo("run").env_remove(VAR).run(); p2.cargo("test").env_remove(VAR).run(); // Ensure this still works when DYLD_FALLBACK_LIBRARY_PATH has // a value set. p2.cargo("run").env(VAR, &libdir).run(); p2.cargo("test").env(VAR, &libdir).run(); } cargo-0.66.0/tests/testsuite/rust_version.rs000066400000000000000000000115271432416201200212020ustar00rootroot00000000000000//! Tests for targets with `rust-version`. use cargo_test_support::{project, registry::Package}; #[cargo_test] fn rust_version_satisfied() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "1.1.1" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build").run(); p.cargo("build --ignore-rust-version").run(); } #[cargo_test] fn rust_version_bad_caret() { project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "^1.43" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build() .cargo("build") .with_status(101) .with_stderr( "error: failed to parse manifest at `[..]`\n\n\ Caused by:\n `rust-version` must be a value like \"1.32\"", ) .run(); } #[cargo_test] fn rust_version_bad_pre_release() { project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "1.43-beta.1" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build() .cargo("build") .with_status(101) .with_stderr( "error: failed to parse manifest at `[..]`\n\n\ Caused by:\n `rust-version` must be a value like \"1.32\"", ) .run(); } #[cargo_test] fn rust_version_bad_nonsense() { project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "foodaddle" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build() .cargo("build") .with_status(101) .with_stderr( "error: failed to parse manifest at `[..]`\n\n\ Caused by:\n `rust-version` must be a value like \"1.32\"", ) .run(); } #[cargo_test] fn rust_version_too_high() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "1.9876.0" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("build") .with_status(101) .with_stderr( "error: package `foo v0.0.1 ([..])` cannot be built because it requires \ rustc 1.9876.0 or newer, while the currently active rustc version is [..]\n\n", ) .run(); p.cargo("build --ignore-rust-version").run(); } #[cargo_test] fn rust_version_dependency_fails() { Package::new("bar", "0.0.1") .rust_version("1.2345.0") .file("src/lib.rs", "fn other_stuff() {}") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.0.1" "#, ) .file("src/main.rs", "fn main(){}") .build(); p.cargo("build") .with_status(101) .with_stderr( " Updating `[..]` index\n \ Downloading crates ...\n \ Downloaded bar v0.0.1 (registry `[..]`)\n\ error: package `bar v0.0.1` cannot be built because it requires \ rustc 1.2345.0 or newer, while the currently active rustc version is [..]\n\ Either upgrade to rustc 1.2345.0 or newer, or use\n\ cargo update -p bar@0.0.1 --precise ver\n\ where `ver` is the latest version of `bar` supporting rustc [..]", ) .run(); p.cargo("build --ignore-rust-version").run(); } #[cargo_test] fn rust_version_older_than_edition() { project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] rust-version = "1.1" edition = "2018" [[bin]] name = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build() .cargo("build") .with_status(101) .with_stderr_contains(" rust-version 1.1 is older than first version (1.31.0) required by the specified edition (2018)", ) .run(); } cargo-0.66.0/tests/testsuite/rustc.rs000066400000000000000000000537441432416201200176070ustar00rootroot00000000000000//! Tests for the `cargo rustc` command. use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; const CARGO_RUSTC_ERROR: &str = "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering the package by passing, e.g., `--lib` or `--bin NAME` to specify a single target"; #[cargo_test] fn build_lib_for_foo() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc --lib -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn lib() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc --lib -v -- -C debug-assertions=off") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C debug-assertions=off \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_main_and_allow_unstable_options() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc -v --bin foo -- -C debug-assertions") .with_stderr(format!( "\ [COMPILING] {name} v{version} ([CWD]) [RUNNING] `rustc --crate-name {name} src/lib.rs [..]--crate-type lib \ --emit=[..]link[..]-C debuginfo=2 \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps` [RUNNING] `rustc --crate-name {name} src/main.rs [..]--crate-type bin \ --emit=[..]link[..]-C debuginfo=2 \ -C debug-assertions \ -C metadata=[..] \ --out-dir [..] \ -L dependency=[CWD]/target/debug/deps \ --extern {name}=[CWD]/target/debug/deps/lib{name}-[..].rlib` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", name = "foo", version = "0.0.1" )) .run(); } #[cargo_test] fn fails_when_trying_to_build_main_and_lib_with_args() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc -v -- -C debug-assertions") .with_status(101) .with_stderr(CARGO_RUSTC_ERROR) .run(); } #[cargo_test] fn build_with_args_to_one_of_multiple_binaries() { let p = project() .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .file("src/bin/baz.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc -v --bin bar -- -C debug-assertions") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\ -C debuginfo=2 -C metadata=[..] \ --out-dir [..]` [RUNNING] `rustc --crate-name bar src/bin/bar.rs [..]--crate-type bin --emit=[..]link[..]\ -C debuginfo=2 -C debug-assertions [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fails_with_args_to_all_binaries() { let p = project() .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .file("src/bin/baz.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc -v -- -C debug-assertions") .with_status(101) .with_stderr(CARGO_RUSTC_ERROR) .run(); } #[cargo_test] fn fails_with_crate_type_to_multi_binaries() { let p = project() .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}") .file("src/bin/baz.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc --crate-type lib") .with_status(101) .with_stderr( "[ERROR] crate types to rustc can only be passed to one target, consider filtering the package by passing, e.g., `--lib` or `--example` to specify a single target", ) .run(); } #[cargo_test] fn fails_with_crate_type_to_multi_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex1" crate-type = ["rlib"] [[example]] name = "ex2" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex1.rs", "") .file("examples/ex2.rs", "") .build(); p.cargo("rustc -v --example ex1 --example ex2 --crate-type lib,cdylib") .with_status(101) .with_stderr( "[ERROR] crate types to rustc can only be passed to one target, consider filtering the package by passing, e.g., `--lib` or `--example` to specify a single target", ) .run(); } #[cargo_test] fn fails_with_crate_type_to_binary() { let p = project().file("src/bin/foo.rs", "fn main() {}").build(); p.cargo("rustc --crate-type lib") .with_status(101) .with_stderr( "[ERROR] crate types can only be specified for libraries and example libraries. Binaries, tests, and benchmarks are always the `bin` crate type", ) .run(); } #[cargo_test] fn build_with_crate_type_for_foo() { let p = project().file("src/lib.rs", "").build(); p.cargo("rustc -v --crate-type cdylib") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_crate_type_for_foo_with_deps() { let p = project() .file( "src/lib.rs", r#" extern crate a; pub fn foo() { a::hello(); } "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "pub fn hello() {}") .build(); p.cargo("rustc -v --crate-type cdylib") .with_stderr( "\ [COMPILING] a v0.1.0 ([CWD]/a) [RUNNING] `rustc --crate-name a a/src/lib.rs [..]--crate-type lib [..] [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_crate_types_for_foo() { let p = project().file("src/lib.rs", "").build(); p.cargo("rustc -v --crate-type lib,cdylib") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib,cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_crate_type_to_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("rustc -v --example ex --crate-type cdylib") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] [RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_crate_types_to_example() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex.rs", "") .build(); p.cargo("rustc -v --example ex --crate-type lib,cdylib") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] [RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type lib,cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_crate_types_to_one_of_multi_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[example]] name = "ex1" crate-type = ["rlib"] [[example]] name = "ex2" crate-type = ["rlib"] "#, ) .file("src/lib.rs", "") .file("examples/ex1.rs", "") .file("examples/ex2.rs", "") .build(); p.cargo("rustc -v --example ex1 --crate-type lib,cdylib") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] [RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type lib,cdylib [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_with_args_to_one_of_multiple_tests() { let p = project() .file("tests/foo.rs", r#" "#) .file("tests/bar.rs", r#" "#) .file("tests/baz.rs", r#" "#) .file("src/lib.rs", r#" "#) .build(); p.cargo("rustc -v --test bar -- -C debug-assertions") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\ -C debuginfo=2 -C metadata=[..] \ --out-dir [..]` [RUNNING] `rustc --crate-name bar tests/bar.rs [..]--emit=[..]link[..]-C debuginfo=2 \ -C debug-assertions [..]--test[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_foo_with_bar_dependency() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustc -v -- -C debug-assertions") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]) [RUNNING] `[..] -C debuginfo=2 [..]` [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn build_only_bar_dependency() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustc -v -p bar -- -C debug-assertions") .with_stderr( "\ [COMPILING] bar v0.1.0 ([..]) [RUNNING] `rustc --crate-name bar [..]--crate-type lib [..] -C debug-assertions [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn targets_selected_default() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("rustc -v") // bin .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) // bench .with_stderr_does_not_contain( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ -C opt-level=3 --test [..]", ) // unit test .with_stderr_does_not_contain( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ -C debuginfo=2 --test [..]", ) .run(); } #[cargo_test] fn targets_selected_all() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("rustc -v --all-targets") // bin .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ --emit=[..]link[..]", ) // unit test .with_stderr_contains( "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ -C debuginfo=2 --test [..]", ) .run(); } #[cargo_test] fn fail_with_multiple_packages() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#, ) .file("src/main.rs", "fn main() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) .file( "src/main.rs", r#" fn main() { if cfg!(flag = "1") { println!("Yeah from bar!"); } } "#, ) .build(); let _baz = project() .at("baz") .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) .file( "src/main.rs", r#" fn main() { if cfg!(flag = "1") { println!("Yeah from baz!"); } } "#, ) .build(); foo.cargo("rustc -v -p bar -p baz") .with_status(1) .with_stderr_contains( "\ error: The argument '--package [...]' was provided more than once, \ but cannot be used multiple times ", ) .run(); } #[cargo_test] fn fail_with_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .build(); p.cargo("rustc -p '*z'") .with_status(101) .with_stderr("[ERROR] Glob patterns on package selection are not supported.") .run(); } #[cargo_test] fn rustc_with_other_profile() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] a = { path = "a" } "#, ) .file( "src/main.rs", r#" #[cfg(test)] extern crate a; #[test] fn foo() {} "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "") .build(); p.cargo("rustc --profile test").run(); } #[cargo_test] fn rustc_fingerprint() { // Verify that the fingerprint includes the rustc args. let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .build(); p.cargo("rustc -v -- -C debug-assertions") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc [..]-C debug-assertions [..] [FINISHED] [..] ", ) .run(); p.cargo("rustc -v -- -C debug-assertions") .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); p.cargo("rustc -v") .with_stderr_does_not_contain("-C debug-assertions") .with_stderr( "\ [COMPILING] foo [..] [RUNNING] `rustc [..] [FINISHED] [..] ", ) .run(); p.cargo("rustc -v") .with_stderr( "\ [FRESH] foo [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn rustc_test_with_implicit_bin() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" #[cfg(foo)] fn f() { compile_fail!("Foo shouldn't be set."); } fn main() {} "#, ) .file( "tests/test1.rs", r#" #[cfg(not(foo))] fn f() { compile_fail!("Foo should be set."); } "#, ) .build(); p.cargo("rustc --test test1 -v -- --cfg foo") .with_stderr_contains( "\ [RUNNING] `rustc --crate-name test1 tests/test1.rs [..] --cfg foo [..] ", ) .with_stderr_contains( "\ [RUNNING] `rustc --crate-name foo src/main.rs [..] ", ) .run(); } #[cargo_test] fn rustc_with_print_cfg_single_target() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#"fn main() {} "#) .build(); p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") .masquerade_as_nightly_cargo(&["print"]) .with_stdout_contains("debug_assertions") .with_stdout_contains("target_arch=\"x86_64\"") .with_stdout_contains("target_endian=\"little\"") .with_stdout_contains("target_env=\"msvc\"") .with_stdout_contains("target_family=\"windows\"") .with_stdout_contains("target_os=\"windows\"") .with_stdout_contains("target_pointer_width=\"64\"") .with_stdout_contains("target_vendor=\"pc\"") .with_stdout_contains("windows") .run(); } #[cargo_test] fn rustc_with_print_cfg_multiple_targets() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#"fn main() {} "#) .build(); p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --target i686-unknown-linux-gnu --print cfg") .masquerade_as_nightly_cargo(&["print"]) .with_stdout_contains("debug_assertions") .with_stdout_contains("target_arch=\"x86_64\"") .with_stdout_contains("target_endian=\"little\"") .with_stdout_contains("target_env=\"msvc\"") .with_stdout_contains("target_family=\"windows\"") .with_stdout_contains("target_os=\"windows\"") .with_stdout_contains("target_pointer_width=\"64\"") .with_stdout_contains("target_vendor=\"pc\"") .with_stdout_contains("windows") .with_stdout_contains("target_env=\"gnu\"") .with_stdout_contains("target_family=\"unix\"") .with_stdout_contains("target_pointer_width=\"32\"") .with_stdout_contains("target_vendor=\"unknown\"") .with_stdout_contains("target_os=\"linux\"") .with_stdout_contains("unix") .run(); } #[cargo_test] fn rustc_with_print_cfg_rustflags_env_var() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#"fn main() {} "#) .build(); p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") .masquerade_as_nightly_cargo(&["print"]) .env("RUSTFLAGS", "-C target-feature=+crt-static") .with_stdout_contains("debug_assertions") .with_stdout_contains("target_arch=\"x86_64\"") .with_stdout_contains("target_endian=\"little\"") .with_stdout_contains("target_env=\"msvc\"") .with_stdout_contains("target_family=\"windows\"") .with_stdout_contains("target_feature=\"crt-static\"") .with_stdout_contains("target_os=\"windows\"") .with_stdout_contains("target_pointer_width=\"64\"") .with_stdout_contains("target_vendor=\"pc\"") .with_stdout_contains("windows") .run(); } #[cargo_test] fn rustc_with_print_cfg_config_toml() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( ".cargo/config.toml", r#" [target.x86_64-pc-windows-msvc] rustflags = ["-C", "target-feature=+crt-static"] "#, ) .file("src/main.rs", r#"fn main() {} "#) .build(); p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") .masquerade_as_nightly_cargo(&["print"]) .env("RUSTFLAGS", "-C target-feature=+crt-static") .with_stdout_contains("debug_assertions") .with_stdout_contains("target_arch=\"x86_64\"") .with_stdout_contains("target_endian=\"little\"") .with_stdout_contains("target_env=\"msvc\"") .with_stdout_contains("target_family=\"windows\"") .with_stdout_contains("target_feature=\"crt-static\"") .with_stdout_contains("target_os=\"windows\"") .with_stdout_contains("target_pointer_width=\"64\"") .with_stdout_contains("target_vendor=\"pc\"") .with_stdout_contains("windows") .run(); } cargo-0.66.0/tests/testsuite/rustc_info_cache.rs000066400000000000000000000147021432416201200217340ustar00rootroot00000000000000//! Tests for the cache file for the rustc version info. use cargo_test_support::{basic_bin_manifest, paths::CargoPathExt}; use cargo_test_support::{basic_manifest, project}; use std::env; const MISS: &str = "[..] rustc info cache miss[..]"; const HIT: &str = "[..]rustc info cache hit[..]"; const UPDATE: &str = "[..]updated rustc info cache[..]"; #[cargo_test] fn rustc_info_cache() { let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .with_stderr_contains("[..]failed to read rustc info cache[..]") .with_stderr_contains(MISS) .with_stderr_does_not_contain(HIT) .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .with_stderr_contains("[..]reusing existing rustc info cache[..]") .with_stderr_contains(HIT) .with_stderr_does_not_contain(MISS) .with_stderr_does_not_contain(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("CARGO_CACHE_RUSTC_INFO", "0") .with_stderr_contains("[..]rustc info cache disabled[..]") .with_stderr_does_not_contain(UPDATE) .run(); let other_rustc = { let p = project() .at("compiler") .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) .file( "src/main.rs", r#" use std::process::Command; use std::env; fn main() { let mut cmd = Command::new("rustc"); for arg in env::args_os().skip(1) { cmd.arg(arg); } std::process::exit(cmd.status().unwrap().code().unwrap()); } "#, ) .build(); p.cargo("build").run(); p.root() .join("target/debug/compiler") .with_extension(env::consts::EXE_EXTENSION) }; p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") .with_stderr_contains(MISS) .with_stderr_does_not_contain(HIT) .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]reusing existing rustc info cache[..]") .with_stderr_contains(HIT) .with_stderr_does_not_contain(MISS) .with_stderr_does_not_contain(UPDATE) .run(); other_rustc.move_into_the_future(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") .with_stderr_contains(MISS) .with_stderr_does_not_contain(HIT) .with_stderr_contains(UPDATE) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env("RUSTC", other_rustc.display().to_string()) .with_stderr_contains("[..]reusing existing rustc info cache[..]") .with_stderr_contains(HIT) .with_stderr_does_not_contain(MISS) .with_stderr_does_not_contain(UPDATE) .run(); } #[cargo_test] fn rustc_info_cache_with_wrappers() { let wrapper_project = project() .at("wrapper") .file("Cargo.toml", &basic_bin_manifest("wrapper")) .file("src/main.rs", r#"fn main() { }"#) .build(); let wrapper = wrapper_project.bin("wrapper"); let p = project() .file( "Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [workspace] "#, ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); for &wrapper_env in ["RUSTC_WRAPPER", "RUSTC_WORKSPACE_WRAPPER"].iter() { p.cargo("clean").with_status(0).run(); wrapper_project.change_file( "src/main.rs", r#" fn main() { let mut args = std::env::args_os(); let _me = args.next().unwrap(); let rustc = args.next().unwrap(); let status = std::process::Command::new(rustc).args(args).status().unwrap(); std::process::exit(if status.success() { 0 } else { 1 }) } "#, ); wrapper_project.cargo("build").with_status(0).run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env(wrapper_env, &wrapper) .with_stderr_contains("[..]failed to read rustc info cache[..]") .with_stderr_contains(MISS) .with_stderr_contains(UPDATE) .with_stderr_does_not_contain(HIT) .with_status(0) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env(wrapper_env, &wrapper) .with_stderr_contains("[..]reusing existing rustc info cache[..]") .with_stderr_contains(HIT) .with_stderr_does_not_contain(UPDATE) .with_stderr_does_not_contain(MISS) .with_status(0) .run(); wrapper_project.change_file("src/main.rs", r#"fn main() { panic!() }"#); wrapper_project.cargo("build").with_status(0).run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env(wrapper_env, &wrapper) .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") .with_stderr_contains(MISS) .with_stderr_contains(UPDATE) .with_stderr_does_not_contain(HIT) .with_status(101) .run(); p.cargo("build") .env("CARGO_LOG", "cargo::util::rustc=debug") .env(wrapper_env, &wrapper) .with_stderr_contains("[..]reusing existing rustc info cache[..]") .with_stderr_contains(HIT) .with_stderr_does_not_contain(UPDATE) .with_stderr_does_not_contain(MISS) .with_status(101) .run(); } } cargo-0.66.0/tests/testsuite/rustdoc.rs000066400000000000000000000141141432416201200201160ustar00rootroot00000000000000//! Tests for the `cargo rustdoc` command. use cargo_test_support::{basic_manifest, cross_compile, project}; #[cargo_test] fn rustdoc_simple() { let p = project().file("src/lib.rs", "").build(); p.cargo("rustdoc -v") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ [..] \ -L dependency=[CWD]/target/debug/deps [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_args() { let p = project().file("src/lib.rs", "").build(); p.cargo("rustdoc -v -- --cfg=foo") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ [..] \ --cfg=foo \ -C metadata=[..] \ -L dependency=[CWD]/target/debug/deps [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_binary_args_passed() { let p = project().file("src/main.rs", "").build(); p.cargo("rustdoc -v") .arg("--") .arg("--markdown-no-toc") .with_stderr_contains("[RUNNING] `rustdoc [..] --markdown-no-toc[..]`") .run(); } #[cargo_test] fn rustdoc_foo_with_bar_dependency() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "extern crate bar; pub fn foo() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustdoc -v -- --cfg=foo") .with_stderr( "\ [CHECKING] bar v0.0.1 ([..]) [RUNNING] `rustc [..]bar/src/lib.rs [..]` [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ [..] \ --cfg=foo \ -C metadata=[..] \ -L dependency=[CWD]/target/debug/deps \ --extern [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_only_bar_dependency() { let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("src/lib.rs", "pub fn baz() {}") .build(); foo.cargo("rustdoc -v -p bar -- --cfg=foo") .with_stderr( "\ [DOCUMENTING] bar v0.0.1 ([..]) [RUNNING] `rustdoc [..]--crate-name bar [..]bar/src/lib.rs [..]\ -o [CWD]/target/doc \ [..] \ --cfg=foo \ -C metadata=[..] \ -L dependency=[CWD]/target/debug/deps [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdoc_same_name_documents_lib() { let p = project() .file("src/main.rs", "fn main() {}") .file("src/lib.rs", r#" "#) .build(); p.cargo("rustdoc -v -- --cfg=foo") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ [..] \ --cfg=foo \ -C metadata=[..] \ -L dependency=[CWD]/target/debug/deps [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [features] quux = [] "#, ) .file("src/lib.rs", "") .build(); p.cargo("rustdoc --verbose --features quux") .with_stderr_contains("[..]feature=[..]quux[..]") .run(); } #[cargo_test] fn proc_macro_crate_type() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] proc-macro = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("rustdoc --verbose") .with_stderr_contains( "\ [RUNNING] `rustdoc --crate-type proc-macro [..]` ", ) .run(); } #[cargo_test] fn rustdoc_target() { if cross_compile::disabled() { return; } let p = project().file("src/lib.rs", "").build(); p.cargo("rustdoc --verbose --target") .arg(cross_compile::alternate()) .with_stderr(format!( "\ [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ --target {target} \ -o [CWD]/target/{target}/doc \ [..] \ -L dependency=[CWD]/target/{target}/debug/deps \ -L dependency=[CWD]/target/debug/deps[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", target = cross_compile::alternate() )) .run(); } #[cargo_test] fn fail_with_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") .build(); p.cargo("rustdoc -p '*z'") .with_status(101) .with_stderr("[ERROR] Glob patterns on package selection are not supported.") .run(); } cargo-0.66.0/tests/testsuite/rustdoc_extern_html.rs000066400000000000000000000256461432416201200225430ustar00rootroot00000000000000//! Tests for the -Zrustdoc-map feature. use cargo_test_support::registry::{self, Package}; use cargo_test_support::{paths, project, Project}; fn basic_project() -> Project { Package::new("bar", "1.0.0") .file("src/lib.rs", "pub struct Straw;") .publish(); project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] bar = "1.0" "#, ) .file( "src/lib.rs", r#" pub fn myfun() -> Option { None } "#, ) .build() } #[cargo_test] fn ignores_on_stable() { // Requires -Zrustdoc-map to use. let p = basic_project(); p.cargo("doc -v --no-deps") .with_stderr_does_not_contain("[..]--extern-html-root-url[..]") .run(); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn simple() { // Basic test that it works with crates.io. let p = basic_project(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]", ) .run(); let myfun = p.read_file("target/doc/foo/fn.myfun.html"); assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#)); } #[ignore = "Broken, temporarily disabled until https://github.com/rust-lang/rust/pull/82776 is resolved."] #[cargo_test] // #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn std_docs() { // Mapping std docs somewhere else. // For local developers, skip this test if docs aren't installed. let docs = std::path::Path::new(&paths::sysroot()).join("share/doc/rust/html"); if !docs.exists() { if cargo_util::is_ci() { panic!("std docs are not installed, check that the rust-docs component is installed"); } else { eprintln!( "documentation not found at {}, \ skipping test (run `rustdoc component add rust-docs` to install", docs.display() ); return; } } let p = basic_project(); p.change_file( ".cargo/config", r#" [doc.extern-map] std = "local" "#, ); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains("[RUNNING] `rustdoc [..]--crate-name foo [..]std=file://[..]") .run(); let myfun = p.read_file("target/doc/foo/fn.myfun.html"); assert!(myfun.contains(r#"share/doc/rust/html/core/option/enum.Option.html""#)); p.change_file( ".cargo/config", r#" [doc.extern-map] std = "https://example.com/rust/" "#, ); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo [..]std=https://example.com/rust/[..]", ) .run(); let myfun = p.read_file("target/doc/foo/fn.myfun.html"); assert!(myfun.contains(r#"href="https://example.com/rust/core/option/enum.Option.html""#)); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn renamed_dep() { // Handles renamed dependencies. Package::new("bar", "1.0.0") .file("src/lib.rs", "pub struct Straw;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] groovy = { version = "1.0", package = "bar" } "#, ) .file( "src/lib.rs", r#" pub fn myfun() -> Option { None } "#, ) .build(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]", ) .run(); let myfun = p.read_file("target/doc/foo/fn.myfun.html"); assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#)); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn lib_name() { // Handles lib name != package name. Package::new("bar", "1.0.0") .file( "Cargo.toml", r#" [package] name = "bar" version = "1.0.0" [lib] name = "rumpelstiltskin" "#, ) .file("src/lib.rs", "pub struct Straw;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file( "src/lib.rs", r#" pub fn myfun() -> Option { None } "#, ) .build(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo [..]rumpelstiltskin=https://docs.rs/bar/1.0.0/[..]", ) .run(); let myfun = p.read_file("target/doc/foo/fn.myfun.html"); assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/rumpelstiltskin/struct.Straw.html""#)); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn alt_registry() { // Supports other registry names. registry::alt_init(); Package::new("bar", "1.0.0") .alternative(true) .file( "src/lib.rs", r#" extern crate baz; pub struct Queen; pub use baz::King; "#, ) .registry_dep("baz", "1.0") .publish(); Package::new("baz", "1.0.0") .alternative(true) .file("src/lib.rs", "pub struct King;") .publish(); Package::new("grimm", "1.0.0") .file("src/lib.rs", "pub struct Gold;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] bar = { version = "1.0", registry="alternative" } grimm = "1.0" "#, ) .file( "src/lib.rs", r#" pub fn queen() -> bar::Queen { bar::Queen } pub fn king() -> bar::King { bar::King } pub fn gold() -> grimm::Gold { grimm::Gold } "#, ) .file( ".cargo/config", r#" [doc.extern-map.registries] alternative = "https://example.com/{pkg_name}/{version}/" crates-io = "https://docs.rs/" "#, ) .build(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo \ [..]bar=https://example.com/bar/1.0.0/[..]grimm=https://docs.rs/grimm/1.0.0/[..]", ) .run(); let queen = p.read_file("target/doc/foo/fn.queen.html"); assert!(queen.contains(r#"href="https://example.com/bar/1.0.0/bar/struct.Queen.html""#)); // The king example fails to link. Rustdoc seems to want the origin crate // name (baz) for re-exports. There are many issues in the issue tracker // for rustdoc re-exports, so I'm not sure, but I think this is maybe a // rustdoc issue. Alternatively, Cargo could provide mappings for all // transitive dependencies to fix this. let king = p.read_file("target/doc/foo/fn.king.html"); assert!(king.contains(r#"-> King"#)); let gold = p.read_file("target/doc/foo/fn.gold.html"); assert!(gold.contains(r#"href="https://docs.rs/grimm/1.0.0/grimm/struct.Gold.html""#)); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn multiple_versions() { // What happens when there are multiple versions. // NOTE: This is currently broken behavior. Rustdoc does not provide a way // to match renamed dependencies. Package::new("bar", "1.0.0") .file("src/lib.rs", "pub struct Spin;") .publish(); Package::new("bar", "2.0.0") .file("src/lib.rs", "pub struct Straw;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] bar = "1.0" bar2 = {version="2.0", package="bar"} "#, ) .file( "src/lib.rs", " pub fn fn1() -> bar::Spin {bar::Spin} pub fn fn2() -> bar2::Straw {bar2::Straw} ", ) .build(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--crate-name foo \ [..]bar=https://docs.rs/bar/1.0.0/[..]bar=https://docs.rs/bar/2.0.0/[..]", ) .run(); let fn1 = p.read_file("target/doc/foo/fn.fn1.html"); // This should be 1.0.0, rustdoc seems to use the last entry when there // are duplicates. assert!(fn1.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Spin.html""#)); let fn2 = p.read_file("target/doc/foo/fn.fn2.html"); assert!(fn2.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Straw.html""#)); } #[cargo_test(nightly, reason = "--extern-html-root-url is unstable")] fn rebuilds_when_changing() { // Make sure it rebuilds if the map changes. let p = basic_project(); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains("[..]--extern-html-root-url[..]") .run(); // This also tests that the map for docs.rs can be overridden. p.change_file( ".cargo/config", r#" [doc.extern-map.registries] crates-io = "https://example.com/" "#, ); p.cargo("doc -v --no-deps -Zrustdoc-map") .masquerade_as_nightly_cargo(&["rustdoc-map"]) .with_stderr_contains( "[RUNNING] `rustdoc [..]--extern-html-root-url [..]bar=https://example.com/bar/1.0.0/[..]", ) .run(); } cargo-0.66.0/tests/testsuite/rustdocflags.rs000066400000000000000000000061221432416201200211330ustar00rootroot00000000000000//! Tests for setting custom rustdoc flags. use cargo_test_support::project; #[cargo_test] fn parses_env() { let p = project().file("src/lib.rs", "").build(); p.cargo("doc -v") .env("RUSTDOCFLAGS", "--cfg=foo") .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`") .run(); } #[cargo_test] fn parses_config() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] rustdocflags = ["--cfg", "foo"] "#, ) .build(); p.cargo("doc -v") .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`") .run(); } #[cargo_test] fn bad_flags() { let p = project().file("src/lib.rs", "").build(); p.cargo("doc") .env("RUSTDOCFLAGS", "--bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn rerun() { let p = project().file("src/lib.rs", "").build(); p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").run(); p.cargo("doc") .env("RUSTDOCFLAGS", "--cfg=foo") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.cargo("doc") .env("RUSTDOCFLAGS", "--cfg=bar") .with_stderr( "\ [DOCUMENTING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn rustdocflags_passed_to_rustdoc_through_cargo_test() { let p = project() .file( "src/lib.rs", r#" //! ``` //! assert!(cfg!(do_not_choke)); //! ``` "#, ) .build(); p.cargo("test --doc") .env("RUSTDOCFLAGS", "--cfg do_not_choke") .run(); } #[cargo_test] fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() { let p = project().file("src/lib.rs", "").build(); p.cargo("test --doc") .env("RUSTDOCFLAGS", "--markdown-no-toc") .run(); } #[cargo_test] fn rustdocflags_misspelled() { let p = project().file("src/main.rs", "fn main() { }").build(); p.cargo("doc") .env("RUSTDOC_FLAGS", "foo") .with_stderr_contains("[WARNING] Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?") .run(); } #[cargo_test] fn whitespace() { // Checks behavior of different whitespace characters. let p = project().file("src/lib.rs", "").build(); // "too many operands" p.cargo("doc") .env("RUSTDOCFLAGS", "--crate-version this has spaces") .with_stderr_contains("[ERROR] could not document `foo`") .with_status(101) .run(); const SPACED_VERSION: &str = "a\nb\tc\u{00a0}d"; p.cargo("doc") .env_remove("__CARGO_TEST_FORCE_ARGFILE") // Not applicable for argfile. .env( "RUSTDOCFLAGS", format!("--crate-version {}", SPACED_VERSION), ) .run(); let contents = p.read_file("target/doc/foo/index.html"); assert!(contents.contains(SPACED_VERSION)); } cargo-0.66.0/tests/testsuite/rustflags.rs000066400000000000000000001234131432416201200204500ustar00rootroot00000000000000//! Tests for setting custom rustc flags. use cargo_test_support::registry::Package; use cargo_test_support::{ basic_lib_manifest, basic_manifest, paths, project, project_in_home, rustc_host, }; use std::fs; #[cargo_test] fn env_rustflags_normal_source() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( "benches/d.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .build(); // Use RUSTFLAGS to pass an argument that will generate an error p.cargo("build --lib") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn env_rustflags_build_script() { // RUSTFLAGS should be passed to rustc for build scripts // when --target is not specified. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(cfg!(foo)); } "#, ) .build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); } #[cargo_test] fn env_rustflags_build_script_dep() { // RUSTFLAGS should be passed to rustc for build scripts // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" [build-dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(not(foo))] fn bar() { } "#, ) .build(); foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); } #[cargo_test] fn env_rustflags_plugin() { // RUSTFLAGS should be passed to rustc for plugins // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true "#, ) .file( "src/lib.rs", r#" fn main() { } #[cfg(not(foo))] fn main() { } "#, ) .build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); } #[cargo_test] fn env_rustflags_plugin_dep() { // RUSTFLAGS should be passed to rustc for plugins // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true [dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "fn foo() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_lib_manifest("bar")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(not(foo))] fn bar() { } "#, ) .build(); foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); } #[cargo_test] fn env_rustflags_normal_source_with_target() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( "benches/d.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .build(); let host = &rustc_host(); // Use RUSTFLAGS to pass an argument that will generate an error p.cargo("build --lib --target") .arg(host) .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a --target") .arg(host) .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b --target") .arg(host) .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test --target") .arg(host) .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench --target") .arg(host) .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn env_rustflags_build_script_with_target() { // RUSTFLAGS should not be passed to rustc for build scripts // when --target is specified. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(!cfg!(foo)); } "#, ) .build(); let host = rustc_host(); p.cargo("build --target") .arg(host) .env("RUSTFLAGS", "--cfg foo") .run(); } #[cargo_test] fn env_rustflags_build_script_with_target_doesnt_apply_to_host_kind() { // RUSTFLAGS should *not* be passed to rustc for build scripts when --target is specified as the // host triple even if target-applies-to-host-kind is enabled, to match legacy Cargo behavior. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(!cfg!(foo)); } "#, ) .file( ".cargo/config.toml", r#" target-applies-to-host = true "#, ) .build(); let host = rustc_host(); p.cargo("build --target") .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg(host) .arg("-Ztarget-applies-to-host") .env("RUSTFLAGS", "--cfg foo") .run(); } #[cargo_test] fn env_rustflags_build_script_dep_with_target() { // RUSTFLAGS should not be passed to rustc for build scripts // when --target is specified. // In this test if --cfg foo is passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" [build-dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(foo)] fn bar() { } "#, ) .build(); let host = rustc_host(); foo.cargo("build --target") .arg(host) .env("RUSTFLAGS", "--cfg foo") .run(); } #[cargo_test] fn env_rustflags_plugin_with_target() { // RUSTFLAGS should not be passed to rustc for plugins // when --target is specified. // In this test if --cfg foo is passed the build will fail. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true "#, ) .file( "src/lib.rs", r#" fn main() { } #[cfg(foo)] fn main() { } "#, ) .build(); let host = rustc_host(); p.cargo("build --target") .arg(host) .env("RUSTFLAGS", "--cfg foo") .run(); } #[cargo_test] fn env_rustflags_plugin_dep_with_target() { // RUSTFLAGS should not be passed to rustc for plugins // when --target is specified. // In this test if --cfg foo is passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true [dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "fn foo() {}") .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_lib_manifest("bar")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(foo)] fn bar() { } "#, ) .build(); let host = rustc_host(); foo.cargo("build --target") .arg(host) .env("RUSTFLAGS", "--cfg foo") .run(); } #[cargo_test] fn env_rustflags_recompile() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").run(); // Setting RUSTFLAGS forces a recompile p.cargo("build") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn env_rustflags_recompile2() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); // Setting RUSTFLAGS forces a recompile p.cargo("build") .env("RUSTFLAGS", "-Z bogus") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn env_rustflags_no_recompile() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); p.cargo("build") .env("RUSTFLAGS", "--cfg foo") .with_stdout("") .run(); } #[cargo_test] fn build_rustflags_normal_source() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( "benches/d.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["-Z", "bogus"] "#, ) .build(); p.cargo("build --lib") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn build_rustflags_build_script() { // RUSTFLAGS should be passed to rustc for build scripts // when --target is not specified. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(cfg!(foo)); } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn build_rustflags_build_script_dep() { // RUSTFLAGS should be passed to rustc for build scripts // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" [build-dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(not(foo))] fn bar() { } "#, ) .build(); foo.cargo("build").run(); } #[cargo_test] fn build_rustflags_plugin() { // RUSTFLAGS should be passed to rustc for plugins // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true "#, ) .file( "src/lib.rs", r#" fn main() { } #[cfg(not(foo))] fn main() { } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn build_rustflags_plugin_dep() { // RUSTFLAGS should be passed to rustc for plugins // when --target is not specified. // In this test if --cfg foo is not passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true [dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "fn foo() {}") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_lib_manifest("bar")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(not(foo))] fn bar() { } "#, ) .build(); foo.cargo("build").run(); } #[cargo_test] fn build_rustflags_normal_source_with_target() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( "benches/d.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["-Z", "bogus"] "#, ) .build(); let host = &rustc_host(); // Use build.rustflags to pass an argument that will generate an error p.cargo("build --lib --target") .arg(host) .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a --target") .arg(host) .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b --target") .arg(host) .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test --target") .arg(host) .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench --target") .arg(host) .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn build_rustflags_build_script_with_target() { // RUSTFLAGS should not be passed to rustc for build scripts // when --target is specified. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(!cfg!(foo)); } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let host = rustc_host(); p.cargo("build --target").arg(host).run(); } #[cargo_test] fn build_rustflags_build_script_dep_with_target() { // RUSTFLAGS should not be passed to rustc for build scripts // when --target is specified. // In this test if --cfg foo is passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" [build-dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(foo)] fn bar() { } "#, ) .build(); let host = rustc_host(); foo.cargo("build --target").arg(host).run(); } #[cargo_test] fn build_rustflags_plugin_with_target() { // RUSTFLAGS should not be passed to rustc for plugins // when --target is specified. // In this test if --cfg foo is passed the build will fail. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true "#, ) .file( "src/lib.rs", r#" fn main() { } #[cfg(foo)] fn main() { } "#, ) .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let host = rustc_host(); p.cargo("build --target").arg(host).run(); } #[cargo_test] fn build_rustflags_plugin_dep_with_target() { // RUSTFLAGS should not be passed to rustc for plugins // when --target is specified. // In this test if --cfg foo is passed the build will fail. let foo = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] name = "foo" plugin = true [dependencies.bar] path = "../bar" "#, ) .file("src/lib.rs", "fn foo() {}") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); let _bar = project() .at("bar") .file("Cargo.toml", &basic_lib_manifest("bar")) .file( "src/lib.rs", r#" fn bar() { } #[cfg(foo)] fn bar() { } "#, ) .build(); let host = rustc_host(); foo.cargo("build --target").arg(host).run(); } #[cargo_test] fn build_rustflags_recompile() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").run(); // Setting RUSTFLAGS forces a recompile let config = r#" [build] rustflags = ["-Z", "bogus"] "#; let config_file = paths::root().join("foo/.cargo/config"); fs::create_dir_all(config_file.parent().unwrap()).unwrap(); fs::write(config_file, config).unwrap(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn build_rustflags_recompile2() { let p = project().file("src/lib.rs", "").build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); // Setting RUSTFLAGS forces a recompile let config = r#" [build] rustflags = ["-Z", "bogus"] "#; let config_file = paths::root().join("foo/.cargo/config"); fs::create_dir_all(config_file.parent().unwrap()).unwrap(); fs::write(config_file, config).unwrap(); p.cargo("build") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn build_rustflags_no_recompile() { let p = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); p.cargo("build") .env("RUSTFLAGS", "--cfg foo") .with_stdout("") .run(); } #[cargo_test] fn build_rustflags_with_home_config() { // We need a config file inside the home directory let home = paths::home(); let home_config = home.join(".cargo"); fs::create_dir(&home_config).unwrap(); fs::write( &home_config.join("config"), r#" [build] rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"] "#, ) .unwrap(); // And we need the project to be inside the home directory // so the walking process finds the home project twice. let p = project_in_home("foo").file("src/lib.rs", "").build(); p.cargo("build -v").run(); } #[cargo_test] fn target_rustflags_normal_source() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( "benches/d.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } "#, ) .file( ".cargo/config", &format!( " [target.{}] rustflags = [\"-Z\", \"bogus\"] ", rustc_host() ), ) .build(); p.cargo("build --lib") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn target_rustflags_also_for_build_scripts() { let p = project() .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(cfg!(foo)); } "#, ) .file( ".cargo/config", &format!( " [target.{}] rustflags = [\"--cfg=foo\"] ", rustc_host() ), ) .build(); p.cargo("build").run(); } #[cargo_test] fn target_rustflags_not_for_build_scripts_with_target() { let host = rustc_host(); let p = project() .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(!cfg!(foo)); } "#, ) .file( ".cargo/config", &format!( " [target.{}] rustflags = [\"--cfg=foo\"] ", host ), ) .build(); p.cargo("build --target").arg(host).run(); // Enabling -Ztarget-applies-to-host should not make a difference without the config setting p.cargo("build --target") .arg(host) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .run(); // Even with the setting, the rustflags from `target.` should not apply, to match the legacy // Cargo behavior. p.change_file( ".cargo/config", &format!( " target-applies-to-host = true [target.{}] rustflags = [\"--cfg=foo\"] ", host ), ); p.cargo("build --target") .arg(host) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .run(); } #[cargo_test] fn build_rustflags_for_build_scripts() { let host = rustc_host(); let p = project() .file("src/lib.rs", "") .file( "build.rs", r#" fn main() { assert!(cfg!(foo)); } "#, ) .file( ".cargo/config", " [build] rustflags = [\"--cfg=foo\"] ", ) .build(); // With "legacy" behavior, build.rustflags should apply to build scripts without --target p.cargo("build").run(); // But should _not_ apply _with_ --target p.cargo("build --target") .arg(host) .with_status(101) .with_stderr_contains("[..]assertion failed[..]") .run(); // Enabling -Ztarget-applies-to-host should not make a difference without the config setting p.cargo("build") .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .run(); p.cargo("build --target") .arg(host) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .with_status(101) .with_stderr_contains("[..]assertion failed[..]") .run(); // When set to false though, the "proper" behavior where host artifacts _only_ pick up on // [host] should be applied. p.change_file( ".cargo/config", " target-applies-to-host = false [build] rustflags = [\"--cfg=foo\"] ", ); p.cargo("build") .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .with_status(101) .with_stderr_contains("[..]assertion failed[..]") .run(); p.cargo("build --target") .arg(host) .masquerade_as_nightly_cargo(&["target-applies-to-host"]) .arg("-Ztarget-applies-to-host") .with_status(101) .with_stderr_contains("[..]assertion failed[..]") .run(); } #[cargo_test] fn host_rustflags_for_build_scripts() { let host = rustc_host(); let p = project() .file("src/lib.rs", "") .file( "build.rs", r#" // Ensure that --cfg=foo is passed. fn main() { assert!(cfg!(foo)); } "#, ) .file( ".cargo/config", &format!( " target-applies-to-host = false [host.{}] rustflags = [\"--cfg=foo\"] ", host ), ) .build(); p.cargo("build --target") .arg(host) .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .arg("-Ztarget-applies-to-host") .arg("-Zhost-config") .run(); } // target.{}.rustflags takes precedence over build.rustflags #[cargo_test] fn target_rustflags_precedence() { let p = project() .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( ".cargo/config", &format!( " [build] rustflags = [\"--cfg\", \"foo\"] [target.{}] rustflags = [\"-Z\", \"bogus\"] ", rustc_host() ), ) .build(); p.cargo("build --lib") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --bin=a") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("build --example=b") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("test") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); p.cargo("bench") .with_status(101) .with_stderr_contains("[..]bogus[..]") .run(); } #[cargo_test] fn cfg_rustflags_normal_source() { let p = project() .file("src/lib.rs", "pub fn t() {}") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( ".cargo/config", &format!( r#" [target.'cfg({})'] rustflags = ["--cfg", "bar"] "#, if rustc_host().contains("-windows-") { "windows" } else { "not(windows)" } ), ) .build(); p.cargo("build --lib -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --bin=a -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --example=b -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("test --no-run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/c-[..][EXE]` ", ) .run(); p.cargo("bench --no-run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] bench [optimized] target(s) in [..] [EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/release/deps/a-[..][EXE]` ", ) .run(); } // target.'cfg(...)'.rustflags takes precedence over build.rustflags #[cargo_test] fn cfg_rustflags_precedence() { let p = project() .file("src/lib.rs", "pub fn t() {}") .file("src/bin/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}") .file("tests/c.rs", "#[test] fn f() { }") .file( ".cargo/config", &format!( r#" [build] rustflags = ["--cfg", "foo"] [target.'cfg({})'] rustflags = ["--cfg", "bar"] "#, if rustc_host().contains("-windows-") { "windows" } else { "not(windows)" } ), ) .build(); p.cargo("build --lib -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --bin=a -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build --example=b -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("test --no-run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/c-[..][EXE]` ", ) .run(); p.cargo("bench --no-run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [RUNNING] `rustc [..] --cfg bar[..]` [FINISHED] bench [optimized] target(s) in [..] [EXECUTABLE] `[..]/target/release/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/release/deps/a-[..][EXE]` ", ) .run(); } #[cargo_test] fn target_rustflags_string_and_array_form1() { let p1 = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] rustflags = ["--cfg", "foo"] "#, ) .build(); p1.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg foo[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p2 = project() .file("src/lib.rs", "") .file( ".cargo/config", r#" [build] rustflags = "--cfg foo" "#, ) .build(); p2.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg foo[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn target_rustflags_string_and_array_form2() { let p1 = project() .file( ".cargo/config", &format!( r#" [target.{}] rustflags = ["--cfg", "foo"] "#, rustc_host() ), ) .file("src/lib.rs", "") .build(); p1.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg foo[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); let p2 = project() .file( ".cargo/config", &format!( r#" [target.{}] rustflags = "--cfg foo" "#, rustc_host() ), ) .file("src/lib.rs", "") .build(); p2.cargo("build -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] --cfg foo[..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn two_matching_in_config() { let p1 = project() .file( ".cargo/config", r#" [target.'cfg(unix)'] rustflags = ["--cfg", 'foo="a"'] [target.'cfg(windows)'] rustflags = ["--cfg", 'foo="a"'] [target.'cfg(target_pointer_width = "32")'] rustflags = ["--cfg", 'foo="b"'] [target.'cfg(target_pointer_width = "64")'] rustflags = ["--cfg", 'foo="b"'] "#, ) .file( "src/main.rs", r#" fn main() { if cfg!(foo = "a") { println!("a"); } else if cfg!(foo = "b") { println!("b"); } else { panic!() } } "#, ) .build(); p1.cargo("run").run(); p1.cargo("build").with_stderr("[FINISHED] [..]").run(); } #[cargo_test] fn env_rustflags_misspelled() { let p = project().file("src/main.rs", "fn main() { }").build(); for cmd in &["check", "build", "run", "test", "bench"] { p.cargo(cmd) .env("RUST_FLAGS", "foo") .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") .run(); } } #[cargo_test] fn env_rustflags_misspelled_build_script() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" build = "build.rs" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() { }") .build(); p.cargo("build") .env("RUST_FLAGS", "foo") .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") .run(); } #[cargo_test] fn remap_path_prefix_ignored() { // Ensure that --remap-path-prefix does not affect metadata hash. let p = project().file("src/lib.rs", "").build(); p.cargo("build").run(); let rlibs = p .glob("target/debug/deps/*.rlib") .collect::, _>>() .unwrap(); assert_eq!(rlibs.len(), 1); p.cargo("clean").run(); let check_metadata_same = || { let rlibs2 = p .glob("target/debug/deps/*.rlib") .collect::, _>>() .unwrap(); assert_eq!(rlibs, rlibs2); }; p.cargo("build") .env( "RUSTFLAGS", "--remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo", ) .run(); check_metadata_same(); p.cargo("clean").run(); p.cargo("rustc -- --remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo") .run(); check_metadata_same(); } #[cargo_test] fn remap_path_prefix_works() { // Check that remap-path-prefix works. Package::new("bar", "0.1.0") .file("src/lib.rs", "pub fn f() -> &'static str { file!() }") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" "#, ) .file( "src/main.rs", r#" fn main() { println!("{}", bar::f()); } "#, ) .build(); p.cargo("run") .env( "RUSTFLAGS", format!("--remap-path-prefix={}=/foo", paths::root().display()), ) .with_stdout("/foo/home/.cargo/registry/src/[..]/bar-0.1.0/src/lib.rs") .run(); } #[cargo_test] fn host_config_rustflags_with_target() { // regression test for https://github.com/rust-lang/cargo/issues/10206 let p = project() .file("src/lib.rs", "") .file("build.rs.rs", "fn main() { assert!(cfg!(foo)); }") .file(".cargo/config.toml", "target-applies-to-host = false") .build(); p.cargo("build") .masquerade_as_nightly_cargo(&["target-applies-to-host", "host-config"]) .arg("-Zhost-config") .arg("-Ztarget-applies-to-host") .arg("-Zunstable-options") .arg("--config") .arg("host.rustflags=[\"--cfg=foo\"]") .run(); } cargo-0.66.0/tests/testsuite/search.rs000066400000000000000000000112321432416201200176760ustar00rootroot00000000000000//! Tests for the `cargo search` command. use cargo_test_support::cargo_process; use cargo_test_support::paths; use cargo_test_support::registry::{RegistryBuilder, Response}; use std::collections::HashSet; const SEARCH_API_RESPONSE: &[u8] = br#" { "crates": [{ "created_at": "2014-11-16T20:17:35Z", "description": "Design by contract style assertions for Rust", "documentation": null, "downloads": 2, "homepage": null, "id": "hoare", "keywords": [], "license": null, "links": { "owners": "/api/v1/crates/hoare/owners", "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", "version_downloads": "/api/v1/crates/hoare/downloads", "versions": "/api/v1/crates/hoare/versions" }, "max_version": "0.1.1", "name": "hoare", "repository": "https://github.com/nick29581/libhoare", "updated_at": "2014-11-20T21:49:21Z", "versions": null }, { "id": "postgres", "name": "postgres", "updated_at": "2020-05-01T23:17:54.335921+00:00", "versions": null, "keywords": null, "categories": null, "badges": [ { "badge_type": "circle-ci", "attributes": { "repository": "sfackler/rust-postgres", "branch": null } } ], "created_at": "2014-11-24T02:34:44.756689+00:00", "downloads": 535491, "recent_downloads": 88321, "max_version": "0.17.3", "newest_version": "0.17.3", "description": "A native, synchronous PostgreSQL client", "homepage": null, "documentation": null, "repository": "https://github.com/sfackler/rust-postgres", "links": { "version_downloads": "/api/v1/crates/postgres/downloads", "versions": "/api/v1/crates/postgres/versions", "owners": "/api/v1/crates/postgres/owners", "owner_team": "/api/v1/crates/postgres/owner_team", "owner_user": "/api/v1/crates/postgres/owner_user", "reverse_dependencies": "/api/v1/crates/postgres/reverse_dependencies" }, "exact_match": true } ], "meta": { "total": 2 } }"#; const SEARCH_RESULTS: &str = "\ hoare = \"0.1.1\" # Design by contract style assertions for Rust postgres = \"0.17.3\" # A native, synchronous PostgreSQL client "; #[must_use] fn setup() -> RegistryBuilder { RegistryBuilder::new() .http_api() .add_responder("/api/v1/crates", |_| Response { code: 200, headers: vec![], body: SEARCH_API_RESPONSE.to_vec(), }) } #[cargo_test] fn not_update() { let registry = setup().build(); use cargo::core::{Shell, Source, SourceId}; use cargo::sources::RegistrySource; use cargo::util::Config; let sid = SourceId::for_registry(registry.index_url()).unwrap(); let cfg = Config::new( Shell::from_write(Box::new(Vec::new())), paths::root(), paths::home().join(".cargo"), ); let lock = cfg.acquire_package_cache_lock().unwrap(); let mut regsrc = RegistrySource::remote(sid, &HashSet::new(), &cfg).unwrap(); regsrc.invalidate_cache(); regsrc.block_until_ready().unwrap(); drop(lock); cargo_process("search postgres") .with_stdout_contains(SEARCH_RESULTS) .with_stderr("") // without "Updating ... index" .run(); } #[cargo_test] fn replace_default() { let _server = setup().build(); cargo_process("search postgres") .with_stdout_contains(SEARCH_RESULTS) .with_stderr_contains("[..]Updating [..] index") .run(); } #[cargo_test] fn simple() { let registry = setup().build(); cargo_process("search postgres --index") .arg(registry.index_url().as_str()) .with_stdout_contains(SEARCH_RESULTS) .run(); } #[cargo_test] fn multiple_query_params() { let registry = setup().build(); cargo_process("search postgres sql --index") .arg(registry.index_url().as_str()) .with_stdout_contains(SEARCH_RESULTS) .run(); } #[cargo_test] fn ignore_quiet() { let _server = setup().build(); cargo_process("search -q postgres") .with_stdout_contains(SEARCH_RESULTS) .run(); } #[cargo_test] fn colored_results() { let _server = setup().build(); cargo_process("search --color=never postgres") .with_stdout_does_not_contain("[..]\x1b[[..]") .run(); cargo_process("search --color=always postgres") .with_stdout_contains("[..]\x1b[[..]") .run(); } cargo-0.66.0/tests/testsuite/shell_quoting.rs000066400000000000000000000020471432416201200213120ustar00rootroot00000000000000//! This file tests that when the commands being run are shown //! in the output, their arguments are quoted properly //! so that the command can be run in a terminal. use cargo_test_support::project; #[cargo_test] fn features_are_quoted() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = ["mikeyhew@example.com"] [features] some_feature = [] default = ["some_feature"] "#, ) .file("src/main.rs", "fn main() {error}") .build(); p.cargo("check -v") .env("MSYSTEM", "1") .with_status(101) .with_stderr_contains( r#"[RUNNING] `rustc [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]`"# ).with_stderr_contains( r#" Caused by: process didn't exit successfully: [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]"# ) .run(); } cargo-0.66.0/tests/testsuite/standard_lib.rs000066400000000000000000000421771432416201200210730ustar00rootroot00000000000000//! Tests for building the standard library (-Zbuild-std). //! //! These tests all use a "mock" standard library so that we don't have to //! rebuild the real one. There is a separate integration test `build-std` //! which builds the real thing, but that should be avoided if possible. use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::ProjectBuilder; use cargo_test_support::{paths, project, rustc_host, Execs}; use std::path::{Path, PathBuf}; struct Setup { rustc_wrapper: PathBuf, real_sysroot: String, } fn setup() -> Setup { // Our mock sysroot requires a few packages from crates.io, so make sure // they're "published" to crates.io. Also edit their code a bit to make sure // that they have access to our custom crates with custom apis. Package::new("registry-dep-using-core", "1.0.0") .file( "src/lib.rs", " #![no_std] #[cfg(feature = \"mockbuild\")] pub fn custom_api() { } #[cfg(not(feature = \"mockbuild\"))] pub fn non_sysroot_api() { core::custom_api(); } ", ) .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true)) .feature("mockbuild", &["rustc-std-workspace-core"]) .publish(); Package::new("registry-dep-using-alloc", "1.0.0") .file( "src/lib.rs", " #![no_std] extern crate alloc; #[cfg(feature = \"mockbuild\")] pub fn custom_api() { } #[cfg(not(feature = \"mockbuild\"))] pub fn non_sysroot_api() { core::custom_api(); alloc::custom_api(); } ", ) .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true)) .add_dep(Dependency::new("rustc-std-workspace-alloc", "*").optional(true)) .feature( "mockbuild", &["rustc-std-workspace-core", "rustc-std-workspace-alloc"], ) .publish(); Package::new("registry-dep-using-std", "1.0.0") .file( "src/lib.rs", " #[cfg(feature = \"mockbuild\")] pub fn custom_api() { } #[cfg(not(feature = \"mockbuild\"))] pub fn non_sysroot_api() { std::custom_api(); } ", ) .add_dep(Dependency::new("rustc-std-workspace-std", "*").optional(true)) .feature("mockbuild", &["rustc-std-workspace-std"]) .publish(); let p = ProjectBuilder::new(paths::root().join("rustc-wrapper")) .file( "src/main.rs", r#" use std::process::Command; use std::env; fn main() { let mut args = env::args().skip(1).collect::>(); let is_sysroot_crate = env::var_os("RUSTC_BOOTSTRAP").is_some(); if is_sysroot_crate { args.push("--sysroot".to_string()); args.push(env::var("REAL_SYSROOT").unwrap()); } else if args.iter().any(|arg| arg == "--target") { // build-std target unit args.push("--sysroot".to_string()); args.push("/path/to/nowhere".to_string()); } else { // host unit, do not use sysroot } let ret = Command::new(&args[0]).args(&args[1..]).status().unwrap(); std::process::exit(ret.code().unwrap_or(1)); } "#, ) .build(); p.cargo("build").run(); Setup { rustc_wrapper: p.bin("foo"), real_sysroot: paths::sysroot(), } } fn enable_build_std(e: &mut Execs, setup: &Setup) { // First up, force Cargo to use our "mock sysroot" which mimics what // libstd looks like upstream. let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/testsuite/mock-std"); e.env("__CARGO_TESTS_ONLY_SRC_ROOT", &root); e.masquerade_as_nightly_cargo(&["build-std"]); // We do various shenanigans to ensure our "mock sysroot" actually links // with the real sysroot, so we don't have to actually recompile std for // each test. Perform all that logic here, namely: // // * RUSTC_WRAPPER - uses our shim executable built above to control rustc // * REAL_SYSROOT - used by the shim executable to swap out to the real // sysroot temporarily for some compilations // * RUST{,DOC}FLAGS - an extra `-L` argument to ensure we can always load // crates from the sysroot, but only indirectly through other crates. e.env("RUSTC_WRAPPER", &setup.rustc_wrapper); e.env("REAL_SYSROOT", &setup.real_sysroot); let libdir = format!("/lib/rustlib/{}/lib", rustc_host()); e.env( "RUSTFLAGS", format!("-Ldependency={}{}", setup.real_sysroot, libdir), ); e.env( "RUSTDOCFLAGS", format!("-Ldependency={}{}", setup.real_sysroot, libdir), ); } // Helper methods used in the tests below trait BuildStd: Sized { fn build_std(&mut self, setup: &Setup) -> &mut Self; fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self; fn target_host(&mut self) -> &mut Self; } impl BuildStd for Execs { fn build_std(&mut self, setup: &Setup) -> &mut Self { enable_build_std(self, setup); self.arg("-Zbuild-std"); self } fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self { enable_build_std(self, setup); self.arg(format!("-Zbuild-std={}", arg)); self } fn target_host(&mut self) -> &mut Self { self.arg("--target").arg(rustc_host()); self } } #[cargo_test(build_std_mock)] fn basic() { let setup = setup(); let p = project() .file( "src/main.rs", " fn main() { std::custom_api(); foo::f(); } #[test] fn smoke_bin_unit() { std::custom_api(); foo::f(); } ", ) .file( "src/lib.rs", " extern crate alloc; extern crate proc_macro; /// ``` /// foo::f(); /// ``` pub fn f() { core::custom_api(); std::custom_api(); alloc::custom_api(); proc_macro::custom_api(); } #[test] fn smoke_lib_unit() { std::custom_api(); f(); } ", ) .file( "tests/smoke.rs", " #[test] fn smoke_integration() { std::custom_api(); foo::f(); } ", ) .build(); p.cargo("check -v").build_std(&setup).target_host().run(); p.cargo("build").build_std(&setup).target_host().run(); p.cargo("run").build_std(&setup).target_host().run(); p.cargo("test").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn simple_lib_std() { let setup = setup(); let p = project().file("src/lib.rs", "").build(); p.cargo("build -v") .build_std(&setup) .target_host() .with_stderr_contains("[RUNNING] `[..]--crate-name std [..]`") .run(); // Check freshness. p.change_file("src/lib.rs", " "); p.cargo("build -v") .build_std(&setup) .target_host() .with_stderr_contains("[FRESH] std[..]") .run(); } #[cargo_test(build_std_mock)] fn simple_bin_std() { let setup = setup(); let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("run -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn lib_nostd() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" #![no_std] pub fn foo() { assert_eq!(u8::MIN, 0); } "#, ) .build(); p.cargo("build -v --lib") .build_std_arg(&setup, "core") .target_host() .with_stderr_does_not_contain("[..]libstd[..]") .run(); } #[cargo_test(build_std_mock)] fn check_core() { let setup = setup(); let p = project() .file("src/lib.rs", "#![no_std] fn unused_fn() {}") .build(); p.cargo("check -v") .build_std_arg(&setup, "core") .target_host() .with_stderr_contains("[WARNING] [..]unused_fn[..]") .run(); } #[cargo_test(build_std_mock)] fn depend_same_as_std() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" pub fn f() { registry_dep_using_core::non_sysroot_api(); registry_dep_using_alloc::non_sysroot_api(); registry_dep_using_std::non_sysroot_api(); } "#, ) .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [dependencies] registry-dep-using-core = "1.0" registry-dep-using-alloc = "1.0" registry-dep-using-std = "1.0" "#, ) .build(); p.cargo("build -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn test() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } } "#, ) .build(); p.cargo("test -v") .build_std(&setup) .target_host() .with_stdout_contains("test tests::it_works ... ok") .run(); } #[cargo_test(build_std_mock)] fn target_proc_macro() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" extern crate proc_macro; pub fn f() { let _ts = proc_macro::TokenStream::new(); } "#, ) .build(); p.cargo("build -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn bench() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" #![feature(test)] extern crate test; #[bench] fn b1(b: &mut test::Bencher) { b.iter(|| ()) } "#, ) .build(); p.cargo("bench -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn doc() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" /// Doc pub fn f() -> Result<(), ()> {Ok(())} "#, ) .build(); p.cargo("doc -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn check_std() { let setup = setup(); let p = project() .file( "src/lib.rs", " extern crate core; extern crate alloc; extern crate proc_macro; pub fn f() {} ", ) .file("src/main.rs", "fn main() {}") .file( "tests/t1.rs", r#" #[test] fn t1() { assert_eq!(1, 2); } "#, ) .build(); p.cargo("check -v --all-targets") .build_std(&setup) .target_host() .run(); p.cargo("check -v --all-targets --profile=test") .build_std(&setup) .target_host() .run(); } #[cargo_test(build_std_mock)] fn doctest() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" /// Doc /// ``` /// std::custom_api(); /// ``` pub fn f() {} "#, ) .build(); p.cargo("test --doc -v -Zdoctest-xcompile") .build_std(&setup) .with_stdout_contains("test src/lib.rs - f [..] ... ok") .target_host() .run(); } #[cargo_test(build_std_mock)] fn no_implicit_alloc() { // Demonstrate that alloc is not implicitly in scope. let setup = setup(); let p = project() .file( "src/lib.rs", r#" pub fn f() { let _: Vec = alloc::vec::Vec::new(); } "#, ) .build(); p.cargo("build -v") .build_std(&setup) .target_host() .with_stderr_contains("[..]use of undeclared [..]`alloc`") .with_status(101) .run(); } #[cargo_test(build_std_mock)] fn macro_expanded_shadow() { // This tests a bug caused by the previous use of `--extern` to directly // load sysroot crates. This necessitated the switch to `--sysroot` to // retain existing behavior. See // https://github.com/rust-lang/wg-cargo-std-aware/issues/40 for more // detail. let setup = setup(); let p = project() .file( "src/lib.rs", r#" macro_rules! a { () => (extern crate std as alloc;) } a!(); "#, ) .build(); p.cargo("build -v").build_std(&setup).target_host().run(); } #[cargo_test(build_std_mock)] fn ignores_incremental() { // Incremental is not really needed for std, make sure it is disabled. // Incremental also tends to have bugs that affect std libraries more than // any other crate. let setup = setup(); let p = project().file("src/lib.rs", "").build(); p.cargo("build") .env("CARGO_INCREMENTAL", "1") .build_std(&setup) .target_host() .run(); let incremental: Vec<_> = p .glob(format!("target/{}/debug/incremental/*", rustc_host())) .map(|e| e.unwrap()) .collect(); assert_eq!(incremental.len(), 1); assert!(incremental[0] .file_name() .unwrap() .to_str() .unwrap() .starts_with("foo-")); } #[cargo_test(build_std_mock)] fn cargo_config_injects_compiler_builtins() { let setup = setup(); let p = project() .file( "src/lib.rs", r#" #![no_std] pub fn foo() { assert_eq!(u8::MIN, 0); } "#, ) .file( ".cargo/config.toml", r#" [unstable] build-std = ['core'] "#, ) .build(); let mut build = p.cargo("build -v --lib"); enable_build_std(&mut build, &setup); build .target_host() .with_stderr_does_not_contain("[..]libstd[..]") .run(); } #[cargo_test(build_std_mock)] fn different_features() { let setup = setup(); let p = project() .file( "src/lib.rs", " pub fn foo() { std::conditional_function(); } ", ) .build(); p.cargo("build") .build_std(&setup) .arg("-Zbuild-std-features=feature1") .target_host() .run(); } #[cargo_test(build_std_mock)] fn no_roots() { // Checks for a bug where it would panic if there are no roots. let setup = setup(); let p = project().file("tests/t1.rs", "").build(); p.cargo("build") .build_std(&setup) .target_host() .with_stderr_contains("[FINISHED] [..]") .run(); } #[cargo_test(build_std_mock)] fn proc_macro_only() { // Checks for a bug where it would panic if building a proc-macro only let setup = setup(); let p = project() .file( "Cargo.toml", r#" [package] name = "pm" version = "0.1.0" [lib] proc-macro = true "#, ) .file("src/lib.rs", "") .build(); p.cargo("build") .build_std(&setup) .target_host() .with_stderr_contains("[FINISHED] [..]") .run(); } #[cargo_test(build_std_mock)] fn fetch() { let setup = setup(); let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("fetch") .build_std(&setup) .target_host() .with_stderr_contains("[DOWNLOADED] [..]") .run(); p.cargo("build") .build_std(&setup) .target_host() .with_stderr_does_not_contain("[DOWNLOADED] [..]") .run(); } cargo-0.66.0/tests/testsuite/test.rs000066400000000000000000003525601432416201200174240ustar00rootroot00000000000000//! Tests for the `cargo test` command. use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; use cargo_test_support::{ basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, project, }; use cargo_test_support::{cross_compile, paths}; use cargo_test_support::{rustc_host, rustc_host_env, sleep_ms}; use std::fs; #[cargo_test] fn cargo_test_simple() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[test] fn test_hello() { assert_eq!(hello(), "hello") } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello\n").run(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("test test_hello ... ok") .run(); } #[cargo_test] fn cargo_test_release() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file( "src/lib.rs", r#" extern crate bar; pub fn foo() { bar::bar(); } #[test] fn test() { foo(); } "#, ) .file( "tests/test.rs", r#" extern crate foo; #[test] fn test() { foo::foo(); } "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("test -v --release") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [RUNNING] [..] -C opt-level=3 [..] [COMPILING] foo v0.1.0 ([CWD]) [RUNNING] [..] -C opt-level=3 [..] [RUNNING] [..] -C opt-level=3 [..] [RUNNING] [..] -C opt-level=3 [..] [FINISHED] release [optimized] target(s) in [..] [RUNNING] `[..]target/release/deps/foo-[..][EXE]` [RUNNING] `[..]target/release/deps/test-[..][EXE]` [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..]lib.rs[..]`", ) .with_stdout_contains_n("test test ... ok", 2) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn cargo_test_overflow_checks() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.5.0" authors = [] [[bin]] name = "foo" [profile.release] overflow-checks = true "#, ) .file( "src/foo.rs", r#" use std::panic; pub fn main() { let r = panic::catch_unwind(|| { [1, i32::MAX].iter().sum::(); }); assert!(r.is_err()); } "#, ) .build(); p.cargo("build --release").run(); assert!(p.release_bin("foo").is_file()); p.process(&p.release_bin("foo")).with_stdout("").run(); } #[cargo_test] fn cargo_test_quiet_with_harness() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [[test]] name = "foo" path = "src/foo.rs" harness = true "#, ) .file( "src/foo.rs", r#" fn main() {} #[test] fn test_hello() {} "#, ) .build(); p.cargo("test -q") .with_stdout( " running 1 test . test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .with_stderr("") .run(); } #[cargo_test] fn cargo_test_quiet_no_harness() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [[bin]] name = "foo" test = false [[test]] name = "foo" path = "src/main.rs" harness = false "#, ) .file( "src/main.rs", r#" fn main() {} #[test] fn test_hello() {} "#, ) .build(); p.cargo("test -q").with_stdout("").with_stderr("").run(); } #[cargo_test] fn cargo_doc_test_quiet() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#, ) .file( "src/lib.rs", r#" /// ``` /// let result = foo::add(2, 3); /// assert_eq!(result, 5); /// ``` pub fn add(a: i32, b: i32) -> i32 { a + b } /// ``` /// let result = foo::div(10, 2); /// assert_eq!(result, 5); /// ``` /// /// # Panics /// /// The function panics if the second argument is zero. /// /// ```rust,should_panic /// // panics on division by zero /// foo::div(10, 0); /// ``` pub fn div(a: i32, b: i32) -> i32 { if b == 0 { panic!("Divide-by-zero error"); } a / b } #[test] fn test_hello() {} "#, ) .build(); p.cargo("test -q") .with_stdout( " running 1 test . test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] running 3 tests ... test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .with_stderr("") .run(); } #[cargo_test] fn cargo_test_verbose() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" fn main() {} #[test] fn test_hello() {} "#, ) .build(); p.cargo("test -v hello") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] src/main.rs [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[CWD]/target/debug/deps/foo-[..] hello` ", ) .with_stdout_contains("test test_hello ... ok") .run(); } #[cargo_test] fn many_similar_names() { let p = project() .file( "src/lib.rs", " pub fn foo() {} #[test] fn lib_test() {} ", ) .file( "src/main.rs", " extern crate foo; fn main() {} #[test] fn bin_test() { foo::foo() } ", ) .file( "tests/foo.rs", r#" extern crate foo; #[test] fn test_test() { foo::foo() } "#, ) .build(); p.cargo("test -v") .with_stdout_contains("test bin_test ... ok") .with_stdout_contains("test lib_test ... ok") .with_stdout_contains("test test_test ... ok") .run(); } #[cargo_test] fn cargo_test_failing_test_in_bin() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file( "src/main.rs", r#" fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[test] fn test_hello() { assert_eq!(hello(), "nope") } "#, ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello\n").run(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [ERROR] test failed, to rerun pass `--bin foo`", ) .with_stdout_contains( " running 1 test test test_hello ... FAILED failures: ---- test_hello stdout ---- [..]thread '[..]' panicked at 'assertion failed:[..]", ) .with_stdout_contains("[..]`(left == right)`[..]") .with_stdout_contains("[..]left: `\"hello\"`,[..]") .with_stdout_contains("[..]right: `\"nope\"`[..]") .with_stdout_contains("[..]src/main.rs:12[..]") .with_stdout_contains( "\ failures: test_hello ", ) .with_status(101) .run(); } #[cargo_test] fn cargo_test_failing_test_in_test() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#"pub fn main() { println!("hello"); }"#) .file( "tests/footest.rs", "#[test] fn test_hello() { assert!(false) }", ) .build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); p.process(&p.bin("foo")).with_stdout("hello\n").run(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/footest-[..][EXE]) [ERROR] test failed, to rerun pass `--test footest`", ) .with_stdout_contains("running 0 tests") .with_stdout_contains( "\ running 1 test test test_hello ... FAILED failures: ---- test_hello stdout ---- [..]thread '[..]' panicked at 'assertion failed: false', \ tests/footest.rs:1[..] ", ) .with_stdout_contains( "\ failures: test_hello ", ) .with_status(101) .run(); } #[cargo_test] fn cargo_test_failing_test_in_lib() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "#[test] fn test_hello() { assert!(false) }") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [ERROR] test failed, to rerun pass `--lib`", ) .with_stdout_contains( "\ test test_hello ... FAILED failures: ---- test_hello stdout ---- [..]thread '[..]' panicked at 'assertion failed: false', \ src/lib.rs:1[..] ", ) .with_stdout_contains( "\ failures: test_hello ", ) .with_status(101) .run(); } #[cargo_test] fn test_with_lib_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "baz" path = "src/main.rs" "#, ) .file( "src/lib.rs", r#" /// /// ```rust /// extern crate foo; /// fn main() { /// println!("{:?}", foo::foo()); /// } /// ``` /// pub fn foo(){} #[test] fn lib_test() {} "#, ) .file( "src/main.rs", " #[allow(unused_extern_crates)] extern crate foo; fn main() {} #[test] fn bin_test() {} ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/baz-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test lib_test ... ok") .with_stdout_contains("test bin_test ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .run(); } #[cargo_test] fn test_with_deep_lib_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#, ) .file( "src/lib.rs", " #[cfg(test)] extern crate bar; /// ``` /// foo::foo(); /// ``` pub fn foo() {} #[test] fn bar_test() { bar::bar(); } ", ) .build(); let _p2 = project() .at("bar") .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("src/lib.rs", "pub fn bar() {} #[test] fn foo_test() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] bar v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target[..]) [DOCTEST] foo", ) .with_stdout_contains("test bar_test ... ok") .with_stdout_contains_n("test [..] ... ok", 2) .run(); } #[cargo_test] fn external_test_explicit() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[test]] name = "test" path = "src/test.rs" "#, ) .file( "src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } #[test] fn internal_test() {} "#, ) .file( "src/test.rs", r#" extern crate foo; #[test] fn external_test() { assert_eq!(foo::get_hello(), "Hello") } "#, ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/test-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test internal_test ... ok") .with_stdout_contains("test external_test ... ok") .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn external_test_named_test() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[test]] name = "test" "#, ) .file("src/lib.rs", "") .file("tests/test.rs", "#[test] fn foo() {}") .build(); p.cargo("test").run(); } #[cargo_test] fn external_test_implicit() { let p = project() .file( "src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } #[test] fn internal_test() {} "#, ) .file( "tests/external.rs", r#" extern crate foo; #[test] fn external_test() { assert_eq!(foo::get_hello(), "Hello") } "#, ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/external-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test internal_test ... ok") .with_stdout_contains("test external_test ... ok") .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn dont_run_examples() { let p = project() .file("src/lib.rs", "") .file( "examples/dont-run-me-i-will-fail.rs", r#" fn main() { panic!("Examples should not be run by 'cargo test'"); } "#, ) .build(); p.cargo("test").run(); } #[cargo_test] fn pass_through_command_line() { let p = project() .file( "src/lib.rs", " #[test] fn foo() {} #[test] fn bar() {} ", ) .build(); p.cargo("test bar") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) ", ) .with_stdout_contains("running 1 test") .with_stdout_contains("test bar ... ok") .run(); p.cargo("test foo") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) ", ) .with_stdout_contains("running 1 test") .with_stdout_contains("test foo ... ok") .run(); } // Regression test for running cargo-test twice with // tests in an rlib #[cargo_test] fn cargo_test_twice() { let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/foo.rs", r#" #![crate_type = "rlib"] #[test] fn dummy_test() { } "#, ) .build(); for _ in 0..2 { p.cargo("test").run(); } } #[cargo_test] fn lib_bin_same_name() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" [[bin]] name = "foo" "#, ) .file("src/lib.rs", "#[test] fn lib_test() {}") .file( "src/main.rs", " #[allow(unused_extern_crates)] extern crate foo; #[test] fn bin_test() {} ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains_n("test [..] ... ok", 2) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn lib_with_standard_name() { let p = project() .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) .file( "src/lib.rs", " /// ``` /// syntax::foo(); /// ``` pub fn foo() {} #[test] fn foo_test() {} ", ) .file( "tests/test.rs", " extern crate syntax; #[test] fn test() { syntax::foo() } ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/syntax-[..][EXE]) [RUNNING] [..] (target/debug/deps/test-[..][EXE]) [DOCTEST] syntax", ) .with_stdout_contains("test foo_test ... ok") .with_stdout_contains("test test ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .run(); } #[cargo_test] fn lib_with_standard_name2() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] name = "syntax" test = false doctest = false "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/syntax-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); } #[cargo_test] fn lib_without_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] syntax v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/syntax-[..][EXE])", ) .with_stdout_contains("test test ... ok") .run(); } #[cargo_test] fn bin_without_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[bin]] path = "src/main.rs" "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .build(); p.cargo("test") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: binary target bin.name is required", ) .run(); } #[cargo_test] fn bench_without_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[bench]] path = "src/bench.rs" "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .file( "src/bench.rs", " #![feature(test)] extern crate syntax; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} ", ) .build(); p.cargo("test") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: benchmark target bench.name is required", ) .run(); } #[cargo_test] fn test_without_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[test]] path = "src/test.rs" "#, ) .file( "src/lib.rs", r#" pub fn foo() {} pub fn get_hello() -> &'static str { "Hello" } "#, ) .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .file( "src/test.rs", r#" extern crate syntax; #[test] fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } "#, ) .build(); p.cargo("test") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: test target test.name is required", ) .run(); } #[cargo_test] fn example_without_name() { let p = project() .file( "Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[example]] path = "examples/example.rs" "#, ) .file("src/lib.rs", "pub fn foo() {}") .file( "src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ", ) .file( "examples/example.rs", r#" extern crate syntax; fn main() { println!("example1"); } "#, ) .build(); p.cargo("test") .with_status(101) .with_stderr( "\ [ERROR] failed to parse manifest at `[..]` Caused by: example target example.name is required", ) .run(); } #[cargo_test] fn bin_there_for_integration() { let p = project() .file( "src/main.rs", " fn main() { std::process::exit(101); } #[test] fn main_test() {} ", ) .file( "tests/foo.rs", r#" use std::process::Command; #[test] fn test_test() { let status = Command::new("target/debug/foo").status().unwrap(); assert_eq!(status.code(), Some(101)); } "#, ) .build(); p.cargo("test -v") .with_stdout_contains("test main_test ... ok") .with_stdout_contains("test test_test ... ok") .run(); } #[cargo_test] fn test_dylib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate_type = ["dylib"] [dependencies.bar] path = "bar" "#, ) .file( "src/lib.rs", r#" extern crate bar as the_bar; pub fn bar() { the_bar::baz(); } #[test] fn foo() { bar(); } "#, ) .file( "tests/test.rs", r#" extern crate foo as the_foo; #[test] fn foo() { the_foo::bar(); } "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#, ) .file("bar/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] bar v0.0.1 ([CWD]/bar) [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/test-[..][EXE])", ) .with_stdout_contains_n("test foo ... ok", 2) .run(); p.root().move_into_the_past(); p.cargo("test") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [RUNNING] [..] (target/debug/deps/test-[..][EXE])", ) .with_stdout_contains_n("test foo ... ok", 2) .run(); } #[cargo_test] fn test_twice_with_build_cmd() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "#[test] fn foo() {}") .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test foo ... ok") .with_stdout_contains("running 0 tests") .run(); p.cargo("test") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test foo ... ok") .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn test_then_build() { let p = project().file("src/lib.rs", "#[test] fn foo() {}").build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("test foo ... ok") .with_stdout_contains("running 0 tests") .run(); p.cargo("build").with_stdout("").run(); } #[cargo_test] fn test_no_run() { let p = project() .file("src/lib.rs", "#[test] fn foo() { panic!() }") .build(); p.cargo("test --no-run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] unittests src/lib.rs (target/debug/deps/foo-[..][EXE]) ", ) .run(); } #[cargo_test] fn test_no_run_emit_json() { let p = project() .file("src/lib.rs", "#[test] fn foo() { panic!() }") .build(); p.cargo("test --no-run --message-format json") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn test_run_specific_bin_target() { let prj = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name="bin1" path="src/bin1.rs" [[bin]] name="bin2" path="src/bin2.rs" "#, ) .file("src/bin1.rs", "#[test] fn test1() { }") .file("src/bin2.rs", "#[test] fn test2() { }") .build(); prj.cargo("test --bin bin2") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/bin2-[..][EXE])", ) .with_stdout_contains("test test2 ... ok") .run(); } #[cargo_test] fn test_run_implicit_bin_target() { let prj = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name="mybin" path="src/mybin.rs" "#, ) .file( "src/mybin.rs", "#[test] fn test_in_bin() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file("tests/mytest.rs", "#[test] fn test_in_test() { }") .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") .file( "examples/myexm.rs", "#[test] fn test_in_exm() { } fn main() { panic!(\"Don't execute me!\"); }", ) .build(); prj.cargo("test --bins") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/mybin-[..][EXE])", ) .with_stdout_contains("test test_in_bin ... ok") .run(); } #[cargo_test] fn test_run_specific_test_target() { let prj = project() .file("src/bin/a.rs", "fn main() { }") .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") .file("tests/a.rs", "#[test] fn test_a() { }") .file("tests/b.rs", "#[test] fn test_b() { }") .build(); prj.cargo("test --test b") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/b-[..][EXE])", ) .with_stdout_contains("test test_b ... ok") .run(); } #[cargo_test] fn test_run_implicit_test_target() { let prj = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name="mybin" path="src/mybin.rs" "#, ) .file( "src/mybin.rs", "#[test] fn test_in_bin() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file("tests/mytest.rs", "#[test] fn test_in_test() { }") .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") .file( "examples/myexm.rs", "fn main() { compile_error!(\"Don't build me!\"); }", ) .build(); prj.cargo("test --tests") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/mybin-[..][EXE]) [RUNNING] [..] (target/debug/deps/mytest-[..][EXE])", ) .with_stdout_contains("test test_in_test ... ok") .run(); } #[cargo_test] fn test_run_implicit_bench_target() { let prj = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name="mybin" path="src/mybin.rs" "#, ) .file( "src/mybin.rs", "#[test] fn test_in_bin() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file("tests/mytest.rs", "#[test] fn test_in_test() { }") .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") .file( "examples/myexm.rs", "fn main() { compile_error!(\"Don't build me!\"); }", ) .build(); prj.cargo("test --benches") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/mybin-[..][EXE]) [RUNNING] [..] (target/debug/deps/mybench-[..][EXE])", ) .with_stdout_contains("test test_in_bench ... ok") .run(); } #[cargo_test] fn test_run_implicit_example_target() { let prj = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "mybin" path = "src/mybin.rs" [[example]] name = "myexm1" [[example]] name = "myexm2" test = true "#, ) .file( "src/mybin.rs", "#[test] fn test_in_bin() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file("tests/mytest.rs", "#[test] fn test_in_test() { }") .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") .file( "examples/myexm1.rs", "#[test] fn test_in_exm() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file( "examples/myexm2.rs", "#[test] fn test_in_exm() { } fn main() { panic!(\"Don't execute me!\"); }", ) .build(); // Compiles myexm1 as normal, but does not run it. prj.cargo("test -v") .with_stderr_contains("[RUNNING] `rustc [..]myexm1.rs [..]--crate-type bin[..]") .with_stderr_contains("[RUNNING] `rustc [..]myexm2.rs [..]--test[..]") .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") .run(); // Only tests myexm2. prj.cargo("test --tests") .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") .run(); // Tests all examples. prj.cargo("test --examples") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") .run(); // Test an example, even without `test` set. prj.cargo("test --example myexm1") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") .run(); // Tests all examples. prj.cargo("test --all-targets") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") .run(); } #[cargo_test] fn test_filtered_excludes_compiling_examples() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "mybin" test = false "#, ) .file( "src/lib.rs", "#[cfg(test)] mod tests { #[test] fn test_in_lib() { } }", ) .file( "src/bin/mybin.rs", "#[test] fn test_in_bin() { } fn main() { panic!(\"Don't execute me!\"); }", ) .file("tests/mytest.rs", "#[test] fn test_in_test() { }") .file( "benches/mybench.rs", "#[test] fn test_in_bench() { assert!(false) }", ) .file( "examples/myexm1.rs", "#[test] fn test_in_exm() { assert!(false) } fn main() { panic!(\"Don't execute me!\"); }", ) .build(); p.cargo("test -v test_in_") .with_stdout( " running 1 test test tests::test_in_lib ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] running 1 test test test_in_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .with_stderr_unordered( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc --crate-name foo src/lib.rs [..] --crate-type lib [..]` [RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..]` [RUNNING] `rustc --crate-name mybin src/bin/mybin.rs [..] --crate-type bin [..]` [RUNNING] `rustc --crate-name mytest tests/mytest.rs [..] --test [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[CWD]/target/debug/deps/foo-[..] test_in_` [RUNNING] `[CWD]/target/debug/deps/mytest-[..] test_in_` ", ) .with_stderr_does_not_contain("[RUNNING][..]rustc[..]myexm1[..]") .with_stderr_does_not_contain("[RUNNING][..]deps/mybin-[..] test_in_") .run(); } #[cargo_test] fn test_no_harness() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "foo" test = false [[test]] name = "bar" path = "foo.rs" harness = false "#, ) .file("src/main.rs", "fn main() {}") .file("foo.rs", "fn main() {}") .build(); p.cargo("test -- --nocapture") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/bar-[..][EXE]) ", ) .run(); } #[cargo_test] fn selective_testing() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [lib] name = "foo" doctest = false "#, ) .file("src/lib.rs", "") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" doctest = false "#, ) .file("d1/src/lib.rs", "") .file( "d1/src/main.rs", "#[allow(unused_extern_crates)] extern crate d1; fn main() {}", ) .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [lib] name = "d2" doctest = false "#, ) .file("d2/src/lib.rs", "") .file( "d2/src/main.rs", "#[allow(unused_extern_crates)] extern crate d2; fn main() {}", ); let p = p.build(); println!("d1"); p.cargo("test -p d1") .with_stderr( "\ [COMPILING] d1 v0.0.1 ([CWD]/d1) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/d1-[..][EXE]) [RUNNING] [..] (target/debug/deps/d1-[..][EXE])", ) .with_stdout_contains_n("running 0 tests", 2) .run(); println!("d2"); p.cargo("test -p d2") .with_stderr( "\ [COMPILING] d2 v0.0.1 ([CWD]/d2) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/d2-[..][EXE]) [RUNNING] [..] (target/debug/deps/d2-[..][EXE])", ) .with_stdout_contains_n("running 0 tests", 2) .run(); println!("whole"); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn almost_cyclic_but_not_quite() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies.b] path = "b" [dev-dependencies.c] path = "c" "#, ) .file( "src/lib.rs", r#" #[cfg(test)] extern crate b; #[cfg(test)] extern crate c; "#, ) .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies.foo] path = ".." "#, ) .file( "b/src/lib.rs", r#" #[allow(unused_extern_crates)] extern crate foo; "#, ) .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) .file("c/src/lib.rs", "") .build(); p.cargo("build").run(); p.cargo("test").run(); } #[cargo_test] fn build_then_selective_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.b] path = "b" "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate b;", ) .file( "src/main.rs", r#" #[allow(unused_extern_crates)] extern crate b; #[allow(unused_extern_crates)] extern crate foo; fn main() {} "#, ) .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("build").run(); p.root().move_into_the_past(); p.cargo("test -p b").run(); } #[cargo_test] fn example_dev_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dev-dependencies.bar] path = "bar" "#, ) .file("src/lib.rs", "") .file("examples/e1.rs", "extern crate bar; fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file( "bar/src/lib.rs", r#" // make sure this file takes awhile to compile macro_rules! f0( () => (1) ); macro_rules! f1( () => ({(f0!()) + (f0!())}) ); macro_rules! f2( () => ({(f1!()) + (f1!())}) ); macro_rules! f3( () => ({(f2!()) + (f2!())}) ); macro_rules! f4( () => ({(f3!()) + (f3!())}) ); macro_rules! f5( () => ({(f4!()) + (f4!())}) ); macro_rules! f6( () => ({(f5!()) + (f5!())}) ); macro_rules! f7( () => ({(f6!()) + (f6!())}) ); macro_rules! f8( () => ({(f7!()) + (f7!())}) ); pub fn bar() { f8!(); } "#, ) .build(); p.cargo("test").run(); p.cargo("run --example e1 --release -v").run(); } #[cargo_test] fn selective_testing_with_docs() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" "#, ) .file( "src/lib.rs", r#" /// ``` /// not valid rust /// ``` pub fn foo() {} "#, ) .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" path = "d1.rs" "#, ) .file("d1/d1.rs", ""); let p = p.build(); p.cargo("test -p d1") .with_stderr( "\ [COMPILING] d1 v0.0.1 ([CWD]/d1) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/d1[..][EXE]) [DOCTEST] d1", ) .with_stdout_contains_n("running 0 tests", 2) .run(); } #[cargo_test] fn example_bin_same_name() { let p = project() .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) .build(); p.cargo("test --no-run -v") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..]` [RUNNING] `rustc [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` ", ) .run(); assert!(!p.bin("foo").is_file()); assert!(p.bin("examples/foo").is_file()); p.process(&p.bin("examples/foo")) .with_stdout("example\n") .run(); p.cargo("run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..]", ) .with_stdout("bin") .run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn test_with_example_twice() { let p = project() .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) .build(); println!("first"); p.cargo("test -v").run(); assert!(p.bin("examples/foo").is_file()); println!("second"); p.cargo("test -v").run(); assert!(p.bin("examples/foo").is_file()); } #[cargo_test] fn example_with_dev_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" test = false doctest = false [dev-dependencies.a] path = "a" "#, ) .file("src/lib.rs", "") .file( "examples/ex.rs", "#[allow(unused_extern_crates)] extern crate a; fn main() {}", ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("test -v") .with_stderr( "\ [..] [..] [..] [..] [RUNNING] `rustc --crate-name ex [..] --extern a=[..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn bin_is_preserved() { let p = project() .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .build(); p.cargo("build -v").run(); assert!(p.bin("foo").is_file()); println!("test"); p.cargo("test -v").run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn bad_example() { let p = project().file("src/lib.rs", ""); let p = p.build(); p.cargo("run --example foo") .with_status(101) .with_stderr( "\ [ERROR] no example target named `foo`. ", ) .run(); p.cargo("run --bin foo") .with_status(101) .with_stderr( "\ [ERROR] no bin target named `foo`. ", ) .run(); } #[cargo_test] fn doctest_feature() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [features] bar = [] "#, ) .file( "src/lib.rs", r#" /// ```rust /// assert_eq!(foo::foo(), 1); /// ``` #[cfg(feature = "bar")] pub fn foo() -> i32 { 1 } "#, ) .build(); p.cargo("test --features bar") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("running 0 tests") .with_stdout_contains("test [..] ... ok") .run(); } #[cargo_test] fn dashes_to_underscores() { let p = project() .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) .file( "src/lib.rs", r#" /// ``` /// assert_eq!(foo_bar::foo(), 1); /// ``` pub fn foo() -> i32 { 1 } "#, ) .build(); p.cargo("test -v").run(); } #[cargo_test] fn doctest_dev_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] b = { path = "b" } "#, ) .file( "src/lib.rs", r#" /// ``` /// extern crate b; /// ``` pub fn foo() {} "#, ) .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) .file("b/src/lib.rs", "") .build(); p.cargo("test -v").run(); } #[cargo_test] fn filter_no_doc_tests() { let p = project() .file( "src/lib.rs", r#" /// ``` /// extern crate b; /// ``` pub fn foo() {} "#, ) .file("tests/foo.rs", "") .build(); p.cargo("test --test=foo") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo[..][EXE])", ) .with_stdout_contains("running 0 tests") .run(); } #[cargo_test] fn dylib_doctest() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["rlib", "dylib"] test = false "#, ) .file( "src/lib.rs", r#" /// ``` /// foo::foo(); /// ``` pub fn foo() {} "#, ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [DOCTEST] foo", ) .with_stdout_contains("test [..] ... ok") .run(); } #[cargo_test] fn dylib_doctest2() { // Can't doc-test dylibs, as they're statically linked together. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["dylib"] test = false "#, ) .file( "src/lib.rs", r#" /// ``` /// foo::foo(); /// ``` pub fn foo() {} "#, ) .build(); p.cargo("test").with_stdout("").run(); } #[cargo_test] fn cyclic_dev_dep_doc_test() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] bar = { path = "bar" } "#, ) .file( "src/lib.rs", r#" //! ``` //! extern crate bar; //! ``` "#, ) .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = { path = ".." } "#, ) .file( "bar/src/lib.rs", r#" #[allow(unused_extern_crates)] extern crate foo; "#, ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [COMPILING] bar v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo[..][EXE]) [DOCTEST] foo", ) .with_stdout_contains("running 0 tests") .with_stdout_contains("test [..] ... ok") .run(); } #[cargo_test] fn dev_dep_with_build_script() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file("examples/foo.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#, ) .file("bar/src/lib.rs", "") .file("bar/build.rs", "fn main() {}") .build(); p.cargo("test").run(); } #[cargo_test] fn no_fail_fast() { let p = project() .file( "src/lib.rs", r#" pub fn add_one(x: i32) -> i32{ x + 1 } /// ```rust /// use foo::sub_one; /// assert_eq!(sub_one(101), 100); /// ``` pub fn sub_one(x: i32) -> i32{ x - 1 } "#, ) .file( "tests/test_add_one.rs", r#" extern crate foo; use foo::*; #[test] fn add_one_test() { assert_eq!(add_one(1), 2); } #[test] fn fail_add_one_test() { assert_eq!(add_one(1), 1); } "#, ) .file( "tests/test_sub_one.rs", r#" extern crate foo; use foo::*; #[test] fn sub_one_test() { assert_eq!(sub_one(1), 0); } "#, ) .build(); p.cargo("test --no-fail-fast") .with_status(101) .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [FINISHED] test [..] [RUNNING] unittests src/lib.rs (target/debug/deps/foo[..]) [RUNNING] tests/test_add_one.rs (target/debug/deps/test_add_one[..]) [ERROR] test failed, to rerun pass `--test test_add_one` [RUNNING] tests/test_sub_one.rs (target/debug/deps/test_sub_one[..]) [DOCTEST] foo [ERROR] 1 target failed: `--test test_add_one` ", ) .with_stdout_contains("running 0 tests") .with_stdout_contains("test result: FAILED. [..]") .with_stdout_contains("test sub_one_test ... ok") .with_stdout_contains_n("test [..] ... ok", 3) .run(); } #[cargo_test] fn test_multiple_packages() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [lib] name = "foo" doctest = false "#, ) .file("src/lib.rs", "") .file( "d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" doctest = false "#, ) .file("d1/src/lib.rs", "") .file( "d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [lib] name = "d2" doctest = false "#, ) .file("d2/src/lib.rs", ""); let p = p.build(); p.cargo("test -p d1 -p d2") .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d1-[..][EXE])") .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d2-[..][EXE])") .with_stdout_contains_n("running 0 tests", 2) .run(); } #[cargo_test] fn bin_does_not_rebuild_tests() { let p = project() .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/foo.rs", ""); let p = p.build(); p.cargo("test -v").run(); sleep_ms(1000); fs::write(p.root().join("src/main.rs"), "fn main() { 3; }").unwrap(); p.cargo("test -v --no-run") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..] src/main.rs [..]` [RUNNING] `rustc [..] src/main.rs [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` [EXECUTABLE] `[..]/target/debug/deps/foo-[..][EXE]` ", ) .run(); } #[cargo_test] fn selective_test_wonky_profile() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release] opt-level = 2 [dependencies] a = { path = "a" } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", ""); let p = p.build(); p.cargo("test -v --no-run --release -p foo -p a").run(); } #[cargo_test] fn selective_test_optional_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a", optional = true } "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", ""); let p = p.build(); p.cargo("test -v --no-run --features a -p a") .with_stderr( "\ [COMPILING] a v0.0.1 ([..]) [RUNNING] `rustc [..] a/src/lib.rs [..]` [RUNNING] `rustc [..] a/src/lib.rs [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [EXECUTABLE] `[..]/target/debug/deps/a-[..][EXE]` ", ) .run(); } #[cargo_test] fn only_test_docs() { let p = project() .file( "src/lib.rs", r#" #[test] fn foo() { let a: u32 = "hello"; } /// ``` /// foo::bar(); /// println!("ok"); /// ``` pub fn bar() { } "#, ) .file("tests/foo.rs", "this is not rust"); let p = p.build(); p.cargo("test --doc") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [DOCTEST] foo", ) .with_stdout_contains("test [..] ... ok") .run(); } #[cargo_test] fn test_panic_abort_with_dep() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } [profile.dev] panic = 'abort' "#, ) .file( "src/lib.rs", r#" extern crate bar; #[test] fn foo() {} "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) .file("bar/src/lib.rs", "") .build(); p.cargo("test -v").run(); } #[cargo_test] fn cfg_test_even_with_no_harness() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] harness = false doctest = false "#, ) .file( "src/lib.rs", r#"#[cfg(test)] fn main() { println!("hello!"); }"#, ) .build(); p.cargo("test -v") .with_stdout("hello!\n") .with_stderr( "\ [COMPILING] foo v0.0.1 ([..]) [RUNNING] `rustc [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `[..]` ", ) .run(); } #[cargo_test] fn panic_abort_multiple() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } [profile.release] panic = 'abort' "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;", ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("test --release -v -p foo -p a").run(); } #[cargo_test] fn pass_correct_cfgs_flags_to_rustdoc() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [features] default = ["feature_a/default"] nightly = ["feature_a/nightly"] [dependencies.feature_a] path = "libs/feature_a" default-features = false "#, ) .file( "src/lib.rs", r#" #[cfg(test)] mod tests { #[test] fn it_works() { assert!(true); } } "#, ) .file( "libs/feature_a/Cargo.toml", r#" [package] name = "feature_a" version = "0.1.0" authors = [] [features] default = ["mock_serde_codegen"] nightly = ["mock_serde_derive"] [dependencies] mock_serde_derive = { path = "../mock_serde_derive", optional = true } [build-dependencies] mock_serde_codegen = { path = "../mock_serde_codegen", optional = true } "#, ) .file( "libs/feature_a/src/lib.rs", r#" #[cfg(feature = "mock_serde_derive")] const MSG: &'static str = "This is safe"; #[cfg(feature = "mock_serde_codegen")] const MSG: &'static str = "This is risky"; pub fn get() -> &'static str { MSG } "#, ) .file( "libs/mock_serde_derive/Cargo.toml", &basic_manifest("mock_serde_derive", "0.1.0"), ) .file("libs/mock_serde_derive/src/lib.rs", "") .file( "libs/mock_serde_codegen/Cargo.toml", &basic_manifest("mock_serde_codegen", "0.1.0"), ) .file("libs/mock_serde_codegen/src/lib.rs", ""); let p = p.build(); p.cargo("test --package feature_a --verbose") .with_stderr_contains( "\ [DOCTEST] feature_a [RUNNING] `rustdoc [..]--test [..]mock_serde_codegen[..]`", ) .run(); p.cargo("test --verbose") .with_stderr_contains( "\ [DOCTEST] foo [RUNNING] `rustdoc [..]--test [..]feature_a[..]`", ) .run(); } #[cargo_test] fn test_release_ignore_panic() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } [profile.test] panic = 'abort' [profile.release] panic = 'abort' "#, ) .file( "src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;", ) .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", ""); let p = p.build(); println!("test"); p.cargo("test -v").run(); println!("bench"); p.cargo("bench -v").run(); } #[cargo_test] fn test_many_with_features() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } [features] foo = [] [workspace] "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) .file("a/src/lib.rs", "") .build(); p.cargo("test -v -p a -p foo --features foo").run(); } #[cargo_test] fn test_all_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "#[test] fn foo_test() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] fn bar_test() {}") .build(); p.cargo("test --workspace") .with_stdout_contains("test foo_test ... ok") .with_stdout_contains("test bar_test ... ok") .run(); } #[cargo_test] fn test_all_exclude() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }") .build(); p.cargo("test --workspace --exclude baz") .with_stdout_contains( "running 1 test test bar ... ok", ) .run(); } #[cargo_test] fn test_all_exclude_not_found() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] pub fn bar() {}") .build(); p.cargo("test --workspace --exclude baz") .with_stderr_contains("[WARNING] excluded package(s) `baz` not found in workspace [..]") .with_stdout_contains( "running 1 test test bar ... ok", ) .run(); } #[cargo_test] fn test_all_exclude_glob() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar", "baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }") .build(); p.cargo("test --workspace --exclude '*z'") .with_stdout_contains( "running 1 test test bar ... ok", ) .run(); } #[cargo_test] fn test_all_exclude_glob_not_found() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] pub fn bar() {}") .build(); p.cargo("test --workspace --exclude '*z'") .with_stderr_contains( "[WARNING] excluded package pattern(s) `*z` not found in workspace [..]", ) .with_stdout_contains( "running 1 test test bar ... ok", ) .run(); } #[cargo_test] fn test_all_exclude_broken_glob() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("test --workspace --exclude '[*z'") .with_status(101) .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") .run(); } #[cargo_test] fn test_all_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "#[test] fn a() {}") .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "#[test] fn b() {}") .build(); p.cargo("test --workspace") .with_stdout_contains("running 1 test\ntest a ... ok") .with_stdout_contains("running 1 test\ntest b ... ok") .run(); } #[cargo_test] fn test_virtual_manifest_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "#[test] fn a() {}") .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "#[test] fn b() {}") .build(); p.cargo("test") .with_stdout_contains("running 1 test\ntest a ... ok") .with_stdout_contains("running 1 test\ntest b ... ok") .run(); } #[cargo_test] fn test_virtual_manifest_one_project() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "#[test] fn baz() { assert!(false); }") .build(); p.cargo("test -p bar") .with_stdout_contains("running 1 test\ntest bar ... ok") .with_stdout_does_not_contain("running 1 test\ntest baz ... ok") .run(); } #[cargo_test] fn test_virtual_manifest_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] fn bar() { assert!(false); }") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "#[test] fn baz() {}") .build(); p.cargo("test -p '*z'") .with_stdout_does_not_contain("running 1 test\ntest bar ... ok") .with_stdout_contains("running 1 test\ntest baz ... ok") .run(); } #[cargo_test] fn test_virtual_manifest_glob_not_found() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] fn bar() {}") .build(); p.cargo("test -p bar -p '*z'") .with_status(101) .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]") .run(); } #[cargo_test] fn test_virtual_manifest_broken_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "#[test] fn bar() {}") .build(); p.cargo("test -p '[*z'") .with_status(101) .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") .run(); } #[cargo_test] fn test_all_member_dependency_same_name() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", r#" [project] name = "a" version = "0.1.0" [dependencies] a = "0.1.0" "#, ) .file("a/src/lib.rs", "#[test] fn a() {}") .build(); Package::new("a", "0.1.0").publish(); p.cargo("test --workspace") .with_stdout_contains("test a ... ok") .run(); } #[cargo_test] fn doctest_only_with_dev_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "a" version = "0.1.0" [dev-dependencies] b = { path = "b" } "#, ) .file( "src/lib.rs", r#" /// ``` /// extern crate b; /// /// b::b(); /// ``` pub fn a() {} "#, ) .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "pub fn b() {}") .build(); p.cargo("test --doc -v").run(); } #[cargo_test] fn test_many_targets() { let p = project() .file( "src/bin/a.rs", r#" fn main() {} #[test] fn bin_a() {} "#, ) .file( "src/bin/b.rs", r#" fn main() {} #[test] fn bin_b() {} "#, ) .file( "src/bin/c.rs", r#" fn main() {} #[test] fn bin_c() { panic!(); } "#, ) .file( "examples/a.rs", r#" fn main() {} #[test] fn example_a() {} "#, ) .file( "examples/b.rs", r#" fn main() {} #[test] fn example_b() {} "#, ) .file("examples/c.rs", "#[test] fn example_c() { panic!(); }") .file("tests/a.rs", "#[test] fn test_a() {}") .file("tests/b.rs", "#[test] fn test_b() {}") .file("tests/c.rs", "does not compile") .build(); p.cargo("test --verbose --bin a --bin b --example a --example b --test a --test b") .with_stdout_contains("test bin_a ... ok") .with_stdout_contains("test bin_b ... ok") .with_stdout_contains("test test_a ... ok") .with_stdout_contains("test test_b ... ok") .with_stderr_contains("[RUNNING] `rustc --crate-name a examples/a.rs [..]`") .with_stderr_contains("[RUNNING] `rustc --crate-name b examples/b.rs [..]`") .run(); } #[cargo_test] fn doctest_and_registry() { let p = project() .file( "Cargo.toml", r#" [project] name = "a" version = "0.1.0" [dependencies] b = { path = "b" } c = { path = "c" } [workspace] "#, ) .file("src/lib.rs", "") .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file( "b/src/lib.rs", " /// ``` /// b::foo(); /// ``` pub fn foo() {} ", ) .file( "c/Cargo.toml", r#" [project] name = "c" version = "0.1.0" [dependencies] b = "0.1" "#, ) .file("c/src/lib.rs", "") .build(); Package::new("b", "0.1.0").publish(); p.cargo("test --workspace -v").run(); } #[cargo_test] fn cargo_test_env() { let src = format!( r#" #![crate_type = "rlib"] #[test] fn env_test() {{ use std::env; eprintln!("{{}}", env::var("{}").unwrap()); }} "#, cargo::CARGO_ENV ); let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", &src) .build(); let cargo = cargo_exe().canonicalize().unwrap(); p.cargo("test --lib -- --nocapture") .with_stderr_contains(cargo.to_str().unwrap()) .with_stdout_contains("test env_test ... ok") .run(); } #[cargo_test] fn test_order() { let p = project() .file("src/lib.rs", "#[test] fn test_lib() {}") .file("tests/a.rs", "#[test] fn test_a() {}") .file("tests/z.rs", "#[test] fn test_z() {}") .build(); p.cargo("test --workspace") .with_stdout_contains( " running 1 test test test_lib ... ok test result: ok. [..] running 1 test test test_a ... ok test result: ok. [..] running 1 test test test_z ... ok test result: ok. [..] ", ) .run(); } #[cargo_test] fn cyclic_dev() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dev-dependencies] foo = { path = "." } "#, ) .file("src/lib.rs", "#[test] fn test_lib() {}") .file("tests/foo.rs", "extern crate foo;") .build(); p.cargo("test --workspace").run(); } #[cargo_test] fn publish_a_crate_without_tests() { Package::new("testless", "0.1.0") .file( "Cargo.toml", r#" [project] name = "testless" version = "0.1.0" exclude = ["tests/*"] [[test]] name = "a_test" "#, ) .file("src/lib.rs", "") // In real life, the package will have a test, // which would be excluded from .crate file by the // `exclude` field. Our test harness does not honor // exclude though, so let's just not add the file! // .file("tests/a_test.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [dependencies] testless = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("test").run(); p.cargo("test --package testless").run(); } #[cargo_test] fn find_dependency_of_proc_macro_dependency_with_target() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["root", "proc_macro_dep"] "#, ) .file( "root/Cargo.toml", r#" [project] name = "root" version = "0.1.0" authors = [] [dependencies] proc_macro_dep = { path = "../proc_macro_dep" } "#, ) .file( "root/src/lib.rs", r#" #[macro_use] extern crate proc_macro_dep; #[derive(Noop)] pub struct X; "#, ) .file( "proc_macro_dep/Cargo.toml", r#" [project] name = "proc_macro_dep" version = "0.1.0" authors = [] [lib] proc-macro = true [dependencies] baz = "^0.1" "#, ) .file( "proc_macro_dep/src/lib.rs", r#" extern crate baz; extern crate proc_macro; use proc_macro::TokenStream; #[proc_macro_derive(Noop)] pub fn noop(_input: TokenStream) -> TokenStream { "".parse().unwrap() } "#, ) .build(); Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0") .dep("bar", "0.1") .file("src/lib.rs", "extern crate bar;") .publish(); p.cargo("test --workspace --target").arg(rustc_host()).run(); } #[cargo_test] fn test_hint_not_masked_by_doctest() { let p = project() .file( "src/lib.rs", r#" /// ``` /// assert_eq!(1, 1); /// ``` pub fn this_works() {} "#, ) .file( "tests/integ.rs", r#" #[test] fn this_fails() { panic!(); } "#, ) .build(); p.cargo("test --no-fail-fast") .with_status(101) .with_stdout_contains("test this_fails ... FAILED") .with_stdout_contains("[..]this_works (line [..]ok") .with_stderr_contains("[ERROR] test failed, to rerun pass `--test integ`") .run(); } #[cargo_test] fn test_hint_workspace_virtual() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b", "c"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "#[test] fn t1() {}") .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) .file("b/src/lib.rs", "#[test] fn t1() {assert!(false)}") .file("c/Cargo.toml", &basic_manifest("c", "0.1.0")) .file( "c/src/lib.rs", r#" /// ```rust /// assert_eq!(1, 2); /// ``` pub fn foo() {} "#, ) .file( "c/src/main.rs", r#" fn main() {} #[test] fn from_main() { assert_eq!(1, 2); } "#, ) .file( "c/tests/t1.rs", r#" #[test] fn from_int_test() { assert_eq!(1, 2); } "#, ) .file( "c/examples/ex1.rs", r#" fn main() {} #[test] fn from_example() { assert_eq!(1, 2); } "#, ) // This does not use #[bench] since it is unstable. #[test] works just // the same for our purpose of checking the hint. .file( "c/benches/b1.rs", r#" #[test] fn from_bench() { assert_eq!(1, 2); } "#, ) .build(); // This depends on Units being sorted so that `b` fails first. p.cargo("test") .with_stderr_unordered( "\ [COMPILING] c v0.1.0 [..] [COMPILING] a v0.1.0 [..] [COMPILING] b v0.1.0 [..] [FINISHED] test [..] [RUNNING] unittests src/lib.rs (target/debug/deps/a[..]) [RUNNING] unittests src/lib.rs (target/debug/deps/b[..]) [ERROR] test failed, to rerun pass `-p b --lib` ", ) .with_status(101) .run(); p.cargo("test") .cwd("b") .with_stderr( "\ [FINISHED] test [..] [RUNNING] unittests src/lib.rs ([ROOT]/foo/target/debug/deps/b[..]) [ERROR] test failed, to rerun pass `--lib` ", ) .with_status(101) .run(); p.cargo("test --no-fail-fast") .with_stderr( "\ [FINISHED] test [..] [RUNNING] unittests src/lib.rs (target/debug/deps/a[..]) [RUNNING] unittests src/lib.rs (target/debug/deps/b[..]) [ERROR] test failed, to rerun pass `-p b --lib` [RUNNING] unittests src/lib.rs (target/debug/deps/c[..]) [RUNNING] unittests src/main.rs (target/debug/deps/c[..]) [ERROR] test failed, to rerun pass `-p c --bin c` [RUNNING] tests/t1.rs (target/debug/deps/t1[..]) [ERROR] test failed, to rerun pass `-p c --test t1` [DOCTEST] a [DOCTEST] b [DOCTEST] c [ERROR] doctest failed, to rerun pass `-p c --doc` [ERROR] 4 targets failed: `-p b --lib` `-p c --bin c` `-p c --test t1` `-p c --doc` ", ) .with_status(101) .run(); // Check others that are not in the default set. p.cargo("test -p c --examples --benches --no-fail-fast") .with_stderr( "\ [COMPILING] c v0.1.0 [..] [FINISHED] test [..] [RUNNING] unittests src/lib.rs (target/debug/deps/c[..]) [RUNNING] unittests src/main.rs (target/debug/deps/c[..]) [ERROR] test failed, to rerun pass `-p c --bin c` [RUNNING] benches/b1.rs (target/debug/deps/b1[..]) [ERROR] test failed, to rerun pass `-p c --bench b1` [RUNNING] unittests examples/ex1.rs (target/debug/examples/ex1[..]) [ERROR] test failed, to rerun pass `-p c --example ex1` [ERROR] 3 targets failed: `-p c --bin c` `-p c --bench b1` `-p c --example ex1` ", ) .with_status(101) .run() } #[cargo_test] fn test_hint_workspace_nonvirtual() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [workspace] members = ["a"] "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", "#[test] fn t1() {assert!(false)}") .build(); p.cargo("test --workspace") .with_stderr_contains("[ERROR] test failed, to rerun pass `-p a --lib`") .with_status(101) .run(); p.cargo("test -p a") .with_stderr_contains("[ERROR] test failed, to rerun pass `-p a --lib`") .with_status(101) .run(); } #[cargo_test] fn json_artifact_includes_test_flag() { // Verify that the JSON artifact output includes `test` flag. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.test] opt-level = 1 "#, ) .file("src/lib.rs", "") .build(); p.cargo("test --lib -v --message-format=json") .with_json( r#" { "reason":"compiler-artifact", "profile": { "debug_assertions": true, "debuginfo": 2, "opt_level": "1", "overflow_checks": true, "test": true }, "executable": "[..]/foo-[..]", "features": [], "package_id":"foo 0.0.1 ([..])", "manifest_path": "[..]", "target":{ "kind":["lib"], "crate_types":["lib"], "doc": true, "doctest": true, "edition": "2015", "name":"foo", "src_path":"[..]lib.rs", "test": true }, "filenames":"{...}", "fresh": false } {"reason": "build-finished", "success": true} "#, ) .run(); } #[cargo_test] fn json_artifact_includes_executable_for_library_tests() { let p = project() .file("src/main.rs", "fn main() { }") .file("src/lib.rs", r#"#[test] fn lib_test() {}"#) .build(); p.cargo("test --lib -v --no-run --message-format=json") .with_json( r#" { "executable": "[..]/foo/target/debug/deps/foo-[..][EXE]", "features": [], "filenames": "{...}", "fresh": false, "package_id": "foo 0.0.1 ([..])", "manifest_path": "[..]", "profile": "{...}", "reason": "compiler-artifact", "target": { "crate_types": [ "lib" ], "kind": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "name": "foo", "src_path": "[..]/foo/src/lib.rs", "test": true } } {"reason": "build-finished", "success": true} "#, ) .run(); } #[cargo_test] fn json_artifact_includes_executable_for_integration_tests() { let p = project() .file( "tests/integration_test.rs", r#"#[test] fn integration_test() {}"#, ) .build(); p.cargo("test -v --no-run --message-format=json --test integration_test") .with_json( r#" { "executable": "[..]/foo/target/debug/deps/integration_test-[..][EXE]", "features": [], "filenames": "{...}", "fresh": false, "package_id": "foo 0.0.1 ([..])", "manifest_path": "[..]", "profile": "{...}", "reason": "compiler-artifact", "target": { "crate_types": [ "bin" ], "kind": [ "test" ], "doc": false, "doctest": false, "edition": "2015", "name": "integration_test", "src_path": "[..]/foo/tests/integration_test.rs", "test": true } } {"reason": "build-finished", "success": true} "#, ) .run(); } #[cargo_test] fn test_build_script_links() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" links = 'something' [lib] test = false "#, ) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .build(); p.cargo("test --no-run").run(); } #[cargo_test] fn doctest_skip_staticlib() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [lib] crate-type = ["staticlib"] "#, ) .file( "src/lib.rs", r#" //! ``` //! assert_eq!(1,2); //! ``` "#, ) .build(); p.cargo("test --doc") .with_status(101) .with_stderr( "\ [WARNING] doc tests are not supported for crate type(s) `staticlib` in package `foo` [ERROR] no library targets found in package `foo`", ) .run(); p.cargo("test") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] test [..] [RUNNING] [..] (target/debug/deps/foo-[..])", ) .run(); } #[cargo_test] fn can_not_mix_doc_tests_and_regular_tests() { let p = project() .file( "src/lib.rs", "\ /// ``` /// assert_eq!(1, 1) /// ``` pub fn foo() -> u8 { 1 } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } } ", ) .build(); p.cargo("test") .with_stderr( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..]) [DOCTEST] foo ", ) .with_stdout( " running 1 test test tests::it_works ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] running 1 test test src/lib.rs - foo (line 1) ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] \n", ) .run(); p.cargo("test --lib") .with_stderr( "\ [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] [..] (target/debug/deps/foo-[..])\n", ) .with_stdout( " running 1 test test tests::it_works ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] \n", ) .run(); // This has been modified to attempt to diagnose spurious errors on CI. // For some reason, this is recompiling the lib when it shouldn't. If the // root cause is ever found, the changes here should be reverted. // See https://github.com/rust-lang/cargo/issues/6887 p.cargo("test --doc -vv") .with_stderr_does_not_contain("[COMPILING] foo [..]") .with_stderr_contains("[DOCTEST] foo") .with_stdout( " running 1 test test src/lib.rs - foo (line 1) ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] ", ) .env("CARGO_LOG", "cargo=trace") .run(); p.cargo("test --lib --doc") .with_status(101) .with_stderr("[ERROR] Can't mix --doc with other target selecting options\n") .run(); } #[cargo_test] fn can_not_no_run_doc_tests() { let p = project() .file( "src/lib.rs", r#" /// ``` /// let _x = 1 + "foo"; /// ``` pub fn foo() -> u8 { 1 } "#, ) .build(); p.cargo("test --doc --no-run") .with_status(101) .with_stderr("[ERROR] Can't skip running doc tests with --no-run") .run(); } #[cargo_test] fn test_all_targets_lib() { let p = project().file("src/lib.rs", "").build(); p.cargo("test --all-targets") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] test [..] [RUNNING] [..]foo[..] ", ) .run(); } #[cargo_test] fn test_dep_with_dev() { Package::new("devdep", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" [dev-dependencies] devdep = "0.1" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("test -p bar") .with_status(101) .with_stderr( "[ERROR] package `bar` cannot be tested because it requires dev-dependencies \ and is not a member of the workspace", ) .run(); } #[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")] fn cargo_test_doctest_xcompile_ignores() { // -Zdoctest-xcompile also enables --enable-per-target-ignores which // allows the ignore-TARGET syntax. let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/lib.rs", r#" ///```ignore-x86_64 ///assert!(cfg!(not(target_arch = "x86_64"))); ///``` pub fn foo() -> u8 { 4 } "#, ) .build(); p.cargo("build").run(); #[cfg(not(target_arch = "x86_64"))] p.cargo("test") .with_stdout_contains( "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .run(); #[cfg(target_arch = "x86_64")] p.cargo("test") .with_status(101) .with_stdout_contains( "test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .run(); #[cfg(not(target_arch = "x86_64"))] p.cargo("test -Zdoctest-xcompile") .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains( "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .run(); #[cfg(target_arch = "x86_64")] p.cargo("test -Zdoctest-xcompile") .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains( "test result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out[..]", ) .run(); } #[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")] fn cargo_test_doctest_xcompile() { if !cross_compile::can_run_on_host() { return; } let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/lib.rs", r#" ///``` ///assert!(1 == 1); ///``` pub fn foo() -> u8 { 4 } "#, ) .build(); p.cargo("build").run(); p.cargo(&format!("test --target {}", cross_compile::alternate())) .with_stdout_contains("running 0 tests") .run(); p.cargo(&format!( "test --target {} -Zdoctest-xcompile", cross_compile::alternate() )) .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains( "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .run(); } #[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")] fn cargo_test_doctest_xcompile_runner() { if !cross_compile::can_run_on_host() { return; } let runner = project() .file("Cargo.toml", &basic_bin_manifest("runner")) .file( "src/main.rs", r#" pub fn main() { eprintln!("this is a runner"); let args: Vec = std::env::args().collect(); std::process::Command::new(&args[1]).spawn(); } "#, ) .build(); runner.cargo("build").run(); assert!(runner.bin("runner").is_file()); let runner_path = paths::root().join("runner"); fs::copy(&runner.bin("runner"), &runner_path).unwrap(); let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); // Escape Windows backslashes for TOML config. let runner_str = runner_path.to_str().unwrap().replace('\\', "\\\\"); fs::write( config, format!( r#" [target.'cfg(target_arch = "{}")'] runner = "{}" "#, cross_compile::alternate_arch(), runner_str ), ) .unwrap(); let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/lib.rs", &format!( r#" ///``` ///assert!(cfg!(target_arch = "{}")); ///``` pub fn foo() -> u8 {{ 4 }} "#, cross_compile::alternate_arch() ), ) .build(); p.cargo("build").run(); p.cargo(&format!("test --target {}", cross_compile::alternate())) .with_stdout_contains("running 0 tests") .run(); p.cargo(&format!( "test --target {} -Zdoctest-xcompile", cross_compile::alternate() )) .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains( "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .with_stderr_contains("this is a runner") .run(); } #[cargo_test(nightly, reason = "-Zdoctest-xcompile is unstable")] fn cargo_test_doctest_xcompile_no_runner() { if !cross_compile::can_run_on_host() { return; } let p = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file( "src/lib.rs", &format!( r#" ///``` ///assert!(cfg!(target_arch = "{}")); ///``` pub fn foo() -> u8 {{ 4 }} "#, cross_compile::alternate_arch() ), ) .build(); p.cargo("build").run(); p.cargo(&format!("test --target {}", cross_compile::alternate())) .with_stdout_contains("running 0 tests") .run(); p.cargo(&format!( "test --target {} -Zdoctest-xcompile", cross_compile::alternate() )) .masquerade_as_nightly_cargo(&["doctest-xcompile"]) .with_stdout_contains( "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", ) .run(); } #[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")] fn panic_abort_tests() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [dependencies] a = { path = 'a' } [profile.dev] panic = 'abort' [profile.test] panic = 'abort' "#, ) .file( "src/lib.rs", r#" #[test] fn foo() { a::foo(); } "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("test -Z panic-abort-tests -v") .with_stderr_contains("[..]--crate-name a [..]-C panic=abort[..]") .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]") .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]--test[..]") .masquerade_as_nightly_cargo(&["panic-abort-tests"]) .run(); } #[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")] fn panic_abort_only_test() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [dependencies] a = { path = 'a' } [profile.test] panic = 'abort' "#, ) .file( "src/lib.rs", r#" #[test] fn foo() { a::foo(); } "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("test -Z panic-abort-tests -v") .with_stderr_contains("warning: `panic` setting is ignored for `test` profile") .masquerade_as_nightly_cargo(&["panic-abort-tests"]) .run(); } #[cargo_test(nightly, reason = "-Zpanic-abort-tests in rustc is unstable")] fn panic_abort_test_profile_inherits() { let p = project() .file( "Cargo.toml", r#" [package] name = 'foo' version = '0.1.0' [dependencies] a = { path = 'a' } [profile.dev] panic = 'abort' "#, ) .file( "src/lib.rs", r#" #[test] fn foo() { a::foo(); } "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "pub fn foo() {}") .build(); p.cargo("test -Z panic-abort-tests -v") .masquerade_as_nightly_cargo(&["panic-abort-tests"]) .with_status(0) .run(); } #[cargo_test] fn bin_env_for_test() { // Test for the `CARGO_BIN_` environment variables for tests. // // Note: The Unicode binary uses a `[[bin]]` definition because different // filesystems normalize utf-8 in different ways. For example, HFS uses // "gru\u{308}ßen" and APFS uses "gr\u{fc}ßen". Defining it in TOML forces // one form to be used. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" edition = "2018" [[bin]] name = 'grüßen' path = 'src/bin/grussen.rs' "#, ) .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/with-dash.rs", "fn main() {}") .file("src/bin/grussen.rs", "fn main() {}") .build(); let bin_path = |name| p.bin(name).to_string_lossy().replace("\\", "\\\\"); p.change_file( "tests/check_env.rs", &r#" #[test] fn run_bins() { assert_eq!(env!("CARGO_BIN_EXE_foo"), ""); assert_eq!(env!("CARGO_BIN_EXE_with-dash"), ""); assert_eq!(env!("CARGO_BIN_EXE_grüßen"), ""); } "# .replace("", &bin_path("foo")) .replace("", &bin_path("with-dash")) .replace("", &bin_path("grüßen")), ); p.cargo("test --test check_env").run(); p.cargo("check --test check_env").run(); } #[cargo_test] fn test_workspaces_cwd() { // This tests that all the different test types are executed from the // crate directory (manifest_dir), and not from the workspace root. let make_lib_file = |expected| { format!( r#" //! ``` //! assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); //! assert_eq!("{expected}", include_str!("../file.txt")); //! assert_eq!( //! std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), //! std::env::current_dir().unwrap(), //! ); //! ``` #[test] fn test_unit_{expected}_cwd() {{ assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); assert_eq!("{expected}", include_str!("../file.txt")); assert_eq!( std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), std::env::current_dir().unwrap(), ); }} "#, expected = expected ) }; let make_test_file = |expected| { format!( r#" #[test] fn test_integration_{expected}_cwd() {{ assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); assert_eq!("{expected}", include_str!("../file.txt")); assert_eq!( std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), std::env::current_dir().unwrap(), ); }} "#, expected = expected ) }; let p = project() .file( "Cargo.toml", r#" [package] name = "root-crate" version = "0.0.0" [workspace] members = [".", "nested-crate", "very/deeply/nested/deep-crate"] "#, ) .file("file.txt", "root") .file("src/lib.rs", &make_lib_file("root")) .file("tests/integration.rs", &make_test_file("root")) .file( "nested-crate/Cargo.toml", r#" [package] name = "nested-crate" version = "0.0.0" "#, ) .file("nested-crate/file.txt", "nested") .file("nested-crate/src/lib.rs", &make_lib_file("nested")) .file( "nested-crate/tests/integration.rs", &make_test_file("nested"), ) .file( "very/deeply/nested/deep-crate/Cargo.toml", r#" [package] name = "deep-crate" version = "0.0.0" "#, ) .file("very/deeply/nested/deep-crate/file.txt", "deep") .file( "very/deeply/nested/deep-crate/src/lib.rs", &make_lib_file("deep"), ) .file( "very/deeply/nested/deep-crate/tests/integration.rs", &make_test_file("deep"), ) .build(); p.cargo("test --workspace --all") .with_stderr_contains("[DOCTEST] root-crate") .with_stderr_contains("[DOCTEST] nested-crate") .with_stderr_contains("[DOCTEST] deep-crate") .with_stdout_contains("test test_unit_root_cwd ... ok") .with_stdout_contains("test test_unit_nested_cwd ... ok") .with_stdout_contains("test test_unit_deep_cwd ... ok") .with_stdout_contains("test test_integration_root_cwd ... ok") .with_stdout_contains("test test_integration_nested_cwd ... ok") .with_stdout_contains("test test_integration_deep_cwd ... ok") .run(); p.cargo("test -p root-crate --all") .with_stderr_contains("[DOCTEST] root-crate") .with_stdout_contains("test test_unit_root_cwd ... ok") .with_stdout_contains("test test_integration_root_cwd ... ok") .run(); p.cargo("test -p nested-crate --all") .with_stderr_contains("[DOCTEST] nested-crate") .with_stdout_contains("test test_unit_nested_cwd ... ok") .with_stdout_contains("test test_integration_nested_cwd ... ok") .run(); p.cargo("test -p deep-crate --all") .with_stderr_contains("[DOCTEST] deep-crate") .with_stdout_contains("test test_unit_deep_cwd ... ok") .with_stdout_contains("test test_integration_deep_cwd ... ok") .run(); p.cargo("test --all") .cwd("nested-crate") .with_stderr_contains("[DOCTEST] nested-crate") .with_stdout_contains("test test_unit_nested_cwd ... ok") .with_stdout_contains("test test_integration_nested_cwd ... ok") .run(); p.cargo("test --all") .cwd("very/deeply/nested/deep-crate") .with_stderr_contains("[DOCTEST] deep-crate") .with_stdout_contains("test test_unit_deep_cwd ... ok") .with_stdout_contains("test test_integration_deep_cwd ... ok") .run(); } #[cargo_test] fn execution_error() { // Checks the behavior when a test fails to launch. let p = project() .file( "tests/t1.rs", r#" #[test] fn foo() {} "#, ) .build(); let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); p.cargo("test") .env(&key, "does_not_exist") // The actual error is usually "no such file", but on Windows it has a // custom message. Since matching against the error string produced by // Rust is not very reliable, this just uses `[..]`. .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [FINISHED] test [..] [RUNNING] tests/t1.rs (target/debug/deps/t1[..]) error: test failed, to rerun pass `--test t1` Caused by: could not execute process `does_not_exist [ROOT]/foo/target/debug/deps/t1[..]` (never executed) Caused by: [..] ", ) .with_status(101) .run(); } #[cargo_test] fn nonzero_exit_status() { // Tests for nonzero exit codes from tests. let p = project() .file( "tests/t1.rs", r#" #[test] fn t() { panic!("this is a normal error") } "#, ) .file( "tests/t2.rs", r#" #[test] fn t() { std::process::exit(4) } "#, ) .build(); p.cargo("test --test t1") .with_stderr( "\ [COMPILING] foo [..] [FINISHED] test [..] [RUNNING] tests/t1.rs (target/debug/deps/t1[..]) error: test failed, to rerun pass `--test t1` ", ) .with_stdout_contains("[..]this is a normal error[..]") .with_status(101) .run(); p.cargo("test --test t2") .with_stderr( "\ [COMPILING] foo v0.0.1 [..] [FINISHED] test [..] [RUNNING] tests/t2.rs (target/debug/deps/t2[..]) error: test failed, to rerun pass `--test t2` Caused by: process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4) ", ) .with_status(4) .run(); // no-fail-fast always uses 101 p.cargo("test --no-fail-fast") .with_stderr( "\ [FINISHED] test [..] [RUNNING] tests/t1.rs (target/debug/deps/t1[..]) error: test failed, to rerun pass `--test t1` [RUNNING] tests/t2.rs (target/debug/deps/t2[..]) error: test failed, to rerun pass `--test t2` Caused by: process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4) error: 2 targets failed: `--test t1` `--test t2` ", ) .with_status(101) .run(); } cargo-0.66.0/tests/testsuite/timings.rs000066400000000000000000000021621432416201200201050ustar00rootroot00000000000000//! Tests for --timings. use cargo_test_support::project; use cargo_test_support::registry::Package; #[cargo_test] fn timings_works() { Package::new("dep", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "0.1" "#, ) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/t1.rs", "") .file("examples/ex1.rs", "fn main() {}") .build(); p.cargo("build --all-targets --timings") .with_stderr_unordered( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] dep v0.1.0 [..] [COMPILING] dep v0.1.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] Timing report saved to [..]/foo/target/cargo-timings/cargo-timing-[..].html ", ) .run(); p.cargo("clean").run(); p.cargo("test --timings").run(); p.cargo("clean").run(); p.cargo("check --timings").run(); p.cargo("clean").run(); p.cargo("doc --timings").run(); } cargo-0.66.0/tests/testsuite/tool_paths.rs000066400000000000000000000252201432416201200206070ustar00rootroot00000000000000//! Tests for configuration values that point to programs. use cargo_test_support::{basic_lib_manifest, project, rustc_host, rustc_host_env}; #[cargo_test] fn pathless_tools() { let target = rustc_host(); let foo = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", &format!( r#" [target.{}] linker = "nonexistent-linker" "#, target ), ) .build(); foo.cargo("build --verbose") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn absolute_tools() { let target = rustc_host(); // Escaped as they appear within a TOML config file let linker = if cfg!(windows) { r#"C:\\bogus\\nonexistent-linker"# } else { r#"/bogus/nonexistent-linker"# }; let foo = project() .file("Cargo.toml", &basic_lib_manifest("foo")) .file("src/lib.rs", "") .file( ".cargo/config", &format!( r#" [target.{target}] linker = "{linker}" "#, target = target, linker = linker ), ) .build(); foo.cargo("build --verbose") .with_stderr( "\ [COMPILING] foo v0.5.0 ([CWD]) [RUNNING] `rustc [..] -C linker=[..]bogus/nonexistent-linker [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn relative_tools() { let target = rustc_host(); // Escaped as they appear within a TOML config file let linker = if cfg!(windows) { r#".\\tools\\nonexistent-linker"# } else { r#"./tools/nonexistent-linker"# }; // Funky directory structure to test that relative tool paths are made absolute // by reference to the `.cargo/..` directory and not to (for example) the CWD. let p = project() .no_manifest() .file("bar/Cargo.toml", &basic_lib_manifest("bar")) .file("bar/src/lib.rs", "") .file( ".cargo/config", &format!( r#" [target.{target}] linker = "{linker}" "#, target = target, linker = linker ), ) .build(); let prefix = p.root().into_os_string().into_string().unwrap(); p.cargo("build --verbose") .cwd("bar") .with_stderr(&format!( "\ [COMPILING] bar v0.5.0 ([CWD]) [RUNNING] `rustc [..] -C linker={prefix}/./tools/nonexistent-linker [..]` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", prefix = prefix, )) .run(); } #[cargo_test] fn custom_runner() { let target = rustc_host(); let p = project() .file("src/main.rs", "fn main() {}") .file("tests/test.rs", "") .file("benches/bench.rs", "") .file( ".cargo/config", &format!( r#" [target.{}] runner = "nonexistent-runner -r" "#, target ), ) .build(); p.cargo("run -- --param") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` ", ) .run(); p.cargo("test --test test --verbose -- --param") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..]` [FINISHED] test [unoptimized + debuginfo] target(s) in [..] [RUNNING] `nonexistent-runner -r [..]/target/debug/deps/test-[..][EXE] --param` ", ) .run(); p.cargo("bench --bench bench --verbose -- --param") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [RUNNING] `rustc [..]` [RUNNING] `rustc [..]` [FINISHED] bench [optimized] target(s) in [..] [RUNNING] `nonexistent-runner -r [..]/target/release/deps/bench-[..][EXE] --param --bench` ", ) .run(); } // can set a custom runner via `target.'cfg(..)'.runner` #[cargo_test] fn custom_runner_cfg() { let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [target.'cfg(not(target_os = "none"))'] runner = "nonexistent-runner -r" "#, ) .build(); p.cargo("run -- --param") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` ", ) .run(); } // custom runner set via `target.$triple.runner` have precedence over `target.'cfg(..)'.runner` #[cargo_test] fn custom_runner_cfg_precedence() { let target = rustc_host(); let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config", &format!( r#" [target.'cfg(not(target_os = "none"))'] runner = "ignored-runner" [target.{}] runner = "nonexistent-runner -r" "#, target ), ) .build(); p.cargo("run -- --param") .with_status(101) .with_stderr_contains( "\ [COMPILING] foo v0.0.1 ([CWD]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` ", ) .run(); } #[cargo_test] fn custom_runner_cfg_collision() { let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config", r#" [target.'cfg(not(target_arch = "avr"))'] runner = "true" [target.'cfg(not(target_os = "none"))'] runner = "false" "#, ) .build(); p.cargo("run -- --param") .with_status(101) .with_stderr( "\ [ERROR] several matching instances of `target.'cfg(..)'.runner` in configurations first match `cfg(not(target_arch = \"avr\"))` located in [..]/foo/.cargo/config second match `cfg(not(target_os = \"none\"))` located in [..]/foo/.cargo/config ", ) .run(); } #[cargo_test] fn custom_runner_env() { let p = project().file("src/main.rs", "fn main() {}").build(); let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); p.cargo("run") .env(&key, "nonexistent-runner --foo") .with_status(101) // FIXME: Update "Caused by" error message once rust/pull/87704 is merged. // On Windows, changing to a custom executable resolver has changed the // error messages. .with_stderr(&format!( "\ [COMPILING] foo [..] [FINISHED] dev [..] [RUNNING] `nonexistent-runner --foo target/debug/foo[EXE]` [ERROR] could not execute process `nonexistent-runner --foo target/debug/foo[EXE]` (never executed) Caused by: [..] " )) .run(); } #[cargo_test] fn custom_runner_env_overrides_config() { let target = rustc_host(); let p = project() .file("src/main.rs", "fn main() {}") .file( ".cargo/config.toml", &format!( r#" [target.{}] runner = "should-not-run -r" "#, target ), ) .build(); let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); p.cargo("run") .env(&key, "should-run --foo") .with_status(101) .with_stderr_contains("[RUNNING] `should-run --foo target/debug/foo[EXE]`") .run(); } #[cargo_test] #[cfg(unix)] // Assumes `true` is in PATH. fn custom_runner_env_true() { // Check for a bug where "true" was interpreted as a boolean instead of // the executable. let p = project().file("src/main.rs", "fn main() {}").build(); let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); p.cargo("run") .env(&key, "true") .with_stderr_contains("[RUNNING] `true target/debug/foo[EXE]`") .run(); } #[cargo_test] fn custom_linker_env() { let p = project().file("src/main.rs", "fn main() {}").build(); let key = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); p.cargo("build -v") .env(&key, "nonexistent-linker") .with_status(101) .with_stderr_contains("[RUNNING] `rustc [..]-C linker=nonexistent-linker [..]") .run(); } #[cargo_test] fn target_in_environment_contains_lower_case() { let p = project().file("src/main.rs", "fn main() {}").build(); let target = rustc_host(); let env_key = format!( "CARGO_TARGET_{}_LINKER", target.to_lowercase().replace('-', "_") ); p.cargo("build -v --target") .arg(target) .env(&env_key, "nonexistent-linker") .with_stderr_contains(format!( "warning: Environment variables are expected to use uppercase \ letters and underscores, the variable `{}` will be ignored and \ have no effect", env_key )) .run(); } #[cargo_test] fn cfg_ignored_fields() { // Test for some ignored fields in [target.'cfg()'] tables. let p = project() .file( ".cargo/config", r#" # Try some empty tables. [target.'cfg(not(foo))'] [target.'cfg(not(bar))'.somelib] # A bunch of unused fields. [target.'cfg(not(target_os = "none"))'] linker = 'false' ar = 'false' foo = {rustc-flags = "-l foo"} invalid = 1 runner = 'false' rustflags = '' "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [WARNING] unused key `somelib` in [target] config table `cfg(not(bar))` [WARNING] unused key `ar` in [target] config table `cfg(not(target_os = \"none\"))` [WARNING] unused key `foo` in [target] config table `cfg(not(target_os = \"none\"))` [WARNING] unused key `invalid` in [target] config table `cfg(not(target_os = \"none\"))` [WARNING] unused key `linker` in [target] config table `cfg(not(target_os = \"none\"))` [CHECKING] foo v0.0.1 ([..]) [FINISHED] [..] ", ) .run(); } cargo-0.66.0/tests/testsuite/tree.rs000066400000000000000000001301731432416201200173760ustar00rootroot00000000000000//! Tests for the `cargo tree` command. use super::features2::switch_to_resolver_2; use cargo_test_support::cross_compile::{self, alternate}; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{basic_manifest, git, project, rustc_host, Project}; fn make_simple_proj() -> Project { Package::new("c", "1.0.0").publish(); Package::new("b", "1.0.0").dep("c", "1.0").publish(); Package::new("a", "1.0.0").dep("b", "1.0").publish(); Package::new("bdep", "1.0.0").dep("b", "1.0").publish(); Package::new("devdep", "1.0.0").dep("b", "1.0.0").publish(); project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = "1.0" c = "1.0" [build-dependencies] bdep = "1.0" [dev-dependencies] devdep = "1.0" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build() } #[cargo_test] fn simple() { // A simple test with a few different dependencies. let p = make_simple_proj(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 β”‚ └── b v1.0.0 β”‚ └── c v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 └── b v1.0.0 (*) [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 (*) ", ) .run(); p.cargo("tree -p bdep") .with_stdout( "\ bdep v1.0.0 └── b v1.0.0 └── c v1.0.0 ", ) .run(); } #[cargo_test] fn virtual_workspace() { // Multiple packages in a virtual workspace. Package::new("somedep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "baz", "c"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "1.0.0")) .file("a/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" [dependencies] c = { path = "../c" } somedep = "1.0" "#, ) .file("baz/src/lib.rs", "") .file("c/Cargo.toml", &basic_manifest("c", "1.0.0")) .file("c/src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ a v1.0.0 ([..]/foo/a) baz v0.1.0 ([..]/foo/baz) β”œβ”€β”€ c v1.0.0 ([..]/foo/c) └── somedep v1.0.0 c v1.0.0 ([..]/foo/c) ", ) .run(); p.cargo("tree -p a").with_stdout("a v1.0.0 [..]").run(); p.cargo("tree") .cwd("baz") .with_stdout( "\ baz v0.1.0 ([..]/foo/baz) β”œβ”€β”€ c v1.0.0 ([..]/foo/c) └── somedep v1.0.0 ", ) .run(); // exclude baz p.cargo("tree --workspace --exclude baz") .with_stdout( "\ a v1.0.0 ([..]/foo/a) c v1.0.0 ([..]/foo/c) ", ) .run(); // exclude glob '*z' p.cargo("tree --workspace --exclude '*z'") .with_stdout( "\ a v1.0.0 ([..]/foo/a) c v1.0.0 ([..]/foo/c) ", ) .run(); // include glob '*z' p.cargo("tree -p '*z'") .with_stdout( "\ baz v0.1.0 ([..]/foo/baz) β”œβ”€β”€ c v1.0.0 ([..]/foo/c) └── somedep v1.0.0 ", ) .run(); } #[cargo_test] fn dedupe_edges() { // Works around https://github.com/rust-lang/cargo/issues/7985 Package::new("bitflags", "1.0.0").publish(); Package::new("manyfeat", "1.0.0") .feature("f1", &[]) .feature("f2", &[]) .feature("f3", &[]) .dep("bitflags", "1.0") .publish(); Package::new("a", "1.0.0") .feature_dep("manyfeat", "1.0", &["f1"]) .publish(); Package::new("b", "1.0.0") .feature_dep("manyfeat", "1.0", &["f2"]) .publish(); Package::new("c", "1.0.0") .feature_dep("manyfeat", "1.0", &["f3"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = "1.0" b = "1.0" c = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 β”‚ └── manyfeat v1.0.0 β”‚ └── bitflags v1.0.0 β”œβ”€β”€ b v1.0.0 β”‚ └── manyfeat v1.0.0 (*) └── c v1.0.0 └── manyfeat v1.0.0 (*) ", ) .run(); } #[cargo_test] fn renamed_deps() { // Handles renamed dependencies. Package::new("one", "1.0.0").publish(); Package::new("two", "1.0.0").publish(); Package::new("bar", "1.0.0").dep("one", "1.0").publish(); Package::new("bar", "2.0.0").dep("two", "1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dependencies] bar1 = {version = "1.0", package="bar"} bar2 = {version = "2.0", package="bar"} "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v1.0.0 ([..]/foo) β”œβ”€β”€ bar v1.0.0 β”‚ └── one v1.0.0 └── bar v2.0.0 └── two v1.0.0 ", ) .run(); } #[cargo_test] fn source_kinds() { // Handles git and path sources. Package::new("regdep", "1.0.0").publish(); let git_project = git::new("gitdep", |p| { p.file("Cargo.toml", &basic_manifest("gitdep", "1.0.0")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] regdep = "1.0" pathdep = {{ path = "pathdep" }} gitdep = {{ git = "{}" }} "#, git_project.url() ), ) .file("src/lib.rs", "") .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0")) .file("pathdep/src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ gitdep v1.0.0 (file://[..]/gitdep#[..]) β”œβ”€β”€ pathdep v1.0.0 ([..]/foo/pathdep) └── regdep v1.0.0 ", ) .run(); } #[cargo_test] fn features() { // Exercises a variety of feature behaviors. Package::new("optdep_default", "1.0.0").publish(); Package::new("optdep", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] optdep_default = { version = "1.0", optional = true } optdep = { version = "1.0", optional = true } [features] default = ["optdep_default"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ a v0.1.0 ([..]/foo) └── optdep_default v1.0.0 ", ) .run(); p.cargo("tree --no-default-features") .with_stdout( "\ a v0.1.0 ([..]/foo) ", ) .run(); p.cargo("tree --all-features") .with_stdout( "\ a v0.1.0 ([..]/foo) β”œβ”€β”€ optdep v1.0.0 └── optdep_default v1.0.0 ", ) .run(); p.cargo("tree --features optdep") .with_stdout( "\ a v0.1.0 ([..]/foo) β”œβ”€β”€ optdep v1.0.0 └── optdep_default v1.0.0 ", ) .run(); } #[cargo_test] fn filters_target() { // --target flag if cross_compile::disabled() { return; } Package::new("targetdep", "1.0.0").publish(); Package::new("hostdep", "1.0.0").publish(); Package::new("devdep", "1.0.0").publish(); Package::new("build_target_dep", "1.0.0").publish(); Package::new("build_host_dep", "1.0.0") .target_dep("targetdep", "1.0", alternate()) .target_dep("hostdep", "1.0", rustc_host()) .publish(); Package::new("pm_target", "1.0.0") .proc_macro(true) .publish(); Package::new("pm_host", "1.0.0").proc_macro(true).publish(); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [target.'{alt}'.dependencies] targetdep = "1.0" pm_target = "1.0" [target.'{host}'.dependencies] hostdep = "1.0" pm_host = "1.0" [target.'{alt}'.dev-dependencies] devdep = "1.0" [target.'{alt}'.build-dependencies] build_target_dep = "1.0" [target.'{host}'.build-dependencies] build_host_dep = "1.0" "#, alt = alternate(), host = rustc_host() ), ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ hostdep v1.0.0 └── pm_host v1.0.0 (proc-macro) [build-dependencies] └── build_host_dep v1.0.0 └── hostdep v1.0.0 ", ) .run(); p.cargo("tree --target") .arg(alternate()) .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ pm_target v1.0.0 (proc-macro) └── targetdep v1.0.0 [build-dependencies] └── build_host_dep v1.0.0 └── hostdep v1.0.0 [dev-dependencies] └── devdep v1.0.0 ", ) .run(); p.cargo("tree --target") .arg(rustc_host()) .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ hostdep v1.0.0 └── pm_host v1.0.0 (proc-macro) [build-dependencies] └── build_host_dep v1.0.0 └── hostdep v1.0.0 ", ) .run(); p.cargo("tree --target=all") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ hostdep v1.0.0 β”œβ”€β”€ pm_host v1.0.0 (proc-macro) β”œβ”€β”€ pm_target v1.0.0 (proc-macro) └── targetdep v1.0.0 [build-dependencies] β”œβ”€β”€ build_host_dep v1.0.0 β”‚ β”œβ”€β”€ hostdep v1.0.0 β”‚ └── targetdep v1.0.0 └── build_target_dep v1.0.0 [dev-dependencies] └── devdep v1.0.0 ", ) .run(); // no-proc-macro p.cargo("tree --target=all -e no-proc-macro") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ hostdep v1.0.0 └── targetdep v1.0.0 [build-dependencies] β”œβ”€β”€ build_host_dep v1.0.0 β”‚ β”œβ”€β”€ hostdep v1.0.0 β”‚ └── targetdep v1.0.0 └── build_target_dep v1.0.0 [dev-dependencies] └── devdep v1.0.0 ", ) .run(); } #[cargo_test] fn dep_kinds() { Package::new("inner-devdep", "1.0.0").publish(); Package::new("inner-builddep", "1.0.0").publish(); Package::new("inner-normal", "1.0.0").publish(); Package::new("inner-pm", "1.0.0").proc_macro(true).publish(); Package::new("inner-buildpm", "1.0.0") .proc_macro(true) .publish(); Package::new("normaldep", "1.0.0") .dep("inner-normal", "1.0") .dev_dep("inner-devdep", "1.0") .build_dep("inner-builddep", "1.0") .publish(); Package::new("devdep", "1.0.0") .dep("inner-normal", "1.0") .dep("inner-pm", "1.0") .dev_dep("inner-devdep", "1.0") .build_dep("inner-builddep", "1.0") .build_dep("inner-buildpm", "1.0") .publish(); Package::new("builddep", "1.0.0") .dep("inner-normal", "1.0") .dev_dep("inner-devdep", "1.0") .build_dep("inner-builddep", "1.0") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] normaldep = "1.0" [dev-dependencies] devdep = "1.0" [build-dependencies] builddep = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── normaldep v1.0.0 └── inner-normal v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 [build-dependencies] └── builddep v1.0.0 └── inner-normal v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 [dev-dependencies] └── devdep v1.0.0 β”œβ”€β”€ inner-normal v1.0.0 └── inner-pm v1.0.0 (proc-macro) [build-dependencies] β”œβ”€β”€ inner-builddep v1.0.0 └── inner-buildpm v1.0.0 (proc-macro) ", ) .run(); p.cargo("tree -e no-dev") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── normaldep v1.0.0 └── inner-normal v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 [build-dependencies] └── builddep v1.0.0 └── inner-normal v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 ", ) .run(); p.cargo("tree -e normal") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── normaldep v1.0.0 └── inner-normal v1.0.0 ", ) .run(); p.cargo("tree -e dev,build") .with_stdout( "\ foo v0.1.0 ([..]/foo) [build-dependencies] └── builddep v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 [dev-dependencies] └── devdep v1.0.0 [build-dependencies] β”œβ”€β”€ inner-builddep v1.0.0 └── inner-buildpm v1.0.0 (proc-macro) ", ) .run(); p.cargo("tree -e dev,build,no-proc-macro") .with_stdout( "\ foo v0.1.0 ([..]/foo) [build-dependencies] └── builddep v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 [dev-dependencies] └── devdep v1.0.0 [build-dependencies] └── inner-builddep v1.0.0 ", ) .run(); } #[cargo_test] fn cyclic_dev_dep() { // Cyclical dev-dependency and inverse flag. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dev-dependencies] dev-dep = { path = "dev-dep" } "#, ) .file("src/lib.rs", "") .file( "dev-dep/Cargo.toml", r#" [package] name = "dev-dep" version = "0.1.0" [dependencies] foo = { path=".." } "#, ) .file("dev-dep/src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) [dev-dependencies] └── dev-dep v0.1.0 ([..]/foo/dev-dep) └── foo v0.1.0 ([..]/foo) (*) ", ) .run(); p.cargo("tree --invert foo") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── dev-dep v0.1.0 ([..]/foo/dev-dep) [dev-dependencies] └── foo v0.1.0 ([..]/foo) (*) ", ) .run(); } #[cargo_test] fn invert() { Package::new("b1", "1.0.0").dep("c", "1.0").publish(); Package::new("b2", "1.0.0").dep("d", "1.0").publish(); Package::new("c", "1.0.0").publish(); Package::new("d", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] b1 = "1.0" b2 = "1.0" c = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ b1 v1.0.0 β”‚ └── c v1.0.0 β”œβ”€β”€ b2 v1.0.0 β”‚ └── d v1.0.0 └── c v1.0.0 ", ) .run(); p.cargo("tree --invert c") .with_stdout( "\ c v1.0.0 β”œβ”€β”€ b1 v1.0.0 β”‚ └── foo v0.1.0 ([..]/foo) └── foo v0.1.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn invert_with_build_dep() { // -i for a common dependency between normal and build deps. Package::new("common", "1.0.0").publish(); Package::new("bdep", "1.0.0").dep("common", "1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] common = "1.0" [build-dependencies] bdep = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── common v1.0.0 [build-dependencies] └── bdep v1.0.0 └── common v1.0.0 ", ) .run(); p.cargo("tree -i common") .with_stdout( "\ common v1.0.0 β”œβ”€β”€ bdep v1.0.0 β”‚ [build-dependencies] β”‚ └── foo v0.1.0 ([..]/foo) └── foo v0.1.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn no_indent() { let p = make_simple_proj(); p.cargo("tree --prefix=none") .with_stdout( "\ foo v0.1.0 ([..]/foo) a v1.0.0 b v1.0.0 c v1.0.0 c v1.0.0 bdep v1.0.0 b v1.0.0 (*) devdep v1.0.0 b v1.0.0 (*) ", ) .run(); } #[cargo_test] fn prefix_depth() { let p = make_simple_proj(); p.cargo("tree --prefix=depth") .with_stdout( "\ 0foo v0.1.0 ([..]/foo) 1a v1.0.0 2b v1.0.0 3c v1.0.0 1c v1.0.0 1bdep v1.0.0 2b v1.0.0 (*) 1devdep v1.0.0 2b v1.0.0 (*) ", ) .run(); } #[cargo_test] fn no_dedupe() { let p = make_simple_proj(); p.cargo("tree --no-dedupe") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 β”‚ └── b v1.0.0 β”‚ └── c v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 └── b v1.0.0 └── c v1.0.0 [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 └── c v1.0.0 ", ) .run(); } #[cargo_test] fn no_dedupe_cycle() { // --no-dedupe with a dependency cycle let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dev-dependencies] bar = {path = "bar"} "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] foo = {path=".."} "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) [dev-dependencies] └── bar v0.1.0 ([..]/foo/bar) └── foo v0.1.0 ([..]/foo) (*) ", ) .run(); p.cargo("tree --no-dedupe") .with_stdout( "\ foo v0.1.0 ([..]/foo) [dev-dependencies] └── bar v0.1.0 ([..]/foo/bar) └── foo v0.1.0 ([..]/foo) (*) ", ) .run(); } #[cargo_test] fn duplicates() { Package::new("dog", "1.0.0").publish(); Package::new("dog", "2.0.0").publish(); Package::new("cat", "1.0.0").publish(); Package::new("cat", "2.0.0").publish(); Package::new("dep", "1.0.0") .dep("dog", "1.0") .dep("cat", "1.0") .publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] dog1 = { version = "1.0", package = "dog" } dog2 = { version = "2.0", package = "dog" } "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [dependencies] dep = "1.0" cat = "2.0" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("tree -p a") .with_stdout( "\ a v0.1.0 ([..]/foo/a) β”œβ”€β”€ dog v1.0.0 └── dog v2.0.0 ", ) .run(); p.cargo("tree -p b") .with_stdout( "\ b v0.1.0 ([..]/foo/b) β”œβ”€β”€ cat v2.0.0 └── dep v1.0.0 β”œβ”€β”€ cat v1.0.0 └── dog v1.0.0 ", ) .run(); p.cargo("tree -p a -d") .with_stdout( "\ dog v1.0.0 └── a v0.1.0 ([..]/foo/a) dog v2.0.0 └── a v0.1.0 ([..]/foo/a) ", ) .run(); p.cargo("tree -p b -d") .with_stdout( "\ cat v1.0.0 └── dep v1.0.0 └── b v0.1.0 ([..]/foo/b) cat v2.0.0 └── b v0.1.0 ([..]/foo/b) ", ) .run(); } #[cargo_test] fn duplicates_with_target() { // --target flag if cross_compile::disabled() { return; } Package::new("a", "1.0.0").publish(); Package::new("dog", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = "1.0" dog = "1.0" [build-dependencies] a = "1.0" dog = "1.0" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); p.cargo("tree -d").with_stdout("").run(); p.cargo("tree -d --target") .arg(alternate()) .with_stdout("") .run(); p.cargo("tree -d --target") .arg(rustc_host()) .with_stdout("") .run(); p.cargo("tree -d --target=all").with_stdout("").run(); } #[cargo_test] fn charset() { let p = make_simple_proj(); p.cargo("tree --charset ascii") .with_stdout( "\ foo v0.1.0 ([..]/foo) |-- a v1.0.0 | `-- b v1.0.0 | `-- c v1.0.0 `-- c v1.0.0 [build-dependencies] `-- bdep v1.0.0 `-- b v1.0.0 (*) [dev-dependencies] `-- devdep v1.0.0 `-- b v1.0.0 (*) ", ) .run(); } #[cargo_test] fn format() { Package::new("dep", "1.0.0").publish(); Package::new("other-dep", "1.0.0").publish(); Package::new("dep_that_is_awesome", "1.0.0") .file( "Cargo.toml", r#" [package] name = "dep_that_is_awesome" version = "1.0.0" [lib] name = "awesome_dep" "#, ) .file("src/lib.rs", "pub struct Straw;") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" license = "MIT" repository = "https://github.com/rust-lang/cargo" [dependencies] dep = {version="1.0", optional=true} other-dep = {version="1.0", optional=true} dep_that_is_awesome = {version="1.0", optional=true} [features] default = ["foo"] foo = ["bar"] bar = [] "#, ) .file("src/main.rs", "") .build(); p.cargo("tree --format <<<{p}>>>") .with_stdout("<<>>") .run(); p.cargo("tree --format {}") .with_stderr( "\ [ERROR] tree format `{}` not valid Caused by: unsupported pattern `` ", ) .with_status(101) .run(); p.cargo("tree --format {p}-{{hello}}") .with_stdout("foo v0.1.0 ([..]/foo)-{hello}") .run(); p.cargo("tree --format") .arg("{p} {l} {r}") .with_stdout("foo v0.1.0 ([..]/foo) MIT https://github.com/rust-lang/cargo") .run(); p.cargo("tree --format") .arg("{p} {f}") .with_stdout("foo v0.1.0 ([..]/foo) bar,default,foo") .run(); p.cargo("tree --all-features --format") .arg("{p} [{f}]") .with_stdout( "\ foo v0.1.0 ([..]/foo) [bar,default,dep,dep_that_is_awesome,foo,other-dep] β”œβ”€β”€ dep v1.0.0 [] β”œβ”€β”€ dep_that_is_awesome v1.0.0 [] └── other-dep v1.0.0 [] ", ) .run(); p.cargo("tree") .arg("--features=other-dep,dep_that_is_awesome") .arg("--format={lib}") .with_stdout( " β”œβ”€β”€ awesome_dep └── other_dep ", ) .run(); } #[cargo_test] fn dev_dep_feature() { // New feature resolver with optional dep Package::new("optdep", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dev-dependencies] bar = { version = "1.0", features = ["optdep"] } [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); // Old behavior. p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 └── optdep v1.0.0 [dev-dependencies] └── bar v1.0.0 (*) ", ) .run(); p.cargo("tree -e normal") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 └── optdep v1.0.0 ", ) .run(); // New behavior. switch_to_resolver_2(&p); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 └── optdep v1.0.0 [dev-dependencies] └── bar v1.0.0 (*) ", ) .run(); p.cargo("tree -e normal") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 ", ) .run(); } #[cargo_test] fn host_dep_feature() { // New feature resolver with optional build dep Package::new("optdep", "1.0.0").publish(); Package::new("bar", "1.0.0") .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [build-dependencies] bar = { version = "1.0", features = ["optdep"] } [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .build(); // Old behavior p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 └── optdep v1.0.0 [build-dependencies] └── bar v1.0.0 (*) ", ) .run(); // -p p.cargo("tree -p bar") .with_stdout( "\ bar v1.0.0 └── optdep v1.0.0 ", ) .run(); // invert p.cargo("tree -i optdep") .with_stdout( "\ optdep v1.0.0 └── bar v1.0.0 └── foo v0.1.0 ([..]/foo) [build-dependencies] └── foo v0.1.0 ([..]/foo) ", ) .run(); // New behavior. switch_to_resolver_2(&p); p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── bar v1.0.0 [build-dependencies] └── bar v1.0.0 └── optdep v1.0.0 ", ) .run(); p.cargo("tree -p bar") .with_stdout( "\ bar v1.0.0 bar v1.0.0 └── optdep v1.0.0 ", ) .run(); p.cargo("tree -i optdep") .with_stdout( "\ optdep v1.0.0 └── bar v1.0.0 [build-dependencies] └── foo v0.1.0 ([..]/foo) ", ) .run(); // Check that -d handles duplicates with features. p.cargo("tree -d") .with_stdout( "\ bar v1.0.0 └── foo v0.1.0 ([..]/foo) bar v1.0.0 [build-dependencies] └── foo v0.1.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn proc_macro_features() { // New feature resolver with a proc-macro Package::new("optdep", "1.0.0").publish(); Package::new("somedep", "1.0.0") .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); Package::new("pm", "1.0.0") .proc_macro(true) .feature_dep("somedep", "1.0", &["optdep"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] pm = "1.0" somedep = "1.0" "#, ) .file("src/lib.rs", "") .build(); // Old behavior p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ pm v1.0.0 (proc-macro) β”‚ └── somedep v1.0.0 β”‚ └── optdep v1.0.0 └── somedep v1.0.0 (*) ", ) .run(); // Old behavior + no-proc-macro p.cargo("tree -e no-proc-macro") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── somedep v1.0.0 └── optdep v1.0.0 ", ) .run(); // -p p.cargo("tree -p somedep") .with_stdout( "\ somedep v1.0.0 └── optdep v1.0.0 ", ) .run(); // -p -e no-proc-macro p.cargo("tree -p somedep -e no-proc-macro") .with_stdout( "\ somedep v1.0.0 └── optdep v1.0.0 ", ) .run(); // invert p.cargo("tree -i somedep") .with_stdout( "\ somedep v1.0.0 β”œβ”€β”€ foo v0.1.0 ([..]/foo) └── pm v1.0.0 (proc-macro) └── foo v0.1.0 ([..]/foo) ", ) .run(); // invert + no-proc-macro p.cargo("tree -i somedep -e no-proc-macro") .with_stdout( "\ somedep v1.0.0 └── foo v0.1.0 ([..]/foo) ", ) .run(); // New behavior. switch_to_resolver_2(&p); // Note the missing (*) p.cargo("tree") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ pm v1.0.0 (proc-macro) β”‚ └── somedep v1.0.0 β”‚ └── optdep v1.0.0 └── somedep v1.0.0 ", ) .run(); p.cargo("tree -e no-proc-macro") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── somedep v1.0.0 ", ) .run(); p.cargo("tree -p somedep") .with_stdout( "\ somedep v1.0.0 somedep v1.0.0 └── optdep v1.0.0 ", ) .run(); p.cargo("tree -i somedep") .with_stdout( "\ somedep v1.0.0 └── foo v0.1.0 ([..]/foo) somedep v1.0.0 └── pm v1.0.0 (proc-macro) └── foo v0.1.0 ([..]/foo) ", ) .run(); p.cargo("tree -i somedep -e no-proc-macro") .with_stdout( "\ somedep v1.0.0 └── foo v0.1.0 ([..]/foo) somedep v1.0.0 ", ) .run(); } #[cargo_test] fn itarget_opt_dep() { // New feature resolver with optional target dep Package::new("optdep", "1.0.0").publish(); Package::new("common", "1.0.0") .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dependencies] common = "1.0" [target.'cfg(whatever)'.dependencies] common = { version = "1.0", features = ["optdep"] } "#, ) .file("src/lib.rs", "") .build(); // Old behavior p.cargo("tree") .with_stdout( "\ foo v1.0.0 ([..]/foo) └── common v1.0.0 └── optdep v1.0.0 ", ) .run(); // New behavior. switch_to_resolver_2(&p); p.cargo("tree") .with_stdout( "\ foo v1.0.0 ([..]/foo) └── common v1.0.0 ", ) .run(); } #[cargo_test] fn ambiguous_name() { // -p that is ambiguous. Package::new("dep", "1.0.0").publish(); Package::new("dep", "2.0.0").publish(); Package::new("bar", "1.0.0").dep("dep", "2.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "1.0" bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -p dep") .with_stderr_contains( "\ error: There are multiple `dep` packages in your project, and the specification `dep` is ambiguous. Please re-run this command with `-p ` where `` is one of the following: dep@1.0.0 dep@2.0.0 ", ) .with_status(101) .run(); } #[cargo_test] fn workspace_features_are_local() { // The features for workspace packages should be the same as `cargo build` // (i.e., the features selected depend on the "current" package). Package::new("optdep", "1.0.0").publish(); Package::new("somedep", "1.0.0") .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] somedep = {version="1.0", features=["optdep"]} "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [dependencies] somedep = "1.0" "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("tree") .with_stdout( "\ a v0.1.0 ([..]/foo/a) └── somedep v1.0.0 └── optdep v1.0.0 b v0.1.0 ([..]/foo/b) └── somedep v1.0.0 (*) ", ) .run(); p.cargo("tree -p a") .with_stdout( "\ a v0.1.0 ([..]/foo/a) └── somedep v1.0.0 └── optdep v1.0.0 ", ) .run(); p.cargo("tree -p b") .with_stdout( "\ b v0.1.0 ([..]/foo/b) └── somedep v1.0.0 ", ) .run(); } #[cargo_test] fn unknown_edge_kind() { let p = project() .file("Cargo.toml", "") .file("src/lib.rs", "") .build(); p.cargo("tree -e unknown") .with_stderr( "\ [ERROR] unknown edge kind `unknown`, valid values are \ \"normal\", \"build\", \"dev\", \ \"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \ \"features\", or \"all\" ", ) .with_status(101) .run(); } #[cargo_test] fn mixed_no_edge_kinds() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e no-build,normal") .with_stderr( "\ [ERROR] `normal` dependency kind cannot be mixed with \ \"no-normal\", \"no-build\", or \"no-dev\" dependency kinds ", ) .with_status(101) .run(); // `no-proc-macro` can be mixed with others p.cargo("tree -e no-proc-macro,normal") .with_stdout( "\ foo v0.1.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn depth_limit() { let p = make_simple_proj(); p.cargo("tree --depth 0") .with_stdout( "\ foo v0.1.0 ([..]/foo) [build-dependencies] [dev-dependencies] ", ) .run(); p.cargo("tree --depth 1") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 [dev-dependencies] └── devdep v1.0.0 ", ) .run(); p.cargo("tree --depth 2") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 β”‚ └── b v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 └── b v1.0.0 (*) [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 (*) ", ) .run(); // specify a package p.cargo("tree -p bdep --depth 1") .with_stdout( "\ bdep v1.0.0 └── b v1.0.0 ", ) .run(); // different prefix p.cargo("tree --depth 1 --prefix depth") .with_stdout( "\ 0foo v0.1.0 ([..]/foo) 1a v1.0.0 1c v1.0.0 1bdep v1.0.0 1devdep v1.0.0 ", ) .run(); // with edge-kinds p.cargo("tree --depth 1 -e no-dev") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 ", ) .run(); // invert p.cargo("tree --depth 1 --invert c") .with_stdout( "\ c v1.0.0 β”œβ”€β”€ b v1.0.0 └── foo v0.1.0 ([..]/foo) ", ) .run(); } #[cargo_test] fn prune() { let p = make_simple_proj(); p.cargo("tree --prune c") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── a v1.0.0 └── b v1.0.0 [build-dependencies] └── bdep v1.0.0 └── b v1.0.0 (*) [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 (*) ", ) .run(); // multiple prune p.cargo("tree --prune c --prune bdep") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── a v1.0.0 └── b v1.0.0 [build-dependencies] [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 (*) ", ) .run(); // with edge-kinds p.cargo("tree --prune c -e normal") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── a v1.0.0 └── b v1.0.0 ", ) .run(); // pruning self does not works p.cargo("tree --prune foo") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ a v1.0.0 β”‚ └── b v1.0.0 β”‚ └── c v1.0.0 └── c v1.0.0 [build-dependencies] └── bdep v1.0.0 └── b v1.0.0 (*) [dev-dependencies] └── devdep v1.0.0 └── b v1.0.0 (*) ", ) .run(); // dep not exist p.cargo("tree --prune no-dep") .with_stderr( "\ [ERROR] package ID specification `no-dep` did not match any packages Did you mean `bdep`? ", ) .with_status(101) .run(); } #[cargo_test] fn cyclic_features() { // Check for stack overflow with cyclic features (oops!). let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [features] a = ["b"] b = ["a"] default = ["a"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout("foo v1.0.0 ([ROOT]/foo)") .run(); p.cargo("tree -e features -i foo") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" β”‚ β”œβ”€β”€ foo feature \"b\" β”‚ β”‚ └── foo feature \"a\" (*) β”‚ └── foo feature \"default\" (command-line) β”œβ”€β”€ foo feature \"b\" (*) └── foo feature \"default\" (command-line) ", ) .run(); } #[cargo_test] fn dev_dep_cycle_with_feature() { // Cycle with features and a dev-dependency. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dev-dependencies] bar = { path = "bar" } [features] a = ["bar/feat1"] "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "1.0.0" [dependencies] foo = { path = ".." } [features] feat1 = ["foo/a"] "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("tree -e features --features a") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) [dev-dependencies] └── bar feature \"default\" └── bar v1.0.0 ([ROOT]/foo/bar) └── foo feature \"default\" (command-line) └── foo v1.0.0 ([ROOT]/foo) (*) ", ) .run(); p.cargo("tree -e features --features a -i foo") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" (command-line) β”‚ └── bar feature \"feat1\" β”‚ └── foo feature \"a\" (command-line) (*) └── foo feature \"default\" (command-line) └── bar v1.0.0 ([ROOT]/foo/bar) β”œβ”€β”€ bar feature \"default\" β”‚ [dev-dependencies] β”‚ └── foo v1.0.0 ([ROOT]/foo) (*) └── bar feature \"feat1\" (*) ", ) .run(); } #[cargo_test] fn dev_dep_cycle_with_feature_nested() { // Checks for an issue where a cyclic dev dependency tries to activate a // feature on its parent that tries to activate the feature back on the // dev-dependency. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dev-dependencies] bar = { path = "bar" } [features] a = ["bar/feat1"] b = ["a"] "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "1.0.0" [dependencies] foo = { path = ".." } [features] feat1 = ["foo/b"] "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) [dev-dependencies] └── bar feature \"default\" └── bar v1.0.0 ([ROOT]/foo/bar) └── foo feature \"default\" (command-line) └── foo v1.0.0 ([ROOT]/foo) (*) ", ) .run(); p.cargo("tree -e features --features a -i foo") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" (command-line) β”‚ └── foo feature \"b\" β”‚ └── bar feature \"feat1\" β”‚ └── foo feature \"a\" (command-line) (*) β”œβ”€β”€ foo feature \"b\" (*) └── foo feature \"default\" (command-line) └── bar v1.0.0 ([ROOT]/foo/bar) β”œβ”€β”€ bar feature \"default\" β”‚ [dev-dependencies] β”‚ └── foo v1.0.0 ([ROOT]/foo) (*) └── bar feature \"feat1\" (*) ", ) .run(); p.cargo("tree -e features --features b -i foo") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" β”‚ └── foo feature \"b\" (command-line) β”‚ └── bar feature \"feat1\" β”‚ └── foo feature \"a\" (*) β”œβ”€β”€ foo feature \"b\" (command-line) (*) └── foo feature \"default\" (command-line) └── bar v1.0.0 ([ROOT]/foo/bar) β”œβ”€β”€ bar feature \"default\" β”‚ [dev-dependencies] β”‚ └── foo v1.0.0 ([ROOT]/foo) (*) └── bar feature \"feat1\" (*) ", ) .run(); p.cargo("tree -e features --features bar/feat1 -i foo") .with_stdout( "\ foo v1.0.0 ([ROOT]/foo) β”œβ”€β”€ foo feature \"a\" β”‚ └── foo feature \"b\" β”‚ └── bar feature \"feat1\" (command-line) β”‚ └── foo feature \"a\" (*) β”œβ”€β”€ foo feature \"b\" (*) └── foo feature \"default\" (command-line) └── bar v1.0.0 ([ROOT]/foo/bar) β”œβ”€β”€ bar feature \"default\" β”‚ [dev-dependencies] β”‚ └── foo v1.0.0 ([ROOT]/foo) (*) └── bar feature \"feat1\" (command-line) (*) ", ) .run(); } cargo-0.66.0/tests/testsuite/tree_graph_features.rs000066400000000000000000000235141432416201200224550ustar00rootroot00000000000000//! Tests for the `cargo tree` command with -e features option. use cargo_test_support::project; use cargo_test_support::registry::{Dependency, Package}; #[cargo_test] fn dep_feature_various() { // Checks different ways of setting features via dependencies. Package::new("optdep", "1.0.0") .feature("default", &["cat"]) .feature("cat", &[]) .publish(); Package::new("defaultdep", "1.0.0") .feature("default", &["f1"]) .feature("f1", &["optdep"]) .add_dep(Dependency::new("optdep", "1.0").optional(true)) .publish(); Package::new("nodefaultdep", "1.0.0") .feature("default", &["f1"]) .feature("f1", &[]) .publish(); Package::new("nameddep", "1.0.0") .add_dep(Dependency::new("serde", "1.0").optional(true)) .feature("default", &["serde-stuff"]) .feature("serde-stuff", &["serde/derive"]) .feature("vehicle", &["car"]) .feature("car", &[]) .publish(); Package::new("serde_derive", "1.0.0").publish(); Package::new("serde", "1.0.0") .feature("derive", &["serde_derive"]) .add_dep(Dependency::new("serde_derive", "1.0").optional(true)) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] defaultdep = "1.0" nodefaultdep = {version="1.0", default-features = false} nameddep = {version="1.0", features = ["vehicle", "serde"]} "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ nodefaultdep v1.0.0 β”œβ”€β”€ defaultdep feature \"default\" β”‚ β”œβ”€β”€ defaultdep v1.0.0 β”‚ β”‚ └── optdep feature \"default\" β”‚ β”‚ β”œβ”€β”€ optdep v1.0.0 β”‚ β”‚ └── optdep feature \"cat\" β”‚ β”‚ └── optdep v1.0.0 β”‚ └── defaultdep feature \"f1\" β”‚ β”œβ”€β”€ defaultdep v1.0.0 (*) β”‚ └── defaultdep feature \"optdep\" β”‚ └── defaultdep v1.0.0 (*) β”œβ”€β”€ nameddep feature \"default\" β”‚ β”œβ”€β”€ nameddep v1.0.0 β”‚ β”‚ └── serde feature \"default\" β”‚ β”‚ └── serde v1.0.0 β”‚ β”‚ └── serde_derive feature \"default\" β”‚ β”‚ └── serde_derive v1.0.0 β”‚ └── nameddep feature \"serde-stuff\" β”‚ β”œβ”€β”€ nameddep v1.0.0 (*) β”‚ β”œβ”€β”€ nameddep feature \"serde\" β”‚ β”‚ └── nameddep v1.0.0 (*) β”‚ └── serde feature \"derive\" β”‚ β”œβ”€β”€ serde v1.0.0 (*) β”‚ └── serde feature \"serde_derive\" β”‚ └── serde v1.0.0 (*) β”œβ”€β”€ nameddep feature \"serde\" (*) └── nameddep feature \"vehicle\" β”œβ”€β”€ nameddep v1.0.0 (*) └── nameddep feature \"car\" └── nameddep v1.0.0 (*) ", ) .run(); } #[cargo_test] fn graph_features_ws_interdependent() { // A workspace with interdependent crates. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a", "b"] "#, ) .file( "a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] b = {path="../b", features=["feat2"]} [features] default = ["a1"] a1 = [] a2 = [] "#, ) .file("a/src/lib.rs", "") .file( "b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" [features] default = ["feat1"] feat1 = [] feat2 = [] "#, ) .file("b/src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout( "\ a v0.1.0 ([..]/foo/a) β”œβ”€β”€ b feature \"default\" (command-line) β”‚ β”œβ”€β”€ b v0.1.0 ([..]/foo/b) β”‚ └── b feature \"feat1\" β”‚ └── b v0.1.0 ([..]/foo/b) └── b feature \"feat2\" └── b v0.1.0 ([..]/foo/b) b v0.1.0 ([..]/foo/b) ", ) .run(); p.cargo("tree -e features -i a -i b") .with_stdout( "\ a v0.1.0 ([..]/foo/a) β”œβ”€β”€ a feature \"a1\" β”‚ └── a feature \"default\" (command-line) └── a feature \"default\" (command-line) b v0.1.0 ([..]/foo/b) β”œβ”€β”€ b feature \"default\" (command-line) β”‚ └── a v0.1.0 ([..]/foo/a) (*) β”œβ”€β”€ b feature \"feat1\" β”‚ └── b feature \"default\" (command-line) (*) └── b feature \"feat2\" └── a v0.1.0 ([..]/foo/a) (*) ", ) .run(); } #[cargo_test] fn slash_feature_name() { // dep_name/feat_name syntax Package::new("opt", "1.0.0").feature("feat1", &[]).publish(); Package::new("notopt", "1.0.0") .feature("cat", &[]) .feature("animal", &["cat"]) .publish(); Package::new("opt2", "1.0.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] opt = {version = "1.0", optional=true} opt2 = {version = "1.0", optional=true} notopt = "1.0" [features] f1 = ["opt/feat1", "notopt/animal"] f2 = ["f1"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features --features f1") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ notopt feature \"default\" β”‚ └── notopt v1.0.0 └── opt feature \"default\" └── opt v1.0.0 ", ) .run(); p.cargo("tree -e features --features f1 -i foo") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ foo feature \"default\" (command-line) β”œβ”€β”€ foo feature \"f1\" (command-line) └── foo feature \"opt\" └── foo feature \"f1\" (command-line) ", ) .run(); p.cargo("tree -e features --features f1 -i notopt") .with_stdout( "\ notopt v1.0.0 β”œβ”€β”€ notopt feature \"animal\" β”‚ └── foo feature \"f1\" (command-line) β”œβ”€β”€ notopt feature \"cat\" β”‚ └── notopt feature \"animal\" (*) └── notopt feature \"default\" └── foo v0.1.0 ([..]/foo) β”œβ”€β”€ foo feature \"default\" (command-line) β”œβ”€β”€ foo feature \"f1\" (command-line) └── foo feature \"opt\" └── foo feature \"f1\" (command-line) ", ) .run(); p.cargo("tree -e features --features notopt/animal -i notopt") .with_stdout( "\ notopt v1.0.0 β”œβ”€β”€ notopt feature \"animal\" (command-line) β”œβ”€β”€ notopt feature \"cat\" β”‚ └── notopt feature \"animal\" (command-line) └── notopt feature \"default\" └── foo v0.1.0 ([..]/foo) └── foo feature \"default\" (command-line) ", ) .run(); p.cargo("tree -e features --all-features") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ notopt feature \"default\" β”‚ └── notopt v1.0.0 β”œβ”€β”€ opt feature \"default\" β”‚ └── opt v1.0.0 └── opt2 feature \"default\" └── opt2 v1.0.0 ", ) .run(); p.cargo("tree -e features --all-features -i opt2") .with_stdout( "\ opt2 v1.0.0 └── opt2 feature \"default\" └── foo v0.1.0 ([..]/foo) β”œβ”€β”€ foo feature \"default\" (command-line) β”œβ”€β”€ foo feature \"f1\" (command-line) β”‚ └── foo feature \"f2\" (command-line) β”œβ”€β”€ foo feature \"f2\" (command-line) β”œβ”€β”€ foo feature \"opt\" (command-line) β”‚ └── foo feature \"f1\" (command-line) (*) └── foo feature \"opt2\" (command-line) ", ) .run(); } #[cargo_test] fn features_enables_inactive_target() { // Features that enable things on targets that are not enabled. Package::new("optdep", "1.0.0") .feature("feat1", &[]) .publish(); Package::new("dep1", "1.0.0") .feature("somefeat", &[]) .publish(); Package::new("dep2", "1.0.0") .add_dep( Dependency::new("optdep", "1.0.0") .optional(true) .target("cfg(whatever)"), ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [target.'cfg(whatever)'.dependencies] optdep = {version="1.0", optional=true} dep1 = "1.0" [dependencies] dep2 = "1.0" [features] f1 = ["optdep"] f2 = ["optdep/feat1"] f3 = ["dep1/somefeat"] f4 = ["dep2/optdep"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("tree -e features") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── dep2 feature \"default\" └── dep2 v1.0.0 ", ) .run(); p.cargo("tree -e features --all-features") .with_stdout( "\ foo v0.1.0 ([..]/foo) └── dep2 feature \"default\" └── dep2 v1.0.0 ", ) .run(); p.cargo("tree -e features --all-features --target=all") .with_stdout( "\ foo v0.1.0 ([..]/foo) β”œβ”€β”€ dep1 feature \"default\" β”‚ └── dep1 v1.0.0 β”œβ”€β”€ dep2 feature \"default\" β”‚ └── dep2 v1.0.0 β”‚ └── optdep feature \"default\" β”‚ └── optdep v1.0.0 └── optdep feature \"default\" (*) ", ) .run(); } cargo-0.66.0/tests/testsuite/unit_graph.rs000066400000000000000000000162651432416201200206040ustar00rootroot00000000000000//! Tests for --unit-graph option. use cargo_test_support::project; use cargo_test_support::registry::Package; #[cargo_test] fn gated() { let p = project().file("src/lib.rs", "").build(); p.cargo("build --unit-graph") .with_status(101) .with_stderr( "\ [ERROR] the `--unit-graph` flag is unstable[..] See [..] See [..] ", ) .run(); } #[cargo_test] fn simple() { Package::new("a", "1.0.0") .dep("b", "1.0") .feature("feata", &["b/featb"]) .publish(); Package::new("b", "1.0.0") .dep("c", "1.0") .feature("featb", &["c/featc"]) .publish(); Package::new("c", "1.0.0").feature("featc", &[]).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] a = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build --features a/feata --unit-graph -Zunstable-options") .masquerade_as_nightly_cargo(&["unit-graph"]) .with_json( r#"{ "roots": [ 3 ], "units": [ { "dependencies": [ { "extern_crate_name": "b", "index": 1, "noprelude": false, "public": false } ], "features": [ "feata" ], "mode": "build", "pkg_id": "a 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "platform": null, "profile": { "codegen_backend": null, "codegen_units": null, "debug_assertions": true, "debuginfo": 2, "incremental": false, "lto": "false", "name": "dev", "opt_level": "0", "overflow_checks": true, "panic": "unwind", "rpath": false, "split_debuginfo": "{...}", "strip": "none" }, "target": { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "a", "src_path": "[..]/a-1.0.0/src/lib.rs", "test": true } }, { "dependencies": [ { "extern_crate_name": "c", "index": 2, "noprelude": false, "public": false } ], "features": [ "featb" ], "mode": "build", "pkg_id": "b 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "platform": null, "profile": { "codegen_backend": null, "codegen_units": null, "debug_assertions": true, "debuginfo": 2, "incremental": false, "lto": "false", "name": "dev", "opt_level": "0", "overflow_checks": true, "panic": "unwind", "rpath": false, "split_debuginfo": "{...}", "strip": "none" }, "target": { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "b", "src_path": "[..]/b-1.0.0/src/lib.rs", "test": true } }, { "dependencies": [], "features": [ "featc" ], "mode": "build", "pkg_id": "c 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "platform": null, "profile": { "codegen_backend": null, "codegen_units": null, "debug_assertions": true, "debuginfo": 2, "incremental": false, "lto": "false", "name": "dev", "opt_level": "0", "overflow_checks": true, "panic": "unwind", "rpath": false, "split_debuginfo": "{...}", "strip": "none" }, "target": { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "c", "src_path": "[..]/c-1.0.0/src/lib.rs", "test": true } }, { "dependencies": [ { "extern_crate_name": "a", "index": 0, "noprelude": false, "public": false } ], "features": [], "mode": "build", "pkg_id": "foo 0.1.0 (path+file://[..]/foo)", "platform": null, "profile": { "codegen_backend": null, "codegen_units": null, "debug_assertions": true, "debuginfo": 2, "incremental": false, "lto": "false", "name": "dev", "opt_level": "0", "overflow_checks": true, "panic": "unwind", "rpath": false, "split_debuginfo": "{...}", "strip": "none" }, "target": { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "foo", "src_path": "[..]/foo/src/lib.rs", "test": true } } ], "version": 1 } "#, ) .run(); } cargo-0.66.0/tests/testsuite/update.rs000066400000000000000000000523141432416201200177210ustar00rootroot00000000000000//! Tests for the `cargo update` command. use cargo_test_support::registry::Package; use cargo_test_support::{basic_manifest, project}; #[cargo_test] fn minor_update_two_places() { Package::new("log", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] log = "0.1" foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] log = "0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); p.change_file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] log = "0.1.1" "#, ); p.cargo("build").run(); } #[cargo_test] fn transitive_minor_update() { Package::new("log", "0.1.0").publish(); Package::new("serde", "0.1.0").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.1" log = "0.1" foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] serde = "0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("serde", "0.1.1").dep("log", "0.1.1").publish(); // Note that `serde` isn't actually updated here! The default behavior for // `update` right now is to as conservatively as possible attempt to satisfy // an update. In this case we previously locked the dependency graph to `log // 0.1.0`, but nothing on the command line says we're allowed to update // that. As a result the update of `serde` here shouldn't update to `serde // 0.1.1` as that would also force an update to `log 0.1.1`. // // Also note that this is probably counterintuitive and weird. We may wish // to change this one day. p.cargo("update -p serde") .with_stderr( "\ [UPDATING] `[..]` index ", ) .run(); } #[cargo_test] fn conservative() { Package::new("log", "0.1.0").publish(); Package::new("serde", "0.1.0").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.1" log = "0.1" foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] serde = "0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("serde", "0.1.1").dep("log", "0.1").publish(); p.cargo("update -p serde") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] serde v0.1.0 -> v0.1.1 ", ) .run(); } #[cargo_test] fn update_via_new_dep() { Package::new("log", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] log = "0.1" # foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] log = "0.1.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); p.uncomment_root_manifest(); p.cargo("build").env("CARGO_LOG", "cargo=trace").run(); } #[cargo_test] fn update_via_new_member() { Package::new("log", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [workspace] # members = [ "foo" ] [dependencies] log = "0.1" "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] log = "0.1.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); p.uncomment_root_manifest(); p.cargo("build").run(); } #[cargo_test] fn add_dep_deep_new_requirement() { Package::new("log", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] log = "0.1" # bar = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); p.uncomment_root_manifest(); p.cargo("build").run(); } #[cargo_test] fn everything_real_deep() { Package::new("log", "0.1.0").publish(); Package::new("foo", "0.1.0").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1" # bar = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); p.uncomment_root_manifest(); p.cargo("build").run(); } #[cargo_test] fn change_package_version() { let p = project() .file( "Cargo.toml", r#" [package] name = "a-foo" version = "0.2.0-alpha" authors = [] [dependencies] bar = { path = "bar", version = "0.2.0-alpha" } "#, ) .file("src/lib.rs", "") .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0-alpha")) .file("bar/src/lib.rs", "") .file( "Cargo.lock", r#" [[package]] name = "foo" version = "0.2.0" dependencies = ["bar 0.2.0"] [[package]] name = "bar" version = "0.2.0" "#, ) .build(); p.cargo("build").run(); } #[cargo_test] fn update_precise() { Package::new("serde", "0.1.0").publish(); Package::new("serde", "0.2.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.2" foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] serde = "0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("serde", "0.2.0").publish(); p.cargo("update -p serde:0.2.1 --precise 0.2.0") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] serde v0.2.1 -> v0.2.0 ", ) .run(); } #[cargo_test] fn update_precise_do_not_force_update_deps() { Package::new("log", "0.1.0").publish(); Package::new("serde", "0.2.1").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("serde", "0.2.2").dep("log", "0.1").publish(); p.cargo("update -p serde:0.2.1 --precise 0.2.2") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] serde v0.2.1 -> v0.2.2 ", ) .run(); } #[cargo_test] fn update_precise_without_package() { Package::new("serde", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("serde", "0.2.1").publish(); Package::new("serde", "0.3.0").publish(); p.cargo("update --precise 0.3.0") .with_stderr( "\ [WARNING] precise is only supported with \"--package \", this will become a hard error in a future release. [UPDATING] `[..]` index [UPDATING] serde v0.2.0 -> v0.2.1 ", ) .run(); } #[cargo_test] fn update_aggressive() { Package::new("log", "0.1.0").publish(); Package::new("serde", "0.2.1").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("log", "0.1.1").publish(); Package::new("serde", "0.2.2").dep("log", "0.1").publish(); p.cargo("update -p serde:0.2.1 --aggressive") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] log v0.1.0 -> v0.1.1 [UPDATING] serde v0.2.1 -> v0.2.2 ", ) .run(); } #[cargo_test] fn update_aggressive_without_package() { Package::new("serde", "0.2.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("build").run(); Package::new("serde", "0.2.1").publish(); p.cargo("update --aggressive") .with_stderr( "\ [WARNING] aggressive is only supported with \"--package \", this will become a hard error in a future release. [UPDATING] `[..]` index [UPDATING] serde v0.2.0 -> v0.2.1 ", ) .run(); } // cargo update should respect its arguments even without a lockfile. // See issue "Running cargo update without a Cargo.lock ignores arguments" // at . #[cargo_test] fn update_precise_first_run() { Package::new("serde", "0.1.0").publish(); Package::new("serde", "0.2.0").publish(); Package::new("serde", "0.2.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" [dependencies] serde = "0.2" "#, ) .file("src/lib.rs", "") .build(); p.cargo("update -p serde --precise 0.2.0") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] serde v0.2.1 -> v0.2.0 ", ) .run(); // Assert `cargo metadata` shows serde 0.2.0 p.cargo("metadata") .with_json( r#"{ "packages": [ { "authors": [], "categories": [], "default_run": null, "dependencies": [ { "features": [], "kind": null, "name": "serde", "optional": false, "registry": null, "rename": null, "req": "^0.2", "source": "registry+https://github.com/rust-lang/crates.io-index", "target": null, "uses_default_features": true } ], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "bar 0.0.1 (path+file://[..]/foo)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/foo/Cargo.toml", "metadata": null, "publish": null, "name": "bar", "readme": null, "repository": null, "rust_version": null, "source": null, "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "test": true, "edition": "2015", "kind": [ "lib" ], "name": "bar", "src_path": "[..]/foo/src/lib.rs" } ], "version": "0.0.1" }, { "authors": [], "categories": [], "default_run": null, "dependencies": [], "description": null, "documentation": null, "edition": "2015", "features": {}, "homepage": null, "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keywords": [], "license": null, "license_file": null, "links": null, "manifest_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/Cargo.toml", "metadata": null, "publish": null, "name": "serde", "readme": null, "repository": null, "rust_version": null, "source": "registry+https://github.com/rust-lang/crates.io-index", "targets": [ { "crate_types": [ "lib" ], "doc": true, "doctest": true, "edition": "2015", "kind": [ "lib" ], "name": "serde", "src_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/src/lib.rs", "test": true } ], "version": "0.2.0" } ], "resolve": { "nodes": [ { "dependencies": [ "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" ], "deps": [ { "dep_kinds": [ { "kind": null, "target": null } ], "name": "serde", "pkg": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" } ], "features": [], "id": "bar 0.0.1 (path+file://[..]/foo)" }, { "dependencies": [], "deps": [], "features": [], "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" } ], "root": "bar 0.0.1 (path+file://[..]/foo)" }, "target_directory": "[..]/foo/target", "version": 1, "workspace_members": [ "bar 0.0.1 (path+file://[..]/foo)" ], "workspace_root": "[..]/foo", "metadata": null }"#, ) .run(); p.cargo("update -p serde --precise 0.2.0") .with_stderr( "\ [UPDATING] `[..]` index ", ) .run(); } #[cargo_test] fn preserve_top_comment() { let p = project().file("src/lib.rs", "").build(); p.cargo("update").run(); let lockfile = p.read_lockfile(); assert!(lockfile.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); let mut lines = lockfile.lines().collect::>(); lines.insert(2, "# some other comment"); let mut lockfile = lines.join("\n"); lockfile.push('\n'); // .lines/.join loses the last newline println!("saving Cargo.lock contents:\n{}", lockfile); p.change_file("Cargo.lock", &lockfile); p.cargo("update").run(); let lockfile2 = p.read_lockfile(); println!("loaded Cargo.lock contents:\n{}", lockfile2); assert_eq!(lockfile, lockfile2); } #[cargo_test] fn dry_run_update() { Package::new("log", "0.1.0").publish(); Package::new("serde", "0.1.0").dep("log", "0.1").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] serde = "0.1" log = "0.1" foo = { path = "foo" } "#, ) .file("src/lib.rs", "") .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] serde = "0.1" "#, ) .file("foo/src/lib.rs", "") .build(); p.cargo("build").run(); let old_lockfile = p.read_lockfile(); Package::new("log", "0.1.1").publish(); Package::new("serde", "0.1.1").dep("log", "0.1").publish(); p.cargo("update -p serde --dry-run") .with_stderr( "\ [UPDATING] `[..]` index [UPDATING] serde v0.1.0 -> v0.1.1 [WARNING] not updating lockfile due to dry run ", ) .run(); let new_lockfile = p.read_lockfile(); assert_eq!(old_lockfile, new_lockfile) } #[cargo_test] fn workspace_only() { let p = project().file("src/main.rs", "fn main() {}").build(); p.cargo("generate-lockfile").run(); let lock1 = p.read_lockfile(); p.change_file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.2" "#, ); p.cargo("update --workspace").run(); let lock2 = p.read_lockfile(); assert_ne!(lock1, lock2); assert!(lock1.contains("0.0.1")); assert!(lock2.contains("0.0.2")); assert!(!lock1.contains("0.0.2")); assert!(!lock2.contains("0.0.1")); } #[cargo_test] fn precise_with_build_metadata() { // +foo syntax shouldn't be necessary with --precise Package::new("bar", "0.1.0+extra-stuff.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("generate-lockfile").run(); Package::new("bar", "0.1.1+extra-stuff.1").publish(); Package::new("bar", "0.1.2+extra-stuff.2").publish(); p.cargo("update -p bar --precise 0.1") .with_status(101) .with_stderr( "\ error: invalid version format for precise version `0.1` Caused by: unexpected end of input while parsing minor version number ", ) .run(); p.cargo("update -p bar --precise 0.1.1+does-not-match") .with_status(101) .with_stderr( "\ [UPDATING] [..] index error: no matching package named `bar` found location searched: registry `crates-io` required by package `foo v0.1.0 ([ROOT]/foo)` ", ) .run(); p.cargo("update -p bar --precise 0.1.1") .with_stderr( "\ [UPDATING] [..] index [UPDATING] bar v0.1.0+extra-stuff.0 -> v0.1.1+extra-stuff.1 ", ) .run(); Package::new("bar", "0.1.3").publish(); p.cargo("update -p bar --precise 0.1.3+foo") .with_status(101) .with_stderr( "\ [UPDATING] [..] index error: no matching package named `bar` found location searched: registry `crates-io` required by package `foo v0.1.0 ([ROOT]/foo)` ", ) .run(); p.cargo("update -p bar --precise 0.1.3") .with_stderr( "\ [UPDATING] [..] index [UPDATING] bar v0.1.1+extra-stuff.1 -> v0.1.3 ", ) .run(); } cargo-0.66.0/tests/testsuite/vendor.rs000066400000000000000000000554521432416201200177420ustar00rootroot00000000000000//! Tests for the `cargo vendor` command. //! //! Note that every test here uses `--respect-source-config` so that the //! "fake" crates.io is used. Otherwise `vendor` would download the crates.io //! index from the network. use std::fs; use cargo_test_support::git; use cargo_test_support::registry::{self, Package}; use cargo_test_support::{basic_lib_manifest, paths, project, Project}; #[cargo_test] fn vendor_simple() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] log = "0.3.5" "#, ) .file("src/lib.rs", "") .build(); Package::new("log", "0.3.5").publish(); p.cargo("vendor --respect-source-config").run(); let lock = p.read_file("vendor/log/Cargo.toml"); assert!(lock.contains("version = \"0.3.5\"")); add_vendor_config(&p); p.cargo("build").run(); } #[cargo_test] fn vendor_sample_config() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] log = "0.3.5" "#, ) .file("src/lib.rs", "") .build(); Package::new("log", "0.3.5").publish(); p.cargo("vendor --respect-source-config") .with_stdout( r#" [source.crates-io] replace-with = "vendored-sources" [source.vendored-sources] directory = "vendor" "#, ) .run(); } #[cargo_test] fn vendor_path_specified() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] log = "0.3.5" "#, ) .file("src/lib.rs", "") .build(); Package::new("log", "0.3.5").publish(); let path = if cfg!(windows) { r#"deps\.vendor"# } else { "deps/.vendor" }; let output = p .cargo("vendor --respect-source-config") .arg(path) .exec_with_output() .unwrap(); // Assert against original output to ensure that // path is normalized by `ops::vendor` on Windows. assert_eq!( &String::from_utf8(output.stdout).unwrap(), r#" [source.crates-io] replace-with = "vendored-sources" [source.vendored-sources] directory = "deps/.vendor" "# ); let lock = p.read_file("deps/.vendor/log/Cargo.toml"); assert!(lock.contains("version = \"0.3.5\"")); } fn add_vendor_config(p: &Project) { p.change_file( ".cargo/config", r#" [source.crates-io] replace-with = 'vendor' [source.vendor] directory = 'vendor' "#, ); } #[cargo_test] fn package_exclude() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); Package::new("bar", "0.1.0") .file( "Cargo.toml", r#" [package] name = "bar" version = "0.1.0" exclude = [".*", "!.include", "!.dotdir/include"] "#, ) .file("src/lib.rs", "") .file(".exclude", "") .file(".include", "") .file(".dotdir/exclude", "") .file(".dotdir/include", "") .publish(); p.cargo("vendor --respect-source-config").run(); let csum = p.read_file("vendor/bar/.cargo-checksum.json"); assert!(csum.contains(".include")); assert!(!csum.contains(".exclude")); assert!(!csum.contains(".dotdir/exclude")); // Gitignore doesn't re-include a file in an excluded parent directory, // even if negating it explicitly. assert!(!csum.contains(".dotdir/include")); } #[cargo_test] fn two_versions() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "0.8.0" bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] bitflags = "0.7.0" "#, ) .file("bar/src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("bitflags", "0.8.0").publish(); p.cargo("vendor --respect-source-config").run(); let lock = p.read_file("vendor/bitflags/Cargo.toml"); assert!(lock.contains("version = \"0.8.0\"")); let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); assert!(lock.contains("version = \"0.7.0\"")); add_vendor_config(&p); p.cargo("build").run(); } #[cargo_test] fn two_explicit_versions() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "0.8.0" bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] bitflags = "0.7.0" "#, ) .file("bar/src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("bitflags", "0.8.0").publish(); p.cargo("vendor --respect-source-config --versioned-dirs") .run(); let lock = p.read_file("vendor/bitflags-0.8.0/Cargo.toml"); assert!(lock.contains("version = \"0.8.0\"")); let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); assert!(lock.contains("version = \"0.7.0\"")); add_vendor_config(&p); p.cargo("build").run(); } #[cargo_test] fn help() { let p = project().build(); p.cargo("vendor -h").run(); } #[cargo_test] fn update_versions() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "0.7.0" "#, ) .file("src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("bitflags", "0.8.0").publish(); p.cargo("vendor --respect-source-config").run(); let lock = p.read_file("vendor/bitflags/Cargo.toml"); assert!(lock.contains("version = \"0.7.0\"")); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "0.8.0" "#, ); p.cargo("vendor --respect-source-config").run(); let lock = p.read_file("vendor/bitflags/Cargo.toml"); assert!(lock.contains("version = \"0.8.0\"")); } #[cargo_test] fn two_lockfiles() { let p = project() .no_manifest() .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "=0.7.0" "#, ) .file("foo/src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] bitflags = "=0.8.0" "#, ) .file("bar/src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("bitflags", "0.8.0").publish(); p.cargo("vendor --respect-source-config -s bar/Cargo.toml --manifest-path foo/Cargo.toml") .run(); let lock = p.read_file("vendor/bitflags/Cargo.toml"); assert!(lock.contains("version = \"0.8.0\"")); let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); assert!(lock.contains("version = \"0.7.0\"")); add_vendor_config(&p); p.cargo("build").cwd("foo").run(); p.cargo("build").cwd("bar").run(); } #[cargo_test] fn test_sync_argument() { let p = project() .no_manifest() .file( "foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "=0.7.0" "#, ) .file("foo/src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" [dependencies] bitflags = "=0.8.0" "#, ) .file("bar/src/lib.rs", "") .file( "baz/Cargo.toml", r#" [package] name = "baz" version = "0.1.0" [dependencies] bitflags = "=0.8.0" "#, ) .file("baz/src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("bitflags", "0.8.0").publish(); p.cargo("vendor --respect-source-config --manifest-path foo/Cargo.toml -s bar/Cargo.toml baz/Cargo.toml test_vendor") .with_stderr("\ error: Found argument 'test_vendor' which wasn't expected, or isn't valid in this context USAGE: cargo[EXE] vendor [OPTIONS] [path] For more information try --help", ) .with_status(1) .run(); p.cargo("vendor --respect-source-config --manifest-path foo/Cargo.toml -s bar/Cargo.toml -s baz/Cargo.toml test_vendor") .run(); let lock = p.read_file("test_vendor/bitflags/Cargo.toml"); assert!(lock.contains("version = \"0.8.0\"")); let lock = p.read_file("test_vendor/bitflags-0.7.0/Cargo.toml"); assert!(lock.contains("version = \"0.7.0\"")); } #[cargo_test] fn delete_old_crates() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bitflags = "=0.7.0" "#, ) .file("src/lib.rs", "") .build(); Package::new("bitflags", "0.7.0").publish(); Package::new("log", "0.3.5").publish(); p.cargo("vendor --respect-source-config").run(); p.read_file("vendor/bitflags/Cargo.toml"); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] log = "=0.3.5" "#, ); p.cargo("vendor --respect-source-config").run(); let lock = p.read_file("vendor/log/Cargo.toml"); assert!(lock.contains("version = \"0.3.5\"")); assert!(!p.root().join("vendor/bitflags/Cargo.toml").exists()); } #[cargo_test] fn ignore_files() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] url = "1.4.1" "#, ) .file("src/lib.rs", "") .build(); Package::new("url", "1.4.1") .file("src/lib.rs", "") .file("foo.orig", "") .file(".gitignore", "") .file(".gitattributes", "") .file("foo.rej", "") .publish(); p.cargo("vendor --respect-source-config").run(); let csum = p.read_file("vendor/url/.cargo-checksum.json"); assert!(!csum.contains("foo.orig")); assert!(!csum.contains(".gitignore")); assert!(!csum.contains(".gitattributes")); assert!(!csum.contains(".cargo-ok")); assert!(!csum.contains("foo.rej")); } #[cargo_test] fn included_files_only() { let git = git::new("a", |p| { p.file("Cargo.toml", &basic_lib_manifest("a")) .file("src/lib.rs", "") .file(".gitignore", "a") .file("a/b.md", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("vendor --respect-source-config").run(); let csum = p.read_file("vendor/a/.cargo-checksum.json"); assert!(!csum.contains("a/b.md")); } #[cargo_test] fn dependent_crates_in_crates() { let git = git::new("a", |p| { p.file( "Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] b = { path = 'b' } "#, ) .file("src/lib.rs", "") .file("b/Cargo.toml", &basic_lib_manifest("b")) .file("b/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("vendor --respect-source-config").run(); p.read_file("vendor/a/.cargo-checksum.json"); p.read_file("vendor/b/.cargo-checksum.json"); } #[cargo_test] fn vendoring_git_crates() { let git = git::new("git", |p| { p.file("Cargo.toml", &basic_lib_manifest("serde_derive")) .file("src/lib.rs", "") .file("src/wut.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies.serde] version = "0.5.0" [dependencies.serde_derive] version = "0.5.0" [patch.crates-io] serde_derive = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); Package::new("serde", "0.5.0") .dep("serde_derive", "0.5") .publish(); Package::new("serde_derive", "0.5.0").publish(); p.cargo("vendor --respect-source-config").run(); p.read_file("vendor/serde_derive/src/wut.rs"); add_vendor_config(&p); p.cargo("build").run(); } #[cargo_test] fn git_simple() { let git = git::new("git", |p| { p.file("Cargo.toml", &basic_lib_manifest("a")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {{ git = '{}' }} "#, git.url() ), ) .file("src/lib.rs", "") .build(); p.cargo("vendor --respect-source-config").run(); let csum = p.read_file("vendor/a/.cargo-checksum.json"); assert!(csum.contains("\"package\":null")); } #[cargo_test] fn git_duplicate() { let git = git::new("a", |p| { p.file( "Cargo.toml", r#" [package] name = "a" version = "0.1.0" [dependencies] b = { path = 'b' } "#, ) .file("src/lib.rs", "") .file("b/Cargo.toml", &basic_lib_manifest("b")) .file("b/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {{ git = '{}' }} b = '0.5.0' "#, git.url() ), ) .file("src/lib.rs", "") .build(); Package::new("b", "0.5.0").publish(); p.cargo("vendor --respect-source-config") .with_stderr( "\ [UPDATING] [..] [UPDATING] [..] [DOWNLOADING] [..] [DOWNLOADED] [..] error: failed to sync Caused by: found duplicate version of package `b v0.5.0` vendored from two sources: source 1: [..] source 2: [..] ", ) .with_status(101) .run(); } #[cargo_test] fn depend_on_vendor_dir_not_deleted() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] libc = "0.2.30" "#, ) .file("src/lib.rs", "") .build(); Package::new("libc", "0.2.30").publish(); p.cargo("vendor --respect-source-config").run(); assert!(p.root().join("vendor/libc").is_dir()); p.change_file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] libc = "0.2.30" [patch.crates-io] libc = { path = 'vendor/libc' } "#, ); p.cargo("vendor --respect-source-config").run(); assert!(p.root().join("vendor/libc").is_dir()); } #[cargo_test] fn ignore_hidden() { // Don't delete files starting with `.` Package::new("bar", "0.1.0").publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "1.0.0" [dependencies] bar = "0.1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("vendor --respect-source-config").run(); // Add a `.git` directory. let repo = git::init(&p.root().join("vendor")); git::add(&repo); git::commit(&repo); assert!(p.root().join("vendor/.git").exists()); // Vendor again, shouldn't change anything. p.cargo("vendor --respect-source-config").run(); // .git should not be removed. assert!(p.root().join("vendor/.git").exists()); // And just for good measure, make sure no files changed. let mut opts = git2::StatusOptions::new(); assert!(repo .statuses(Some(&mut opts)) .unwrap() .iter() .all(|status| status.status() == git2::Status::CURRENT)); } #[cargo_test] fn config_instructions_works() { // Check that the config instructions work for all dependency kinds. registry::alt_init(); Package::new("dep", "0.1.0").publish(); Package::new("altdep", "0.1.0").alternative(true).publish(); let git_project = git::new("gitdep", |project| { project .file("Cargo.toml", &basic_lib_manifest("gitdep")) .file("src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "0.1" altdep = {{version="0.1", registry="alternative"}} gitdep = {{git='{}'}} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); let output = p .cargo("vendor --respect-source-config") .exec_with_output() .unwrap(); let output = String::from_utf8(output.stdout).unwrap(); p.change_file(".cargo/config", &output); p.cargo("check -v") .with_stderr_contains("[..]foo/vendor/dep/src/lib.rs[..]") .with_stderr_contains("[..]foo/vendor/altdep/src/lib.rs[..]") .with_stderr_contains("[..]foo/vendor/gitdep/src/lib.rs[..]") .run(); } #[cargo_test] fn git_crlf_preservation() { // Check that newlines don't get changed when you vendor // (will only fail if your system is setup with core.autocrlf=true on windows) let input = "hello \nthere\nmy newline\nfriends"; let git_project = git::new("git", |p| { p.file("Cargo.toml", &basic_lib_manifest("a")) .file("src/lib.rs", input) }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies] a = {{ git = '{}' }} "#, git_project.url() ), ) .file("src/lib.rs", "") .build(); fs::write( paths::home().join(".gitconfig"), r#" [core] autocrlf = true "#, ) .unwrap(); p.cargo("vendor --respect-source-config").run(); let output = p.read_file("vendor/a/src/lib.rs"); assert_eq!(input, output); } #[cargo_test] #[cfg(unix)] fn vendor_preserves_permissions() { use std::os::unix::fs::MetadataExt; Package::new("bar", "1.0.0") .file_with_mode("example.sh", 0o755, "#!/bin/sh") .file("src/lib.rs", "") .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("vendor --respect-source-config").run(); let metadata = fs::metadata(p.root().join("vendor/bar/src/lib.rs")).unwrap(); assert_eq!(metadata.mode() & 0o777, 0o644); let metadata = fs::metadata(p.root().join("vendor/bar/example.sh")).unwrap(); assert_eq!(metadata.mode() & 0o777, 0o755); } #[cargo_test] fn no_remote_dependency_no_vendor() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { path = "bar" } "#, ) .file("src/lib.rs", "") .file( "bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" "#, ) .file("bar/src/lib.rs", "") .build(); p.cargo("vendor") .with_stderr("There is no dependency to vendor in this project.") .run(); assert!(!p.root().join("vendor").exists()); } cargo-0.66.0/tests/testsuite/verify_project.rs000066400000000000000000000037671432416201200215010ustar00rootroot00000000000000//! Tests for the `cargo verify-project` command. use cargo_test_support::{basic_bin_manifest, main_file, project}; fn verify_project_success_output() -> String { r#"{"success":"true"}"#.into() } #[cargo_test] fn cargo_verify_project_path_to_cargo_toml_relative() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project --manifest-path foo/Cargo.toml") .cwd(p.root().parent().unwrap()) .with_stdout(verify_project_success_output()) .run(); } #[cargo_test] fn cargo_verify_project_path_to_cargo_toml_absolute() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project --manifest-path") .arg(p.root().join("Cargo.toml")) .cwd(p.root().parent().unwrap()) .with_stdout(verify_project_success_output()) .run(); } #[cargo_test] fn cargo_verify_project_cwd() { let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .build(); p.cargo("verify-project") .with_stdout(verify_project_success_output()) .run(); } #[cargo_test] fn cargo_verify_project_honours_unstable_features() { let p = project() .file( "Cargo.toml", r#" cargo-features = ["test-dummy-unstable"] [package] name = "foo" version = "0.0.1" "#, ) .file("src/lib.rs", "") .build(); p.cargo("verify-project") .masquerade_as_nightly_cargo(&["test-dummy-unstable"]) .with_stdout(verify_project_success_output()) .run(); p.cargo("verify-project") .with_status(1) .with_json(r#"{"invalid":"failed to parse manifest at `[CWD]/Cargo.toml`"}"#) .run(); } cargo-0.66.0/tests/testsuite/version.rs000066400000000000000000000024231432416201200201200ustar00rootroot00000000000000//! Tests for displaying the cargo version. use cargo_test_support::{cargo_process, project}; #[cargo_test] fn simple() { let p = project().build(); p.cargo("version") .with_stdout(&format!("cargo {}\n", cargo::version())) .run(); p.cargo("--version") .with_stdout(&format!("cargo {}\n", cargo::version())) .run(); } #[cargo_test] fn version_works_without_rustc() { let p = project().build(); p.cargo("version").env("PATH", "").run(); } #[cargo_test] fn version_works_with_bad_config() { let p = project().file(".cargo/config", "this is not toml").build(); p.cargo("version").run(); } #[cargo_test] fn version_works_with_bad_target_dir() { let p = project() .file( ".cargo/config", r#" [build] target-dir = 4 "#, ) .build(); p.cargo("version").run(); } #[cargo_test] fn verbose() { // This is mainly to check that it doesn't explode. cargo_process("-vV") .with_stdout_contains(&format!("cargo {}", cargo::version())) .with_stdout_contains("host: [..]") .with_stdout_contains("libgit2: [..]") .with_stdout_contains("libcurl: [..]") .with_stdout_contains("os: [..]") .run(); } cargo-0.66.0/tests/testsuite/warn_on_failure.rs000066400000000000000000000063721432416201200216140ustar00rootroot00000000000000//! Tests for whether or not warnings are displayed for build scripts. use cargo_test_support::registry::Package; use cargo_test_support::{project, Project}; static WARNING1: &str = "Hello! I'm a warning. :)"; static WARNING2: &str = "And one more!"; fn make_lib(lib_src: &str) { Package::new("bar", "0.0.1") .file( "Cargo.toml", r#" [package] name = "bar" authors = [] version = "0.0.1" build = "build.rs" "#, ) .file( "build.rs", &format!( r#" fn main() {{ use std::io::Write; println!("cargo:warning={{}}", "{}"); println!("hidden stdout"); write!(&mut ::std::io::stderr(), "hidden stderr"); println!("cargo:warning={{}}", "{}"); }} "#, WARNING1, WARNING2 ), ) .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src)) .publish(); } fn make_upstream(main_src: &str) -> Project { project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#, ) .file("src/main.rs", &format!("fn main() {{ {} }}", main_src)) .build() } #[cargo_test] fn no_warning_on_success() { make_lib(""); let upstream = make_upstream(""); upstream .cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] bar v0.0.1 ([..]) [COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn no_warning_on_bin_failure() { make_lib(""); let upstream = make_upstream("hi()"); upstream .cargo("build") .with_status(101) .with_stdout_does_not_contain("hidden stdout") .with_stderr_does_not_contain("hidden stderr") .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) .with_stderr_contains("[UPDATING] `[..]` index") .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") .with_stderr_contains("[COMPILING] bar v0.0.1") .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") .run(); } #[cargo_test] fn warning_on_lib_failure() { make_lib("err()"); let upstream = make_upstream(""); upstream .cargo("build") .with_status(101) .with_stdout_does_not_contain("hidden stdout") .with_stderr_does_not_contain("hidden stderr") .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])") .with_stderr_contains("[UPDATING] `[..]` index") .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") .with_stderr_contains("[COMPILING] bar v0.0.1") .with_stderr_contains(&format!("[WARNING] {}", WARNING1)) .with_stderr_contains(&format!("[WARNING] {}", WARNING2)) .run(); } cargo-0.66.0/tests/testsuite/weak_dep_features.rs000066400000000000000000000355001432416201200221120ustar00rootroot00000000000000//! Tests for weak-dep-features. use super::features2::switch_to_resolver_2; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::{Dependency, Package}; use cargo_test_support::{project, publish}; use std::fmt::Write; // Helper to create lib.rs files that check features. fn require(enabled_features: &[&str], disabled_features: &[&str]) -> String { let mut s = String::new(); for feature in enabled_features { writeln!(s, "#[cfg(not(feature=\"{feature}\"))] compile_error!(\"expected feature {feature} to be enabled\");", feature=feature).unwrap(); } for feature in disabled_features { writeln!(s, "#[cfg(feature=\"{feature}\")] compile_error!(\"did not expect feature {feature} to be enabled\");", feature=feature).unwrap(); } s } #[cargo_test] fn simple() { Package::new("bar", "1.0.0") .feature("feat", &[]) .file("src/lib.rs", &require(&["feat"], &[])) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } [features] f1 = ["bar?/feat"] "#, ) .file("src/lib.rs", &require(&["f1"], &[])) .build(); // It's a bit unfortunate that this has to download `bar`, but avoiding // that is extremely difficult. p.cargo("check --features f1") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("check --features f1,bar") .with_stderr( "\ [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn deferred() { // A complex chain that requires deferring enabling the feature due to // another dependency getting enabled. Package::new("bar", "1.0.0") .feature("feat", &[]) .file("src/lib.rs", &require(&["feat"], &[])) .publish(); Package::new("dep", "1.0.0") .add_dep(Dependency::new("bar", "1.0").optional(true)) .feature("feat", &["bar?/feat"]) .publish(); Package::new("bar_activator", "1.0.0") .feature_dep("dep", "1.0", &["bar"]) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = { version = "1.0", features = ["feat"] } bar_activator = "1.0" "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] dep v1.0.0 [..] [DOWNLOADED] bar_activator v1.0.0 [..] [DOWNLOADED] bar v1.0.0 [..] [CHECKING] bar v1.0.0 [CHECKING] dep v1.0.0 [CHECKING] bar_activator v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn not_optional_dep() { // Attempt to use dep_name?/feat where dep_name is not optional. Package::new("dep", "1.0.0").feature("feat", &[]).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] dep = "1.0" [features] feat = ["dep?/feat"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr("\ error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` Caused by: feature `feat` includes `dep?/feat` with a `?`, but `dep` is not an optional dependency A non-optional dependency of the same name is defined; consider removing the `?` or changing the dependency to be optional ") .run(); } #[cargo_test] fn optional_cli_syntax() { // --features bar?/feat Package::new("bar", "1.0.0") .feature("feat", &[]) .file("src/lib.rs", &require(&["feat"], &[])) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } "#, ) .file("src/lib.rs", "") .build(); // Does not build bar. p.cargo("check --features bar?/feat") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); // Builds bar. p.cargo("check --features bar?/feat,bar") .with_stderr( "\ [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); eprintln!("check V2 resolver"); switch_to_resolver_2(&p); p.build_dir().rm_rf(); // Does not build bar. p.cargo("check --features bar?/feat") .with_stderr( "\ [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); // Builds bar. p.cargo("check --features bar?/feat,bar") .with_stderr( "\ [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn required_features() { // required-features doesn't allow ? Package::new("bar", "1.0.0").feature("feat", &[]).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } [[bin]] name = "foo" required-features = ["bar?/feat"] "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("check") .with_status(101) .with_stderr( "\ [UPDATING] [..] [ERROR] invalid feature `bar?/feat` in required-features of target `foo`: \ optional dependency with `?` is not allowed in required-features ", ) .run(); } #[cargo_test] fn weak_with_host_decouple() { // weak-dep-features with new resolver // // foo v0.1.0 // └── common v1.0.0 // └── bar v1.0.0 <-- does not have `feat` enabled // [build-dependencies] // └── bar_activator v1.0.0 // └── common v1.0.0 // └── bar v1.0.0 <-- does have `feat` enabled Package::new("bar", "1.0.0") .feature("feat", &[]) .file( "src/lib.rs", r#" pub fn feat() -> bool { cfg!(feature = "feat") } "#, ) .publish(); Package::new("common", "1.0.0") .add_dep(Dependency::new("bar", "1.0").optional(true)) .feature("feat", &["bar?/feat"]) .file( "src/lib.rs", r#" #[cfg(feature = "bar")] pub fn feat() -> bool { bar::feat() } #[cfg(not(feature = "bar"))] pub fn feat() -> bool { false } "#, ) .publish(); Package::new("bar_activator", "1.0.0") .feature_dep("common", "1.0", &["bar", "feat"]) .file( "src/lib.rs", r#" pub fn feat() -> bool { common::feat() } "#, ) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" resolver = "2" [dependencies] common = { version = "1.0", features = ["feat"] } [build-dependencies] bar_activator = "1.0" "#, ) .file( "src/main.rs", r#" fn main() { assert!(!common::feat()); } "#, ) .file( "build.rs", r#" fn main() { assert!(bar_activator::feat()); } "#, ) .build(); p.cargo("run") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] [..] [DOWNLOADED] [..] [DOWNLOADED] [..] [COMPILING] bar v1.0.0 [COMPILING] common v1.0.0 [COMPILING] bar_activator v1.0.0 [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [RUNNING] `target/debug/foo[EXE]` ", ) .run(); } #[cargo_test] fn weak_namespaced() { // Behavior with a dep: dependency. Package::new("bar", "1.0.0") .feature("feat", &[]) .file("src/lib.rs", &require(&["feat"], &[])) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } [features] f1 = ["bar?/feat"] f2 = ["dep:bar"] "#, ) .file("src/lib.rs", &require(&["f1"], &["f2", "bar"])) .build(); p.cargo("check --features f1") .with_stderr( "\ [UPDATING] [..] [DOWNLOADING] crates ... [DOWNLOADED] bar v1.0.0 [..] [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); p.cargo("tree -f") .arg("{p} feats:{f}") .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:") .run(); p.cargo("tree --features f1 -f") .arg("{p} feats:{f}") .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:f1") .run(); p.cargo("tree --features f1,f2 -f") .arg("{p} feats:{f}") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) feats:f1,f2 └── bar v1.0.0 feats:feat ", ) .run(); // "bar" remains not-a-feature p.change_file("src/lib.rs", &require(&["f1", "f2"], &["bar"])); p.cargo("check --features f1,f2") .with_stderr( "\ [CHECKING] bar v1.0.0 [CHECKING] foo v0.1.0 [..] [FINISHED] [..] ", ) .run(); } #[cargo_test] fn tree() { Package::new("bar", "1.0.0") .feature("feat", &[]) .file("src/lib.rs", &require(&["feat"], &[])) .publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] bar = { version = "1.0", optional = true } [features] f1 = ["bar?/feat"] "#, ) .file("src/lib.rs", &require(&["f1"], &[])) .build(); p.cargo("tree --features f1") .with_stdout("foo v0.1.0 ([ROOT]/foo)") .run(); p.cargo("tree --features f1,bar") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) └── bar v1.0.0 ", ) .run(); p.cargo("tree --features f1,bar -e features") .with_stdout( "\ foo v0.1.0 ([ROOT]/foo) └── bar feature \"default\" └── bar v1.0.0 ", ) .run(); p.cargo("tree --features f1,bar -e features -i bar") .with_stdout( "\ bar v1.0.0 β”œβ”€β”€ bar feature \"default\" β”‚ └── foo v0.1.0 ([ROOT]/foo) β”‚ β”œβ”€β”€ foo feature \"bar\" (command-line) β”‚ β”œβ”€β”€ foo feature \"default\" (command-line) β”‚ └── foo feature \"f1\" (command-line) └── bar feature \"feat\" └── foo feature \"f1\" (command-line) ", ) .run(); p.cargo("tree -e features --features bar?/feat") .with_stdout("foo v0.1.0 ([ROOT]/foo)") .run(); // This is a little strange in that it produces no output. // Maybe `cargo tree` should print a note about why? p.cargo("tree -e features -i bar --features bar?/feat") .with_stdout("") .run(); p.cargo("tree -e features -i bar --features bar?/feat,bar") .with_stdout( "\ bar v1.0.0 β”œβ”€β”€ bar feature \"default\" β”‚ └── foo v0.1.0 ([ROOT]/foo) β”‚ β”œβ”€β”€ foo feature \"bar\" (command-line) β”‚ └── foo feature \"default\" (command-line) └── bar feature \"feat\" (command-line) ", ) .run(); } #[cargo_test] fn publish() { // Publish behavior with /? syntax. Package::new("bar", "1.0.0").feature("feat", &[]).publish(); let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" description = "foo" license = "MIT" homepage = "https://example.com/" [dependencies] bar = { version = "1.0", optional = true } [features] feat1 = [] feat2 = ["bar?/feat"] "#, ) .file("src/lib.rs", "") .build(); p.cargo("publish --token sekrit") .with_stderr( "\ [UPDATING] [..] [PACKAGING] foo v0.1.0 [..] [VERIFYING] foo v0.1.0 [..] [COMPILING] foo v0.1.0 [..] [FINISHED] [..] [UPLOADING] foo v0.1.0 [..] ", ) .run(); publish::validate_upload_with_contents( r#" { "authors": [], "badges": {}, "categories": [], "deps": [ { "default_features": true, "features": [], "kind": "normal", "name": "bar", "optional": true, "registry": "https://github.com/rust-lang/crates.io-index", "target": null, "version_req": "^1.0" } ], "description": "foo", "documentation": null, "features": { "feat1": [], "feat2": ["bar?/feat"] }, "homepage": "https://example.com/", "keywords": [], "license": "MIT", "license_file": null, "links": null, "name": "foo", "readme": null, "readme_file": null, "repository": null, "vers": "0.1.0" } "#, "foo-0.1.0.crate", &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &[( "Cargo.toml", &format!( r#"{} [package] name = "foo" version = "0.1.0" description = "foo" homepage = "https://example.com/" license = "MIT" [dependencies.bar] version = "1.0" optional = true [features] feat1 = [] feat2 = ["bar?/feat"] "#, cargo::core::package::MANIFEST_PREAMBLE ), )], ); } cargo-0.66.0/tests/testsuite/workspaces.rs000066400000000000000000001736471432416201200206350ustar00rootroot00000000000000//! Tests for workspaces. use cargo_test_support::registry::Package; use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project, sleep_ms}; use std::env; use std::fs; #[cargo_test] fn simple_explicit() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); p.cargo("build").cwd("bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn simple_explicit_default_members() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] default-members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.bin("bar").is_file()); assert!(!p.bin("foo").is_file()); } #[cargo_test] fn non_virtual_default_members_build_other_member() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = [".", "bar", "baz"] default-members = ["baz"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build") .with_stderr( "[..] Compiling baz v0.1.0 ([..])\n\ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); p.cargo("build --manifest-path bar/Cargo.toml") .with_stderr( "[..] Compiling bar v0.1.0 ([..])\n\ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn non_virtual_default_members_build_root_project() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] default-members = ["."] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .build(); p.cargo("build") .with_stderr( "[..] Compiling foo v0.1.0 ([..])\n\ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn inferred_root() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); p.cargo("build").cwd("bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn inferred_path_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); p.cargo("build").cwd("bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn transitive_path_dep() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar" } [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] baz = { path = "../baz" } "#, ) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", "") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/main.rs", "fn main() {}") .file("baz/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); assert!(!p.bin("baz").is_file()); p.cargo("build").cwd("bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(!p.bin("baz").is_file()); p.cargo("build").cwd("baz").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); assert!(p.bin("baz").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); assert!(!p.root().join("baz/Cargo.lock").is_file()); } #[cargo_test] fn parent_pointer_works() { let p = project() .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "../bar" } [workspace] "#, ) .file("foo/src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = "../foo" "#, ) .file("bar/src/main.rs", "fn main() {}") .file("bar/src/lib.rs", ""); let p = p.build(); p.cargo("build").cwd("foo").run(); p.cargo("build").cwd("bar").run(); assert!(p.root().join("foo/Cargo.lock").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn same_names_in_workspace() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: two packages named `foo` in this workspace: - [..]Cargo.toml - [..]Cargo.toml ", ) .run(); } #[cargo_test] fn parent_doesnt_point_to_child() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ error: current package believes it's in a workspace when it's not: current: [..]Cargo.toml workspace: [..]Cargo.toml this may be fixable [..] [..] ", ) .run(); } #[cargo_test] fn invalid_parent_pointer() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] workspace = "foo" "#, ) .file("src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: failed to read `[..]Cargo.toml` Caused by: [..] ", ) .run(); } #[cargo_test] fn invalid_members() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["foo"] "#, ) .file("src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to load manifest for workspace member `[..]/foo` Caused by: failed to read `[..]foo/foo/Cargo.toml` Caused by: [..] ", ) .run(); } #[cargo_test] fn bare_workspace_ok() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] "#, ) .file("src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); } #[cargo_test] fn two_roots() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [workspace] members = [".."] "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: multiple workspace roots found in the same workspace: [..] [..] ", ) .run(); } #[cargo_test] fn workspace_isnt_root() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] workspace = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr("error: root of a workspace inferred but wasn't a root: [..]") .run(); } #[cargo_test] fn dangling_member() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = "../baz" "#, ) .file("bar/src/main.rs", "fn main() {}") .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "0.1.0" authors = [] workspace = "../baz" "#, ) .file("baz/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: package `[..]` is a member of the wrong workspace expected: [..] actual: [..] ", ) .run(); } #[cargo_test] fn cycle() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] workspace = "bar" "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "[ERROR] root of a workspace inferred but wasn't a root: [..]/foo/bar/Cargo.toml", ) .run(); } #[cargo_test] fn share_dependencies() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] dep1 = "0.1" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] dep1 = "< 0.1.5" "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); Package::new("dep1", "0.1.3").publish(); Package::new("dep1", "0.1.8").publish(); p.cargo("build") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep1 v0.1.3 ([..]) [COMPILING] dep1 v0.1.3 [COMPILING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn fetch_fetches_all() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] dep1 = "*" "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); Package::new("dep1", "0.1.3").publish(); p.cargo("fetch") .with_stderr( "\ [UPDATING] `[..]` index [DOWNLOADING] crates ... [DOWNLOADED] dep1 v0.1.3 ([..]) ", ) .run(); } #[cargo_test] fn lock_works_for_everyone() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] dep2 = "0.1" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [dependencies] dep1 = "0.1" "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); Package::new("dep1", "0.1.0").publish(); Package::new("dep2", "0.1.0").publish(); p.cargo("generate-lockfile") .with_stderr("[UPDATING] `[..]` index") .run(); Package::new("dep1", "0.1.1").publish(); Package::new("dep2", "0.1.1").publish(); p.cargo("build") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] dep2 v0.1.0 ([..]) [COMPILING] dep2 v0.1.0 [COMPILING] foo v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); p.cargo("build") .cwd("bar") .with_stderr( "\ [DOWNLOADING] crates ... [DOWNLOADED] dep1 v0.1.0 ([..]) [COMPILING] dep1 v0.1.0 [COMPILING] bar v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); } #[cargo_test] fn virtual_works() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").cwd("bar").run(); assert!(p.root().join("Cargo.lock").is_file()); assert!(p.bin("bar").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn explicit_package_argument_works_with_virtual_manifest() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build --package bar").run(); assert!(p.root().join("Cargo.lock").is_file()); assert!(p.bin("bar").is_file()); assert!(!p.root().join("bar/Cargo.lock").is_file()); } #[cargo_test] fn virtual_misconfigure() { let p = project() .file( "Cargo.toml", r#" [workspace] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr( "\ error: current package believes it's in a workspace when it's not: current: [CWD]/Cargo.toml workspace: [..]Cargo.toml this may be fixable by adding `bar` to the `workspace.members` array of the \ manifest located at: [..] [..] ", ) .run(); } #[cargo_test] fn virtual_build_all_implied() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); } #[cargo_test] fn virtual_default_members() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] default-members = ["bar"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("bar/src/main.rs", "fn main() {}") .file("baz/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.bin("bar").is_file()); assert!(!p.bin("baz").is_file()); } #[cargo_test] fn virtual_default_member_is_not_a_member() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar"] default-members = ["something-else"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: package `[..]something-else` is listed in workspace’s default-members \ but is not a member. ", ) .run(); } #[cargo_test] fn virtual_default_members_build_other_member() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["bar", "baz"] default-members = ["baz"] "#, ) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("baz/src/lib.rs", "pub fn baz() {}") .build(); p.cargo("build --manifest-path bar/Cargo.toml") .with_stderr( "[..] Compiling bar v0.1.0 ([..])\n\ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", ) .run(); } #[cargo_test] fn virtual_build_no_members() { let p = project().file( "Cargo.toml", r#" [workspace] "#, ); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: manifest path `[..]` contains no package: The manifest is virtual, \ and the workspace has no members. ", ) .run(); } #[cargo_test] fn include_virtual() { let p = project() .file( "Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/main.rs", "") .file( "bar/Cargo.toml", r#" [workspace] "#, ); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: multiple workspace roots found in the same workspace: [..] [..] ", ) .run(); } #[cargo_test] fn members_include_path_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["p1"] [dependencies] p3 = { path = "p3" } "#, ) .file("src/lib.rs", "") .file( "p1/Cargo.toml", r#" [project] name = "p1" version = "0.1.0" authors = [] [dependencies] p2 = { path = "../p2" } "#, ) .file("p1/src/lib.rs", "") .file("p2/Cargo.toml", &basic_manifest("p2", "0.1.0")) .file("p2/src/lib.rs", "") .file("p3/Cargo.toml", &basic_manifest("p3", "0.1.0")) .file("p3/src/lib.rs", ""); let p = p.build(); p.cargo("build").cwd("p1").run(); p.cargo("build").cwd("p2").run(); p.cargo("build").cwd("p3").run(); p.cargo("build").run(); assert!(p.root().join("target").is_dir()); assert!(!p.root().join("p1/target").is_dir()); assert!(!p.root().join("p2/target").is_dir()); assert!(!p.root().join("p3/target").is_dir()); } #[cargo_test] fn new_warns_you_this_will_not_work() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] "#, ) .file("src/lib.rs", ""); let p = p.build(); p.cargo("new --lib bar") .with_stderr( "\ warning: compiling this new package may not work due to invalid workspace configuration current package believes it's in a workspace when it's not: current: [..] workspace: [..] this may be fixable by ensuring that this crate is depended on by the workspace \ root: [..] [..] [CREATED] library `bar` package ", ) .run(); } #[cargo_test] fn new_warning_with_corrupt_ws() { let p = project().file("Cargo.toml", "asdf").build(); p.cargo("new bar") .with_stderr( "\ [WARNING] compiling this new package may not work due to invalid workspace configuration failed to parse manifest at `[..]foo/Cargo.toml` Caused by: could not parse input as TOML Caused by: TOML parse error at line 1, column 5 | 1 | asdf | ^ Unexpected end of input Expected `.` or `=` Created binary (application) `bar` package ", ) .run(); } #[cargo_test] fn lock_doesnt_change_depending_on_crate() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ['baz'] [dependencies] foo = "*" "#, ) .file("src/lib.rs", "") .file( "baz/Cargo.toml", r#" [project] name = "baz" version = "0.1.0" authors = [] [dependencies] bar = "*" "#, ) .file("baz/src/lib.rs", ""); let p = p.build(); Package::new("foo", "1.0.0").publish(); Package::new("bar", "1.0.0").publish(); p.cargo("build").run(); let lockfile = p.read_lockfile(); p.cargo("build").cwd("baz").run(); let lockfile2 = p.read_lockfile(); assert_eq!(lockfile, lockfile2); } #[cargo_test] fn rebuild_please() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ['lib', 'bin'] "#, ) .file("lib/Cargo.toml", &basic_manifest("lib", "0.1.0")) .file( "lib/src/lib.rs", r#" pub fn foo() -> u32 { 0 } "#, ) .file( "bin/Cargo.toml", r#" [package] name = "bin" version = "0.1.0" [dependencies] lib = { path = "../lib" } "#, ) .file( "bin/src/main.rs", r#" extern crate lib; fn main() { assert_eq!(lib::foo(), 0); } "#, ); let p = p.build(); p.cargo("run").cwd("bin").run(); sleep_ms(1000); p.change_file("lib/src/lib.rs", "pub fn foo() -> u32 { 1 }"); p.cargo("build").cwd("lib").run(); p.cargo("run") .cwd("bin") .with_status(101) .with_stderr_contains("[..]assertion[..]") .run(); } #[cargo_test] fn workspace_in_git() { let git_project = git::new("dep1", |project| { project .file( "Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "lib" version = "0.1.0" [dependencies.foo] git = '{}' "#, git_project.url() ), ) .file( "src/lib.rs", r#" pub fn foo() -> u32 { 0 } "#, ); let p = p.build(); p.cargo("build").run(); } #[cargo_test] fn lockfile_can_specify_nonexistent_members() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/main.rs", "fn main() {}") .file( "Cargo.lock", r#" [[package]] name = "a" version = "0.1.0" [[package]] name = "b" version = "0.1.0" "#, ); let p = p.build(); p.cargo("build").cwd("a").run(); } #[cargo_test] fn you_cannot_generate_lockfile_for_empty_workspaces() { let p = project() .file( "Cargo.toml", r#" [workspace] "#, ) .file("bar/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("update") .with_status(101) .with_stderr("error: you can't generate a lockfile for an empty workspace.") .run(); } #[cargo_test] fn workspace_with_transitive_dev_deps() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["mbrubeck@example.com"] [dependencies.bar] path = "bar" [workspace] "#, ) .file("src/main.rs", r#"fn main() {}"#) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["mbrubeck@example.com"] [dev-dependencies.baz] path = "../baz" "#, ) .file( "bar/src/lib.rs", r#" pub fn init() {} #[cfg(test)] #[test] fn test() { extern crate baz; baz::do_stuff(); } "#, ) .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#); let p = p.build(); p.cargo("test -p bar").run(); } #[cargo_test] fn error_if_parent_cargo_toml_is_invalid() { let p = project() .file("Cargo.toml", "Totally not a TOML file") .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .cwd("bar") .with_status(101) .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`") .run(); } #[cargo_test] fn relative_path_for_member_works() { let p = project() .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["../bar"] "#, ) .file("foo/src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = "../foo" "#, ) .file("bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").cwd("foo").run(); p.cargo("build").cwd("bar").run(); } #[cargo_test] fn relative_path_for_root_works() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] [dependencies] subproj = { path = "./subproj" } "#, ) .file("src/main.rs", "fn main() {}") .file("subproj/Cargo.toml", &basic_manifest("subproj", "0.1.0")) .file("subproj/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build --manifest-path ./Cargo.toml").run(); p.cargo("build --manifest-path ../Cargo.toml") .cwd("subproj") .run(); } #[cargo_test] fn path_dep_outside_workspace_is_not_member() { let p = project() .no_manifest() .file( "ws/Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [dependencies] foo = { path = "../foo" } [workspace] "#, ) .file("ws/src/lib.rs", "extern crate foo;") .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", ""); let p = p.build(); p.cargo("build").cwd("ws").run(); } #[cargo_test] fn test_in_and_out_of_workspace() { let p = project() .no_manifest() .file( "ws/Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [dependencies] foo = { path = "../foo" } [workspace] members = [ "../bar" ] "#, ) .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }") .file( "foo/Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "../bar" } "#, ) .file( "foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }", ) .file( "bar/Cargo.toml", r#" [project] workspace = "../ws" name = "bar" version = "0.1.0" authors = [] "#, ) .file("bar/src/lib.rs", "pub fn f() { }"); let p = p.build(); p.cargo("build").cwd("ws").run(); assert!(p.root().join("ws/Cargo.lock").is_file()); assert!(p.root().join("ws/target").is_dir()); assert!(!p.root().join("foo/Cargo.lock").is_file()); assert!(!p.root().join("foo/target").is_dir()); assert!(!p.root().join("bar/Cargo.lock").is_file()); assert!(!p.root().join("bar/target").is_dir()); p.cargo("build").cwd("foo").run(); assert!(p.root().join("foo/Cargo.lock").is_file()); assert!(p.root().join("foo/target").is_dir()); assert!(!p.root().join("bar/Cargo.lock").is_file()); assert!(!p.root().join("bar/target").is_dir()); } #[cargo_test] fn test_path_dependency_under_member() { let p = project() .file( "ws/Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [dependencies] foo = { path = "../foo" } [workspace] "#, ) .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }") .file( "foo/Cargo.toml", r#" [project] workspace = "../ws" name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "./bar" } "#, ) .file( "foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }", ) .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("foo/bar/src/lib.rs", "pub fn f() { }"); let p = p.build(); p.cargo("build").cwd("ws").run(); assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); assert!(!p.root().join("foo/bar/target").is_dir()); p.cargo("build").cwd("foo/bar").run(); assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); assert!(!p.root().join("foo/bar/target").is_dir()); } #[cargo_test] fn excluded_simple() { let p = project() .file( "Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [workspace] exclude = ["foo"] "#, ) .file("src/lib.rs", "") .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); assert!(p.root().join("target").is_dir()); p.cargo("build").cwd("foo").run(); assert!(p.root().join("foo/target").is_dir()); } #[cargo_test] fn exclude_members_preferred() { let p = project() .file( "Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [workspace] members = ["foo/bar"] exclude = ["foo"] "#, ) .file("src/lib.rs", "") .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("foo/bar/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); assert!(p.root().join("target").is_dir()); p.cargo("build").cwd("foo").run(); assert!(p.root().join("foo/target").is_dir()); p.cargo("build").cwd("foo/bar").run(); assert!(!p.root().join("foo/bar/target").is_dir()); } #[cargo_test] fn exclude_but_also_depend() { let p = project() .file( "Cargo.toml", r#" [project] name = "ws" version = "0.1.0" authors = [] [dependencies] bar = { path = "foo/bar" } [workspace] exclude = ["foo"] "#, ) .file("src/lib.rs", "") .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("foo/bar/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); assert!(p.root().join("target").is_dir()); p.cargo("build").cwd("foo").run(); assert!(p.root().join("foo/target").is_dir()); p.cargo("build").cwd("foo/bar").run(); assert!(p.root().join("foo/bar/target").is_dir()); } #[cargo_test] fn excluded_default_members_still_must_be_members() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo"] default-members = ["foo", "bar"] exclude = ["bar"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .file("bar/something.txt", ""); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ error: package `[..]bar` is listed in workspace’s default-members \ but is not a member. ", ) .run(); } #[cargo_test] fn excluded_default_members_crate_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar/*"] default-members = ["bar/*"] exclude = ["bar/quux"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/main.rs", "fn main() {}") .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("bar/baz/src/main.rs", "fn main() {}") .file("bar/quux/Cargo.toml", &basic_manifest("quux", "0.1.0")) .file("bar/quux/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.root().join("target").is_dir()); assert!(!p.bin("foo").is_file()); assert!(p.bin("baz").is_file()); assert!(!p.bin("quux").exists()); p.cargo("build --workspace").run(); assert!(p.root().join("target").is_dir()); assert!(p.bin("foo").is_file()); assert!(!p.bin("quux").exists()); p.cargo("build").cwd("bar/quux").run(); assert!(p.root().join("bar/quux/target").is_dir()); } #[cargo_test] fn excluded_default_members_not_crate_glob() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar/*"] default-members = ["bar/*"] exclude = ["bar/docs"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/main.rs", "fn main() {}") .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) .file("bar/baz/src/main.rs", "fn main() {}") .file("bar/docs/readme.txt", "This folder is not a crate!"); let p = p.build(); p.cargo("build").run(); assert!(!p.bin("foo").is_file()); assert!(p.bin("baz").is_file()); p.cargo("build --workspace").run(); assert!(p.bin("foo").is_file()); } #[cargo_test] fn glob_syntax() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["crates/*"] exclude = ["crates/qux"] "#, ) .file("src/main.rs", "fn main() {}") .file( "crates/bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = "../.." "#, ) .file("crates/bar/src/main.rs", "fn main() {}") .file( "crates/baz/Cargo.toml", r#" [project] name = "baz" version = "0.1.0" authors = [] workspace = "../.." "#, ) .file("crates/baz/src/main.rs", "fn main() {}") .file( "crates/qux/Cargo.toml", r#" [project] name = "qux" version = "0.1.0" authors = [] "#, ) .file("crates/qux/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); assert!(!p.bin("baz").is_file()); p.cargo("build").cwd("crates/bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); p.cargo("build").cwd("crates/baz").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("baz").is_file()); p.cargo("build").cwd("crates/qux").run(); assert!(!p.bin("qux").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); assert!(p.root().join("crates/qux/Cargo.lock").is_file()); } /*FIXME: This fails because of how workspace.exclude and workspace.members are working. #[cargo_test] fn glob_syntax_2() { let p = project() .file("Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["crates/b*"] exclude = ["crates/q*"] "#) .file("src/main.rs", "fn main() {}") .file("crates/bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = "../.." "#) .file("crates/bar/src/main.rs", "fn main() {}") .file("crates/baz/Cargo.toml", r#" [project] name = "baz" version = "0.1.0" authors = [] workspace = "../.." "#) .file("crates/baz/src/main.rs", "fn main() {}") .file("crates/qux/Cargo.toml", r#" [project] name = "qux" version = "0.1.0" authors = [] "#) .file("crates/qux/src/main.rs", "fn main() {}"); p.build(); p.cargo("build").run(); assert!(p.bin("foo").is_file()); assert!(!p.bin("bar").is_file()); assert!(!p.bin("baz").is_file()); p.cargo("build").cwd("crates/bar").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("bar").is_file()); p.cargo("build").cwd("crates/baz").run(); assert!(p.bin("foo").is_file()); assert!(p.bin("baz").is_file()); p.cargo("build").cwd("crates/qux").run(); assert!(!p.bin("qux").is_file()); assert!(p.root().join("Cargo.lock").is_file()); assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); assert!(p.root().join("crates/qux/Cargo.lock").is_file()); } */ #[cargo_test] fn glob_syntax_invalid_members() { let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["crates/*"] "#, ) .file("src/main.rs", "fn main() {}") .file("crates/bar/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("build") .with_status(101) .with_stderr( "\ [ERROR] failed to load manifest for workspace member `[..]/crates/bar` Caused by: failed to read `[..]foo/crates/bar/Cargo.toml` Caused by: [..] ", ) .run(); } /// This is a freshness test for feature use with workspaces. /// /// `feat_lib` is used by `caller1` and `caller2`, but with different features enabled. /// This test ensures that alternating building `caller1`, `caller2` doesn't force /// recompile of `feat_lib`. /// /// Ideally, once we solve rust-lang/cargo#3620, then a single Cargo build at the top level /// will be enough. #[cargo_test] fn dep_used_with_separate_features() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["feat_lib", "caller1", "caller2"] "#, ) .file( "feat_lib/Cargo.toml", r#" [project] name = "feat_lib" version = "0.1.0" authors = [] [features] myfeature = [] "#, ) .file("feat_lib/src/lib.rs", "") .file( "caller1/Cargo.toml", r#" [project] name = "caller1" version = "0.1.0" authors = [] [dependencies] feat_lib = { path = "../feat_lib" } "#, ) .file("caller1/src/main.rs", "fn main() {}") .file("caller1/src/lib.rs", "") .file( "caller2/Cargo.toml", r#" [project] name = "caller2" version = "0.1.0" authors = [] [dependencies] feat_lib = { path = "../feat_lib", features = ["myfeature"] } caller1 = { path = "../caller1" } "#, ) .file("caller2/src/main.rs", "fn main() {}") .file("caller2/src/lib.rs", ""); let p = p.build(); // Build the entire workspace. p.cargo("build --workspace") .with_stderr( "\ [..]Compiling feat_lib v0.1.0 ([..]) [..]Compiling caller1 v0.1.0 ([..]) [..]Compiling caller2 v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); assert!(p.bin("caller1").is_file()); assert!(p.bin("caller2").is_file()); // Build `caller1`. Should build the dep library. Because the features // are different than the full workspace, it rebuilds. // Ideally once we solve rust-lang/cargo#3620, then a single Cargo build at the top level // will be enough. p.cargo("build") .cwd("caller1") .with_stderr( "\ [..]Compiling feat_lib v0.1.0 ([..]) [..]Compiling caller1 v0.1.0 ([..]) [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) .run(); // Alternate building `caller2`/`caller1` a few times, just to make sure // features are being built separately. Should not rebuild anything. p.cargo("build") .cwd("caller2") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.cargo("build") .cwd("caller1") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); p.cargo("build") .cwd("caller2") .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") .run(); } #[cargo_test] fn dont_recurse_out_of_cargo_home() { let git_project = git::new("dep", |project| { project .file("Cargo.toml", &basic_manifest("dep", "0.1.0")) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; use std::path::Path; use std::process::{self, Command}; fn main() { let cargo = env::var_os("CARGO").unwrap(); let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); let output = Command::new(cargo) .args(&["metadata", "--format-version", "1", "--manifest-path"]) .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml")) .output() .unwrap(); if !output.status.success() { eprintln!("{}", String::from_utf8(output.stderr).unwrap()); process::exit(1); } } "#, ) }); let p = project() .file( "Cargo.toml", &format!( r#" [package] name = "foo" version = "0.1.0" [dependencies.dep] git = "{}" [workspace] "#, git_project.url() ), ) .file("src/lib.rs", ""); let p = p.build(); p.cargo("build") .env("CARGO_HOME", p.root().join(".cargo")) .run(); } // FIXME: this fails because of how workspace.exclude and workspace.members are working. /* #[cargo_test] fn include_and_exclude() { let p = project() .file("Cargo.toml", r#" [workspace] members = ["foo"] exclude = ["foo/bar"] "#) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", "") .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("foo/bar/src/lib.rs", ""); p.build(); p.cargo("build").cwd("foo").run(); assert!(p.root().join("target").is_dir()); assert!(!p.root().join("foo/target").is_dir()); p.cargo("build").cwd("foo/bar").run(); assert!(p.root().join("foo/bar/target").is_dir()); } */ #[cargo_test] fn cargo_home_at_root_works() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [workspace] members = ["a"] "#, ) .file("src/lib.rs", "") .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) .file("a/src/lib.rs", ""); let p = p.build(); p.cargo("build").run(); p.cargo("build --frozen").env("CARGO_HOME", p.root()).run(); } #[cargo_test] fn relative_rustc() { let p = project() .file( "src/main.rs", r#" use std::process::Command; use std::env; fn main() { let mut cmd = Command::new("rustc"); for arg in env::args_os().skip(1) { cmd.arg(arg); } std::process::exit(cmd.status().unwrap().code().unwrap()); } "#, ) .build(); p.cargo("build").run(); let src = p .root() .join("target/debug/foo") .with_extension(env::consts::EXE_EXTENSION); Package::new("a", "0.1.0").publish(); let p = project() .at("lib") .file( "Cargo.toml", r#" [package] name = "lib" version = "0.1.0" [dependencies] a = "0.1" "#, ) .file("src/lib.rs", "") .build(); fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap(); let file = format!("./foo{}", env::consts::EXE_SUFFIX); p.cargo("build").env("RUSTC", &file).run(); } #[cargo_test] fn ws_rustc_err() { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "") .build(); p.cargo("rustc") .with_status(101) .with_stderr("[ERROR] [..]against an actual package[..]") .run(); p.cargo("rustdoc") .with_status(101) .with_stderr("[ERROR] [..]against an actual package[..]") .run(); } #[cargo_test] fn ws_err_unused() { for key in &[ "[lib]", "[[bin]]", "[[example]]", "[[test]]", "[[bench]]", "[dependencies]", "[dev-dependencies]", "[build-dependencies]", "[features]", "[target]", "[badges]", ] { let p = project() .file( "Cargo.toml", &format!( r#" [workspace] members = ["a"] {} "#, key ), ) .file("a/Cargo.toml", &basic_lib_manifest("a")) .file("a/src/lib.rs", "") .build(); p.cargo("check") .with_status(101) .with_stderr(&format!( "\ [ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` Caused by: this virtual manifest specifies a {} section, which is not allowed ", key )) .run(); } } #[cargo_test] fn ws_warn_unused() { for (key, name) in &[ ("[profile.dev]\nopt-level = 1", "profiles"), ("[replace]\n\"bar:0.1.0\" = { path = \"bar\" }", "replace"), ("[patch.crates-io]\nbar = { path = \"bar\" }", "patch"), ] { let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", &format!( r#" [package] name = "a" version = "0.1.0" {} "#, key ), ) .file("a/src/lib.rs", "") .build(); p.cargo("check") .with_stderr_contains(&format!( "\ [WARNING] {} for the non root package will be ignored, specify {} at the workspace root: package: [..]/foo/a/Cargo.toml workspace: [..]/foo/Cargo.toml ", name, name )) .run(); } } #[cargo_test] fn ws_warn_path() { // Warnings include path to manifest. let p = project() .file( "Cargo.toml", r#" [workspace] members = ["a"] "#, ) .file( "a/Cargo.toml", r#" cargo-features = ["edition"] [package] name = "foo" version = "0.1.0" "#, ) .file("a/src/lib.rs", "") .build(); p.cargo("check") .with_stderr_contains("[WARNING] [..]/foo/a/Cargo.toml: the cargo feature `edition`[..]") .run(); } #[cargo_test] fn invalid_missing() { // Make sure errors are not suppressed with -q. let p = project() .file( "Cargo.toml", r#" [package] name = "foo" version = "0.1.0" [dependencies] x = { path = 'x' } "#, ) .file("src/lib.rs", "") .build(); p.cargo("build -q") .with_status(101) .with_stderr( "\ [ERROR] failed to get `x` as a dependency of package `foo v0.1.0 [..]` Caused by: failed to load source for dependency `x` Caused by: Unable to update [..]/foo/x Caused by: failed to read `[..]foo/x/Cargo.toml` Caused by: [..] ", ) .run(); } #[cargo_test] fn member_dep_missing() { // Make sure errors are not suppressed with -q. let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" [workspace] members = ["bar"] "#, ) .file("src/main.rs", "fn main() {}") .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" [dependencies] baz = { path = "baz" } "#, ) .file("bar/src/main.rs", "fn main() {}") .build(); p.cargo("build -q") .with_status(101) .with_stderr( "\ [ERROR] failed to load manifest for workspace member `[..]/bar` Caused by: failed to load manifest for dependency `baz` Caused by: failed to read `[..]foo/bar/baz/Cargo.toml` Caused by: [..] ", ) .run(); } #[cargo_test] fn simple_primary_package_env_var() { let is_primary_package = r#" #[test] fn verify_primary_package() {{ assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some()); }} "#; let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [workspace] members = ["bar"] "#, ) .file("src/lib.rs", is_primary_package) .file( "bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] workspace = ".." "#, ) .file("bar/src/lib.rs", is_primary_package); let p = p.build(); p.cargo("test").run(); // Again, this time selecting a specific crate p.cargo("clean").run(); p.cargo("test -p bar").run(); // Again, this time selecting all crates p.cargo("clean").run(); p.cargo("test --all").run(); } #[cargo_test] fn virtual_primary_package_env_var() { let is_primary_package = r#" #[test] fn verify_primary_package() {{ assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some()); }} "#; let p = project() .file( "Cargo.toml", r#" [workspace] members = ["foo", "bar"] "#, ) .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) .file("foo/src/lib.rs", is_primary_package) .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) .file("bar/src/lib.rs", is_primary_package); let p = p.build(); p.cargo("test").run(); // Again, this time selecting a specific crate p.cargo("clean").run(); p.cargo("test -p foo").run(); } #[cargo_test] fn ensure_correct_workspace_when_nested() { let p = project() .file( "Cargo.toml", r#" [workspace] [project] name = "bar" version = "0.1.0" authors = [] "#, ) .file("src/lib.rs", "") .file( "sub/Cargo.toml", r#" [workspace] members = ["foo"] "#, ) .file( "sub/foo/Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "../.."} "#, ) .file("sub/foo/src/main.rs", "fn main() {}"); let p = p.build(); p.cargo("tree") .cwd("sub/foo") .with_stdout( "\ foo v0.1.0 ([..]/foo/sub/foo) └── bar v0.1.0 ([..]/foo)\ ", ) .run(); } cargo-0.66.0/tests/testsuite/yank.rs000066400000000000000000000073661432416201200174100ustar00rootroot00000000000000//! Tests for the `cargo yank` command. use std::fs; use cargo_test_support::paths::CargoPathExt; use cargo_test_support::project; use cargo_test_support::registry; fn setup(name: &str, version: &str) { let dir = registry::api_path().join(format!("api/v1/crates/{}/{}", name, version)); dir.mkdir_p(); fs::write(dir.join("yank"), r#"{"ok": true}"#).unwrap(); } #[cargo_test] fn explicit_version() { registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("yank --version 0.0.1 --token sekrit").run(); p.cargo("yank --undo --version 0.0.1 --token sekrit") .with_status(101) .with_stderr( " Updating `[..]` index Unyank foo@0.0.1 error: failed to undo a yank from the registry at file:///[..] Caused by: EOF while parsing a value at line 1 column 0", ) .run(); } #[cargo_test] fn inline_version() { registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("yank foo@0.0.1 --token sekrit").run(); p.cargo("yank --undo foo@0.0.1 --token sekrit") .with_status(101) .with_stderr( " Updating `[..]` index Unyank foo@0.0.1 error: failed to undo a yank from the registry at file:///[..] Caused by: EOF while parsing a value at line 1 column 0", ) .run(); } #[cargo_test] fn version_required() { registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("yank foo --token sekrit") .with_status(101) .with_stderr("error: `--version` is required") .run(); } #[cargo_test] fn inline_version_without_name() { registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("yank @0.0.1 --token sekrit") .with_status(101) .with_stderr("error: missing crate name for `@0.0.1`") .run(); } #[cargo_test] fn inline_and_explicit_version() { registry::init(); setup("foo", "0.0.1"); let p = project() .file( "Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#, ) .file("src/main.rs", "fn main() {}") .build(); p.cargo("yank foo@0.0.1 --version 0.0.1 --token sekrit") .with_status(101) .with_stderr("error: cannot specify both `@0.0.1` and `--version`") .run(); } cargo-0.66.0/triagebot.toml000066400000000000000000000005461432416201200155530ustar00rootroot00000000000000[assign] [ping.windows] message = """\ Hey Windows Group! This bug has been identified as a good "Windows candidate". In case it's useful, here are some [instructions] for tackling these sorts of bugs. Maybe take a look? Thanks! <3 [instructions]: https://rustc-dev-guide.rust-lang.org/notification-groups/windows.html """ label = "O-windows" [shortcut]