pax_global_header00006660000000000000000000000064126465633320014524gustar00rootroot0000000000000052 comment=28a0cbb2212c295264a7a3031a4be0113a17aa91 cargo-0.8.0/000077500000000000000000000000001264656333200126245ustar00rootroot00000000000000cargo-0.8.0/.gitignore000066400000000000000000000002141264656333200146110ustar00rootroot00000000000000/target .cargo /config.stamp /Makefile /config.mk src/doc/build src/etc/*.pyc src/registry/target src/registry/Cargo.lock rustc __pycache__ cargo-0.8.0/.gitmodules000066400000000000000000000001631264656333200150010ustar00rootroot00000000000000[submodule "src/rust-installer"] path = src/rust-installer url = https://github.com/rust-lang/rust-installer.git cargo-0.8.0/.travis.install.deps.sh000077500000000000000000000000631264656333200171470ustar00rootroot00000000000000#!/bin/sh set -ex python src/etc/install-deps.py cargo-0.8.0/.travis.yml000066400000000000000000000016301264656333200147350ustar00rootroot00000000000000language: rust rust: - 1.2.0 - stable - beta - nightly sudo: false script: - ./configure --prefix=$HOME/cargo-install --disable-cross-tests --disable-optimize - make - make test - make distcheck - make doc - make install - make uninstall after_success: | [ $TRAVIS_BRANCH = master ] && [ $TRAVIS_PULL_REQUEST = false ] && [ $(uname -s) = Linux ] && pip install ghp-import --user $USER && $HOME/.local/bin/ghp-import -n target/doc && git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages env: global: - secure: scGpeetUfba5RWyuS4yt10bPoFAI9wpHEReIFqEx7eH5vr2Anajk6+70jW6GdrWVdUvdINiArlQ3An2DeB9vEUWcBjw8WvuPtOH0tDMoSsuVloPlFD8yn1Ac0Bx9getAO5ofxqtoNg+OV4MDVuGabEesqAOWqURNrBC7XK+ntC8= os: - linux - osx branches: only: - master addons: apt: sources: - kalakris-cmake packages: - cmake - g++-multilib - lib32stdc++6 cargo-0.8.0/Cargo.lock000066400000000000000000000373511264656333200145420ustar00rootroot00000000000000[root] name = "cargo" version = "0.8.0" dependencies = [ "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "bufstream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "crates-io 0.1.0", "crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "curl 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "git2-curl 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "advapi32-sys" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "aho-corasick" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bitflags" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "bufstream" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "cmake" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crates-io" version = "0.1.0" dependencies = [ "curl 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crossbeam" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "curl" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl-sys 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "docopt" version = "0.6.78" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "regex 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "env_logger" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "log 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "filetime" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "flate2" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "gcc" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "gdi32-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "git2" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "git2-curl" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "glob" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "hamcrest" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "kernel32-sys" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "kernel32-sys" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libc" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libgit2-sys" version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cmake 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libssh2-sys 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libressl-pnacl-sys" version = "2.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "pnacl-build-helper 1.4.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libssh2-sys" version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cmake 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libz-sys" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "log" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "matches" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "memchr" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "miniz-sys" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num" version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rand 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num_cpus" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "openssl-sys" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gdi32-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "libressl-pnacl-sys 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "user32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "pkg-config" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pnacl-build-helper" version = "1.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "tempdir 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "regex" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "regex-syntax" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-serialize" version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "semver" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "strsim" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "tar" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "filetime 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "tempdir" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rand 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "term" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "time" version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "toml" version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "url" version = "0.2.38" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "user32-sys" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "uuid" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rand 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "winapi" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "winapi-build" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "ws2_32-sys" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] cargo-0.8.0/Cargo.toml000066400000000000000000000020651264656333200145570ustar00rootroot00000000000000[project] name = "cargo" version = "0.8.0" authors = ["Yehuda Katz ", "Carl Lerche ", "Alex Crichton "] license = "MIT/Apache-2.0" homepage = "https://crates.io" repository = "https://github.com/rust-lang/cargo" documentation = "http://doc.crates.io" description = """ Cargo, a package manager for Rust. """ [lib] name = "cargo" path = "src/cargo/lib.rs" [dependencies] advapi32-sys = "0.1" crates-io = { path = "src/crates-io", version = "0.1" } crossbeam = "0.1" curl = "0.2" docopt = "0.6" env_logger = "0.3" filetime = "0.1" flate2 = "0.2" git2 = "0.3" git2-curl = "0.3" glob = "0.2" kernel32-sys = "0.1" libc = "0.2" libgit2-sys = "0.3" log = "0.3" num_cpus = "0.2" regex = "0.1" rustc-serialize = "0.3" semver = "0.2.0" tar = "0.3" term = "0.2" time = "0.1" toml = "0.1" url = "0.2" winapi = "0.2" [dev-dependencies] tempdir = "0.3" hamcrest = "0.1" bufstream = "0.1" filetime = "0.1" [[bin]] name = "cargo" test = false doc = false [[test]] name = "tests" [[test]] name = "resolve" cargo-0.8.0/LICENSE-APACHE000066400000000000000000000251371264656333200145600ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cargo-0.8.0/LICENSE-MIT000066400000000000000000000020571264656333200142640ustar00rootroot00000000000000Copyright (c) 2014 The Rust Project Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. cargo-0.8.0/LICENSE-THIRD-PARTY000066400000000000000000002055101264656333200154010ustar00rootroot00000000000000The Cargo source code itself does not bundle any third party libraries, but it depends on a number of libraries which carry their own copyright notices and license terms. These libraries are normally all linked static into the binary distributions of Cargo: * OpenSSL - http://www.openssl.org/source/license.html Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. All advertising materials mentioning features or use of this software must display the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit. (http://www.openssl.org/)" 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact openssl-core@openssl.org. 5. Products derived from this software may not be called "OpenSSL" nor may "OpenSSL" appear in their names without prior written permission of the OpenSSL Project. 6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/)" THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ==================================================================== This product includes cryptographic software written by Eric Young (eay@cryptsoft.com). This product includes software written by Tim Hudson (tjh@cryptsoft.com). --- Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) All rights reserved. This package is an SSL implementation written by Eric Young (eay@cryptsoft.com). The implementation was written so as to conform with Netscapes SSL. This library is free for commercial and non-commercial use as long as the following conditions are aheared to. The following conditions apply to all code found in this distribution, be it the RC4, RSA, lhash, DES, etc., code; not just the SSL code. The SSL documentation included with this distribution is covered by the same copyright terms except that the holder is Tim Hudson (tjh@cryptsoft.com). Copyright remains Eric Young's, and as such any Copyright notices in the code are not to be removed. If this package is used in a product, Eric Young should be given attribution as the author of the parts of the library used. This can be in the form of a textual message at program startup or in documentation (online or textual) provided with the package. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. All advertising materials mentioning features or use of this software must display the following acknowledgement: "This product includes cryptographic software written by Eric Young (eay@cryptsoft.com)" The word 'cryptographic' can be left out if the rouines from the library being used are not cryptographic related :-). 4. If you include any Windows specific code (or a derivative thereof) from the apps directory (application code) you must include an acknowledgement: "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The licence and distribution terms for any publically available version or derivative of this code cannot be changed. i.e. this code cannot simply be copied and put under another distribution licence [including the GNU Public Licence.] * libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING libgit2 is Copyright (C) the libgit2 contributors, unless otherwise stated. See the AUTHORS file for details. Note that the only valid version of the GPL as far as this project is concerned is _this_ particular version of the license (ie v2, not v2.2 or v3.x or whatever), unless explicitly otherwise stated. ---------------------------------------------------------------------- LINKING EXCEPTION In addition to the permissions in the GNU General Public License, the authors give you unlimited permission to link the compiled version of this library into combinations with other programs, and to distribute those combinations without any restriction coming from the use of this file. (The General Public License restrictions do apply in other respects; for example, they cover modification of the file, and distribution when not linked into a combined executable.) ---------------------------------------------------------------------- GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. ---------------------------------------------------------------------- The bundled ZLib code is licensed under the ZLib license: Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. Jean-loup Gailly Mark Adler jloup@gzip.org madler@alumni.caltech.edu ---------------------------------------------------------------------- The Clar framework is licensed under the MIT license: Copyright (C) 2011 by Vicent Marti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ---------------------------------------------------------------------- The regex library (deps/regex/) is licensed under the GNU LGPL GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! ---------------------------------------------------------------------- * libssh2 - http://www.libssh2.org/license.html Copyright (c) 2004-2007 Sara Golemon Copyright (c) 2005,2006 Mikhail Gusarov Copyright (c) 2006-2007 The Written Word, Inc. Copyright (c) 2007 Eli Fant Copyright (c) 2009 Daniel Stenberg Copyright (C) 2008, 2009 Simon Josefsson All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the copyright holder nor the names of any other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * libcurl - http://curl.haxx.se/docs/copyright.html COPYRIGHT AND PERMISSION NOTICE Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se. All rights reserved. Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization of the copyright holder. * flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT * link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT * openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT * toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT * libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT * git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT * tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT Copyright (c) 2014 Alex Crichton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT * semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT Copyright (c) 2014 The Rust Project Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT Copyright (c) 2006-2009 Graydon Hoare Copyright (c) 2009-2013 Mozilla Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt The MIT License (MIT) Copyright (c) 2013, Kang Seonghoon. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE Copyright (c) 2014 Carl Lerche Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to cargo-0.8.0/Makefile.in000066400000000000000000000160761264656333200147030ustar00rootroot00000000000000CFG_RELEASE_NUM=0.8.0 CFG_RELEASE_LABEL= include config.mk ifneq ($(CFG_LOCAL_RUST_ROOT),) export LD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(LD_LIBRARY_PATH) export DYLD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(DYLD_LIBRARY_PATH) endif export PATH := $(dir $(CFG_RUSTC)):$(PATH) ifdef CFG_ENABLE_NIGHTLY CFG_RELEASE=$(CFG_RELEASE_NUM)$(CFG_RELEASE_LABEL)-nightly CFG_PACKAGE_VERS = nightly else CFG_RELEASE=$(CFG_RELEASE_NUM)$(CFG_RELEASE_LABEL) CFG_PACKAGE_VERS=$(CFG_RELEASE) endif CFG_BUILD_DATE = $(shell date +%F) ifeq ($(wildcard .git),) CFG_VERSION = $(CFG_RELEASE) (built $(CFG_BUILD_DATE)) else CFG_VER_DATE = $(shell git log -1 --date=short --pretty=format:'%cd') CFG_VER_HASH = $(shell git rev-parse --short HEAD) CFG_VERSION = $(CFG_RELEASE) ($(CFG_VER_HASH) $(CFG_VER_DATE)) endif PKG_NAME = cargo-$(CFG_PACKAGE_VERS) ifdef CFG_DISABLE_VERIFY_INSTALL MAYBE_DISABLE_VERIFY=--disable-verify else MAYBE_DISABLE_VERIFY= endif ifdef CFG_DISABLE_OPTIMIZE OPT_FLAG= else OPT_FLAG=--release endif ifdef VERBOSE VERBOSE_FLAG=--verbose else VERBOSE_FLAG= endif export CFG_VERSION export CFG_DISABLE_CROSS_TESTS ifeq ($(OS),Windows_NT) X = .exe endif TARGET_ROOT = target BIN_TARGETS := cargo BIN_TARGETS := $(BIN_TARGETS:src/bin/%.rs=%) BIN_TARGETS := $(filter-out cargo,$(BIN_TARGETS)) define DIST_TARGET ifdef CFG_DISABLE_OPTIMIZE TARGET_$(1) = $$(TARGET_ROOT)/$(1)/debug else TARGET_$(1) = $$(TARGET_ROOT)/$(1)/release endif DISTDIR_$(1) = $$(TARGET_$(1))/dist IMGDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1)-image OVERLAYDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1)-overlay PKGDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1) BIN_TARGETS_$(1) := $$(BIN_TARGETS:%=$$(TARGET_$(1))/%$$(X)) endef $(foreach target,$(CFG_TARGET),$(eval $(call DIST_TARGET,$(target)))) ifdef CFG_LOCAL_CARGO CARGO := $(CFG_LOCAL_CARGO) else CARGO := $(TARGET_ROOT)/snapshot/bin/cargo$(X) endif all: $(foreach target,$(CFG_TARGET),cargo-$(target)) define CARGO_TARGET cargo-$(1): $$(CARGO) $$(CFG_RUSTC) -V $$(CARGO) --version $$(CARGO) build --target $(1) $$(OPT_FLAG) $$(VERBOSE_FLAG) $$(ARGS) test-unit-$(1): $$(CARGO) @mkdir -p target/$(1)/cit $$(CARGO) test --target $(1) $$(VERBOSE_FLAG) $$(only) endef $(foreach target,$(CFG_TARGET),$(eval $(call CARGO_TARGET,$(target)))) $(TARGET_ROOT)/snapshot/bin/cargo$(X): src/snapshots.txt $(CFG_PYTHON) src/etc/dl-snapshot.py $(CFG_BUILD) touch $@ # === Tests test: style no-exes $(foreach target,$(CFG_TARGET),test-unit-$(target)) style: sh tests/check-style.sh no-exes: find $$(git ls-files) -type f \ \( -perm -u+x -or -perm -g+x -or -perm -o+x \) \ -not -name configure -not -name '*.sh' -not -name '*.rs' \ -not -name '*.py' -not -wholename "*/rust-installer/*" | \ grep '.*' \ && exit 1 || exit 0 # === Misc Makefile config.mk: config.stamp config.stamp: $(CFG_SRC_DIR)configure $(CFG_SRC_DIR)Makefile.in $(CFG_SRC_DIR)configure $(CFG_CONFIGURE_ARGS) clean-all: clean clean: rm -rf $(TARGET_ROOT) # === Documentation DOCS := index faq config guide manifest build-script pkgid-spec crates-io \ environment-variables DOC_DIR := target/doc DOC_OPTS := --markdown-no-toc \ --markdown-css stylesheets/normalize.css \ --markdown-css stylesheets/all.css \ --markdown-css stylesheets/prism.css \ --html-before-content src/doc/header.html \ --html-after-content src/doc/footer.html ASSETS := CNAME images/noise.png images/forkme.png images/Cargo-Logo-Small.png \ stylesheets/all.css stylesheets/normalize.css javascripts/prism.js \ javascripts/all.js stylesheets/prism.css images/circle-with-i.png \ images/search.png images/org-level-acl.png images/auth-level-acl.png doc: $(foreach doc,$(DOCS),target/doc/$(doc).html) \ $(foreach asset,$(ASSETS),target/doc/$(asset)) \ target/doc/cargo/index.html target/doc/cargo/index.html: $(CARGO) doc --no-deps $(DOC_DIR)/%.html: src/doc/%.md src/doc/header.html src/doc/footer.html @mkdir -p $(@D) $(CFG_RUSTDOC) $< -o $(@D) $(DOC_OPTS) $(DOC_DIR)/%: src/doc/% @mkdir -p $(@D) cp $< $@ # === Distribution define DO_DIST_TARGET dist-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz # One may wonder why some of the commands here are prefixed with `cd -P .`, and # that's a good question! On some of the windows bots, PWD has a windows-style # path, and that ends up choking the installation script in various ways. # Prefixing commands with this `cd -P .` helps the bots to set the right PWD env # var. distcheck-$(1): dist-$(1) rm -rf $$(TARGET_$(1))/distcheck mkdir -p $$(TARGET_$(1))/distcheck (cd $$(TARGET_$(1))/distcheck && tar xf ../dist/$$(PKG_NAME)-$(1).tar.gz) cd -P . && $$(TARGET_$(1))/distcheck/$$(PKG_NAME)-$(1)/install.sh \ --prefix=$$(TARGET_$(1))/distcheck/install $$(TARGET_$(1))/distcheck/install/bin/cargo -V > /dev/null cd -P . && $$(TARGET_$(1))/distcheck/$$(PKG_NAME)-$(1)/install.sh \ --prefix=$$(TARGET_$(1))/distcheck/install --uninstall [ -f $$(TARGET_$(1))/distcheck/install/bin/cargo$(X) ] && exit 1 || exit 0 prepare-image-$(1): @[ -f $$(TARGET_$(1))/cargo$$(X) ] || echo 'Please run `make` first' @[ -f $$(TARGET_$(1))/cargo$$(X) ] rm -rf $$(IMGDIR_$(1)) mkdir -p $$(IMGDIR_$(1))/bin $$(IMGDIR_$(1))/lib/cargo \ $$(IMGDIR_$(1))/share/man/man1 \ $$(IMGDIR_$(1))/share/doc/cargo \ $$(IMGDIR_$(1))/share/zsh/site-functions \ $$(IMGDIR_$(1))/etc/bash_completion.d cp $$(TARGET_$(1))/cargo$$(X) $$(IMGDIR_$(1))/bin cp src/etc/cargo.1 $$(IMGDIR_$(1))/share/man/man1 cp src/etc/_cargo $$(IMGDIR_$(1))/share/zsh/site-functions/_cargo cp src/etc/cargo.bashcomp.sh $$(IMGDIR_$(1))/etc/bash_completion.d/cargo cp README.md LICENSE-MIT LICENSE-APACHE LICENSE-THIRD-PARTY \ $$(IMGDIR_$(1))/share/doc/cargo prepare-overlay-$(1): rm -Rf $$(OVERLAYDIR_$(1)) mkdir -p $$(OVERLAYDIR_$(1)) cp README.md LICENSE-MIT LICENSE-APACHE LICENSE-THIRD-PARTY \ $$(OVERLAYDIR_$(1)) echo "$(CFG_VERSION)" > $$(OVERLAYDIR_$(1))/version $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz: prepare-image-$(1) prepare-overlay-$(1) sh src/rust-installer/gen-installer.sh \ --product-name=Rust \ --rel-manifest-dir=rustlib \ --success-message=Rust-is-ready-to-roll. \ --image-dir=$$(IMGDIR_$(1)) \ --work-dir=./$$(DISTDIR_$(1)) \ --output-dir=./$$(DISTDIR_$(1)) \ --non-installed-overlay=$$(OVERLAYDIR_$(1)) \ --package-name=$$(PKG_NAME)-$(1) \ --component-name=cargo \ --legacy-manifest-dirs=rustlib,cargo rm -Rf $$(IMGDIR_$(1)) install-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz $$(PKGDIR_$(1))/install.sh \ --prefix="$$(CFG_PREFIX)" \ --destdir="$$(DESTDIR)" $$(MAYBE_DISABLE_VERIFY) uninstall-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz $$(PKGDIR_$(1))/install.sh \ --prefix="$$(CFG_PREFIX)" \ --destdir="$$(DESTDIR)" \ --uninstall endef $(foreach target,$(CFG_TARGET),$(eval $(call DO_DIST_TARGET,$(target)))) dist: $(foreach target,$(CFG_TARGET),dist-$(target)) distcheck: $(foreach target,$(CFG_TARGET),distcheck-$(target)) install: $(foreach target,$(CFG_TARGET),install-$(target)) uninstall: $(foreach target,$(CFG_TARGET), uninstall-$(target)) # Setup phony tasks .PHONY: all clean clean-all dist distcheck install uninstall test test-unit style # Disable unnecessary built-in rules .SUFFIXES: cargo-0.8.0/README.md000066400000000000000000000066471264656333200141200ustar00rootroot00000000000000Cargo downloads your Rust project’s dependencies and compiles your project. Learn more at http://doc.crates.io/ ## Installing Cargo Cargo is distributed by default with Rust, so if you've got `rustc` installed locally you probably also have `cargo` installed locally. If, however, you would like to install Cargo from the nightly binaries that are generated, you may also do so! Note that these nightlies are not official binaries, so they are only provided in one format with one installation method. Each tarball below contains a top-level `install.sh` script to install Cargo. * [`x86_64-unknown-linux-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-unknown-linux-gnu.tar.gz) * [`i686-unknown-linux-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-unknown-linux-gnu.tar.gz) * [`x86_64-apple-darwin`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-apple-darwin.tar.gz) * [`i686-apple-darwin`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-apple-darwin.tar.gz) * [`x86_64-pc-windows-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-gnu.tar.gz) * [`i686-pc-windows-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-pc-windows-gnu.tar.gz) * [`x86_64-pc-windows-msvc`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-msvc.tar.gz) Note that if you're on Windows you will have to run the `install.sh` script from inside an MSYS shell, likely from a MinGW-64 installation. ## Compiling from Source Cargo requires the following tools and packages to build: * `rustc` * `python` * `curl` (on Unix) * `cmake` * OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu) Cargo can then be compiled like many other standard unix-like projects: ```sh git clone https://github.com/rust-lang/cargo cd cargo git submodule update --init python -B src/etc/install-deps.py ./configure --local-rust-root="$PWD"/rustc make make install ``` More options can be discovered through `./configure`, such as compiling cargo for more than one target. For example, if you'd like to compile both 32 and 64 bit versions of cargo on unix you would use: ``` $ ./configure --target=i686-unknown-linux-gnu,x86_64-unknown-linux-gnu ``` ## Adding new subcommands to Cargo Cargo is designed to be extensible with new subcommands without having to modify Cargo itself. See [the Wiki page][third-party-subcommands] for more details and a list of known community-developed subcommands. [third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands ## Contributing to the Docs To contribute to the docs, all you need to do is change the markdown files in the `src/doc` directory. ## Reporting Issues Found a bug? We'd love to know about it! Please report all issues on the github [issue tracker][issues]. [issues]: https://github.com/rust-lang/cargo/issues ## License Cargo is primarily distributed under the terms of both the MIT license and the Apache License (Version 2.0). See LICENSE-APACHE and LICENSE-MIT for details. ### Third party software This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/). In binary form, this product includes software that is licensed under the terms of the GNU General Public License, version 2, with a linking exception, which can be obtained from the [upstream repository][1]. [1]: https://github.com/libgit2/libgit2 cargo-0.8.0/appveyor.yml000066400000000000000000000012101264656333200152060ustar00rootroot00000000000000environment: CFG_DISABLE_CROSS_TESTS: 1 matrix: - MSVC: 1 BITS: 32 TARGET: i686-pc-windows-msvc ARCH: x86 NEEDS_LIBGCC: 1 - MSVC: 1 BITS: 64 TARGET: x86_64-pc-windows-msvc ARCH: amd64 install: - python src/etc/install-deps.py - python src/etc/dl-snapshot.py %TARGET% - call "C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" %ARCH% - SET PATH=%PATH%;%cd%/rustc/bin - SET PATH=%PATH%;%cd%/target/snapshot/bin - if defined NEEDS_LIBGCC set PATH=%PATH%;C:\MinGW\bin - rustc -V - cargo -V build: false test_script: - cargo test -- --nocapture branches: only: - master cargo-0.8.0/configure000077500000000000000000000215511264656333200145370ustar00rootroot00000000000000#!/bin/sh msg() { echo "configure: $1" } step_msg() { msg msg "$1" msg } warn() { echo "configure: WARNING: $1" } err() { echo "configure: error: $1" exit 1 } need_ok() { if [ $? -ne 0 ] then err "$1" fi } need_cmd() { if command -v $1 >/dev/null 2>&1 then msg "found $1" else err "need $1" fi } make_dir() { if [ ! -d $1 ] then msg "mkdir -p $1" mkdir -p $1 fi } copy_if_changed() { if cmp -s $1 $2 then msg "leaving $2 unchanged" else msg "cp $1 $2" cp -f $1 $2 chmod u-w $2 # make copied artifact read-only fi } move_if_changed() { if cmp -s $1 $2 then msg "leaving $2 unchanged" else msg "mv $1 $2" mv -f $1 $2 chmod u-w $2 # make moved artifact read-only fi } putvar() { local T eval T=\$$1 eval TLEN=\${#$1} if [ $TLEN -gt 35 ] then printf "configure: %-20s := %.35s ...\n" $1 "$T" else printf "configure: %-20s := %s %s\n" $1 "$T" "$2" fi printf "%-20s := %s\n" $1 "$T" >>config.tmp } probe() { local V=$1 shift local P local T for P do T=$(command -v $P 2>&1) if [ $? -eq 0 ] then VER0=$($P --version 2>/dev/null | head -1 \ | sed -e 's/[^0-9]*\([vV]\?[0-9.]\+[^ ]*\).*/\1/' ) if [ $? -eq 0 -a "x${VER0}" != "x" ] then VER="($VER0)" else VER="" fi break else VER="" T="" fi done eval $V=\$T putvar $V "$VER" } probe_need() { local V=$1 probe $* eval VV=\$$V if [ -z "$VV" ] then err "needed, but unable to find any of: $*" fi } validate_opt () { for arg in $CFG_CONFIGURE_ARGS do isArgValid=0 for option in $BOOL_OPTIONS do if test --disable-$option = $arg then isArgValid=1 fi if test --enable-$option = $arg then isArgValid=1 fi done for option in $VAL_OPTIONS do if echo "$arg" | grep -q -- "--$option=" then isArgValid=1 fi done if [ "$arg" = "--help" ] then echo echo "No more help available for Configure options," echo "check the Wiki or join our IRC channel" break else if test $isArgValid -eq 0 then err "Option '$arg' is not recognized" fi fi done } valopt() { VAL_OPTIONS="$VAL_OPTIONS $1" local OP=$1 local DEFAULT=$2 shift shift local DOC="$*" if [ $HELP -eq 0 ] then local UOP=$(echo $OP | tr '[:lower:]' '[:upper:]' | tr '\-' '\_') local V="CFG_${UOP}" eval $V="$DEFAULT" for arg in $CFG_CONFIGURE_ARGS do if echo "$arg" | grep -q -- "--$OP=" then val=$(echo "$arg" | cut -f2 -d=) eval $V=$val fi done putvar $V else if [ -z "$DEFAULT" ] then DEFAULT="" fi OP="${OP}=[${DEFAULT}]" printf " --%-30s %s\n" "$OP" "$DOC" fi } opt() { BOOL_OPTIONS="$BOOL_OPTIONS $1" local OP=$1 local DEFAULT=$2 shift shift local DOC="$*" local FLAG="" if [ $DEFAULT -eq 0 ] then FLAG="enable" else FLAG="disable" DOC="don't $DOC" fi if [ $HELP -eq 0 ] then for arg in $CFG_CONFIGURE_ARGS do if [ "$arg" = "--${FLAG}-${OP}" ] then OP=$(echo $OP | tr 'a-z-' 'A-Z_') FLAG=$(echo $FLAG | tr 'a-z' 'A-Z') local V="CFG_${FLAG}_${OP}" eval $V=1 putvar $V fi done else if [ ! -z "$META" ] then OP="$OP=<$META>" fi printf " --%-30s %s\n" "$FLAG-$OP" "$DOC" fi } envopt() { local NAME=$1 local V="CFG_${NAME}" eval VV=\$$V # If configure didn't set a value already, then check environment. # # (It is recommended that the configure script always check the # environment before setting any values to envopt variables; see # e.g. how CFG_CC is handled, where it first checks `-z "$CC"`, # and issues msg if it ends up employing that provided value.) if [ -z "$VV" ] then eval $V=\$$NAME eval VV=\$$V fi # If script or environment provided a value, save it. if [ ! -z "$VV" ] then putvar $V fi } msg "looking for configure programs" need_cmd cmp need_cmd mkdir need_cmd printf need_cmd cut need_cmd head need_cmd grep need_cmd xargs need_cmd cp need_cmd find need_cmd uname need_cmd date need_cmd tr need_cmd sed need_cmd cmake if [ "${OS}" != "Windows_NT" ]; then need_cmd curl fi CFG_SRC_DIR="$(cd $(dirname $0) && pwd)/" CFG_BUILD_DIR="$(pwd)/" CFG_SELF="$0" CFG_CONFIGURE_ARGS="$@" OPTIONS="" HELP=0 if [ "$1" = "--help" ] then HELP=1 shift echo echo "Usage: $CFG_SELF [options]" echo echo "Options:" echo else msg "recreating config.tmp" echo '' >config.tmp step_msg "processing $CFG_SELF args" fi BOOL_OPTIONS="" VAL_OPTIONS="" opt debug 1 "build with extra debug fun" opt optimize 1 "build with optimizations" opt nightly 0 "build nightly packages" opt verify-install 1 "verify installed binaries work" opt cross-tests 1 "run cross-compilation tests" valopt prefix "/usr/local" "set installation prefix" valopt local-rust-root "" "set prefix for local rust binary" if [ $HELP -eq 0 ]; then if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then export LD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$LD_LIBRARY_PATH" export DYLD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$DYLD_LIBRARY_PATH" LRV=`${CFG_LOCAL_RUST_ROOT}/bin/rustc --version` if [ $? -eq 0 ]; then step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV" else err "failed to run rustc at: ${CFG_LOCAL_RUST_ROOT}" fi CFG_RUSTC="${CFG_LOCAL_RUST_ROOT}/bin/rustc" else probe_need CFG_RUSTC rustc fi DEFAULT_BUILD=$("${CFG_RUSTC}" -vV | grep 'host: ' | sed 's/host: //') fi valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple" valopt host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples" valopt target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples" valopt localstatedir "/var/lib" "local state directory" valopt sysconfdir "/etc" "install system configuration files" valopt datadir "${CFG_PREFIX}/share" "install data" valopt infodir "${CFG_PREFIX}/share/info" "install additional info" valopt mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" valopt libdir "${CFG_PREFIX}/lib" "install libraries" valopt local-cargo "" "local cargo to bootstrap from" if [ $HELP -eq 1 ] then echo exit 0 fi # Validate Options step_msg "validating $CFG_SELF args" validate_opt step_msg "looking for build programs" probe_need CFG_CURLORWGET curl wget probe_need CFG_PYTHON python probe_need CFG_CC cc gcc clang if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then CFG_RUSTDOC="${CFG_LOCAL_RUST_ROOT}/bin/rustdoc" else probe_need CFG_RUSTDOC rustdoc fi # a little post-processing of various config values CFG_PREFIX=${CFG_PREFIX%/} CFG_MANDIR=${CFG_MANDIR%/} CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')" CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')" # copy host-triples to target-triples so that hosts are a subset of targets V_TEMP="" for i in $CFG_HOST $CFG_TARGET; do echo "$V_TEMP" | grep -qF $i || V_TEMP="$V_TEMP${V_TEMP:+ }$i" done CFG_TARGET=$V_TEMP if [ "$CFG_SRC_DIR" != "$CFG_BUILD_DIR" ]; then err "cargo does not currently support an out-of-tree build dir" fi if [ ! -z "$CFG_ENABLE_NIGHTLY" ]; then if [ ! -f .cargo/config ]; then mkdir -p .cargo cat > .cargo/config <<-EOF [target.x86_64-unknown-linux-gnu.openssl] rustc-flags = "-l static=ssl -l static=crypto -l dl -L /home/rustbuild/root64/lib" root = "/home/rustbuild/root64" include = "/home/rustbuild/root64/include" [target.i686-unknown-linux-gnu.openssl] rustc-flags = "-l static=ssl -l static=crypto -l dl -L /home/rustbuild/root32/lib" root = "/home/rustbuild/root32" include = "/home/rustbuild/root32/include" EOF fi fi step_msg "writing configuration" putvar CFG_SRC_DIR putvar CFG_BUILD_DIR putvar CFG_CONFIGURE_ARGS putvar CFG_PREFIX putvar CFG_BUILD putvar CFG_HOST putvar CFG_TARGET putvar CFG_LIBDIR putvar CFG_MANDIR putvar CFG_RUSTC putvar CFG_RUSTDOC msg copy_if_changed ${CFG_SRC_DIR}Makefile.in ./Makefile move_if_changed config.tmp config.mk rm -f config.tmp touch config.stamp step_msg "complete" msg cargo-0.8.0/src/000077500000000000000000000000001264656333200134135ustar00rootroot00000000000000cargo-0.8.0/src/bin/000077500000000000000000000000001264656333200141635ustar00rootroot00000000000000cargo-0.8.0/src/bin/bench.rs000066400000000000000000000100501264656333200156040ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, CliError, Human, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { flag_no_run: bool, flag_package: Vec, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_lib: bool, flag_bin: Vec, flag_example: Vec, flag_test: Vec, flag_bench: Vec, arg_args: Vec, } pub const USAGE: &'static str = " Execute all benchmarks of a local package Usage: cargo bench [options] [--] [...] Options: -h, --help Print this message --lib Benchmark only this package's library --bin NAME Benchmark only the specified binary --example NAME Benchmark only the specified example --test NAME Benchmark only the specified test target --bench NAME Benchmark only the specified bench target --no-run Compile, but don't run benchmarks -p SPEC, --package SPEC ... Package to run benchmarks for -j N, --jobs N The number of jobs to run in parallel --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to build benchmarks for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never All of the trailing arguments are passed to the benchmark binaries generated for filtering benchmarks and generally providing options configuring how they run. If the --package argument is given, then SPEC is a package id specification which indicates which package should be benchmarked. If it is not given, then the current package is benchmarked. For more information on SPEC and its format, see the `cargo help pkgid` command. The --jobs argument affects the building of the benchmark executable but does not affect how many jobs are used when running the benchmarks. Compilation can be customized with the `bench` profile in the manifest. "; pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let ops = ops::TestOptions { no_run: options.flag_no_run, no_fail_fast: false, compile_opts: ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|s| &s[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package, exec_engine: None, release: true, mode: ops::CompileMode::Bench, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), target_rustdoc_args: None, target_rustc_args: None, }, }; let err = try!(ops::run_benches(&root, &ops, &options.arg_args)); match err { None => Ok(None), Some(err) => { Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(i) => CliError::new("", i), None => CliError::from_error(Human(err), 101) }) } } } cargo-0.8.0/src/bin/build.rs000066400000000000000000000067611264656333200156420ustar00rootroot00000000000000use std::env; use cargo::ops::CompileOptions; use cargo::ops; use cargo::util::important_paths::{find_root_manifest_for_wd}; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { flag_package: Vec, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_release: bool, flag_lib: bool, flag_bin: Vec, flag_example: Vec, flag_test: Vec, flag_bench: Vec, } pub const USAGE: &'static str = " Compile a local package and all of its dependencies Usage: cargo build [options] Options: -h, --help Print this message -p SPEC, --package SPEC ... Package to build -j N, --jobs N The number of jobs to run in parallel --lib Build only this package's library --bin NAME Build only the specified binary --example NAME Build only the specified example --test NAME Build only the specified test target --bench NAME Build only the specified benchmark target --release Build artifacts in release mode, with optimizations --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to compile -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never If the --package argument is given, then SPEC is a package id specification which indicates which package should be built. If it is not given, then the current package is built. For more information on SPEC and its format, see the `cargo help pkgid` command. Compilation can be configured via the use of profiles which are configured in the manifest. The default profile for this command is `dev`, but passing the --release flag will use the `release` profile instead. "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-build; args={:?}", env::args().collect::>()); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let opts = CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package, exec_engine: None, mode: ops::CompileMode::Build, release: options.flag_release, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), target_rustdoc_args: None, target_rustc_args: None, }; try!(ops::compile(&root, &opts)); Ok(None) } cargo-0.8.0/src/bin/cargo.rs000066400000000000000000000221431264656333200156260ustar00rootroot00000000000000extern crate cargo; extern crate env_logger; extern crate git2_curl; extern crate rustc_serialize; extern crate toml; #[macro_use] extern crate log; use std::collections::BTreeSet; use std::env; use std::fs; use std::path::PathBuf; use cargo::execute_main_without_stdin; use cargo::util::{self, CliResult, lev_distance, Config, human, CargoResult}; #[derive(RustcDecodable)] struct Flags { flag_list: bool, flag_verbose: bool, flag_quiet: bool, flag_color: Option, arg_command: String, arg_args: Vec, } const USAGE: &'static str = " Rust's package manager Usage: cargo [...] cargo [options] Options: -h, --help Display this message -V, --version Print version info and exit --list List installed commands -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never Some common cargo commands are: build Compile the current project clean Remove the target directory doc Build this project's and its dependencies' documentation new Create a new cargo project run Build and execute src/main.rs test Run the tests bench Run the benchmarks update Update dependencies listed in Cargo.lock search Search registry for crates publish Package and upload this project to the registry install Install a Rust binary See 'cargo help ' for more information on a specific command. "; fn main() { env_logger::init().unwrap(); execute_main_without_stdin(execute, true, USAGE) } macro_rules! each_subcommand{ ($mac:ident) => ({ $mac!(bench); $mac!(build); $mac!(clean); $mac!(doc); $mac!(fetch); $mac!(generate_lockfile); $mac!(git_checkout); $mac!(help); $mac!(install); $mac!(locate_project); $mac!(login); $mac!(new); $mac!(owner); $mac!(package); $mac!(pkgid); $mac!(publish); $mac!(read_manifest); $mac!(run); $mac!(rustc); $mac!(rustdoc); $mac!(search); $mac!(test); $mac!(uninstall); $mac!(update); $mac!(verify_project); $mac!(version); $mac!(yank); }) } /** The top-level `cargo` command handles configuration and project location because they are fundamental (and intertwined). Other commands can rely on this top-level information. */ fn execute(flags: Flags, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(flags.flag_verbose, flags.flag_quiet)); try!(config.shell().set_color_config(flags.flag_color.as_ref().map(|s| &s[..]))); init_git_transports(config); if flags.flag_list { println!("Installed Commands:"); for command in list_commands(config) { println!(" {}", command); }; return Ok(None) } let args = match &flags.arg_command[..] { // For the commands `cargo` and `cargo help`, re-execute ourselves as // `cargo -h` so we can go through the normal process of printing the // help message. "" | "help" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["cargo".to_string(), "-h".to_string()]; let r = cargo::call_main_without_stdin(execute, config, USAGE, args, false); cargo::process_executed(r, &mut config.shell()); return Ok(None) } // For `cargo help -h` and `cargo help --help`, print out the help // message for `cargo help` "help" if flags.arg_args[0] == "-h" || flags.arg_args[0] == "--help" => { vec!["cargo".to_string(), "help".to_string(), "-h".to_string()] } // For `cargo help foo`, print out the usage message for the specified // subcommand by executing the command with the `-h` flag. "help" => vec!["cargo".to_string(), flags.arg_args[0].clone(), "-h".to_string()], // For all other invocations, we're of the form `cargo foo args...`. We // use the exact environment arguments to preserve tokens like `--` for // example. _ => env::args().collect(), }; macro_rules! cmd{ ($name:ident) => (if args[1] == stringify!($name).replace("_", "-") { mod $name; config.shell().set_verbose(true); let r = cargo::call_main_without_stdin($name::execute, config, $name::USAGE, &args, false); cargo::process_executed(r, &mut config.shell()); return Ok(None) }) } each_subcommand!(cmd); try!(execute_subcommand(config, &args[1], &args)); Ok(None) } fn find_closest(config: &Config, cmd: &str) -> Option { let cmds = list_commands(config); // Only consider candidates with a lev_distance of 3 or less so we don't // suggest out-of-the-blue options. let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c)) .filter(|&(d, _)| d < 4) .collect::>(); filtered.sort_by(|a, b| a.0.cmp(&b.0)); filtered.get(0).map(|slot| slot.1.clone()) } fn execute_subcommand(config: &Config, cmd: &str, args: &[String]) -> CargoResult<()> { let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); let path = search_directories(config) .iter() .map(|dir| dir.join(&command_exe)) .filter_map(|dir| fs::metadata(&dir).ok().map(|m| (dir, m))) .find(|&(_, ref meta)| is_executable(meta)); let command = match path { Some((command, _)) => command, None => { return Err(human(match find_closest(config, cmd) { Some(closest) => format!("no such subcommand\n\n\t\ Did you mean `{}`?\n", closest), None => "no such subcommand".to_string() })) } }; try!(util::process(&command).args(&args[1..]).exec()); Ok(()) } /// List all runnable commands. find_command should always succeed /// if given one of returned command. fn list_commands(config: &Config) -> BTreeSet { let prefix = "cargo-"; let suffix = env::consts::EXE_SUFFIX; let mut commands = BTreeSet::new(); for dir in search_directories(config) { let entries = match fs::read_dir(dir) { Ok(entries) => entries, _ => continue }; for entry in entries.filter_map(|e| e.ok()) { let path = entry.path(); let filename = match path.file_name().and_then(|s| s.to_str()) { Some(filename) => filename, _ => continue }; if !filename.starts_with(prefix) || !filename.ends_with(suffix) { continue } if let Ok(meta) = entry.metadata() { if is_executable(&meta) { let end = filename.len() - suffix.len(); commands.insert(filename[prefix.len()..end].to_string()); } } } } macro_rules! add_cmd { ($cmd:ident) => (commands.insert(stringify!($cmd).replace("_", "-"))) } each_subcommand!(add_cmd); commands } #[cfg(unix)] fn is_executable(metadata: &fs::Metadata) -> bool { use std::os::unix::prelude::*; metadata.is_file() && metadata.permissions().mode() & 0o111 != 0 } #[cfg(windows)] fn is_executable(metadata: &fs::Metadata) -> bool { metadata.is_file() } fn search_directories(config: &Config) -> Vec { let mut dirs = vec![config.home().join("bin")]; if let Some(val) = env::var_os("PATH") { dirs.extend(env::split_paths(&val)); } dirs } fn init_git_transports(config: &Config) { // Only use a custom transport if a proxy is configured, right now libgit2 // doesn't support proxies and we have to use a custom transport in this // case. The custom transport, however, is not as well battle-tested. match cargo::ops::http_proxy_exists(config) { Ok(true) => {} _ => return } let handle = match cargo::ops::http_handle(config) { Ok(handle) => handle, Err(..) => return, }; // The unsafety of the registration function derives from two aspects: // // 1. This call must be synchronized with all other registration calls as // well as construction of new transports. // 2. The argument is leaked. // // We're clear on point (1) because this is only called at the start of this // binary (we know what the state of the world looks like) and we're mostly // clear on point (2) because we'd only free it after everything is done // anyway unsafe { git2_curl::register(handle); } } cargo-0.8.0/src/bin/clean.rs000066400000000000000000000037331264656333200156210ustar00rootroot00000000000000use std::env; use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { flag_package: Vec, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_release: bool, } pub const USAGE: &'static str = " Remove artifacts that cargo has generated in the past Usage: cargo clean [options] Options: -h, --help Print this message -p SPEC, --package SPEC ... Package to clean artifacts for --manifest-path PATH Path to the manifest to the package to clean --target TRIPLE Target triple to clean output for (default all) --release Whether or not to clean release artifacts -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never If the --package argument is given, then SPEC is a package id specification which indicates which package's artifacts should be cleaned out. If it is not given, then all packages' artifacts are removed. For more information on SPEC and its format, see the `cargo help pkgid` command. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); debug!("executing; cmd=cargo-clean; args={:?}", env::args().collect::>()); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let opts = ops::CleanOptions { config: config, spec: &options.flag_package, target: options.flag_target.as_ref().map(|s| &s[..]), release: options.flag_release, }; try!(ops::clean(&root, &opts)); Ok(None) } cargo-0.8.0/src/bin/doc.rs000066400000000000000000000057321264656333200153050ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { flag_target: Option, flag_features: Vec, flag_jobs: Option, flag_manifest_path: Option, flag_no_default_features: bool, flag_no_deps: bool, flag_open: bool, flag_verbose: bool, flag_release: bool, flag_quiet: bool, flag_color: Option, flag_package: Vec, } pub const USAGE: &'static str = " Build a package's documentation Usage: cargo doc [options] Options: -h, --help Print this message --open Opens the docs in a browser after the operation -p SPEC, --package SPEC ... Package to document --no-deps Don't build documentation for dependencies -j N, --jobs N The number of jobs to run in parallel --release Build artifacts in release mode, with optimizations --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to document -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never By default the documentation for the local package and all dependencies is built. The output is all placed in `target/doc` in rustdoc's usual format. If the --package argument is given, then SPEC is a package id specification which indicates which package should be documented. If it is not given, then the current package is documented. For more information on SPEC and its format, see the `cargo help pkgid` command. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let doc_opts = ops::DocOptions { open_result: options.flag_open, compile_opts: ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package, exec_engine: None, filter: ops::CompileFilter::Everything, release: options.flag_release, mode: ops::CompileMode::Doc { deps: !options.flag_no_deps, }, target_rustc_args: None, target_rustdoc_args: None, }, }; try!(ops::doc(&root, &doc_opts)); Ok(None) } cargo-0.8.0/src/bin/fetch.rs000066400000000000000000000027121264656333200156240ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::find_root_manifest_for_wd; #[derive(RustcDecodable)] struct Options { flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Fetch dependencies of a package from the network. Usage: cargo fetch [options] Options: -h, --help Print this message --manifest-path PATH Path to the manifest to fetch dependencies for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never If a lockfile is available, this command will ensure that all of the git dependencies and/or registries dependencies are downloaded and locally available. The network is never touched after a `cargo fetch` unless the lockfile changes. If the lockfile is not available, then this is the equivalent of `cargo generate-lockfile`. A lockfile is generated and dependencies are also all updated. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(ops::fetch(&root, config)); Ok(None) } cargo-0.8.0/src/bin/generate_lockfile.rs000066400000000000000000000022761264656333200202020ustar00rootroot00000000000000use std::env; use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::find_root_manifest_for_wd; #[derive(RustcDecodable)] struct Options { flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Generate the lockfile for a project Usage: cargo generate-lockfile [options] Options: -h, --help Print this message --manifest-path PATH Path to the manifest to generate a lockfile for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-generate-lockfile; args={:?}", env::args().collect::>()); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(ops::generate_lockfile(&root, config)); Ok(None) } cargo-0.8.0/src/bin/git_checkout.rs000066400000000000000000000031501264656333200172000ustar00rootroot00000000000000use cargo::core::source::{Source, SourceId, GitReference}; use cargo::sources::git::{GitSource}; use cargo::util::{Config, CliResult, CliError, human, ToUrl}; #[derive(RustcDecodable)] struct Options { flag_url: String, flag_reference: String, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Checkout a copy of a Git repository Usage: cargo git-checkout [options] --url=URL --reference=REF cargo git-checkout -h | --help Options: -h, --help Print this message -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let Options { flag_url: url, flag_reference: reference, .. } = options; let url = try!(url.to_url().map_err(|e| { human(format!("The URL `{}` you passed was \ not a valid URL: {}", url, e)) }) .map_err(|e| CliError::from_boxed(e, 1))); let reference = GitReference::Branch(reference.clone()); let source_id = SourceId::for_git(&url, reference); let mut source = GitSource::new(&source_id, config); try!(source.update().map_err(|e| { CliError::new(&format!("Couldn't update {:?}: {:?}", source, e), 1) })); Ok(None) } cargo-0.8.0/src/bin/help.rs000066400000000000000000000011161264656333200154600ustar00rootroot00000000000000use cargo::util::{CliResult, CliError, Config}; #[derive(RustcDecodable)] struct Options; pub const USAGE: &'static str = " Get some help with a cargo command. Usage: cargo help cargo help -h | --help Options: -h, --help Print this message "; pub fn execute(_: Options, _: &Config) -> CliResult> { // This is a dummy command just so that `cargo help help` works. // The actual delegation of help flag to subcommands is handled by the // cargo command. Err(CliError::new("Help command should not be executed directly.", 101)) } cargo-0.8.0/src/bin/install.rs000066400000000000000000000120301264656333200161730ustar00rootroot00000000000000use cargo::ops; use cargo::core::{SourceId, GitReference}; use cargo::util::{CliResult, Config, ToUrl, human}; #[derive(RustcDecodable)] struct Options { flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, flag_debug: bool, flag_bin: Vec, flag_example: Vec, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_root: Option, flag_list: bool, arg_crate: Option, flag_vers: Option, flag_git: Option, flag_branch: Option, flag_tag: Option, flag_rev: Option, flag_path: Option, } pub const USAGE: &'static str = " Install a Rust binary Usage: cargo install [options] [] cargo install [options] --list Specifying what crate to install: --vers VERS Specify a version to install from crates.io --git URL Git URL to install the specified crate from --branch BRANCH Branch to use when installing from git --tag TAG Tag to use when installing from git --rev SHA Specific commit to use when installing from git --path PATH Filesystem path to local crate to install Build and install options: -h, --help Print this message -j N, --jobs N The number of jobs to run in parallel --features FEATURES Space-separated list of features to activate --no-default-features Do not build the `default` feature --debug Build in debug mode instead of release mode --bin NAME Only install the binary NAME --example EXAMPLE Install the example EXAMPLE instead of binaries --root DIR Directory to install packages into -v, --verbose Use verbose output -q, --quiet Less output printed to stdout --color WHEN Coloring: auto, always, never This command manages Cargo's local set of install binary crates. Only packages which have [[bin]] targets can be installed, and all binaries are installed into the installation root's `bin` folder. The installation root is determined, in order of precedence, by `--root`, `$CARGO_INSTALL_ROOT`, the `install.root` configuration key, and finally the home directory (which is either `$CARGO_HOME` if set or `$HOME/.cargo` by default). There are multiple sources from which a crate can be installed. The default location is crates.io but the `--git` and `--path` flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the `` argument is required to indicate which crate should be installed. Crates from crates.io can optionally specify the version they wish to install via the `--vers` flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the `--bin` argument can selectively install only one of them, and if you'd rather install examples the `--example` argument can be used as well. The `--list` option will list all installed packages (and their versions). "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let compile_opts = ops::CompileOptions { config: config, jobs: options.flag_jobs, target: None, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &[], exec_engine: None, mode: ops::CompileMode::Build, release: !options.flag_debug, filter: ops::CompileFilter::new(false, &options.flag_bin, &[], &options.flag_example, &[]), target_rustc_args: None, target_rustdoc_args: None, }; let source = if let Some(url) = options.flag_git { let url = try!(url.to_url().map_err(human)); let gitref = if let Some(branch) = options.flag_branch { GitReference::Branch(branch) } else if let Some(tag) = options.flag_tag { GitReference::Tag(tag) } else if let Some(rev) = options.flag_rev { GitReference::Rev(rev) } else { GitReference::Branch("master".to_string()) }; SourceId::for_git(&url, gitref) } else if let Some(path) = options.flag_path { try!(SourceId::for_path(&config.cwd().join(path))) } else { try!(SourceId::for_central(config)) }; let krate = options.arg_crate.as_ref().map(|s| &s[..]); let vers = options.flag_vers.as_ref().map(|s| &s[..]); let root = options.flag_root.as_ref().map(|s| &s[..]); if options.flag_list { try!(ops::install_list(root, config)); } else { try!(ops::install(root, krate, &source, vers, &compile_opts)); } Ok(None) } cargo-0.8.0/src/bin/locate_project.rs000066400000000000000000000021531264656333200175270ustar00rootroot00000000000000use cargo::util::{CliResult, CliError, human, ChainError, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct LocateProjectFlags { flag_manifest_path: Option, } pub const USAGE: &'static str = " Print a JSON representation of a Cargo.toml file's location Usage: cargo locate-project [options] Options: --manifest-path PATH Path to the manifest to locate -h, --help Print this message "; #[derive(RustcEncodable)] struct ProjectLocation { root: String } pub fn execute(flags: LocateProjectFlags, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())); let string = try!(root.to_str() .chain_error(|| human("Your project path contains \ characters not representable in \ Unicode")) .map_err(|e| CliError::from_boxed(e, 1))); Ok(Some(ProjectLocation { root: string.to_string() })) } cargo-0.8.0/src/bin/login.rs000066400000000000000000000034321264656333200156430ustar00rootroot00000000000000use std::io::prelude::*; use std::io; use cargo::ops; use cargo::core::{SourceId, Source}; use cargo::sources::RegistrySource; use cargo::util::{CliResult, Config, human, ChainError}; #[derive(RustcDecodable)] struct Options { flag_host: Option, arg_token: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Save an api token from the registry locally Usage: cargo login [options] [] Options: -h, --help Print this message --host HOST Host to set the token for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let token = match options.arg_token.clone() { Some(token) => token, None => { let src = try!(SourceId::for_central(config)); let mut src = RegistrySource::new(&src, config); try!(src.update()); let config = try!(src.config()); let host = options.flag_host.clone().unwrap_or(config.api); println!("please visit {}me and paste the API Token below", host); let mut line = String::new(); let input = io::stdin(); try!(input.lock().read_line(&mut line).chain_error(|| { human("failed to read stdin") })); line } }; let token = token.trim().to_string(); try!(ops::registry_login(config, token)); Ok(None) } cargo-0.8.0/src/bin/new.rs000066400000000000000000000031611264656333200153230ustar00rootroot00000000000000use std::env; use cargo::ops; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_bin: bool, arg_path: String, flag_name: Option, flag_vcs: Option, } pub const USAGE: &'static str = " Create a new cargo package at Usage: cargo new [options] cargo new -h | --help Options: -h, --help Print this message --vcs VCS Initialize a new repository for the given version control system (git or hg) or do not initialize any version control at all (none) overriding a global configuration. --bin Use a binary instead of a library template --name NAME Set the resulting package name -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-new; args={:?}", env::args().collect::>()); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let Options { flag_bin, arg_path, flag_name, flag_vcs, .. } = options; let opts = ops::NewOptions { version_control: flag_vcs, bin: flag_bin, path: &arg_path, name: flag_name.as_ref().map(|s| s.as_ref()), }; try!(ops::new(opts, config)); Ok(None) } cargo-0.8.0/src/bin/owner.rs000066400000000000000000000036351264656333200156720ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { arg_crate: Option, flag_token: Option, flag_add: Option>, flag_remove: Option>, flag_index: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_list: bool, } pub const USAGE: &'static str = " Manage the owners of a crate on the registry Usage: cargo owner [options] [] Options: -h, --help Print this message -a, --add LOGIN Name of a user or team to add as an owner -r, --remove LOGIN Name of a user or team to remove as an owner -l, --list List owners of a crate --index INDEX Registry index to modify owners for --token TOKEN API token to use when authenticating -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never This command will modify the owners for a package on the specified registry (or default). Note that owners of a package can upload new versions, yank old versions. Explicitly named owners can also modify the set of owners, so take caution! See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation and troubleshooting. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let opts = ops::OwnersOptions { krate: options.arg_crate, token: options.flag_token, index: options.flag_index, to_add: options.flag_add, to_remove: options.flag_remove, list: options.flag_list, }; try!(ops::modify_owners(config, &opts)); Ok(None) } cargo-0.8.0/src/bin/package.rs000066400000000000000000000027661264656333200161370ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::find_root_manifest_for_wd; #[derive(RustcDecodable)] struct Options { flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_manifest_path: Option, flag_no_verify: bool, flag_no_metadata: bool, flag_list: bool, } pub const USAGE: &'static str = " Assemble the local package into a distributable tarball Usage: cargo package [options] Options: -h, --help Print this message -l, --list Print files included in a package without making one --no-verify Don't verify the contents by building them --no-metadata Ignore warnings about a lack of human-usable metadata --manifest-path PATH Path to the manifest to compile -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(ops::package(&root, config, !options.flag_no_verify, options.flag_list, !options.flag_no_metadata)); Ok(None) } cargo-0.8.0/src/bin/pkgid.rs000066400000000000000000000042261264656333200156330ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_manifest_path: Option, arg_spec: Option, } pub const USAGE: &'static str = " Print a fully qualified package specification Usage: cargo pkgid [options] [] Options: -h, --help Print this message --manifest-path PATH Path to the manifest to the package to clean -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never Given a argument, print out the fully qualified package id specifier. This command will generate an error if is ambiguous as to which package it refers to in the dependency graph. If no is given, then the pkgid for the local package is printed. This command requires that a lockfile is available and dependencies have been fetched. Example Package IDs pkgid | name | version | url |-----------------------------|--------|-----------|---------------------| foo | foo | * | * foo:1.2.3 | foo | 1.2.3 | * crates.io/foo | foo | * | *://crates.io/foo crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())); let spec = options.arg_spec.as_ref().map(|s| &s[..]); let spec = try!(ops::pkgid(&root, spec, config)); println!("{}", spec); Ok(None) } cargo-0.8.0/src/bin/publish.rs000066400000000000000000000030061264656333200161760ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::find_root_manifest_for_wd; #[derive(RustcDecodable)] struct Options { flag_host: Option, flag_token: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_no_verify: bool, } pub const USAGE: &'static str = " Upload a package to the registry Usage: cargo publish [options] Options: -h, --help Print this message --host HOST Host to upload the package to --token TOKEN Token to use when uploading --no-verify Don't verify package tarball before publish --manifest-path PATH Path to the manifest of the package to publish -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let Options { flag_token: token, flag_host: host, flag_manifest_path, flag_no_verify: no_verify, .. } = options; let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())); try!(ops::publish(&root, config, token, host, !no_verify)); Ok(None) } cargo-0.8.0/src/bin/read_manifest.rs000066400000000000000000000023141264656333200173320ustar00rootroot00000000000000use std::env; use cargo::core::{Package, Source}; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; use cargo::sources::{PathSource}; #[derive(RustcDecodable)] struct Options { flag_manifest_path: Option, flag_color: Option, } pub const USAGE: &'static str = " Print a JSON representation of a Cargo.toml manifest Usage: cargo read-manifest [options] cargo read-manifest -h | --help Options: -h, --help Print this message -v, --verbose Use verbose output --manifest-path PATH Path to the manifest --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-read-manifest; args={:?}", env::args().collect::>()); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let mut source = try!(PathSource::for_path(root.parent().unwrap(), config)); try!(source.update()); let pkg = try!(source.root_package()); Ok(Some(pkg)) } cargo-0.8.0/src/bin/run.rs000066400000000000000000000067011264656333200153410ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, CliError, Config, Human}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { flag_bin: Option, flag_example: Option, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_release: bool, arg_args: Vec, } pub const USAGE: &'static str = " Run the main binary of the local package (src/main.rs) Usage: cargo run [options] [--] [...] Options: -h, --help Print this message --bin NAME Name of the bin target to run --example NAME Name of the example target to run -j N, --jobs N The number of jobs to run in parallel --release Build artifacts in release mode, with optimizations --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to execute -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never If neither `--bin` nor `--example` are given, then if the project only has one bin target it will be run. Otherwise `--bin` specifies the bin target to run, and `--example` specifies the example target to run. At most one of `--bin` or `--example` can be provided. All of the trailing arguments are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let (mut examples, mut bins) = (Vec::new(), Vec::new()); if let Some(s) = options.flag_bin { bins.push(s); } if let Some(s) = options.flag_example { examples.push(s); } let compile_opts = ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &[], exec_engine: None, release: options.flag_release, mode: ops::CompileMode::Build, filter: if examples.is_empty() && bins.is_empty() { ops::CompileFilter::Everything } else { ops::CompileFilter::Only { lib: false, tests: &[], benches: &[], bins: &bins, examples: &examples, } }, target_rustdoc_args: None, target_rustc_args: None, }; match try!(ops::run(&root, &compile_opts, &options.arg_args)) { None => Ok(None), Some(err) => { Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(code) => CliError::from_error(Human(err), code), None => CliError::from_error(err, 101), }) } } } cargo-0.8.0/src/bin/rustc.rs000066400000000000000000000104511264656333200156720ustar00rootroot00000000000000use std::env; use cargo::ops::{CompileOptions, CompileMode}; use cargo::ops; use cargo::util::important_paths::{find_root_manifest_for_wd}; use cargo::util::{CliResult, CliError, Config}; #[derive(RustcDecodable)] struct Options { arg_opts: Option>, flag_package: Option, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_release: bool, flag_lib: bool, flag_bin: Vec, flag_example: Vec, flag_test: Vec, flag_bench: Vec, flag_profile: Option, } pub const USAGE: &'static str = " Compile a package and all of its dependencies Usage: cargo rustc [options] [--] [...] Options: -h, --help Print this message -p SPEC, --package SPEC The profile to compile for -j N, --jobs N The number of jobs to run in parallel --lib Build only this package's library --bin NAME Build only the specified binary --example NAME Build only the specified example --test NAME Build only the specified test target --bench NAME Build only the specified benchmark target --release Build artifacts in release mode, with optimizations --profile PROFILE Profile to build the selected target for --features FEATURES Features to compile for the package --no-default-features Do not compile default features for the package --target TRIPLE Target triple which compiles will be for --manifest-path PATH Path to the manifest to fetch dependencies for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never The specified target for the current package (or package specified by SPEC if provided) will be compiled along with all of its dependencies. The specified ... will all be passed to the final compiler invocation, not any of the dependencies. Note that the compiler will still unconditionally receive arguments such as -L, --extern, and --crate-type, and the specified ... will simply be added to the compiler invocation. This command requires that only one target is being compiled. If more than one target is available for the current package the filters of --lib, --bin, etc, must be used to select which target is compiled. "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-rustc; args={:?}", env::args().collect::>()); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let mode = match options.flag_profile.as_ref().map(|t| &t[..]) { Some("dev") | None => CompileMode::Build, Some("test") => CompileMode::Test, Some("bench") => CompileMode::Bench, Some(mode) => { return Err(CliError::new(&format!("unknown profile: `{}`, use dev, test, or bench", mode), 101)) } }; let opts = CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package.map_or(Vec::new(), |s| vec![s]), exec_engine: None, mode: mode, release: options.flag_release, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), target_rustdoc_args: None, target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]), }; try!(ops::compile(&root, &opts)); Ok(None) } cargo-0.8.0/src/bin/rustdoc.rs000066400000000000000000000076671264656333200162340ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { arg_opts: Vec, flag_target: Option, flag_features: Vec, flag_jobs: Option, flag_manifest_path: Option, flag_no_default_features: bool, flag_open: bool, flag_verbose: bool, flag_release: bool, flag_quiet: bool, flag_color: Option, flag_package: Option, flag_lib: bool, flag_bin: Vec, flag_example: Vec, flag_test: Vec, flag_bench: Vec, } pub const USAGE: &'static str = " Build a package's documentation, using specified custom flags. Usage: cargo rustdoc [options] [--] [...] Options: -h, --help Print this message --open Opens the docs in a browser after the operation -p SPEC, --package SPEC Package to document -j N, --jobs N The number of jobs to run in parallel --lib Build only this package's library --bin NAME Build only the specified binary --example NAME Build only the specified example --test NAME Build only the specified test target --bench NAME Build only the specified benchmark target --release Build artifacts in release mode, with optimizations --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to document -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never The specified target for the current package (or package specified by SPEC if provided) will be documented with the specified ... being passed to the final rustdoc invocation. Dependencies will not be documented as part of this command. Note that rustdoc will still unconditionally receive arguments such as -L, --extern, and --crate-type, and the specified ... will simply be added to the rustdoc invocation. If the --package argument is given, then SPEC is a package id specification which indicates which package should be documented. If it is not given, then the current package is documented. For more information on SPEC and its format, see the `cargo help pkgid` command. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let doc_opts = ops::DocOptions { open_result: options.flag_open, compile_opts: ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package.map_or(Vec::new(), |s| vec![s]), exec_engine: None, release: options.flag_release, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), mode: ops::CompileMode::Doc { deps: false }, target_rustdoc_args: Some(&options.arg_opts), target_rustc_args: None, }, }; try!(ops::doc(&root, &doc_opts)); Ok(None) } cargo-0.8.0/src/bin/search.rs000066400000000000000000000020451264656333200157770ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { flag_host: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, arg_query: String } pub const USAGE: &'static str = " Search packages in crates.io Usage: cargo search [options] cargo search [-h | --help] Options: -h, --help Print this message --host HOST Host of a registry to search in -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let Options { flag_host: host, arg_query: query, .. } = options; try!(ops::search(&query, config, host)); Ok(None) } cargo-0.8.0/src/bin/test.rs000066400000000000000000000110461264656333200155120ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, CliError, Human, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] struct Options { arg_args: Vec, flag_features: Vec, flag_jobs: Option, flag_manifest_path: Option, flag_no_default_features: bool, flag_no_run: bool, flag_package: Vec, flag_target: Option, flag_lib: bool, flag_bin: Vec, flag_example: Vec, flag_test: Vec, flag_bench: Vec, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_release: bool, flag_no_fail_fast: bool, } pub const USAGE: &'static str = " Execute all unit and integration tests of a local package Usage: cargo test [options] [--] [...] Options: -h, --help Print this message --lib Test only this package's library --bin NAME Test only the specified binary --example NAME Test only the specified example --test NAME Test only the specified integration test target --bench NAME Test only the specified benchmark target --no-run Compile, but don't run tests -p SPEC, --package SPEC ... Package to run tests for -j N, --jobs N The number of jobs to run in parallel --release Build artifacts in release mode, with optimizations --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to build tests for -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never --no-fail-fast Run all tests regardless of failure All of the trailing arguments are passed to the test binaries generated for filtering tests and generally providing options configuring how they run. For example, this will run all tests with the name `foo` in their name: cargo test foo If the --package argument is given, then SPEC is a package id specification which indicates which package should be tested. If it is not given, then the current package is tested. For more information on SPEC and its format, see the `cargo help pkgid` command. The --jobs argument affects the building of the test executable but does not affect how many jobs are used when running the tests. Compilation can be configured via the `test` profile in the manifest. By default the rust test harness hides output from test execution to keep results readable. Test output can be recovered (e.g. for debugging) by passing `--nocapture` to the test binaries: cargo test -- --nocapture "; pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let ops = ops::TestOptions { no_run: options.flag_no_run, no_fail_fast: options.flag_no_fail_fast, compile_opts: ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|s| &s[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package, exec_engine: None, release: options.flag_release, mode: ops::CompileMode::Test, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), target_rustdoc_args: None, target_rustc_args: None, }, }; let err = try!(ops::run_tests(&root, &ops, &options.arg_args)); match err { None => Ok(None), Some(err) => { Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(i) => CliError::new("", i), None => CliError::from_error(Human(err), 101) }) } } } cargo-0.8.0/src/bin/uninstall.rs000066400000000000000000000026101264656333200165410ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { flag_bin: Vec, flag_root: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, arg_spec: String, } pub const USAGE: &'static str = " Remove a Rust binary Usage: cargo uninstall [options] cargo uninstall (-h | --help) Options: -h, --help Print this message --root DIR Directory to uninstall packages from --bin NAME Only uninstall the binary NAME -v, --verbose Use verbose output -q, --quiet Less output printed to stdout --color WHEN Coloring: auto, always, never The argument SPEC is a package id specification (see `cargo help pkgid`) to specify which crate should be uninstalled. By default all binaries are uninstalled for a crate but the `--bin` and `--example` flags can be used to only uninstall particular binaries. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = options.flag_root.as_ref().map(|s| &s[..]); try!(ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)); Ok(None) } cargo-0.8.0/src/bin/update.rs000066400000000000000000000051731264656333200160210ustar00rootroot00000000000000use std::env; use cargo::ops; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::find_root_manifest_for_wd; #[derive(RustcDecodable)] struct Options { flag_package: Vec, flag_aggressive: bool, flag_precise: Option, flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Update dependencies as recorded in the local lock file. Usage: cargo update [options] Options: -h, --help Print this message -p SPEC, --package SPEC ... Package to update --aggressive Force updating all dependencies of as well --precise PRECISE Update a single dependency to exactly PRECISE --manifest-path PATH Path to the crate's manifest -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never This command requires that a `Cargo.lock` already exists as generated by `cargo build` or related commands. If SPEC is given, then a conservative update of the lockfile will be performed. This means that only the dependency specified by SPEC will be updated. Its transitive dependencies will be updated only if SPEC cannot be updated without updating dependencies. All other dependencies will remain locked at their currently recorded versions. If PRECISE is specified, then --aggressive must not also be specified. The argument PRECISE is a string representing a precise revision that the package being updated should be updated to. For example, if the package comes from a git repository, then PRECISE would be the exact revision that the repository should be updated to. If SPEC is not given, then all dependencies will be re-resolved and updated. For more information about package id specifications, see `cargo help pkgid`. "; pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-update; args={:?}", env::args().collect::>()); try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); let update_opts = ops::UpdateOptions { aggressive: options.flag_aggressive, precise: options.flag_precise.as_ref().map(|s| &s[..]), to_update: &options.flag_package, config: config, }; try!(ops::update_lockfile(&root, &update_opts)); Ok(None) } cargo-0.8.0/src/bin/verify_project.rs000066400000000000000000000037771264656333200176010ustar00rootroot00000000000000use std::collections::HashMap; use std::fs::File; use std::io::prelude::*; use std::process; use cargo::util::important_paths::{find_root_manifest_for_wd}; use cargo::util::{CliResult, Config}; use rustc_serialize::json; use toml; pub type Error = HashMap; #[derive(RustcDecodable)] struct Flags { flag_manifest_path: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, } pub const USAGE: &'static str = " Check correctness of crate manifest Usage: cargo verify-project [options] cargo verify-project -h | --help Options: -h, --help Print this message --manifest-path PATH Path to the manifest to verify -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never "; pub fn execute(args: Flags, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(args.flag_verbose, args.flag_quiet)); try!(config.shell().set_color_config(args.flag_color.as_ref().map(|s| &s[..]))); let mut contents = String::new(); let filename = args.flag_manifest_path.unwrap_or("Cargo.toml".into()); let filename = match find_root_manifest_for_wd(Some(filename), config.cwd()) { Ok(manifest_path) => manifest_path, Err(e) => fail("invalid", &e.to_string()), }; let file = File::open(&filename); match file.and_then(|mut f| f.read_to_string(&mut contents)) { Ok(_) => {}, Err(e) => fail("invalid", &format!("error reading file: {}", e)) }; match toml::Parser::new(&contents).parse() { None => fail("invalid", "invalid-format"), Some(..) => {} }; let mut h = HashMap::new(); h.insert("success".to_string(), "true".to_string()); Ok(Some(h)) } fn fail(reason: &str, value: &str) -> ! { let mut h = HashMap::new(); h.insert(reason.to_string(), value.to_string()); println!("{}", json::encode(&h).unwrap()); process::exit(1) } cargo-0.8.0/src/bin/version.rs000066400000000000000000000011051264656333200162130ustar00rootroot00000000000000use std::env; use cargo; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options; pub const USAGE: &'static str = " Show version information Usage: cargo version [options] Options: -h, --help Print this message -v, --verbose Use verbose output --color WHEN Coloring: auto, always, never "; pub fn execute(_: Options, _: &Config) -> CliResult> { debug!("executing; cmd=cargo-version; args={:?}", env::args().collect::>()); println!("{}", cargo::version()); Ok(None) } cargo-0.8.0/src/bin/yank.rs000066400000000000000000000033661264656333200155030ustar00rootroot00000000000000use cargo::ops; use cargo::util::{CliResult, Config}; #[derive(RustcDecodable)] struct Options { arg_crate: Option, flag_token: Option, flag_vers: Option, flag_index: Option, flag_verbose: bool, flag_quiet: bool, flag_color: Option, flag_undo: bool, } pub static USAGE: &'static str = " Remove a pushed crate from the index Usage: cargo yank [options] [] Options: -h, --help Print this message --vers VERSION The version to yank or un-yank --undo Undo a yank, putting a version back into the index --index INDEX Registry index to yank from --token TOKEN API token to use when authenticating -v, --verbose Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never The yank command removes a previously pushed crate's version from the server's index. This command does not delete any data, and the crate will still be available for download via the registry's download link. Note that existing crates locked to a yanked version will still be able to download the yanked version to use it. Cargo will, however, not allow any new crates to be locked to any yanked version. "; pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); try!(ops::yank(config, options.arg_crate, options.flag_vers, options.flag_token, options.flag_index, options.flag_undo)); Ok(None) } cargo-0.8.0/src/cargo/000077500000000000000000000000001264656333200145065ustar00rootroot00000000000000cargo-0.8.0/src/cargo/core/000077500000000000000000000000001264656333200154365ustar00rootroot00000000000000cargo-0.8.0/src/cargo/core/dependency.rs000066400000000000000000000210371264656333200201250ustar00rootroot00000000000000use semver::VersionReq; use rustc_serialize::{Encoder, Encodable}; use core::{SourceId, Summary, PackageId}; use std::rc::Rc; use util::CargoResult; /// The data underlying a Dependency. #[derive(PartialEq,Clone,Debug)] pub struct DependencyInner { name: String, source_id: SourceId, req: VersionReq, specified_req: Option, kind: Kind, only_match_name: bool, optional: bool, default_features: bool, features: Vec, // This dependency should be used only for this platform. // `None` means *all platforms*. only_for_platform: Option, } /// Information about a dependency requested by a Cargo manifest. /// Cheap to copy. #[derive(PartialEq,Clone,Debug)] pub struct Dependency { inner: Rc, } #[derive(RustcEncodable)] struct SerializedDependency<'a> { name: &'a str, source: &'a SourceId, req: String, kind: Kind, optional: bool, uses_default_features: bool, features: &'a [String], target: &'a Option<&'a str>, } impl Encodable for Dependency { fn encode(&self, s: &mut S) -> Result<(), S::Error> { SerializedDependency { name: self.name(), source: &self.source_id(), req: self.version_req().to_string(), kind: self.kind(), optional: self.is_optional(), uses_default_features: self.uses_default_features(), features: self.features(), target: &self.only_for_platform(), }.encode(s) } } #[derive(PartialEq, Clone, Debug, Copy)] pub enum Kind { Normal, Development, Build, } impl Encodable for Kind { fn encode(&self, s: &mut S) -> Result<(), S::Error> { match *self { Kind::Normal => None, Kind::Development => Some("dev"), Kind::Build => Some("build"), }.encode(s) } } impl DependencyInner { /// Attempt to create a `Dependency` from an entry in the manifest. pub fn parse(name: &str, version: Option<&str>, source_id: &SourceId) -> CargoResult { let version_req = match version { Some(v) => try!(VersionReq::parse(v)), None => VersionReq::any() }; Ok(DependencyInner { only_match_name: false, req: version_req, specified_req: version.map(|s| s.to_string()), .. DependencyInner::new_override(name, source_id) }) } pub fn new_override(name: &str, source_id: &SourceId) -> DependencyInner { DependencyInner { name: name.to_string(), source_id: source_id.clone(), req: VersionReq::any(), kind: Kind::Normal, only_match_name: true, optional: false, features: Vec::new(), default_features: true, specified_req: None, only_for_platform: None, } } pub fn version_req(&self) -> &VersionReq { &self.req } pub fn name(&self) -> &str { &self.name } pub fn source_id(&self) -> &SourceId { &self.source_id } pub fn kind(&self) -> Kind { self.kind } pub fn specified_req(&self) -> Option<&str> { self.specified_req.as_ref().map(|s| &s[..]) } /// If none, this dependencies must be built for all platforms. /// If some, it must only be built for the specified platform. pub fn only_for_platform(&self) -> Option<&str> { self.only_for_platform.as_ref().map(|s| &s[..]) } pub fn set_kind(mut self, kind: Kind) -> DependencyInner { self.kind = kind; self } /// Sets the list of features requested for the package. pub fn set_features(mut self, features: Vec) -> DependencyInner { self.features = features; self } /// Sets whether the dependency requests default features of the package. pub fn set_default_features(mut self, default_features: bool) -> DependencyInner { self.default_features = default_features; self } /// Sets whether the dependency is optional. pub fn set_optional(mut self, optional: bool) -> DependencyInner { self.optional = optional; self } /// Set the source id for this dependency pub fn set_source_id(mut self, id: SourceId) -> DependencyInner { self.source_id = id; self } /// Set the version requirement for this dependency pub fn set_version_req(mut self, req: VersionReq) -> DependencyInner { self.req = req; self } pub fn set_only_for_platform(mut self, platform: Option) -> DependencyInner { self.only_for_platform = platform; self } /// Lock this dependency to depending on the specified package id pub fn lock_to(self, id: &PackageId) -> DependencyInner { assert_eq!(self.source_id, *id.source_id()); assert!(self.req.matches(id.version())); self.set_version_req(VersionReq::exact(id.version())) .set_source_id(id.source_id().clone()) } /// Returns false if the dependency is only used to build the local package. pub fn is_transitive(&self) -> bool { match self.kind { Kind::Normal | Kind::Build => true, Kind::Development => false, } } pub fn is_build(&self) -> bool { match self.kind { Kind::Build => true, _ => false } } pub fn is_optional(&self) -> bool { self.optional } /// Returns true if the default features of the dependency are requested. pub fn uses_default_features(&self) -> bool { self.default_features } /// Returns the list of features that are requested by the dependency. pub fn features(&self) -> &[String] { &self.features } /// Returns true if the package (`sum`) can fulfill this dependency request. pub fn matches(&self, sum: &Summary) -> bool { self.matches_id(sum.package_id()) } /// Returns true if the package (`id`) can fulfill this dependency request. pub fn matches_id(&self, id: &PackageId) -> bool { self.name == id.name() && (self.only_match_name || (self.req.matches(id.version()) && &self.source_id == id.source_id())) } pub fn into_dependency(self) -> Dependency { Dependency {inner: Rc::new(self)} } } impl Dependency { /// Attempt to create a `Dependency` from an entry in the manifest. pub fn parse(name: &str, version: Option<&str>, source_id: &SourceId) -> CargoResult { DependencyInner::parse(name, version, source_id).map(|di| { di.into_dependency() }) } pub fn new_override(name: &str, source_id: &SourceId) -> Dependency { DependencyInner::new_override(name, source_id).into_dependency() } pub fn clone_inner(&self) -> DependencyInner { (*self.inner).clone() } pub fn version_req(&self) -> &VersionReq { self.inner.version_req() } pub fn name(&self) -> &str { self.inner.name() } pub fn source_id(&self) -> &SourceId { self.inner.source_id() } pub fn kind(&self) -> Kind { self.inner.kind() } pub fn specified_req(&self) -> Option<&str> { self.inner.specified_req() } /// If none, this dependencies must be built for all platforms. /// If some, it must only be built for the specified platform. pub fn only_for_platform(&self) -> Option<&str> { self.inner.only_for_platform() } /// Lock this dependency to depending on the specified package id pub fn lock_to(self, id: &PackageId) -> Dependency { self.clone_inner().lock_to(id).into_dependency() } /// Returns false if the dependency is only used to build the local package. pub fn is_transitive(&self) -> bool { self.inner.is_transitive() } pub fn is_build(&self) -> bool { self.inner.is_build() } pub fn is_optional(&self) -> bool { self.inner.is_optional() } /// Returns true if the default features of the dependency are requested. pub fn uses_default_features(&self) -> bool { self.inner.uses_default_features() } /// Returns the list of features that are requested by the dependency. pub fn features(&self) -> &[String] { self.inner.features() } /// Returns true if the package (`sum`) can fulfill this dependency request. pub fn matches(&self, sum: &Summary) -> bool { self.inner.matches(sum) } /// Returns true if the package (`id`) can fulfill this dependency request. pub fn matches_id(&self, id: &PackageId) -> bool { self.inner.matches_id(id) } } cargo-0.8.0/src/cargo/core/manifest.rs000066400000000000000000000334751264656333200176260ustar00rootroot00000000000000use std::default::Default; use std::fmt; use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder, Encodable}; use core::{Dependency, PackageId, Summary}; use core::package_id::Metadata; use util::{CargoResult, human}; /// Contains all the information about a package, as loaded from a Cargo.toml. #[derive(Clone, Debug)] pub struct Manifest { summary: Summary, targets: Vec, links: Option, warnings: Vec, exclude: Vec, include: Vec, metadata: ManifestMetadata, profiles: Profiles, } /// General metadata about a package which is just blindly uploaded to the /// registry. /// /// Note that many of these fields can contain invalid values such as the /// homepage, repository, documentation, or license. These fields are not /// validated by cargo itself, but rather it is up to the registry when uploaded /// to validate these fields. Cargo will itself accept any valid TOML /// specification for these values. #[derive(PartialEq, Clone, Debug)] pub struct ManifestMetadata { pub authors: Vec, pub keywords: Vec, pub license: Option, pub license_file: Option, pub description: Option, // not markdown pub readme: Option, // file, not contents pub homepage: Option, // url pub repository: Option, // url pub documentation: Option, // url } #[derive(Debug, Clone, PartialEq, Eq, Hash, RustcEncodable, Copy)] pub enum LibKind { Lib, Rlib, Dylib, StaticLib } impl LibKind { pub fn from_str(string: &str) -> CargoResult { match string { "lib" => Ok(LibKind::Lib), "rlib" => Ok(LibKind::Rlib), "dylib" => Ok(LibKind::Dylib), "staticlib" => Ok(LibKind::StaticLib), _ => Err(human(format!("crate-type \"{}\" was not one of lib|rlib|dylib|staticlib", string))) } } /// Returns the argument suitable for `--crate-type` to pass to rustc. pub fn crate_type(&self) -> &'static str { match *self { LibKind::Lib => "lib", LibKind::Rlib => "rlib", LibKind::Dylib => "dylib", LibKind::StaticLib => "staticlib" } } } #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum TargetKind { Lib(Vec), Bin, Test, Bench, Example, CustomBuild, } impl Encodable for TargetKind { fn encode(&self, s: &mut S) -> Result<(), S::Error> { match *self { TargetKind::Lib(ref kinds) => { kinds.iter().map(|k| k.crate_type()).collect() } TargetKind::Bin => vec!["bin"], TargetKind::Example => vec!["example"], TargetKind::Test => vec!["test"], TargetKind::CustomBuild => vec!["custom-build"], TargetKind::Bench => vec!["bench"], }.encode(s) } } #[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug, Hash)] pub struct Profile { pub opt_level: u32, pub lto: bool, pub codegen_units: Option, // None = use rustc default pub rustc_args: Option>, pub rustdoc_args: Option>, pub debuginfo: bool, pub debug_assertions: bool, pub rpath: bool, pub test: bool, pub doc: bool, pub run_custom_build: bool, } #[derive(Default, Clone, Debug)] pub struct Profiles { pub release: Profile, pub dev: Profile, pub test: Profile, pub bench: Profile, pub doc: Profile, pub custom_build: Profile, } /// Information about a binary, a library, an example, etc. that is part of the /// package. #[derive(Clone, Hash, PartialEq, Eq, Debug)] pub struct Target { kind: TargetKind, name: String, src_path: PathBuf, metadata: Option, tested: bool, benched: bool, doc: bool, doctest: bool, harness: bool, // whether to use the test harness (--test) for_host: bool, } #[derive(RustcEncodable)] struct SerializedTarget<'a> { kind: &'a TargetKind, name: &'a str, src_path: &'a str, } impl Encodable for Target { fn encode(&self, s: &mut S) -> Result<(), S::Error> { SerializedTarget { kind: &self.kind, name: &self.name, src_path: &self.src_path.display().to_string(), }.encode(s) } } impl Manifest { pub fn new(summary: Summary, targets: Vec, exclude: Vec, include: Vec, links: Option, metadata: ManifestMetadata, profiles: Profiles) -> Manifest { Manifest { summary: summary, targets: targets, warnings: Vec::new(), exclude: exclude, include: include, links: links, metadata: metadata, profiles: profiles, } } pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } pub fn exclude(&self) -> &[String] { &self.exclude } pub fn include(&self) -> &[String] { &self.include } pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } pub fn name(&self) -> &str { self.package_id().name() } pub fn package_id(&self) -> &PackageId { self.summary.package_id() } pub fn summary(&self) -> &Summary { &self.summary } pub fn targets(&self) -> &[Target] { &self.targets } pub fn version(&self) -> &Version { self.package_id().version() } pub fn warnings(&self) -> &[String] { &self.warnings } pub fn profiles(&self) -> &Profiles { &self.profiles } pub fn links(&self) -> Option<&str> { self.links.as_ref().map(|s| &s[..]) } pub fn add_warning(&mut self, s: String) { self.warnings.push(s) } pub fn set_summary(&mut self, summary: Summary) { self.summary = summary; } } impl Target { fn blank() -> Target { Target { kind: TargetKind::Bin, name: String::new(), src_path: PathBuf::new(), metadata: None, doc: false, doctest: false, harness: true, for_host: false, tested: true, benched: true, } } pub fn lib_target(name: &str, crate_targets: Vec, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Lib(crate_targets), name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), doctest: true, doc: true, ..Target::blank() } } pub fn bin_target(name: &str, src_path: &Path, metadata: Option) -> Target { Target { kind: TargetKind::Bin, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: metadata, doc: true, ..Target::blank() } } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. pub fn custom_build_target(name: &str, src_path: &Path, metadata: Option) -> Target { Target { kind: TargetKind::CustomBuild, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: metadata, for_host: true, benched: false, tested: false, ..Target::blank() } } pub fn example_target(name: &str, src_path: &Path) -> Target { Target { kind: TargetKind::Example, name: name.to_string(), src_path: src_path.to_path_buf(), benched: false, ..Target::blank() } } pub fn test_target(name: &str, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Test, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), benched: false, ..Target::blank() } } pub fn bench_target(name: &str, src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Bench, name: name.to_string(), src_path: src_path.to_path_buf(), metadata: Some(metadata), tested: false, ..Target::blank() } } pub fn name(&self) -> &str { &self.name } pub fn crate_name(&self) -> String { self.name.replace("-", "_") } pub fn src_path(&self) -> &Path { &self.src_path } pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() } pub fn kind(&self) -> &TargetKind { &self.kind } pub fn tested(&self) -> bool { self.tested } pub fn harness(&self) -> bool { self.harness } pub fn documented(&self) -> bool { self.doc } pub fn for_host(&self) -> bool { self.for_host } pub fn benched(&self) -> bool { self.benched } pub fn doctested(&self) -> bool { self.doctest && match self.kind { TargetKind::Lib(ref kinds) => { kinds.contains(&LibKind::Rlib) || kinds.contains(&LibKind::Lib) } _ => false, } } pub fn allows_underscores(&self) -> bool { self.is_bin() || self.is_example() || self.is_custom_build() } pub fn is_lib(&self) -> bool { match self.kind { TargetKind::Lib(_) => true, _ => false } } pub fn linkable(&self) -> bool { match self.kind { TargetKind::Lib(ref kinds) => { kinds.iter().any(|k| { match *k { LibKind::Lib | LibKind::Rlib | LibKind::Dylib => true, LibKind::StaticLib => false, } }) } _ => false } } pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } pub fn is_example(&self) -> bool { self.kind == TargetKind::Example } pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } /// Returns the arguments suitable for `--crate-type` to pass to rustc. pub fn rustc_crate_types(&self) -> Vec<&'static str> { match self.kind { TargetKind::Lib(ref kinds) => { kinds.iter().map(|kind| kind.crate_type()).collect() }, TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test | TargetKind::Example | TargetKind::Bin => vec!["bin"], } } pub fn can_lto(&self) -> bool { match self.kind { TargetKind::Lib(ref v) => *v == [LibKind::StaticLib], _ => true, } } pub fn set_tested(&mut self, tested: bool) -> &mut Target { self.tested = tested; self } pub fn set_benched(&mut self, benched: bool) -> &mut Target { self.benched = benched; self } pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { self.doctest = doctest; self } pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { self.for_host = for_host; self } pub fn set_harness(&mut self, harness: bool) -> &mut Target { self.harness = harness; self } pub fn set_doc(&mut self, doc: bool) -> &mut Target { self.doc = doc; self } } impl fmt::Display for Target { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.kind { TargetKind::Lib(..) => write!(f, "Target(lib)"), TargetKind::Bin => write!(f, "Target(bin: {})", self.name), TargetKind::Test => write!(f, "Target(test: {})", self.name), TargetKind::Bench => write!(f, "Target(bench: {})", self.name), TargetKind::Example => write!(f, "Target(example: {})", self.name), TargetKind::CustomBuild => write!(f, "Target(script)"), } } } impl Profile { pub fn default_dev() -> Profile { Profile { debuginfo: true, debug_assertions: true, ..Profile::default() } } pub fn default_release() -> Profile { Profile { opt_level: 3, debuginfo: false, ..Profile::default() } } pub fn default_test() -> Profile { Profile { test: true, ..Profile::default_dev() } } pub fn default_bench() -> Profile { Profile { test: true, ..Profile::default_release() } } pub fn default_doc() -> Profile { Profile { doc: true, ..Profile::default_dev() } } pub fn default_custom_build() -> Profile { Profile { run_custom_build: true, ..Profile::default_dev() } } } impl Default for Profile { fn default() -> Profile { Profile { opt_level: 0, lto: false, codegen_units: None, rustc_args: None, rustdoc_args: None, debuginfo: false, debug_assertions: false, rpath: false, test: false, doc: false, run_custom_build: false, } } } impl fmt::Display for Profile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.test { write!(f, "Profile(test)") } else if self.doc { write!(f, "Profile(doc)") } else if self.run_custom_build { write!(f, "Profile(run)") } else { write!(f, "Profile(build)") } } } cargo-0.8.0/src/cargo/core/mod.rs000066400000000000000000000013171264656333200165650ustar00rootroot00000000000000pub use self::dependency::{Dependency, DependencyInner}; pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles}; pub use self::package::{Package, PackageSet}; pub use self::package_id::{PackageId, Metadata}; pub use self::package_id_spec::PackageIdSpec; pub use self::registry::Registry; pub use self::resolver::Resolve; pub use self::shell::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig}; pub use self::source::{Source, SourceId, SourceMap, SourceSet, GitReference}; pub use self::summary::Summary; pub mod source; pub mod package; pub mod package_id; pub mod dependency; pub mod manifest; pub mod resolver; pub mod summary; pub mod shell; pub mod registry; mod package_id_spec; cargo-0.8.0/src/cargo/core/package.rs000066400000000000000000000133251264656333200174030ustar00rootroot00000000000000use std::collections::HashMap; use std::fmt; use std::hash; use std::slice; use std::path::{Path, PathBuf}; use semver::Version; use core::{Dependency, Manifest, PackageId, SourceId, Registry, Target, Summary, Metadata}; use ops; use util::{CargoResult, graph, Config}; use rustc_serialize::{Encoder,Encodable}; use core::source::Source; /// Information about a package that is available somewhere in the file system. /// /// A package is a `Cargo.toml` file plus all the files that are part of it. // TODO: Is manifest_path a relic? #[derive(Clone, Debug)] pub struct Package { // The package's manifest manifest: Manifest, // The root of the package manifest_path: PathBuf, } #[derive(RustcEncodable)] struct SerializedPackage<'a> { name: &'a str, version: &'a str, id: &'a PackageId, source: &'a SourceId, dependencies: &'a [Dependency], targets: &'a [Target], features: &'a HashMap>, manifest_path: &'a str, } impl Encodable for Package { fn encode(&self, s: &mut S) -> Result<(), S::Error> { let summary = self.manifest.summary(); let package_id = summary.package_id(); SerializedPackage { name: &package_id.name(), version: &package_id.version().to_string(), id: package_id, source: summary.source_id(), dependencies: summary.dependencies(), targets: &self.manifest.targets(), features: summary.features(), manifest_path: &self.manifest_path.display().to_string(), }.encode(s) } } impl Package { pub fn new(manifest: Manifest, manifest_path: &Path) -> Package { Package { manifest: manifest, manifest_path: manifest_path.to_path_buf(), } } pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult { let path = manifest_path.parent().unwrap(); let source_id = try!(SourceId::for_path(path)); let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id, config)); Ok(pkg) } pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() } pub fn manifest(&self) -> &Manifest { &self.manifest } pub fn manifest_path(&self) -> &Path { &self.manifest_path } pub fn name(&self) -> &str { self.package_id().name() } pub fn package_id(&self) -> &PackageId { self.manifest.package_id() } pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() } pub fn summary(&self) -> &Summary { self.manifest.summary() } pub fn targets(&self) -> &[Target] { self.manifest().targets() } pub fn version(&self) -> &Version { self.package_id().version() } pub fn has_custom_build(&self) -> bool { self.targets().iter().any(|t| t.is_custom_build()) } pub fn generate_metadata(&self) -> Metadata { self.package_id().generate_metadata(self.root()) } } impl fmt::Display for Package { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.summary().package_id()) } } impl PartialEq for Package { fn eq(&self, other: &Package) -> bool { self.package_id() == other.package_id() } } impl Eq for Package {} impl hash::Hash for Package { fn hash(&self, into: &mut H) { // We want to be sure that a path-based package showing up at the same // location always has the same hash. To that effect we don't hash the // vanilla package ID if we're a path, but instead feed in our own root // path. if self.package_id().source_id().is_path() { (0, self.root(), self.name(), self.package_id().version()).hash(into) } else { (1, self.package_id()).hash(into) } } } #[derive(PartialEq,Clone,Debug)] pub struct PackageSet { packages: Vec, } impl PackageSet { pub fn new(packages: &[Package]) -> PackageSet { //assert!(packages.len() > 0, // "PackageSet must be created with at least one package") PackageSet { packages: packages.to_vec() } } pub fn len(&self) -> usize { self.packages.len() } pub fn pop(&mut self) -> Package { self.packages.pop().expect("PackageSet.pop: empty set") } /// Get a package by name out of the set pub fn get(&self, name: &str) -> &Package { self.packages.iter().find(|pkg| name == pkg.name()) .expect("PackageSet.get: empty set") } pub fn get_all(&self, names: &[&str]) -> Vec<&Package> { names.iter().map(|name| self.get(*name) ).collect() } pub fn packages(&self) -> &[Package] { &self.packages } // For now, assume that the package set contains only one package with a // given name pub fn sort(&self) -> Option { let mut graph = graph::Graph::new(); for pkg in self.packages.iter() { let deps: Vec<&str> = pkg.dependencies().iter() .map(|dep| dep.name()) .collect(); graph.add(pkg.name(), &deps); } let pkgs = match graph.sort() { Some(pkgs) => pkgs, None => return None, }; let pkgs = pkgs.iter().map(|name| { self.get(*name).clone() }).collect(); Some(PackageSet { packages: pkgs }) } pub fn iter(&self) -> slice::Iter { self.packages.iter() } } impl Registry for PackageSet { fn query(&mut self, name: &Dependency) -> CargoResult> { Ok(self.packages.iter() .filter(|pkg| name.name() == pkg.name()) .map(|pkg| pkg.summary().clone()) .collect()) } } cargo-0.8.0/src/cargo/core/package_id.rs000066400000000000000000000134321264656333200200560ustar00rootroot00000000000000use std::cmp::Ordering; use std::error::Error; use std::fmt::{self, Formatter}; use std::hash::Hash; use std::hash; use std::path::Path; use std::sync::Arc; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use semver; use util::{CargoResult, CargoError, short_hash, ToSemver}; use core::source::SourceId; /// Identifier for a specific version of a package in a specific source. #[derive(Clone, Debug)] pub struct PackageId { inner: Arc, } #[derive(PartialEq, PartialOrd, Eq, Ord, Debug)] struct PackageIdInner { name: String, version: semver::Version, source_id: SourceId, } impl Encodable for PackageId { fn encode(&self, s: &mut S) -> Result<(), S::Error> { let source = self.inner.source_id.to_url(); let encoded = format!("{} {} ({})", self.inner.name, self.inner.version, source); encoded.encode(s) } } impl Decodable for PackageId { fn decode(d: &mut D) -> Result { let string: String = try!(Decodable::decode(d)); let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap(); let captures = regex.captures(&string).expect("invalid serialized PackageId"); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let url = captures.at(3).unwrap(); let version = semver::Version::parse(version).ok().expect("invalid version"); let source_id = SourceId::from_url(url.to_string()); Ok(PackageId { inner: Arc::new(PackageIdInner { name: name.to_string(), version: version, source_id: source_id, }), }) } } impl Hash for PackageId { fn hash(&self, state: &mut S) { self.inner.name.hash(state); self.inner.version.hash(state); self.inner.source_id.hash(state); } } impl PartialEq for PackageId { fn eq(&self, other: &PackageId) -> bool { (*self.inner).eq(&*other.inner) } } impl PartialOrd for PackageId { fn partial_cmp(&self, other: &PackageId) -> Option { (*self.inner).partial_cmp(&*other.inner) } } impl Eq for PackageId {} impl Ord for PackageId { fn cmp(&self, other: &PackageId) -> Ordering { (*self.inner).cmp(&*other.inner) } } #[derive(Clone, Debug, PartialEq)] pub enum PackageIdError { InvalidVersion(String), InvalidNamespace(String) } impl Error for PackageIdError { fn description(&self) -> &str { "failed to parse package id" } } impl fmt::Display for PackageIdError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { PackageIdError::InvalidVersion(ref v) => { write!(f, "invalid version: {}", *v) } PackageIdError::InvalidNamespace(ref ns) => { write!(f, "invalid namespace: {}", *ns) } } } } impl CargoError for PackageIdError { fn is_human(&self) -> bool { true } } impl From for Box { fn from(t: PackageIdError) -> Box { Box::new(t) } } #[derive(PartialEq, Eq, Hash, Clone, RustcEncodable, Debug)] pub struct Metadata { pub metadata: String, pub extra_filename: String } impl PackageId { pub fn new(name: &str, version: T, sid: &SourceId) -> CargoResult { let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion)); Ok(PackageId { inner: Arc::new(PackageIdInner { name: name.to_string(), version: v, source_id: sid.clone(), }), }) } pub fn name(&self) -> &str { &self.inner.name } pub fn version(&self) -> &semver::Version { &self.inner.version } pub fn source_id(&self) -> &SourceId { &self.inner.source_id } pub fn generate_metadata(&self, source_root: &Path) -> Metadata { // See comments in Package::hash for why we have this test let metadata = if self.inner.source_id.is_path() { short_hash(&(0, &self.inner.name, &self.inner.version, source_root)) } else { short_hash(&(1, self)) }; let extra_filename = format!("-{}", metadata); Metadata { metadata: metadata, extra_filename: extra_filename } } pub fn with_precise(&self, precise: Option) -> PackageId { PackageId { inner: Arc::new(PackageIdInner { name: self.inner.name.to_string(), version: self.inner.version.clone(), source_id: self.inner.source_id.with_precise(precise), }), } } } impl Metadata { pub fn mix(&mut self, t: &T) { let new_metadata = short_hash(&(&self.metadata, t)); self.extra_filename = format!("-{}", new_metadata); self.metadata = new_metadata; } } impl fmt::Display for PackageId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { try!(write!(f, "{} v{}", self.inner.name, self.inner.version)); if !self.inner.source_id.is_default_registry() { try!(write!(f, " ({})", self.inner.source_id)); } Ok(()) } } #[cfg(test)] mod tests { use super::PackageId; use core::source::SourceId; use sources::RegistrySource; use util::ToUrl; #[test] fn invalid_version_handled_nicely() { let loc = RegistrySource::default_url().to_url().unwrap(); let repo = SourceId::for_registry(&loc); assert!(PackageId::new("foo", "1.0", &repo).is_err()); assert!(PackageId::new("foo", "1", &repo).is_err()); assert!(PackageId::new("foo", "bar", &repo).is_err()); assert!(PackageId::new("foo", "", &repo).is_err()); } } cargo-0.8.0/src/cargo/core/package_id_spec.rs000066400000000000000000000242771264656333200211010ustar00rootroot00000000000000use std::collections::HashMap; use std::fmt; use semver::Version; use url::{self, Url, UrlParser}; use core::PackageId; use util::{CargoResult, ToUrl, human, ToSemver, ChainError}; #[derive(Clone, PartialEq, Eq, Debug)] pub struct PackageIdSpec { name: String, version: Option, url: Option, } impl PackageIdSpec { pub fn parse(spec: &str) -> CargoResult { if spec.contains("/") { match spec.to_url() { Ok(url) => return PackageIdSpec::from_url(url), Err(..) => {} } if !spec.contains("://") { match url(&format!("cargo://{}", spec)) { Ok(url) => return PackageIdSpec::from_url(url), Err(..) => {} } } } let mut parts = spec.splitn(2, ':'); let name = parts.next().unwrap(); let version = match parts.next() { Some(version) => Some(try!(Version::parse(version).map_err(human))), None => None, }; for ch in name.chars() { if !ch.is_alphanumeric() && ch != '_' && ch != '-' { bail!("invalid character in pkgid `{}`: `{}`", spec, ch) } } Ok(PackageIdSpec { name: name.to_string(), version: version, url: None, }) } pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId> where I: IntoIterator { let spec = try!(PackageIdSpec::parse(spec).chain_error(|| { human(format!("invalid package id specification: `{}`", spec)) })); spec.query(i) } pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec { PackageIdSpec { name: package_id.name().to_string(), version: Some(package_id.version().clone()), url: Some(package_id.source_id().url().clone()), } } fn from_url(mut url: Url) -> CargoResult { if url.query.is_some() { bail!("cannot have a query string in a pkgid: {}", url) } let frag = url.fragment.take(); let (name, version) = { let path = try!(url.path().chain_error(|| { human(format!("pkgid urls must have a path: {}", url)) })); let path_name = try!(path.last().chain_error(|| { human(format!("pkgid urls must have at least one path \ component: {}", url)) })); match frag { Some(fragment) => { let mut parts = fragment.splitn(2, ':'); let name_or_version = parts.next().unwrap(); match parts.next() { Some(part) => { let version = try!(part.to_semver().map_err(human)); (name_or_version.to_string(), Some(version)) } None => { if name_or_version.chars().next().unwrap() .is_alphabetic() { (name_or_version.to_string(), None) } else { let version = try!(name_or_version.to_semver() .map_err(human)); (path_name.to_string(), Some(version)) } } } } None => (path_name.to_string(), None), } }; Ok(PackageIdSpec { name: name, version: version, url: Some(url), }) } pub fn name(&self) -> &str { &self.name } pub fn version(&self) -> Option<&Version> { self.version.as_ref() } pub fn url(&self) -> Option<&Url> { self.url.as_ref() } pub fn matches(&self, package_id: &PackageId) -> bool { if self.name() != package_id.name() { return false } match self.version { Some(ref v) => if v != package_id.version() { return false }, None => {} } match self.url { Some(ref u) => u == package_id.source_id().url(), None => true } } pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId> where I: IntoIterator { let mut ids = i.into_iter().filter(|p| self.matches(*p)); let ret = match ids.next() { Some(id) => id, None => bail!("package id specification `{}` \ matched no packages", self), }; return match ids.next() { Some(other) => { let mut msg = format!("There are multiple `{}` packages in \ your project, and the specification \ `{}` is ambiguous.\n\ Please re-run this command \ with `-p ` where `` is one \ of the following:", self.name(), self); let mut vec = vec![ret, other]; vec.extend(ids); minimize(&mut msg, vec, self); Err(human(msg)) } None => Ok(ret) }; fn minimize(msg: &mut String, ids: Vec<&PackageId>, spec: &PackageIdSpec) { let mut version_cnt = HashMap::new(); for id in ids.iter() { *version_cnt.entry(id.version()).or_insert(0) += 1; } for id in ids.iter() { if version_cnt[id.version()] == 1 { msg.push_str(&format!("\n {}:{}", spec.name(), id.version())); } else { msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id))); } } } } } fn url(s: &str) -> url::ParseResult { return UrlParser::new().scheme_type_mapper(mapper).parse(s); fn mapper(scheme: &str) -> url::SchemeType { if scheme == "cargo" { url::SchemeType::Relative(1) } else { url::whatwg_scheme_type_mapper(scheme) } } } impl fmt::Display for PackageIdSpec { #[allow(deprecated)] // connect => join in 1.3 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut printed_name = false; match self.url { Some(ref url) => { if url.scheme == "cargo" { try!(write!(f, "{}/{}", url.host().unwrap(), url.path().unwrap().connect("/"))); } else { try!(write!(f, "{}", url)); } if url.path().unwrap().last().unwrap() != &self.name { printed_name = true; try!(write!(f, "#{}", self.name)); } } None => { printed_name = true; try!(write!(f, "{}", self.name)) } } match self.version { Some(ref v) => { try!(write!(f, "{}{}", if printed_name {":"} else {"#"}, v)); } None => {} } Ok(()) } } #[cfg(test)] mod tests { use core::{PackageId, SourceId}; use super::{PackageIdSpec, url}; use url::Url; use semver::Version; #[test] fn good_parsing() { fn ok(spec: &str, expected: PackageIdSpec) { let parsed = PackageIdSpec::parse(spec).unwrap(); assert_eq!(parsed, expected); assert_eq!(parsed.to_string(), spec); } ok("http://crates.io/foo#1.2.3", PackageIdSpec { name: "foo".to_string(), version: Some(Version::parse("1.2.3").unwrap()), url: Some(url("http://crates.io/foo").unwrap()), }); ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec { name: "bar".to_string(), version: Some(Version::parse("1.2.3").unwrap()), url: Some(url("http://crates.io/foo").unwrap()), }); ok("crates.io/foo", PackageIdSpec { name: "foo".to_string(), version: None, url: Some(url("cargo://crates.io/foo").unwrap()), }); ok("crates.io/foo#1.2.3", PackageIdSpec { name: "foo".to_string(), version: Some(Version::parse("1.2.3").unwrap()), url: Some(url("cargo://crates.io/foo").unwrap()), }); ok("crates.io/foo#bar", PackageIdSpec { name: "bar".to_string(), version: None, url: Some(url("cargo://crates.io/foo").unwrap()), }); ok("crates.io/foo#bar:1.2.3", PackageIdSpec { name: "bar".to_string(), version: Some(Version::parse("1.2.3").unwrap()), url: Some(url("cargo://crates.io/foo").unwrap()), }); ok("foo", PackageIdSpec { name: "foo".to_string(), version: None, url: None, }); ok("foo:1.2.3", PackageIdSpec { name: "foo".to_string(), version: Some(Version::parse("1.2.3").unwrap()), url: None, }); } #[test] fn bad_parsing() { assert!(PackageIdSpec::parse("baz:").is_err()); assert!(PackageIdSpec::parse("baz:1.0").is_err()); assert!(PackageIdSpec::parse("http://baz:1.0").is_err()); assert!(PackageIdSpec::parse("http://#baz:1.0").is_err()); } #[test] fn matching() { let url = Url::parse("http://example.com").unwrap(); let sid = SourceId::for_registry(&url); let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo)); assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo)); assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo)); } } cargo-0.8.0/src/cargo/core/registry.rs000066400000000000000000000334141264656333200176610ustar00rootroot00000000000000use std::collections::HashSet; use std::collections::hash_map::HashMap; use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package}; use util::{CargoResult, ChainError, Config, human, profile}; /// Source of information about a group of packages. /// /// See also `core::Source`. pub trait Registry { /// Attempt to find the packages that match a dependency request. fn query(&mut self, name: &Dependency) -> CargoResult>; } impl Registry for Vec { fn query(&mut self, dep: &Dependency) -> CargoResult> { Ok(self.iter().filter(|summary| dep.matches(*summary)) .map(|summary| summary.clone()).collect()) } } impl Registry for Vec { fn query(&mut self, dep: &Dependency) -> CargoResult> { Ok(self.iter().filter(|pkg| dep.matches(pkg.summary())) .map(|pkg| pkg.summary().clone()).collect()) } } /// This structure represents a registry of known packages. It internally /// contains a number of `Box` instances which are used to load a /// `Package` from. /// /// The resolution phase of Cargo uses this to drive knowledge about new /// packages as well as querying for lists of new packages. It is here that /// sources are updated (e.g. network operations) and overrides are /// handled. /// /// The general idea behind this registry is that it is centered around the /// `SourceMap` structure, contained within which is a mapping of a `SourceId` to /// a `Source`. Each `Source` in the map has been updated (using network /// operations if necessary) and is ready to be queried for packages. pub struct PackageRegistry<'cfg> { sources: SourceMap<'cfg>, config: &'cfg Config, // A list of sources which are considered "overrides" which take precedent // when querying for packages. overrides: Vec, // Note that each SourceId does not take into account its `precise` field // when hashing or testing for equality. When adding a new `SourceId`, we // want to avoid duplicates in the `SourceMap` (to prevent re-updating the // same git repo twice for example), but we also want to ensure that the // loaded source is always updated. // // Sources with a `precise` field normally don't need to be updated because // their contents are already on disk, but sources without a `precise` field // almost always need to be updated. If we have a cached `Source` for a // precise `SourceId`, then when we add a new `SourceId` that is not precise // we want to ensure that the underlying source is updated. // // This is basically a long-winded way of saying that we want to know // precisely what the keys of `sources` are, so this is a mapping of key to // what exactly the key is. source_ids: HashMap, locked: HashMap)>>>, } #[derive(PartialEq, Eq, Clone, Copy)] enum Kind { Override, Locked, Normal, } impl<'cfg> PackageRegistry<'cfg> { pub fn new(config: &'cfg Config) -> PackageRegistry<'cfg> { PackageRegistry { sources: SourceMap::new(), source_ids: HashMap::new(), overrides: vec![], config: config, locked: HashMap::new(), } } pub fn get(&mut self, package_ids: &[PackageId]) -> CargoResult> { trace!("getting packages; sources={}", self.sources.len()); // TODO: Only call source with package ID if the package came from the // source let mut ret = Vec::new(); for (_, source) in self.sources.sources_mut() { try!(source.download(package_ids)); let packages = try!(source.get(package_ids)); ret.extend(packages.into_iter()); } // TODO: Return earlier if fail assert!(package_ids.len() == ret.len(), "could not get packages from registry; ids={:?}; ret={:?}", package_ids, ret); Ok(ret) } pub fn move_sources(self) -> SourceMap<'cfg> { self.sources } fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { match self.source_ids.get(namespace) { // We've previously loaded this source, and we've already locked it, // so we're not allowed to change it even if `namespace` has a // slightly different precise version listed. Some(&(_, Kind::Locked)) => { debug!("load/locked {}", namespace); return Ok(()) } // If the previous source was not a precise source, then we can be // sure that it's already been updated if we've already loaded it. Some(&(ref previous, _)) if previous.precise().is_none() => { debug!("load/precise {}", namespace); return Ok(()) } // If the previous source has the same precise version as we do, // then we're done, otherwise we need to need to move forward // updating this source. Some(&(ref previous, _)) => { if previous.precise() == namespace.precise() { debug!("load/match {}", namespace); return Ok(()) } debug!("load/mismatch {}", namespace); } None => { debug!("load/missing {}", namespace); } } try!(self.load(namespace, kind)); Ok(()) } pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { for id in ids.iter() { try!(self.ensure_loaded(id, Kind::Locked)); } Ok(()) } pub fn add_preloaded(&mut self, id: &SourceId, source: Box) { self.add_source(id, source, Kind::Locked); } fn add_source(&mut self, id: &SourceId, source: Box, kind: Kind) { self.sources.insert(id, source); self.source_ids.insert(id.clone(), (id.clone(), kind)); } pub fn add_overrides(&mut self, ids: Vec) -> CargoResult<()> { for id in ids.iter() { try!(self.load(id, Kind::Override)); } Ok(()) } pub fn register_lock(&mut self, id: PackageId, deps: Vec) { let sub_map = self.locked.entry(id.source_id().clone()) .or_insert(HashMap::new()); let sub_vec = sub_map.entry(id.name().to_string()) .or_insert(Vec::new()); sub_vec.push((id, deps)); } fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { (|| { let mut source = source_id.load(self.config); // Ensure the source has fetched all necessary remote data. let p = profile::start(format!("updating: {}", source_id)); try!(source.update()); drop(p); if kind == Kind::Override { self.overrides.push(source_id.clone()); } // Save off the source self.add_source(source_id, source, kind); Ok(()) }).chain_error(|| human(format!("Unable to update {}", source_id))) } fn query_overrides(&mut self, dep: &Dependency) -> CargoResult> { let mut seen = HashSet::new(); let mut ret = Vec::new(); for s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); let dep = Dependency::new_override(dep.name(), s); ret.extend(try!(src.query(&dep)).into_iter().filter(|s| { seen.insert(s.name().to_string()) })); } Ok(ret) } // This function is used to transform a summary to another locked summary if // possible. This is where the concept of a lockfile comes into play. // // If a summary points at a package id which was previously locked, then we // override the summary's id itself, as well as all dependencies, to be // rewritten to the locked versions. This will transform the summary's // source to a precise source (listed in the locked version) as well as // transforming all of the dependencies from range requirements on imprecise // sources to exact requirements on precise sources. // // If a summary does not point at a package id which was previously locked, // we still want to avoid updating as many dependencies as possible to keep // the graph stable. In this case we map all of the summary's dependencies // to be rewritten to a locked version wherever possible. If we're unable to // map a dependency though, we just pass it on through. fn lock(&self, summary: Summary) -> Summary { let pair = self.locked.get(summary.source_id()).and_then(|map| { map.get(summary.name()) }).and_then(|vec| { vec.iter().find(|&&(ref id, _)| id == summary.package_id()) }); // Lock the summary's id if possible let summary = match pair { Some(&(ref precise, _)) => summary.override_id(precise.clone()), None => summary, }; summary.map_dependencies(|dep| { match pair { // If we've got a known set of overrides for this summary, then // one of a few cases can arise: // // 1. We have a lock entry for this dependency from the same // source as it's listed as coming from. In this case we make // sure to lock to precisely the given package id. // // 2. We have a lock entry for this dependency, but it's from a // different source than what's listed, or the version // requirement has changed. In this case we must discard the // locked version because the dependency needs to be // re-resolved. // // 3. We don't have a lock entry for this dependency, in which // case it was likely an optional dependency which wasn't // included previously so we just pass it through anyway. Some(&(_, ref deps)) => { match deps.iter().find(|d| d.name() == dep.name()) { Some(lock) => { if dep.matches_id(lock) { dep.lock_to(lock) } else { dep } } None => dep, } } // If this summary did not have a locked version, then we query // all known locked packages to see if they match this // dependency. If anything does then we lock it to that and move // on. None => { let v = self.locked.get(dep.source_id()).and_then(|map| { map.get(dep.name()) }).and_then(|vec| { vec.iter().find(|&&(ref id, _)| dep.matches_id(id)) }); match v { Some(&(ref id, _)) => dep.lock_to(id), None => dep } } } }) } } impl<'cfg> Registry for PackageRegistry<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { let overrides = try!(self.query_overrides(dep)); let ret = if overrides.len() == 0 { // Ensure the requested source_id is loaded try!(self.ensure_loaded(dep.source_id(), Kind::Normal)); let mut ret = Vec::new(); for (id, src) in self.sources.sources_mut() { if id == dep.source_id() { ret.extend(try!(src.query(dep)).into_iter()); } } ret } else { overrides }; // post-process all returned summaries to ensure that we lock all // relevant summaries to the right versions and sources Ok(ret.into_iter().map(|summary| self.lock(summary)).collect()) } } #[cfg(test)] pub mod test { use core::{Summary, Registry, Dependency}; use util::{CargoResult}; pub struct RegistryBuilder { summaries: Vec, overrides: Vec } impl RegistryBuilder { pub fn new() -> RegistryBuilder { RegistryBuilder { summaries: vec![], overrides: vec![] } } pub fn summary(mut self, summary: Summary) -> RegistryBuilder { self.summaries.push(summary); self } pub fn summaries(mut self, summaries: Vec) -> RegistryBuilder { self.summaries.extend(summaries.into_iter()); self } pub fn add_override(mut self, summary: Summary) -> RegistryBuilder { self.overrides.push(summary); self } pub fn overrides(mut self, summaries: Vec) -> RegistryBuilder { self.overrides.extend(summaries.into_iter()); self } fn query_overrides(&self, dep: &Dependency) -> Vec { self.overrides.iter() .filter(|s| s.name() == dep.name()) .map(|s| s.clone()) .collect() } } impl Registry for RegistryBuilder { fn query(&mut self, dep: &Dependency) -> CargoResult> { debug!("querying; dep={:?}", dep); let overrides = self.query_overrides(dep); if overrides.is_empty() { self.summaries.query(dep) } else { Ok(overrides) } } } } cargo-0.8.0/src/cargo/core/resolver/000077500000000000000000000000001264656333200172775ustar00rootroot00000000000000cargo-0.8.0/src/cargo/core/resolver/encode.rs000066400000000000000000000140761264656333200211120ustar00rootroot00000000000000use std::collections::{HashMap, BTreeMap}; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use core::{PackageId, SourceId}; use util::{CargoResult, Graph}; use super::Resolve; #[derive(RustcEncodable, RustcDecodable, Debug)] pub struct EncodableResolve { package: Option>, root: EncodableDependency, metadata: Option, } pub type Metadata = BTreeMap; impl EncodableResolve { pub fn to_resolve(&self, default: &SourceId) -> CargoResult { let mut g = Graph::new(); let mut tmp = HashMap::new(); let packages = Vec::new(); let packages = self.package.as_ref().unwrap_or(&packages); { let mut register_pkg = |pkg: &EncodableDependency| -> CargoResult<()> { let pkgid = try!(pkg.to_package_id(default)); let precise = pkgid.source_id().precise() .map(|s| s.to_string()); assert!(tmp.insert(pkgid.clone(), precise).is_none(), "a package was referenced twice in the lockfile"); g.add(try!(pkg.to_package_id(default)), &[]); Ok(()) }; try!(register_pkg(&self.root)); for pkg in packages.iter() { try!(register_pkg(pkg)); } } { let mut add_dependencies = |pkg: &EncodableDependency| -> CargoResult<()> { let package_id = try!(pkg.to_package_id(default)); let deps = match pkg.dependencies { Some(ref deps) => deps, None => return Ok(()), }; for edge in deps.iter() { let to_depend_on = try!(edge.to_package_id(default)); let precise_pkgid = tmp.get(&to_depend_on) .map(|p| to_depend_on.with_precise(p.clone())) .unwrap_or(to_depend_on.clone()); g.link(package_id.clone(), precise_pkgid); } Ok(()) }; try!(add_dependencies(&self.root)); for pkg in packages.iter() { try!(add_dependencies(pkg)); } } Ok(Resolve { graph: g, root: try!(self.root.to_package_id(default)), features: HashMap::new(), metadata: self.metadata.clone(), }) } } #[derive(RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodableDependency { name: String, version: String, source: Option, dependencies: Option> } impl EncodableDependency { fn to_package_id(&self, default_source: &SourceId) -> CargoResult { PackageId::new( &self.name, &self.version, self.source.as_ref().unwrap_or(default_source)) } } #[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodablePackageId { name: String, version: String, source: Option } impl Encodable for EncodablePackageId { fn encode(&self, s: &mut S) -> Result<(), S::Error> { let mut out = format!("{} {}", self.name, self.version); if let Some(ref s) = self.source { out.push_str(&format!(" ({})", s.to_url())); } out.encode(s) } } impl Decodable for EncodablePackageId { fn decode(d: &mut D) -> Result { let string: String = try!(Decodable::decode(d)); let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); let captures = regex.captures(&string) .expect("invalid serialized PackageId"); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let source = captures.at(3); let source_id = source.map(|s| SourceId::from_url(s.to_string())); Ok(EncodablePackageId { name: name.to_string(), version: version.to_string(), source: source_id }) } } impl EncodablePackageId { fn to_package_id(&self, default_source: &SourceId) -> CargoResult { PackageId::new( &self.name, &self.version, self.source.as_ref().unwrap_or(default_source)) } } impl Encodable for Resolve { fn encode(&self, s: &mut S) -> Result<(), S::Error> { let mut ids: Vec<&PackageId> = self.graph.iter().collect(); ids.sort(); let encodable = ids.iter().filter_map(|&id| { if self.root == *id { return None; } Some(encodable_resolve_node(id, &self.root, &self.graph)) }).collect::>(); EncodableResolve { package: Some(encodable), root: encodable_resolve_node(&self.root, &self.root, &self.graph), metadata: self.metadata.clone(), }.encode(s) } } fn encodable_resolve_node(id: &PackageId, root: &PackageId, graph: &Graph) -> EncodableDependency { let deps = graph.edges(id).map(|edge| { let mut deps = edge.map(|e| { encodable_package_id(e, root) }).collect::>(); deps.sort(); deps }); let source = if id.source_id() == root.source_id() { None } else { Some(id.source_id().clone()) }; EncodableDependency { name: id.name().to_string(), version: id.version().to_string(), source: source, dependencies: deps, } } fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { let source = if id.source_id() == root.source_id() { None } else { Some(id.source_id().with_precise(None)) }; EncodablePackageId { name: id.name().to_string(), version: id.version().to_string(), source: source, } } cargo-0.8.0/src/cargo/core/resolver/mod.rs000066400000000000000000000700061264656333200204270ustar00rootroot00000000000000//! Resolution of the entire dependency graph for a crate //! //! This module implements the core logic in taking the world of crates and //! constraints and creating a resolved graph with locked versions for all //! crates and their dependencies. This is separate from the registry module //! which is more worried about discovering crates from various sources, this //! module just uses the Registry trait as a source to learn about crates from. //! //! Actually solving a constraint graph is an NP-hard (or NP-complete, I forget //! which) problem, this the algorithm is basically a nice heuristic to make //! sure we get roughly the best answer most of the time. The constraints that //! we're working with are: //! //! 1. Each crate can have any number of dependencies. Each dependency can //! declare a version range that it is compatible with. //! 2. Crates can be activated with multiple version (e.g. show up in the //! dependency graph twice) so long as each pairwise instance have //! semver-incompatible versions. //! //! The algorithm employed here is fairly simple, we simply do a DFS, activating //! the "newest crate" (highest version) first and then going to the next //! option. The heuristics we employ are: //! //! * Never try to activate a crate version which is incompatible. This means we //! only try crates which will actually satisfy a dependency and we won't ever //! try to activate a crate that's semver compatible with something else //! activated (as we're only allowed to have one). //! * Always try to activate the highest version crate first. The default //! dependency in Cargo (e.g. when you write `foo = "0.1.2"`) is //! semver-compatible, so selecting the highest version possible will allow us //! to hopefully satisfy as many dependencies at once. //! //! Beyond that, what's implemented below is just a naive backtracking version //! which should in theory try all possible combinations of dependencies and //! versions to see if one works. The first resolution that works causes //! everything to bail out immediately and return success, and only if *nothing* //! works do we actually return an error up the stack. //! //! ## Performance //! //! Note that this is a relatively performance-critical portion of Cargo. The //! data that we're processing is proportional to the size of the dependency //! graph, which can often be quite large (e.g. take a look at Servo). To make //! matters worse the DFS algorithm we're implemented is inherently quite //! inefficient. When we add the requirement of backtracking on top it means //! that we're implementing something that probably shouldn't be allocating all //! over the place. use std::collections::HashSet; use std::collections::hash_map::HashMap; use std::fmt; use std::ops::Range; use std::rc::Rc; use semver; use core::{PackageId, Registry, SourceId, Summary, Dependency}; use core::PackageIdSpec; use util::{CargoResult, Graph, human, CargoError}; use util::profile; use util::graph::{Nodes, Edges}; pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; pub use self::encode::Metadata; mod encode; /// Represents a fully resolved package dependency graph. Each node in the graph /// is a package and edges represent dependencies between packages. /// /// Each instance of `Resolve` also understands the full set of features used /// for each package as well as what the root package is. #[derive(PartialEq, Eq, Clone)] pub struct Resolve { graph: Graph, features: HashMap>, root: PackageId, metadata: Option, } #[derive(Clone, Copy)] pub enum Method<'a> { Everything, Required { dev_deps: bool, features: &'a [String], uses_default_features: bool, }, } // Err(..) == standard transient error (e.g. I/O error) // Ok(Err(..)) == resolve error, but is human readable // Ok(Ok(..)) == success in resolving type ResolveResult = CargoResult>>; // Information about the dependencies for a crate, a tuple of: // // (dependency info, candidates, features activated) type DepInfo = (Dependency, Vec>, Vec); impl Resolve { fn new(root: PackageId) -> Resolve { let mut g = Graph::new(); g.add(root.clone(), &[]); Resolve { graph: g, root: root, features: HashMap::new(), metadata: None } } pub fn copy_metadata(&mut self, other: &Resolve) { self.metadata = other.metadata.clone(); } pub fn iter(&self) -> Nodes { self.graph.iter() } pub fn root(&self) -> &PackageId { &self.root } pub fn deps(&self, pkg: &PackageId) -> Option> { self.graph.edges(pkg) } pub fn features(&self, pkg: &PackageId) -> Option<&HashSet> { self.features.get(pkg) } pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { PackageIdSpec::query_str(spec, self.iter()) } } impl fmt::Debug for Resolve { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "graph: {:?}\n", self.graph)); try!(write!(fmt, "\nfeatures: {{\n")); for (pkg, features) in &self.features { try!(write!(fmt, " {}: {:?}\n", pkg, features)); } write!(fmt, "}}") } } #[derive(Clone)] struct Context { activations: HashMap<(String, SourceId), Vec>>, resolve: Resolve, visited: HashSet, } /// Builds the list of all packages required to build the first argument. pub fn resolve(summary: &Summary, method: &Method, registry: &mut Registry) -> CargoResult { trace!("resolve; summary={}", summary.package_id()); let summary = Rc::new(summary.clone()); let cx = Context { resolve: Resolve::new(summary.package_id().clone()), activations: HashMap::new(), visited: HashSet::new(), }; let _p = profile::start(format!("resolving: {}", summary.package_id())); activate_deps_loop(cx, registry, summary, method) } /// Attempts to activate the summary `parent` in the context `cx`. /// /// This function will pull dependency summaries from the registry provided, and /// the dependencies of the package will be determined by the `method` provided. /// If `parent` was activated, this function returns the dependency frame to /// iterate through next. fn activate(cx: &mut Context, registry: &mut Registry, parent: Rc, method: &Method) -> CargoResult> { // Dependency graphs are required to be a DAG, so we keep a set of // packages we're visiting and bail if we hit a dupe. let id = parent.package_id().clone(); if !cx.visited.insert(id.clone()) { bail!("cyclic package dependency: package `{}` depends on itself", id) } // If we're already activated, then that was easy! if cx.flag_activated(&parent, method) { cx.visited.remove(&id); return Ok(None); } trace!("activating {}", parent.package_id()); let deps = try!(cx.build_deps(registry, &parent, method)); Ok(Some(DepsFrame{ parent: parent, remaining_siblings: RcVecIter::new(deps), id: id, })) } #[derive(Clone)] struct RcVecIter { vec: Rc>, rest: Range, } impl RcVecIter { fn new(vec: Vec) -> RcVecIter { RcVecIter { rest: 0..vec.len(), vec: Rc::new(vec), } } fn cur_index(&self) -> usize { self.rest.start - 1 } } impl Iterator for RcVecIter where T: Clone { type Item = (usize, T); fn next(&mut self) -> Option<(usize, T)> { self.rest.next().and_then(|i| { self.vec.get(i).map(|val| (i, val.clone())) }) } } #[derive(Clone)] struct DepsFrame { parent: Rc, remaining_siblings: RcVecIter, id: PackageId, } struct BacktrackFrame { context_backup: Context, deps_backup: Vec, remaining_candidates: RcVecIter>, parent: Rc, dep: Dependency, } /// Recursively activates the dependencies for `top`, in depth-first order, /// backtracking across possible candidates for each dependency as necessary. /// /// If all dependencies can be activated and resolved to a version in the /// dependency graph, cx.resolve is returned. fn activate_deps_loop(mut cx: Context, registry: &mut Registry, top: Rc, top_method: &Method) -> CargoResult { let mut backtrack_stack = Vec::new(); let mut remaining_deps = Vec::new(); remaining_deps.extend(try!(activate(&mut cx, registry, top, &top_method))); // Main resolution loop, this is the workhorse of the resolution algorithm. // // You'll note that a few stacks are maintained on the side, which might // seem odd when this algorithm looks like it could be implemented // recursively. While correct, this is implemented iteratively to avoid // blowing the stack (the recusion depth is proportional to the size of the // input). // // The general sketch of this loop is to run until there are no dependencies // left to activate, and for each dependency to attempt to activate all of // its own dependencies in turn. The `backtrack_stack` is a side table of // backtracking states where if we hit an error we can return to in order to // attempt to continue resolving. while let Some(mut deps_frame) = remaining_deps.pop() { let frame = match deps_frame.remaining_siblings.next() { Some(sibling) => { let parent = deps_frame.parent.clone(); remaining_deps.push(deps_frame); (parent, sibling) } None => { cx.visited.remove(&deps_frame.id); continue } }; let (mut parent, (mut cur, (mut dep, candidates, features))) = frame; assert!(!remaining_deps.is_empty()); let method = Method::Required { dev_deps: false, features: &features, uses_default_features: dep.uses_default_features(), }; let my_candidates = { let prev_active = cx.prev_active(&dep); trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(), candidates.len()); trace!("{}[{}]>{} {} prev activations", parent.name(), cur, dep.name(), prev_active.len()); // Filter the set of candidates based on the previously activated // versions for this dependency. We can actually use a version if it // precisely matches an activated version or if it is otherwise // incompatible with all other activated versions. Note that we // define "compatible" here in terms of the semver sense where if // the left-most nonzero digit is the same they're considered // compatible. candidates.iter().filter(|&b| { prev_active.iter().any(|a| a == b) || prev_active.iter().all(|a| { !compatible(a.version(), b.version()) }) }).cloned().collect() }; // Alright, for each candidate that's gotten this far, it meets the // following requirements: // // 1. The version matches the dependency requirement listed for this // package // 2. There are no activated versions for this package which are // semver-compatible, or there's an activated version which is // precisely equal to `candidate`. // // This means that we're going to attempt to activate each candidate in // turn. We could possibly fail to activate each candidate, so we try // each one in turn. let mut remaining_candidates = RcVecIter::new(my_candidates); let candidate = match remaining_candidates.next() { Some((_, candidate)) => { // We have a candidate. Add an entry to the `backtrack_stack` so // we can try the next one if this one fails. backtrack_stack.push(BacktrackFrame { context_backup: cx.clone(), deps_backup: remaining_deps.clone(), remaining_candidates: remaining_candidates, parent: parent.clone(), dep: dep.clone(), }); candidate } None => { // This dependency has no valid candidate. Backtrack until we // find a dependency that does have a candidate to try, and try // to activate that one. This resets the `remaining_deps` to // their state at the found level of the `backtrack_stack`. trace!("{}[{}]>{} -- no candidates", parent.name(), cur, dep.name()); match find_candidate(&mut backtrack_stack, &mut cx, &mut remaining_deps, &mut parent, &mut cur, &mut dep) { None => return Err(activation_error(&cx, registry, &parent, &dep, &cx.prev_active(&dep), &candidates)), Some(candidate) => candidate, } } }; trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), candidate.version()); cx.resolve.graph.link(parent.package_id().clone(), candidate.package_id().clone()); // If we hit an intransitive dependency then clear out the visitation // list as we can't induce a cycle through transitive dependencies. if !dep.is_transitive() { cx.visited.clear(); } remaining_deps.extend(try!(activate(&mut cx, registry, candidate, &method))); } trace!("resolved: {:?}", cx.resolve); Ok(cx.resolve) } // Searches up `backtrack_stack` until it finds a dependency with remaining // candidates. Resets `cx` and `remaining_deps` to that level and returns the // next candidate. If all candidates have been exhausted, returns None. fn find_candidate(backtrack_stack: &mut Vec, cx: &mut Context, remaining_deps: &mut Vec, parent: &mut Rc, cur: &mut usize, dep: &mut Dependency) -> Option> { while let Some(mut frame) = backtrack_stack.pop() { if let Some((_, candidate)) = frame.remaining_candidates.next() { *cx = frame.context_backup.clone(); *remaining_deps = frame.deps_backup.clone(); *parent = frame.parent.clone(); *cur = remaining_deps.last().unwrap().remaining_siblings.cur_index(); *dep = frame.dep.clone(); backtrack_stack.push(frame); return Some(candidate) } } return None } #[allow(deprecated)] // connect => join in 1.3 fn activation_error(cx: &Context, registry: &mut Registry, parent: &Summary, dep: &Dependency, prev_active: &[Rc], candidates: &[Rc]) -> Box { if candidates.len() > 0 { let mut msg = format!("failed to select a version for `{}` \ (required by `{}`):\n\ all possible versions conflict with \ previously selected versions of `{}`", dep.name(), parent.name(), dep.name()); 'outer: for v in prev_active.iter() { for node in cx.resolve.graph.iter() { let edges = match cx.resolve.graph.edges(node) { Some(edges) => edges, None => continue, }; for edge in edges { if edge != v.package_id() { continue } msg.push_str(&format!("\n version {} in use by {}", v.version(), edge)); continue 'outer; } } msg.push_str(&format!("\n version {} in use by ??", v.version())); } msg.push_str(&format!("\n possible versions to select: {}", candidates.iter() .map(|v| v.version()) .map(|v| v.to_string()) .collect::>() .connect(", "))); return human(msg) } // Once we're all the way down here, we're definitely lost in the // weeds! We didn't actually use any candidates above, so we need to // give an error message that nothing was found. // // Note that we re-query the registry with a new dependency that // allows any version so we can give some nicer error reporting // which indicates a few versions that were actually found. let msg = format!("no matching package named `{}` found \ (required by `{}`)\n\ location searched: {}\n\ version required: {}", dep.name(), parent.name(), dep.source_id(), dep.version_req()); let mut msg = msg; let all_req = semver::VersionReq::parse("*").unwrap(); let new_dep = dep.clone_inner().set_version_req(all_req).into_dependency(); let mut candidates = match registry.query(&new_dep) { Ok(candidates) => candidates, Err(e) => return e, }; candidates.sort_by(|a, b| { b.version().cmp(a.version()) }); if candidates.len() > 0 { msg.push_str("\nversions found: "); for (i, c) in candidates.iter().take(3).enumerate() { if i != 0 { msg.push_str(", "); } msg.push_str(&c.version().to_string()); } if candidates.len() > 3 { msg.push_str(", ..."); } } // If we have a path dependency with a locked version, then this may // indicate that we updated a sub-package and forgot to run `cargo // update`. In this case try to print a helpful error! if dep.source_id().is_path() && dep.version_req().to_string().starts_with("=") && candidates.len() > 0 { msg.push_str("\nconsider running `cargo update` to update \ a path dependency's locked version"); } human(msg) } // Returns if `a` and `b` are compatible in the semver sense. This is a // commutative operation. // // Versions `a` and `b` are compatible if their left-most nonzero digit is the // same. fn compatible(a: &semver::Version, b: &semver::Version) -> bool { if a.major != b.major { return false } if a.major != 0 { return true } if a.minor != b.minor { return false } if a.minor != 0 { return true } a.patch == b.patch } // Returns a pair of (feature dependencies, all used features) // // The feature dependencies map is a mapping of package name to list of features // enabled. Each package should be enabled, and each package should have the // specified set of features enabled. // // The all used features set is the set of features which this local package had // enabled, which is later used when compiling to instruct the code what // features were enabled. fn build_features(s: &Summary, method: &Method) -> CargoResult<(HashMap>, HashSet)> { let mut deps = HashMap::new(); let mut used = HashSet::new(); let mut visited = HashSet::new(); match *method { Method::Everything => { for key in s.features().keys() { try!(add_feature(s, key, &mut deps, &mut used, &mut visited)); } for dep in s.dependencies().iter().filter(|d| d.is_optional()) { try!(add_feature(s, dep.name(), &mut deps, &mut used, &mut visited)); } } Method::Required { features: requested_features, .. } => { for feat in requested_features.iter() { try!(add_feature(s, feat, &mut deps, &mut used, &mut visited)); } } } match *method { Method::Everything | Method::Required { uses_default_features: true, .. } => { if s.features().get("default").is_some() { try!(add_feature(s, "default", &mut deps, &mut used, &mut visited)); } } Method::Required { uses_default_features: false, .. } => {} } return Ok((deps, used)); fn add_feature(s: &Summary, feat: &str, deps: &mut HashMap>, used: &mut HashSet, visited: &mut HashSet) -> CargoResult<()> { if feat.is_empty() { return Ok(()) } // If this feature is of the form `foo/bar`, then we just lookup package // `foo` and enable its feature `bar`. Otherwise this feature is of the // form `foo` and we need to recurse to enable the feature `foo` for our // own package, which may end up enabling more features or just enabling // a dependency. let mut parts = feat.splitn(2, '/'); let feat_or_package = parts.next().unwrap(); match parts.next() { Some(feat) => { let package = feat_or_package; used.insert(package.to_string()); deps.entry(package.to_string()) .or_insert(Vec::new()) .push(feat.to_string()); } None => { let feat = feat_or_package; if !visited.insert(feat.to_string()) { bail!("Cyclic feature dependency: feature `{}` depends \ on itself", feat) } used.insert(feat.to_string()); match s.features().get(feat) { Some(recursive) => { for f in recursive { try!(add_feature(s, f, deps, used, visited)); } } None => { deps.entry(feat.to_string()).or_insert(Vec::new()); } } visited.remove(&feat.to_string()); } } Ok(()) } } impl Context { // Activate this summary by inserting it into our list of known activations. // // Returns if this summary with the given method is already activated. fn flag_activated(&mut self, summary: &Rc, method: &Method) -> bool { let id = summary.package_id(); let key = (id.name().to_string(), id.source_id().clone()); let prev = self.activations.entry(key).or_insert(Vec::new()); if !prev.iter().any(|c| c == summary) { self.resolve.graph.add(id.clone(), &[]); prev.push(summary.clone()); return false } debug!("checking if {} is already activated", summary.package_id()); let (features, use_default) = match *method { Method::Required { features, uses_default_features, .. } => { (features, uses_default_features) } Method::Everything => return false, }; let has_default_feature = summary.features().contains_key("default"); match self.resolve.features(id) { Some(prev) => { features.iter().all(|f| prev.contains(f)) && (!use_default || prev.contains("default") || !has_default_feature) } None => features.len() == 0 && (!use_default || !has_default_feature) } } fn build_deps(&mut self, registry: &mut Registry, parent: &Summary, method: &Method) -> CargoResult> { // First, figure out our set of dependencies based on the requsted set // of features. This also calculates what features we're going to enable // for our own dependencies. let deps = try!(self.resolve_features(parent, method)); // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. let mut deps = try!(deps.into_iter().map(|(dep, features)| { let mut candidates = try!(registry.query(&dep)); // When we attempt versions for a package, we'll want to start at // the maximum version and work our way down. candidates.sort_by(|a, b| { b.version().cmp(a.version()) }); let candidates = candidates.into_iter().map(Rc::new).collect(); Ok((dep, candidates, features)) }).collect::>>()); // Attempt to resolve dependencies with fewer candidates before trying // dependencies with more candidates. This way if the dependency with // only one candidate can't be resolved we don't have to do a bunch of // work before we figure that out. deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| { a.len().cmp(&b.len()) }); Ok(deps) } fn prev_active(&self, dep: &Dependency) -> &[Rc] { let key = (dep.name().to_string(), dep.source_id().clone()); self.activations.get(&key).map(|v| &v[..]).unwrap_or(&[]) } #[allow(deprecated)] // connect => join in 1.3 fn resolve_features(&mut self, parent: &Summary, method: &Method) -> CargoResult)>> { let dev_deps = match *method { Method::Everything => true, Method::Required { dev_deps, .. } => dev_deps, }; // First, filter by dev-dependencies let deps = parent.dependencies(); let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); let (mut feature_deps, used_features) = try!(build_features(parent, method)); let mut ret = Vec::new(); // Next, sanitize all requested features by whitelisting all the // requested features that correspond to optional dependencies for dep in deps { // weed out optional dependencies, but not those required if dep.is_optional() && !feature_deps.contains_key(dep.name()) { continue } let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]); for feature in dep.features().iter() { base.push(feature.clone()); if feature.contains("/") { bail!("features in dependencies cannot enable features in \ other dependencies: `{}`", feature) } } ret.push((dep.clone(), base)); } // All features can only point to optional dependencies, in which case // they should have all been weeded out by the above iteration. Any // remaining features are bugs in that the package does not actually // have those features. if feature_deps.len() > 0 { let unknown = feature_deps.keys().map(|s| &s[..]) .collect::>(); if unknown.len() > 0 { let features = unknown.connect(", "); bail!("Package `{}` does not have these features: `{}`", parent.package_id(), features) } } // Record what list of features is active for this package. if used_features.len() > 0 { let pkgid = parent.package_id(); self.resolve.features.entry(pkgid.clone()) .or_insert(HashSet::new()) .extend(used_features); } Ok(ret) } } cargo-0.8.0/src/cargo/core/shell.rs000066400000000000000000000156771264656333200171330ustar00rootroot00000000000000use std::fmt; use std::io::prelude::*; use std::io; use term::Attr; use term::color::{Color, BLACK, RED, GREEN, YELLOW}; use term::{Terminal, TerminfoTerminal, color}; use self::AdequateTerminal::{NoColor, Colored}; use self::Verbosity::{Verbose, Normal, Quiet}; use self::ColorConfig::{Auto, Always, Never}; use util::errors::CargoResult; #[derive(Clone, Copy, PartialEq)] pub enum Verbosity { Verbose, Normal, Quiet } #[derive(Clone, Copy, PartialEq)] pub enum ColorConfig { Auto, Always, Never } #[derive(Clone, Copy)] pub struct ShellConfig { pub color_config: ColorConfig, pub tty: bool } enum AdequateTerminal { NoColor(Box), Colored(Box> + Send>) } pub struct Shell { terminal: AdequateTerminal, config: ShellConfig, } pub struct MultiShell { out: Shell, err: Shell, verbosity: Verbosity } impl MultiShell { pub fn new(out: Shell, err: Shell, verbosity: Verbosity) -> MultiShell { MultiShell { out: out, err: err, verbosity: verbosity } } pub fn out(&mut self) -> &mut Shell { &mut self.out } pub fn err(&mut self) -> &mut Shell { &mut self.err } pub fn say(&mut self, message: T, color: Color) -> io::Result<()> { match self.verbosity { Quiet => Ok(()), _ => self.out().say(message, color) } } pub fn status(&mut self, status: T, message: U) -> io::Result<()> where T: fmt::Display, U: fmt::Display { match self.verbosity { Quiet => Ok(()), _ => self.out().say_status(status, message, GREEN) } } pub fn verbose(&mut self, mut callback: F) -> io::Result<()> where F: FnMut(&mut MultiShell) -> io::Result<()> { match self.verbosity { Verbose => return callback(self), _ => Ok(()) } } pub fn concise(&mut self, mut callback: F) -> io::Result<()> where F: FnMut(&mut MultiShell) -> io::Result<()> { match self.verbosity { Verbose => Ok(()), _ => return callback(self) } } pub fn error(&mut self, message: T) -> io::Result<()> { self.err().say(message, RED) } pub fn warn(&mut self, message: T) -> io::Result<()> { self.err().say(message, YELLOW) } pub fn set_verbosity(&mut self, verbose: bool, quiet: bool) -> CargoResult<()> { self.verbosity = match (verbose, quiet) { (true, true) => bail!("cannot set both --verbose and --quiet"), (true, false) => Verbose, (false, true) => Quiet, (false, false) => Normal }; Ok(()) } /// shortcut for commands that don't have both --verbose and --quiet pub fn set_verbose(&mut self, verbose: bool) { if verbose { self.verbosity = Verbose; } else { self.verbosity = Normal; } } pub fn set_color_config(&mut self, color: Option<&str>) -> CargoResult<()> { self.out.set_color_config(match color { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, None => Auto, Some(arg) => bail!("argument for --color must be auto, always, or \ never, but found `{}`", arg), }); Ok(()) } pub fn get_verbose(&self) -> Verbosity { self.verbosity } } impl Shell { pub fn create(out: Box, config: ShellConfig) -> Shell { // Match from_env() to determine if creation of a TerminfoTerminal is possible regardless // of the tty status. --color options are parsed after Shell creation so always try to // create a terminal that supports color output. Fall back to a no-color terminal or write // output to stderr if a tty is present and color output is not possible. match ::term::terminfo::TermInfo::from_env() { Ok(ti) => { // Color output is possible. Shell { terminal: Colored(Box::new(TerminfoTerminal::new_with_terminfo(out, ti))), config: config } } _ if config.tty => { // Color output is expected but not available, fall back to stderr. Shell { terminal: NoColor(Box::new(io::stderr())), config: config } } _ => { // No color output. Shell { terminal: NoColor(out), config: config } } } } pub fn set_color_config(&mut self, color_config: ColorConfig) { self.config.color_config = color_config; } pub fn say(&mut self, message: T, color: Color) -> io::Result<()> { try!(self.reset()); if color != BLACK { try!(self.fg(color)); } try!(write!(self, "{}\n", message.to_string())); try!(self.reset()); try!(self.flush()); Ok(()) } pub fn say_status(&mut self, status: T, message: U, color: Color) -> io::Result<()> where T: fmt::Display, U: fmt::Display { try!(self.reset()); if color != BLACK { try!(self.fg(color)); } if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); } try!(write!(self, "{:>12}", status.to_string())); try!(self.reset()); try!(write!(self, " {}\n", message)); try!(self.flush()); Ok(()) } fn fg(&mut self, color: color::Color) -> io::Result { let colored = self.colored(); match self.terminal { Colored(ref mut c) if colored => c.fg(color), _ => Ok(false) } } fn attr(&mut self, attr: Attr) -> io::Result { let colored = self.colored(); match self.terminal { Colored(ref mut c) if colored => c.attr(attr), _ => Ok(false) } } fn supports_attr(&self, attr: Attr) -> bool { let colored = self.colored(); match self.terminal { Colored(ref c) if colored => c.supports_attr(attr), _ => false } } fn reset(&mut self) -> io::Result<()> { let colored = self.colored(); match self.terminal { Colored(ref mut c) if colored => c.reset().map(|_| ()), _ => Ok(()) } } fn colored(&self) -> bool { self.config.tty && Auto == self.config.color_config || Always == self.config.color_config } } impl Write for Shell { fn write(&mut self, buf: &[u8]) -> io::Result { match self.terminal { Colored(ref mut c) => c.write(buf), NoColor(ref mut n) => n.write(buf) } } fn flush(&mut self) -> io::Result<()> { match self.terminal { Colored(ref mut c) => c.flush(), NoColor(ref mut n) => n.flush() } } } cargo-0.8.0/src/cargo/core/source.rs000066400000000000000000000366001264656333200173110ustar00rootroot00000000000000use std::cmp::{self, Ordering}; use std::collections::hash_map::{HashMap, Values, IterMut}; use std::fmt::{self, Formatter}; use std::hash; use std::mem; use std::path::Path; use std::sync::Arc; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; use core::{Summary, Package, PackageId, Registry, Dependency}; use sources::{PathSource, GitSource, RegistrySource}; use sources::git; use util::{human, Config, CargoResult, ToUrl}; /// A Source finds and downloads remote packages based on names and /// versions. pub trait Source: Registry { /// The update method performs any network operations required to /// get the entire list of all names, versions and dependencies of /// packages managed by the Source. fn update(&mut self) -> CargoResult<()>; /// The download method fetches the full package for each name and /// version specified. fn download(&mut self, packages: &[PackageId]) -> CargoResult<()>; /// The get method returns the Path of each specified package on the /// local file system. It assumes that `download` was already called, /// and that the packages are already locally available on the file /// system. fn get(&self, packages: &[PackageId]) -> CargoResult>; /// Generates a unique string which represents the fingerprint of the /// current state of the source. /// /// This fingerprint is used to determine the "fresheness" of the source /// later on. It must be guaranteed that the fingerprint of a source is /// constant if and only if the output product will remain constant. /// /// The `pkg` argument is the package which this fingerprint should only be /// interested in for when this source may contain multiple packages. fn fingerprint(&self, pkg: &Package) -> CargoResult; } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] enum Kind { /// Kind::Git() represents a git repository Git(GitReference), /// represents a local path Path, /// represents the central registry Registry, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum GitReference { Tag(String), Branch(String), Rev(String), } /// Unique identifier for a source of packages. #[derive(Clone, Eq, Debug)] pub struct SourceId { inner: Arc, } #[derive(Eq, Clone, Debug)] struct SourceIdInner { url: Url, canonical_url: Url, kind: Kind, // e.g. the exact git revision of the specified branch for a Git Source precise: Option } impl SourceId { fn new(kind: Kind, url: Url) -> SourceId { SourceId { inner: Arc::new(SourceIdInner { kind: kind, canonical_url: git::canonicalize_url(&url), url: url, precise: None, }), } } /// Parses a source URL and returns the corresponding ID. /// /// ## Example /// /// ``` /// use cargo::core::SourceId; /// SourceId::from_url("git+https://github.com/alexcrichton/\ /// libssh2-static-sys#80e71a3021618eb05\ /// 656c58fb7c5ef5f12bc747f".to_string()); /// ``` pub fn from_url(string: String) -> SourceId { let mut parts = string.splitn(2, '+'); let kind = parts.next().unwrap(); let url = parts.next().unwrap(); match kind { "git" => { let mut url = url.to_url().unwrap(); let mut reference = GitReference::Branch("master".to_string()); let pairs = url.query_pairs().unwrap_or(Vec::new()); for &(ref k, ref v) in pairs.iter() { match &k[..] { // map older 'ref' to branch "branch" | "ref" => reference = GitReference::Branch(v.clone()), "rev" => reference = GitReference::Rev(v.clone()), "tag" => reference = GitReference::Tag(v.clone()), _ => {} } } url.query = None; let precise = mem::replace(&mut url.fragment, None); SourceId::for_git(&url, reference) .with_precise(precise) }, "registry" => { let url = url.to_url().unwrap(); SourceId::new(Kind::Registry, url) .with_precise(Some("locked".to_string())) } "path" => { let url = url.to_url().unwrap(); SourceId::new(Kind::Path, url) } _ => panic!("Unsupported serialized SourceId") } } pub fn to_url(&self) -> String { match *self.inner { SourceIdInner { kind: Kind::Path, ref url, .. } => { format!("path+{}", url) } SourceIdInner { kind: Kind::Git(ref reference), ref url, ref precise, .. } => { let ref_str = url_ref(reference); let precise_str = if precise.is_some() { format!("#{}", precise.as_ref().unwrap()) } else { "".to_string() }; format!("git+{}{}{}", url, ref_str, precise_str) }, SourceIdInner { kind: Kind::Registry, ref url, .. } => { format!("registry+{}", url) } } } // Pass absolute path pub fn for_path(path: &Path) -> CargoResult { let url = try!(path.to_url().map_err(human)); Ok(SourceId::new(Kind::Path, url)) } pub fn for_git(url: &Url, reference: GitReference) -> SourceId { SourceId::new(Kind::Git(reference), url.clone()) } pub fn for_registry(url: &Url) -> SourceId { SourceId::new(Kind::Registry, url.clone()) } /// Returns the `SourceId` corresponding to the main repository. /// /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config`. pub fn for_central(config: &Config) -> CargoResult { Ok(SourceId::for_registry(&try!(RegistrySource::url(config)))) } pub fn url(&self) -> &Url { &self.inner.url } pub fn is_path(&self) -> bool { self.inner.kind == Kind::Path } pub fn is_registry(&self) -> bool { self.inner.kind == Kind::Registry } pub fn is_git(&self) -> bool { match self.inner.kind { Kind::Git(_) => true, _ => false } } /// Creates an implementation of `Source` corresponding to this ID. pub fn load<'a>(&self, config: &'a Config) -> Box { trace!("loading SourceId; {}", self); match self.inner.kind { Kind::Git(..) => Box::new(GitSource::new(self, config)), Kind::Path => { let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => panic!("path sources cannot be remote"), }; Box::new(PathSource::new(&path, self, config)) } Kind::Registry => Box::new(RegistrySource::new(self, config)), } } pub fn precise(&self) -> Option<&str> { self.inner.precise.as_ref().map(|s| &s[..]) } pub fn git_reference(&self) -> Option<&GitReference> { match self.inner.kind { Kind::Git(ref s) => Some(s), _ => None, } } pub fn with_precise(&self, v: Option) -> SourceId { SourceId { inner: Arc::new(SourceIdInner { precise: v, .. (*self.inner).clone() }), } } pub fn is_default_registry(&self) -> bool { match self.inner.kind { Kind::Registry => {} _ => return false, } self.inner.url.to_string() == RegistrySource::default_url() } } impl PartialEq for SourceId { fn eq(&self, other: &SourceId) -> bool { (*self.inner).eq(&*other.inner) } } impl PartialOrd for SourceId { fn partial_cmp(&self, other: &SourceId) -> Option { Some(self.cmp(other)) } } impl Ord for SourceId { fn cmp(&self, other: &SourceId) -> Ordering { self.inner.cmp(&other.inner) } } impl Encodable for SourceId { fn encode(&self, s: &mut S) -> Result<(), S::Error> { if self.is_path() { s.emit_option_none() } else { self.to_url().encode(s) } } } impl Decodable for SourceId { fn decode(d: &mut D) -> Result { let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); Ok(SourceId::from_url(string)) } } impl fmt::Display for SourceId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self.inner { SourceIdInner { kind: Kind::Path, ref url, .. } => { fmt::Display::fmt(url, f) } SourceIdInner { kind: Kind::Git(ref reference), ref url, ref precise, .. } => { try!(write!(f, "{}{}", url, url_ref(reference))); if let Some(ref s) = *precise { let len = cmp::min(s.len(), 8); try!(write!(f, "#{}", &s[..len])); } Ok(()) } SourceIdInner { kind: Kind::Registry, ref url, .. } => { write!(f, "registry {}", url) } } } } // This custom implementation handles situations such as when two git sources // point at *almost* the same URL, but not quite, even when they actually point // to the same repository. impl PartialEq for SourceIdInner { fn eq(&self, other: &SourceIdInner) -> bool { if self.kind != other.kind { return false } if self.url == other.url { return true } match (&self.kind, &other.kind) { (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { ref1 == ref2 && self.canonical_url == other.canonical_url } _ => false, } } } impl PartialOrd for SourceIdInner { fn partial_cmp(&self, other: &SourceIdInner) -> Option { Some(self.cmp(other)) } } impl Ord for SourceIdInner { fn cmp(&self, other: &SourceIdInner) -> Ordering { match self.kind.cmp(&other.kind) { Ordering::Equal => {} ord => return ord, } match self.url.cmp(&other.url) { Ordering::Equal => {} ord => return ord, } match (&self.kind, &other.kind) { (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { (ref1, &self.canonical_url).cmp(&(ref2, &other.canonical_url)) } _ => self.kind.cmp(&other.kind) } } } impl hash::Hash for SourceId { fn hash(&self, into: &mut S) { self.inner.kind.hash(into); match *self.inner { SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => { canonical_url.hash(into) } _ => self.inner.url.hash(into), } } } fn url_ref(r: &GitReference) -> String { match r.to_ref_string() { None => "".to_string(), Some(s) => format!("?{}", s), } } impl GitReference { pub fn to_ref_string(&self) -> Option { match *self { GitReference::Branch(ref s) => { if *s == "master" { None } else { Some(format!("branch={}", s)) } } GitReference::Tag(ref s) => Some(format!("tag={}", s)), GitReference::Rev(ref s) => Some(format!("rev={}", s)), } } } pub struct SourceMap<'src> { map: HashMap> } pub type Sources<'a, 'src> = Values<'a, SourceId, Box>; pub struct SourcesMut<'a, 'src: 'a> { inner: IterMut<'a, SourceId, Box>, } impl<'src> SourceMap<'src> { pub fn new() -> SourceMap<'src> { SourceMap { map: HashMap::new() } } pub fn contains(&self, id: &SourceId) -> bool { self.map.contains_key(id) } pub fn get(&self, id: &SourceId) -> Option<&(Source+'src)> { let source = self.map.get(id); source.map(|s| { let s: &(Source+'src) = &**s; s }) } pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source+'src)> { self.map.get_mut(id).map(|s| { let s: &mut (Source+'src) = &mut **s; s }) } pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source+'src)> { self.get(pkg_id.source_id()) } pub fn insert(&mut self, id: &SourceId, source: Box) { self.map.insert(id.clone(), source); } pub fn len(&self) -> usize { self.map.len() } pub fn sources<'a>(&'a self) -> Sources<'a, 'src> { self.map.values() } pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> { SourcesMut { inner: self.map.iter_mut() } } } impl<'a, 'src> Iterator for SourcesMut<'a, 'src> { type Item = (&'a SourceId, &'a mut (Source + 'src)); fn next(&mut self) -> Option<(&'a SourceId, &'a mut (Source + 'src))> { self.inner.next().map(|(a, b)| (a, &mut **b)) } } /// List of `Source` implementors. `SourceSet` itself implements `Source`. pub struct SourceSet<'src> { sources: Vec> } impl<'src> SourceSet<'src> { pub fn new(sources: Vec>) -> SourceSet<'src> { SourceSet { sources: sources } } } impl<'src> Registry for SourceSet<'src> { fn query(&mut self, name: &Dependency) -> CargoResult> { let mut ret = Vec::new(); for source in self.sources.iter_mut() { ret.extend(try!(source.query(name)).into_iter()); } Ok(ret) } } impl<'src> Source for SourceSet<'src> { fn update(&mut self) -> CargoResult<()> { for source in self.sources.iter_mut() { try!(source.update()); } Ok(()) } fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { for source in self.sources.iter_mut() { try!(source.download(packages)); } Ok(()) } fn get(&self, packages: &[PackageId]) -> CargoResult> { let mut ret = Vec::new(); for source in self.sources.iter() { ret.extend(try!(source.get(packages)).into_iter()); } Ok(ret) } fn fingerprint(&self, id: &Package) -> CargoResult { let mut ret = String::new(); for source in self.sources.iter() { ret.push_str(&try!(source.fingerprint(id))[..]); } Ok(ret) } } #[cfg(test)] mod tests { use super::{SourceId, Kind, GitReference}; use util::ToUrl; #[test] fn github_sources_equal() { let loc = "https://github.com/foo/bar".to_url().unwrap(); let master = Kind::Git(GitReference::Branch("master".to_string())); let s1 = SourceId::new(master.clone(), loc); let loc = "git://github.com/foo/bar".to_url().unwrap(); let s2 = SourceId::new(master, loc.clone()); assert_eq!(s1, s2); let foo = Kind::Git(GitReference::Branch("foo".to_string())); let s3 = SourceId::new(foo, loc); assert!(s1 != s3); } } cargo-0.8.0/src/cargo/core/summary.rs000066400000000000000000000070511264656333200175040ustar00rootroot00000000000000use std::collections::HashMap; use std::mem; use semver::Version; use core::{Dependency, PackageId, SourceId}; use util::CargoResult; /// Subset of a `Manifest`. Contains only the most important informations about /// a package. /// /// Summaries are cloned, and should not be mutated after creation #[derive(Debug,Clone)] pub struct Summary { package_id: PackageId, dependencies: Vec, features: HashMap>, } impl Summary { pub fn new(pkg_id: PackageId, dependencies: Vec, features: HashMap>) -> CargoResult { for dep in dependencies.iter() { if features.get(dep.name()).is_some() { bail!("Features and dependencies cannot have the \ same name: `{}`", dep.name()) } if dep.is_optional() && !dep.is_transitive() { bail!("Dev-dependencies are not allowed to be optional: `{}`", dep.name()) } } for (feature, list) in features.iter() { for dep in list.iter() { let mut parts = dep.splitn(2, '/'); let dep = parts.next().unwrap(); let is_reexport = parts.next().is_some(); if !is_reexport && features.get(dep).is_some() { continue } match dependencies.iter().find(|d| d.name() == dep) { Some(d) => { if d.is_optional() || is_reexport { continue } bail!("Feature `{}` depends on `{}` which is not an \ optional dependency.\nConsider adding \ `optional = true` to the dependency", feature, dep) } None if is_reexport => { bail!("Feature `{}` requires `{}` which is not an \ optional dependency", feature, dep) } None => { bail!("Feature `{}` includes `{}` which is neither \ a dependency nor another feature", feature, dep) } } } } Ok(Summary { package_id: pkg_id, dependencies: dependencies, features: features, }) } pub fn package_id(&self) -> &PackageId { &self.package_id } pub fn name(&self) -> &str { self.package_id().name() } pub fn version(&self) -> &Version { self.package_id().version() } pub fn source_id(&self) -> &SourceId { self.package_id.source_id() } pub fn dependencies(&self) -> &[Dependency] { &self.dependencies } pub fn features(&self) -> &HashMap> { &self.features } pub fn override_id(mut self, id: PackageId) -> Summary { self.package_id = id; self } pub fn map_dependencies(mut self, f: F) -> Summary where F: FnMut(Dependency) -> Dependency { let deps = mem::replace(&mut self.dependencies, Vec::new()); self.dependencies = deps.into_iter().map(f).collect(); self } } impl PartialEq for Summary { fn eq(&self, other: &Summary) -> bool { self.package_id == other.package_id } } pub trait SummaryVec { fn names(&self) -> Vec; } impl SummaryVec for Vec { // TODO: Move to Registry fn names(&self) -> Vec { self.iter().map(|summary| summary.name().to_string()).collect() } } cargo-0.8.0/src/cargo/lib.rs000066400000000000000000000204271264656333200156270ustar00rootroot00000000000000#![deny(unused)] #![cfg_attr(test, deny(warnings))] #[cfg(test)] extern crate hamcrest; #[macro_use] extern crate log; extern crate crates_io as registry; extern crate crossbeam; extern crate curl; extern crate docopt; extern crate filetime; extern crate flate2; extern crate git2; extern crate glob; extern crate libc; extern crate libgit2_sys; extern crate num_cpus; extern crate regex; extern crate rustc_serialize; extern crate semver; extern crate tar; extern crate term; extern crate time; extern crate toml; extern crate url; use std::env; use std::error::Error; use std::io::prelude::*; use std::io; use rustc_serialize::{Decodable, Encodable}; use rustc_serialize::json::{self, Json}; use docopt::Docopt; use core::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig}; use core::shell::Verbosity::{Verbose}; use core::shell::ColorConfig::{Auto}; use term::color::{BLACK, RED}; pub use util::{CargoError, CliError, CliResult, human, Config, ChainError}; macro_rules! bail { ($($fmt:tt)*) => ( return Err(::util::human(&format_args!($($fmt)*))) ) } pub mod core; pub mod ops; pub mod sources; pub mod util; pub fn execute_main( exec: fn(T, U, &Config) -> CliResult>, options_first: bool, usage: &str) where V: Encodable, T: Decodable, U: Decodable { process::(|rest, shell| { call_main(exec, shell, usage, rest, options_first) }); } pub fn call_main( exec: fn(T, U, &Config) -> CliResult>, shell: &Config, usage: &str, args: &[String], options_first: bool) -> CliResult> where V: Encodable, T: Decodable, U: Decodable { let flags = try!(flags_from_args::(usage, args, options_first)); let json = try!(json_from_stdin::()); exec(flags, json, shell) } pub fn execute_main_without_stdin( exec: fn(T, &Config) -> CliResult>, options_first: bool, usage: &str) where V: Encodable, T: Decodable { process::(|rest, shell| { call_main_without_stdin(exec, shell, usage, rest, options_first) }); } pub fn call_main_without_stdin( exec: fn(T, &Config) -> CliResult>, shell: &Config, usage: &str, args: &[String], options_first: bool) -> CliResult> where V: Encodable, T: Decodable { let flags = try!(flags_from_args::(usage, args, options_first)); exec(flags, shell) } fn process(mut callback: F) where F: FnMut(&[String], &Config) -> CliResult>, V: Encodable { let mut config = None; let result = (|| { config = Some(try!(Config::default())); let args: Vec<_> = try!(env::args_os().map(|s| { s.into_string().map_err(|s| { human(format!("invalid unicode in argument: {:?}", s)) }) }).collect()); callback(&args, config.as_ref().unwrap()) })(); let mut verbose_shell = shell(Verbose, Auto); let mut shell = config.as_ref().map(|s| s.shell()); let shell = shell.as_mut().map(|s| &mut **s).unwrap_or(&mut verbose_shell); process_executed(result, shell) } pub fn process_executed(result: CliResult>, shell: &mut MultiShell) where T: Encodable { match result { Err(e) => handle_error(e, shell), Ok(Some(encodable)) => { let encoded = json::encode(&encodable).unwrap(); println!("{}", encoded); } Ok(None) => {} } } pub fn shell(verbosity: Verbosity, color_config: ColorConfig) -> MultiShell { enum Output { Stdout, Stderr, } let tty = isatty(Output::Stderr); let stderr = Box::new(io::stderr()); let config = ShellConfig { color_config: color_config, tty: tty }; let err = Shell::create(stderr, config); let tty = isatty(Output::Stdout); let stdout = Box::new(io::stdout()); let config = ShellConfig { color_config: color_config, tty: tty }; let out = Shell::create(stdout, config); return MultiShell::new(out, err, verbosity); #[cfg(unix)] fn isatty(output: Output) -> bool { let fd = match output { Output::Stdout => libc::STDOUT_FILENO, Output::Stderr => libc::STDERR_FILENO, }; unsafe { libc::isatty(fd) != 0 } } #[cfg(windows)] fn isatty(output: Output) -> bool { extern crate kernel32; extern crate winapi; let handle = match output { Output::Stdout => winapi::winbase::STD_OUTPUT_HANDLE, Output::Stderr => winapi::winbase::STD_ERROR_HANDLE, }; unsafe { let handle = kernel32::GetStdHandle(handle); let mut out = 0; kernel32::GetConsoleMode(handle, &mut out) != 0 } } } // `output` print variant error strings to either stderr or stdout. // For fatal errors, print to stderr; // and for others, e.g. docopt version info, print to stdout. fn output(err: String, shell: &mut MultiShell, fatal: bool) { let std_shell = if fatal {shell.err()} else {shell.out()}; let color = if fatal {RED} else {BLACK}; let _ = std_shell.say(err, color); } pub fn handle_error(err: CliError, shell: &mut MultiShell) { debug!("handle_error; err={:?}", err); let CliError { error, exit_code, unknown } = err; let fatal = exit_code != 0; // exit_code == 0 is non-fatal error let hide = unknown && shell.get_verbose() != Verbose; if hide { let _ = shell.err().say("An unknown error occurred", RED); } else { output(error.to_string(), shell, fatal); } if !handle_cause(&error, shell) || hide { let _ = shell.err().say("\nTo learn more, run the command again \ with --verbose.".to_string(), BLACK); } std::process::exit(exit_code); } fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool { let verbose = shell.get_verbose(); let mut err; loop { cargo_err = match cargo_err.cargo_cause() { Some(cause) => cause, None => { err = cargo_err.cause(); break } }; if verbose != Verbose && !cargo_err.is_human() { return false } print(cargo_err.to_string(), shell); } loop { let cause = match err { Some(err) => err, None => return true }; if verbose != Verbose { return false } print(cause.to_string(), shell); err = cause.cause(); } fn print(error: String, shell: &mut MultiShell) { let _ = shell.err().say("\nCaused by:", BLACK); let _ = shell.err().say(format!(" {}", error), BLACK); } } pub fn version() -> String { format!("cargo {}", match option_env!("CFG_VERSION") { Some(s) => s.to_string(), None => format!("{}.{}.{}{}", env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_PATCH"), option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")) }) } fn flags_from_args<'a, T>(usage: &str, args: &[String], options_first: bool) -> CliResult where T: Decodable { let docopt = Docopt::new(usage).unwrap() .options_first(options_first) .argv(args.iter().map(|s| &s[..])) .help(true) .version(Some(version())); docopt.decode().map_err(|e| { let code = if e.fatal() {1} else {0}; CliError::from_error(human(e.to_string()), code) }) } fn json_from_stdin() -> CliResult { let mut reader = io::stdin(); let mut input = String::new(); try!(reader.read_to_string(&mut input).map_err(|_| { CliError::new("Standard in did not exist or was not UTF-8", 1) })); let json = try!(Json::from_str(&input).map_err(|_| { CliError::new("Could not parse standard in as JSON", 1) })); let mut decoder = json::Decoder::new(json); Decodable::decode(&mut decoder).map_err(|_| { CliError::new("Could not process standard in as input", 1) }) } cargo-0.8.0/src/cargo/ops/000077500000000000000000000000001264656333200153075ustar00rootroot00000000000000cargo-0.8.0/src/cargo/ops/cargo_clean.rs000066400000000000000000000073441264656333200201220ustar00rootroot00000000000000use std::default::Default; use std::fs; use std::path::Path; use core::{Package, PackageSet, Profiles}; use core::source::{Source, SourceMap}; use core::registry::PackageRegistry; use util::{CargoResult, human, ChainError, Config}; use ops::{self, Layout, Context, BuildConfig, Kind, Unit}; pub struct CleanOptions<'a> { pub spec: &'a [String], pub target: Option<&'a str>, pub config: &'a Config, pub release: bool, } /// Cleans the project from build artifacts. pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { let root = try!(Package::for_path(manifest_path, opts.config)); let target_dir = opts.config.target_dir(&root); // If we have a spec, then we need to delete some packages, otherwise, just // remove the whole target directory and be done with it! if opts.spec.len() == 0 { return rm_rf(&target_dir); } // Load the lockfile (if one's available) let lockfile = root.root().join("Cargo.lock"); let source_id = root.package_id().source_id(); let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist before cleaning") }; // Create a compilation context to have access to information like target // filenames and such let srcs = SourceMap::new(); let pkgs = PackageSet::new(&[]); let dest = if opts.release {"release"} else {"debug"}; let host_layout = Layout::new(opts.config, &root, None, dest); let target_layout = opts.target.map(|target| { Layout::new(opts.config, &root, Some(target), dest) }); let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config, host_layout, target_layout, BuildConfig::default(), root.manifest().profiles())); let mut registry = PackageRegistry::new(opts.config); // resolve package specs and remove the corresponding packages for spec in opts.spec { let pkgid = try!(resolve.query(spec)); // Translate the PackageId to a Package let pkg = { try!(registry.add_sources(&[pkgid.source_id().clone()])); (try!(registry.get(&[pkgid.clone()]))).into_iter().next().unwrap() }; // And finally, clean everything out! for target in pkg.targets() { for kind in [Kind::Host, Kind::Target].iter() { let layout = cx.layout(&pkg, *kind); try!(rm_rf(&layout.proxy().fingerprint(&pkg))); try!(rm_rf(&layout.build(&pkg))); let Profiles { ref release, ref dev, ref test, ref bench, ref doc, ref custom_build, } = *root.manifest().profiles(); for profile in [release, dev, test, bench, doc, custom_build].iter() { let unit = Unit { pkg: &pkg, target: target, profile: profile, kind: *kind, }; let root = cx.out_dir(&unit); for filename in try!(cx.target_filenames(&unit)).iter() { try!(rm_rf(&root.join(&filename))); } } } } } Ok(()) } fn rm_rf(path: &Path) -> CargoResult<()> { let m = fs::metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { try!(fs::remove_dir_all(path).chain_error(|| { human("could not remove build directory") })); } else if m.is_ok() { try!(fs::remove_file(path).chain_error(|| { human("failed to remove build artifact") })); } Ok(()) } cargo-0.8.0/src/cargo/ops/cargo_compile.rs000066400000000000000000000465631264656333200204760ustar00rootroot00000000000000//! //! Cargo compile currently does the following steps: //! //! All configurations are already injected as environment variables via the //! main cargo command //! //! 1. Read the manifest //! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as //! stdin //! //! a. Shell out to `--do update` and `--do list` for each source //! b. Resolve dependencies and return a list of name/version/source //! //! 3. Shell out to `--do download` for each source //! 4. Shell out to `--do get` for each source, and build up the list of paths //! to pass to rustc -L //! 5. Call `cargo-rustc` with the results of the resolver zipped together with //! the results of the `get` //! //! a. Topologically sort the dependencies //! b. Compile each dependency in order, passing in the -L's pointing at each //! previously compiled dependency //! use std::collections::HashMap; use std::default::Default; use std::path::{Path, PathBuf}; use std::sync::Arc; use core::registry::PackageRegistry; use core::{Source, SourceId, SourceMap, PackageSet, Package, Target}; use core::{Profile, TargetKind, Profiles}; use core::resolver::{Method, Resolve}; use ops::{self, BuildOutput, ExecEngine}; use util::config::{ConfigValue, Config}; use util::{CargoResult, internal, ChainError, profile}; /// Contains information about how a package should be compiled. pub struct CompileOptions<'a> { pub config: &'a Config, /// Number of concurrent jobs to use. pub jobs: Option, /// The target platform to compile for (example: `i686-unknown-linux-gnu`). pub target: Option<&'a str>, /// Extra features to build for the root package pub features: &'a [String], /// Flag if the default feature should be built for the root package pub no_default_features: bool, /// Root package to build (if None it's the current one) pub spec: &'a [String], /// Filter to apply to the root package to select which targets will be /// built. pub filter: CompileFilter<'a>, /// Engine which drives compilation pub exec_engine: Option>>, /// Whether this is a release build or not pub release: bool, /// Mode for this compile. pub mode: CompileMode, /// Extra arguments to be passed to rustdoc (for main crate and dependencies) pub target_rustdoc_args: Option<&'a [String]>, /// The specified target will be compiled with all the available arguments, /// note that this only accounts for the *final* invocation of rustc pub target_rustc_args: Option<&'a [String]>, } #[derive(Clone, Copy, PartialEq)] pub enum CompileMode { Test, Build, Bench, Doc { deps: bool }, } pub enum CompileFilter<'a> { Everything, Only { lib: bool, bins: &'a [String], examples: &'a [String], tests: &'a [String], benches: &'a [String], } } pub fn compile<'a>(manifest_path: &Path, options: &CompileOptions<'a>) -> CargoResult> { debug!("compile; manifest-path={}", manifest_path.display()); let package = try!(Package::for_path(manifest_path, options.config)); debug!("loaded package; package={}", package); for key in package.manifest().warnings().iter() { try!(options.config.shell().warn(key)) } compile_pkg(&package, None, options) } pub fn resolve_dependencies<'a>(root_package: &Package, config: &'a Config, source: Option>, features: Vec, no_default_features: bool) -> CargoResult<(Vec, Resolve, SourceMap<'a>)> { let override_ids = try!(source_ids_from_config(config, root_package.root())); let mut registry = PackageRegistry::new(config); if let Some(source) = source { registry.add_preloaded(root_package.package_id().source_id(), source); } // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. let resolve = try!(ops::resolve_pkg(&mut registry, root_package)); // Second, resolve with precisely what we're doing. Filter out // transitive dependencies if necessary, specify features, handle // overrides, etc. let _p = profile::start("resolving w/ overrides..."); try!(registry.add_overrides(override_ids)); let method = Method::Required{ dev_deps: true, // TODO: remove this option? features: &features, uses_default_features: !no_default_features, }; let resolved_with_overrides = try!(ops::resolve_with_previous(&mut registry, root_package, method, Some(&resolve), None)); let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry)); Ok((packages, resolved_with_overrides, registry.move_sources())) } #[allow(deprecated)] // connect => join in 1.3 pub fn compile_pkg<'a>(root_package: &Package, source: Option>, options: &CompileOptions<'a>) -> CargoResult> { let CompileOptions { config, jobs, target, spec, features, no_default_features, release, mode, ref filter, ref exec_engine, ref target_rustdoc_args, ref target_rustc_args } = *options; let target = target.map(|s| s.to_string()); let features = features.iter().flat_map(|s| { s.split(' ') }).map(|s| s.to_string()).collect::>(); if jobs == Some(0) { bail!("jobs must be at least 1") } let (packages, resolve_with_overrides, sources) = { try!(resolve_dependencies(root_package, config, source, features, no_default_features)) }; let mut invalid_spec = vec![]; let pkgids = if spec.len() > 0 { spec.iter().filter_map(|p| { match resolve_with_overrides.query(&p) { Ok(p) => Some(p), Err(..) => { invalid_spec.push(p.to_string()); None } } }).collect::>() } else { vec![root_package.package_id()] }; if spec.len() > 0 && invalid_spec.len() > 0 { bail!("could not find package matching spec `{}`", invalid_spec.connect(", ")) } let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id())) .collect::>(); let mut general_targets = Vec::new(); let mut package_targets = Vec::new(); let profiles = root_package.manifest().profiles(); match (*target_rustc_args, *target_rustdoc_args) { (Some(..), _) | (_, Some(..)) if to_builds.len() != 1 => { panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags") } (Some(args), _) => { let targets = try!(generate_targets(to_builds[0], profiles, mode, filter, release)); if targets.len() == 1 { let (target, profile) = targets[0]; let mut profile = profile.clone(); profile.rustc_args = Some(args.to_vec()); general_targets.push((target, profile)); } else { bail!("extra arguments to `rustc` can only be passed to one \ target, consider filtering\nthe package by passing \ e.g. `--lib` or `--bin NAME` to specify a single target") } } (None, Some(args)) => { let targets = try!(generate_targets(to_builds[0], profiles, mode, filter, release)); if targets.len() == 1 { let (target, profile) = targets[0]; let mut profile = profile.clone(); profile.rustdoc_args = Some(args.to_vec()); general_targets.push((target, profile)); } else { bail!("extra arguments to `rustdoc` can only be passed to one \ target, consider filtering\nthe package by passing e.g. \ `--lib` or `--bin NAME` to specify a single target") } } (None, None) => { for &to_build in to_builds.iter() { let targets = try!(generate_targets(to_build, profiles, mode, filter, release)); package_targets.push((to_build, targets)); } } }; for &(target, ref profile) in &general_targets { for &to_build in to_builds.iter() { package_targets.push((to_build, vec![(target, profile)])); } } let mut ret = { let _p = profile::start("compiling"); let mut build_config = try!(scrape_build_config(config, jobs, target)); build_config.exec_engine = exec_engine.clone(); build_config.release = release; if let CompileMode::Doc { deps } = mode { build_config.doc_all = deps; } try!(ops::compile_targets(&package_targets, &PackageSet::new(&packages), &resolve_with_overrides, &sources, config, build_config, root_package.manifest().profiles(), )) }; ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect(); return Ok(ret); } impl<'a> CompileFilter<'a> { pub fn new(lib_only: bool, bins: &'a [String], tests: &'a [String], examples: &'a [String], benches: &'a [String]) -> CompileFilter<'a> { if lib_only || !bins.is_empty() || !tests.is_empty() || !examples.is_empty() || !benches.is_empty() { CompileFilter::Only { lib: lib_only, bins: bins, examples: examples, benches: benches, tests: tests, } } else { CompileFilter::Everything } } pub fn matches(&self, target: &Target) -> bool { match *self { CompileFilter::Everything => true, CompileFilter::Only { lib, bins, examples, tests, benches } => { let list = match *target.kind() { TargetKind::Bin => bins, TargetKind::Test => tests, TargetKind::Bench => benches, TargetKind::Example => examples, TargetKind::Lib(..) => return lib, TargetKind::CustomBuild => return false, }; list.iter().any(|x| *x == target.name()) } } } } /// Given the configuration for a build, this function will generate all /// target/profile combinations needed to be built. fn generate_targets<'a>(pkg: &'a Package, profiles: &'a Profiles, mode: CompileMode, filter: &CompileFilter, release: bool) -> CargoResult> { let build = if release {&profiles.release} else {&profiles.dev}; let test = if release {&profiles.bench} else {&profiles.test}; let profile = match mode { CompileMode::Test => test, CompileMode::Bench => &profiles.bench, CompileMode::Build => build, CompileMode::Doc { .. } => &profiles.doc, }; return match *filter { CompileFilter::Everything => { match mode { CompileMode::Bench => { Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| { (t, profile) }).collect::>()) } CompileMode::Test => { let mut base = pkg.targets().iter().filter(|t| { t.tested() }).map(|t| { (t, if t.is_example() {build} else {profile}) }).collect::>(); // Always compile the library if we're testing everything as // it'll be needed for doctests if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { if t.doctested() { base.push((t, build)); } } Ok(base) } CompileMode::Build => { Ok(pkg.targets().iter().filter(|t| { t.is_bin() || t.is_lib() }).map(|t| (t, profile)).collect()) } CompileMode::Doc { .. } => { Ok(pkg.targets().iter().filter(|t| t.documented()) .map(|t| (t, profile)).collect()) } } } CompileFilter::Only { lib, bins, examples, tests, benches } => { let mut targets = Vec::new(); if lib { if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { targets.push((t, profile)); } else { bail!("no library targets found") } } { let mut find = |names: &[String], desc, kind, profile| { for name in names { let target = pkg.targets().iter().find(|t| { t.name() == *name && *t.kind() == kind }); let t = match target { Some(t) => t, None => bail!("no {} target named `{}`", desc, name), }; debug!("found {} `{}`", desc, name); targets.push((t, profile)); } Ok(()) }; try!(find(bins, "bin", TargetKind::Bin, profile)); try!(find(examples, "example", TargetKind::Example, build)); try!(find(tests, "test", TargetKind::Test, test)); try!(find(benches, "bench", TargetKind::Bench, &profiles.bench)); } Ok(targets) } }; } /// Read the `paths` configuration variable to discover all path overrides that /// have been configured. fn source_ids_from_config(config: &Config, cur_path: &Path) -> CargoResult> { let configs = try!(config.values()); debug!("loaded config; configs={:?}", configs); let config_paths = match configs.get("paths") { Some(cfg) => cfg, None => return Ok(Vec::new()) }; let paths = try!(config_paths.list().chain_error(|| { internal("invalid configuration for the key `paths`") })); paths.iter().map(|&(ref s, ref p)| { // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component // to get the directory containing the `.cargo` folder. p.parent().unwrap().parent().unwrap().join(s) }).filter(|p| { // Make sure we don't override the local package, even if it's in the // list of override paths. cur_path != &**p }).map(|p| SourceId::for_path(&p)).collect() } /// Parse all config files to learn about build configuration. Currently /// configured options are: /// /// * build.jobs /// * target.$target.ar /// * target.$target.linker /// * target.$target.libfoo.metadata fn scrape_build_config(config: &Config, jobs: Option, target: Option) -> CargoResult { let cfg_jobs = match try!(config.get_i64("build.jobs")) { Some((n, p)) => { if n <= 0 { bail!("build.jobs must be positive, but found {} in {:?}", n, p) } else if n >= u32::max_value() as i64 { bail!("build.jobs is too large: found {} in {:?}", n, p) } else { Some(n as u32) } } None => None, }; let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); let mut base = ops::BuildConfig { jobs: jobs, requested_target: target.clone(), ..Default::default() }; base.host = try!(scrape_target_config(config, &config.rustc_info().host)); base.target = match target.as_ref() { Some(triple) => try!(scrape_target_config(config, &triple)), None => base.host.clone(), }; Ok(base) } fn scrape_target_config(config: &Config, triple: &str) -> CargoResult { let key = format!("target.{}", triple); let mut ret = ops::TargetConfig { ar: try!(config.get_path(&format!("{}.ar", key))), linker: try!(config.get_path(&format!("{}.linker", key))), overrides: HashMap::new(), }; let table = match try!(config.get_table(&key)) { Some((table, _)) => table, None => return Ok(ret), }; for (lib_name, _) in table.into_iter() { if lib_name == "ar" || lib_name == "linker" { continue } let mut output = BuildOutput { library_paths: Vec::new(), library_links: Vec::new(), cfgs: Vec::new(), metadata: Vec::new(), rerun_if_changed: Vec::new(), }; let key = format!("{}.{}", key, lib_name); let table = try!(config.get_table(&key)).unwrap().0; for (k, _) in table.into_iter() { let key = format!("{}.{}", key, k); match try!(config.get(&key)).unwrap() { ConfigValue::String(v, path) => { if k == "rustc-flags" { let whence = format!("in `{}` (in {})", key, path.display()); let (paths, links) = try!( BuildOutput::parse_rustc_flags(&v, &whence) ); output.library_paths.extend(paths.into_iter()); output.library_links.extend(links.into_iter()); } else { output.metadata.push((k, v)); } }, ConfigValue::List(a, p) => { if k == "rustc-link-lib" { output.library_links.extend(a.into_iter().map(|v| v.0)); } else if k == "rustc-link-search" { output.library_paths.extend(a.into_iter().map(|v| { PathBuf::from(&v.0) })); } else if k == "rustc-cfg" { output.cfgs.extend(a.into_iter().map(|v| v.0)); } else { try!(config.expected("string", &k, ConfigValue::List(a, p))); } }, // technically could be a list too, but that's the exception to // the rule... cv => { try!(config.expected("string", &k, cv)); } } } ret.overrides.insert(lib_name, output); } Ok(ret) } cargo-0.8.0/src/cargo/ops/cargo_doc.rs000066400000000000000000000055271264656333200176060ustar00rootroot00000000000000use std::collections::HashSet; use std::fs; use std::path::Path; use std::process::Command; use core::{Package, PackageIdSpec}; use ops; use util::CargoResult; pub struct DocOptions<'a> { pub open_result: bool, pub compile_opts: ops::CompileOptions<'a>, } pub fn doc(manifest_path: &Path, options: &DocOptions) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, options.compile_opts.config)); let mut lib_names = HashSet::new(); let mut bin_names = HashSet::new(); if options.compile_opts.spec.len() == 0 { for target in package.targets().iter().filter(|t| t.documented()) { if target.is_lib() { assert!(lib_names.insert(target.crate_name())); } else { assert!(bin_names.insert(target.crate_name())); } } for bin in bin_names.iter() { if lib_names.contains(bin) { bail!("cannot document a package where a library and a binary \ have the same name. Consider renaming one or marking \ the target as `doc = false`") } } } try!(ops::compile(manifest_path, &options.compile_opts)); if options.open_result { let name = if options.compile_opts.spec.len() > 1 { bail!("Passing multiple packages and `open` is not supported") } else if options.compile_opts.spec.len() == 1 { try!(PackageIdSpec::parse(&options.compile_opts.spec[0])) .name().replace("-", "_").to_string() } else { match lib_names.iter().chain(bin_names.iter()).nth(0) { Some(s) => s.to_string(), None => return Ok(()) } }; let target_dir = options.compile_opts.config.target_dir(&package); let path = target_dir.join("doc").join(&name).join("index.html"); if fs::metadata(&path).is_ok() { open_docs(&path); } } Ok(()) } #[cfg(not(any(target_os = "windows", target_os = "macos")))] fn open_docs(path: &Path) { // trying xdg-open match Command::new("xdg-open").arg(path).status() { Ok(_) => return, Err(_) => () }; // trying gnome-open match Command::new("gnome-open").arg(path).status() { Ok(_) => return, Err(_) => () }; // trying kde-open match Command::new("kde-open").arg(path).status() { Ok(_) => return, Err(_) => () }; } #[cfg(target_os = "windows")] fn open_docs(path: &Path) { match Command::new("cmd").arg("/C").arg("start").arg("").arg(path).status() { Ok(_) => return, Err(_) => () }; } #[cfg(target_os = "macos")] fn open_docs(path: &Path) { match Command::new("open").arg(path).status() { Ok(_) => return, Err(_) => () }; } cargo-0.8.0/src/cargo/ops/cargo_fetch.rs000066400000000000000000000015031264656333200201200ustar00rootroot00000000000000use std::path::Path; use core::registry::PackageRegistry; use core::{Package, PackageId, Resolve}; use ops; use util::{CargoResult, Config, human, ChainError}; /// Executes `cargo fetch`. pub fn fetch(manifest_path: &Path, config: &Config) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, config)); let mut registry = PackageRegistry::new(config); let resolve = try!(ops::resolve_pkg(&mut registry, &package)); let _ = try!(get_resolved_packages(&resolve, &mut registry)); Ok(()) } pub fn get_resolved_packages(resolve: &Resolve, registry: &mut PackageRegistry) -> CargoResult> { let ids: Vec = resolve.iter().cloned().collect(); registry.get(&ids).chain_error(|| { human("unable to get packages from source") }) } cargo-0.8.0/src/cargo/ops/cargo_generate_lockfile.rs000066400000000000000000000161151264656333200224760ustar00rootroot00000000000000use std::collections::{BTreeMap, HashSet}; use std::path::Path; use core::PackageId; use core::registry::PackageRegistry; use core::{Resolve, SourceId, Package}; use core::resolver::Method; use ops; use util::config::Config; use util::CargoResult; pub struct UpdateOptions<'a> { pub config: &'a Config, pub to_update: &'a [String], pub precise: Option<&'a str>, pub aggressive: bool, } pub fn generate_lockfile(manifest_path: &Path, config: &Config) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, config)); let mut registry = PackageRegistry::new(config); let resolve = try!(ops::resolve_with_previous(&mut registry, &package, Method::Everything, None, None)); try!(ops::write_pkg_lockfile(&package, &resolve)); Ok(()) } pub fn update_lockfile(manifest_path: &Path, opts: &UpdateOptions) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, opts.config)); let previous_resolve = match try!(ops::load_pkg_lockfile(&package)) { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist before it is updated") }; if opts.aggressive && opts.precise.is_some() { bail!("cannot specify both aggressive and precise simultaneously") } let mut registry = PackageRegistry::new(opts.config); let mut to_avoid = HashSet::new(); if opts.to_update.len() == 0 { to_avoid.extend(previous_resolve.iter()); } else { let mut sources = Vec::new(); for name in opts.to_update { let dep = try!(previous_resolve.query(name)); if opts.aggressive { fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); } else { to_avoid.insert(dep); sources.push(match opts.precise { Some(precise) => { // TODO: see comment in `resolve.rs` as well, but this // seems like a pretty hokey reason to single out // the registry as well. let precise = if dep.source_id().is_registry() { format!("{}={}", dep.name(), precise) } else { precise.to_string() }; dep.source_id().clone().with_precise(Some(precise)) } None => { dep.source_id().clone().with_precise(None) } }); } } try!(registry.add_sources(&sources)); } let resolve = try!(ops::resolve_with_previous(&mut registry, &package, Method::Everything, Some(&previous_resolve), Some(&to_avoid))); // Summarize what is changing for the user. let print_change = |status: &str, msg: String| { opts.config.shell().status(status, msg) }; for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { if removed.len() == 1 && added.len() == 1 { let msg = if removed[0].source_id().is_git() { format!("{} -> #{}", removed[0], &added[0].source_id().precise().unwrap()[..8]) } else { format!("{} -> v{}", removed[0], added[0].version()) }; try!(print_change("Updating", msg)); } else { for package in removed.iter() { try!(print_change("Removing", format!("{}", package))); } for package in added.iter() { try!(print_change("Adding", format!("{}", package))); } } } try!(ops::write_pkg_lockfile(&package, &resolve)); return Ok(()); fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, set: &mut HashSet<&'a PackageId>, visited: &mut HashSet<&'a PackageId>) { if !visited.insert(dep) { return } set.insert(dep); if let Some(deps) = resolve.deps(dep) { for dep in deps { fill_with_deps(resolve, dep, set, visited); } } } fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve, resolve: &'a Resolve) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { fn key(dep: &PackageId) -> (&str, &SourceId) { (dep.name(), dep.source_id()) } // Removes all package ids in `b` from `a`. Note that this is somewhat // more complicated because the equality for source ids does not take // precise versions into account (e.g. git shas), but we want to take // that into account here. fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> { a.iter().filter(|a| { // If this package id is not found in `b`, then it's definitely // in the subtracted set let i = match b.binary_search(a) { Ok(i) => i, Err(..) => return true, }; // If we've found `a` in `b`, then we iterate over all instances // (we know `b` is sorted) and see if they all have different // precise versions. If so, then `a` isn't actually in `b` so // we'll let it through. // // Note that we only check this for non-registry sources, // however, as registries countain enough version information in // the package id to disambiguate if a.source_id().is_registry() { return false } b[i..].iter().take_while(|b| a == b).all(|b| { a.source_id().precise() != b.source_id().precise() }) }).cloned().collect() } // Map (package name, package source) to (removed versions, added versions). let mut changes = BTreeMap::new(); let empty = (Vec::new(), Vec::new()); for dep in previous_resolve.iter() { changes.entry(key(dep)).or_insert(empty.clone()).0.push(dep); } for dep in resolve.iter() { changes.entry(key(dep)).or_insert(empty.clone()).1.push(dep); } for (_, v) in changes.iter_mut() { let (ref mut old, ref mut new) = *v; old.sort(); new.sort(); let removed = vec_subtract(old, new); let added = vec_subtract(new, old); *old = removed; *new = added; } debug!("{:#?}", changes); changes.into_iter().map(|(_, v)| v).collect() } } cargo-0.8.0/src/cargo/ops/cargo_install.rs000066400000000000000000000277361264656333200205150ustar00rootroot00000000000000use std::collections::btree_map::Entry; use std::collections::{BTreeMap, BTreeSet}; use std::env; use std::ffi::OsString; use std::fs::{self, File}; use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; use toml; use core::{SourceId, Source, Package, Registry, Dependency, PackageIdSpec}; use core::PackageId; use ops::{self, CompileFilter}; use sources::{GitSource, PathSource, RegistrySource}; use util::{CargoResult, ChainError, Config, human, internal}; #[derive(RustcDecodable, RustcEncodable)] enum CrateListing { V1(CrateListingV1), } #[derive(RustcDecodable, RustcEncodable)] struct CrateListingV1 { v1: BTreeMap>, } struct Transaction { bins: Vec, } impl Drop for Transaction { fn drop(&mut self) { for bin in self.bins.iter() { let _ = fs::remove_file(bin); } } } pub fn install(root: Option<&str>, krate: Option<&str>, source_id: &SourceId, vers: Option<&str>, opts: &ops::CompileOptions) -> CargoResult<()> { let config = opts.config; let root = try!(resolve_root(root, config)); let (pkg, source) = if source_id.is_git() { try!(select_pkg(GitSource::new(source_id, config), source_id, krate, vers, &mut |git| git.read_packages())) } else if source_id.is_path() { let path = source_id.url().to_file_path().ok() .expect("path sources must have a valid path"); try!(select_pkg(PathSource::new(&path, source_id, config), source_id, krate, vers, &mut |path| path.read_packages())) } else { try!(select_pkg(RegistrySource::new(source_id, config), source_id, krate, vers, &mut |_| Err(human("must specify a crate to install from \ crates.io, or use --path or --git to \ specify alternate source")))) }; let mut list = try!(read_crate_list(&root)); let dst = root.join("bin"); try!(check_overwrites(&dst, &pkg, &opts.filter, &list)); let target_dir = config.cwd().join("target-install"); config.set_target_dir(&target_dir); let compile = try!(ops::compile_pkg(&pkg, Some(source), opts).chain_error(|| { human(format!("failed to compile `{}`, intermediate artifacts can be \ found at `{}`", pkg, target_dir.display())) })); let mut t = Transaction { bins: Vec::new() }; try!(fs::create_dir_all(&dst)); for bin in compile.binaries.iter() { let dst = dst.join(bin.file_name().unwrap()); try!(config.shell().status("Installing", dst.display())); try!(fs::copy(&bin, &dst).chain_error(|| { human(format!("failed to copy `{}` to `{}`", bin.display(), dst.display())) })); t.bins.push(dst); } try!(fs::remove_dir_all(&target_dir)); list.v1.entry(pkg.package_id().clone()).or_insert_with(|| { BTreeSet::new() }).extend(t.bins.iter().map(|t| { t.file_name().unwrap().to_string_lossy().into_owned() })); try!(write_crate_list(&root, list)); t.bins.truncate(0); // Print a warning that if this directory isn't in PATH that they won't be // able to run these commands. let path = env::var_os("PATH").unwrap_or(OsString::new()); for path in env::split_paths(&path) { if path == dst { return Ok(()) } } try!(config.shell().warn(&format!("be sure to add `{}` to your PATH to be \ able to run the installed binaries", dst.display()))); Ok(()) } fn select_pkg<'a, T>(mut source: T, source_id: &SourceId, name: Option<&str>, vers: Option<&str>, list_all: &mut FnMut(&mut T) -> CargoResult>) -> CargoResult<(Package, Box)> where T: Source + 'a { try!(source.update()); match name { Some(name) => { let dep = try!(Dependency::parse(name, vers, source_id)); let deps = try!(source.query(&dep)); match deps.iter().map(|p| p.package_id()).max() { Some(pkgid) => { try!(source.download(&[pkgid.clone()])); Ok((try!(source.get(&[pkgid.clone()])).remove(0), Box::new(source))) } None => { let vers_info = vers.map(|v| format!(" with version `{}`", v)) .unwrap_or(String::new()); Err(human(format!("could not find `{}` in `{}`{}", name, source_id, vers_info))) } } } None => { let candidates = try!(list_all(&mut source)); let binaries = candidates.iter().filter(|cand| { cand.targets().iter().filter(|t| t.is_bin()).count() > 0 }); let examples = candidates.iter().filter(|cand| { cand.targets().iter().filter(|t| t.is_example()).count() > 0 }); let pkg = match try!(one(binaries, |v| multi_err("binaries", v))) { Some(p) => p, None => { match try!(one(examples, |v| multi_err("examples", v))) { Some(p) => p, None => bail!("no packages found with binaries or \ examples"), } } }; return Ok((pkg.clone(), Box::new(source))); #[allow(deprecated)] // connect => join in 1.3 fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { pkgs.sort_by(|a, b| a.name().cmp(b.name())); format!("multiple packages with {} found: {}", kind, pkgs.iter().map(|p| p.name()).collect::>() .connect(", ")) } } } } fn one(mut i: I, f: F) -> CargoResult> where I: Iterator, F: FnOnce(Vec) -> String { match (i.next(), i.next()) { (Some(i1), Some(i2)) => { let mut v = vec![i1, i2]; v.extend(i); Err(human(f(v))) } (Some(i), None) => Ok(Some(i)), (None, _) => Ok(None) } } fn check_overwrites(dst: &Path, pkg: &Package, filter: &ops::CompileFilter, prev: &CrateListingV1) -> CargoResult<()> { let check = |name| { let name = format!("{}{}", name, env::consts::EXE_SUFFIX); if fs::metadata(dst.join(&name)).is_err() { return Ok(()) } let mut msg = format!("binary `{}` already exists in destination", name); if let Some((p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) { msg.push_str(&format!(" as part of `{}`", p)); } Err(human(msg)) }; match *filter { CompileFilter::Everything => { // If explicit --bin or --example flags were passed then those'll // get checked during cargo_compile, we only care about the "build // everything" case here if pkg.targets().iter().filter(|t| t.is_bin()).next().is_none() { bail!("specified package has no binaries") } for target in pkg.targets().iter().filter(|t| t.is_bin()) { try!(check(target.name())); } } CompileFilter::Only { bins, examples, .. } => { for bin in bins.iter().chain(examples) { try!(check(bin)); } } } Ok(()) } fn read_crate_list(path: &Path) -> CargoResult { let metadata = path.join(".crates.toml"); let mut f = match File::open(&metadata) { Ok(f) => f, Err(e) => { if e.kind() == io::ErrorKind::NotFound { return Ok(CrateListingV1 { v1: BTreeMap::new() }); } return Err(e).chain_error(|| { human(format!("failed to open crate metadata at `{}`", metadata.display())) }); } }; (|| -> CargoResult<_> { let mut contents = String::new(); try!(f.read_to_string(&mut contents)); let listing = try!(toml::decode_str(&contents).chain_error(|| { internal("invalid TOML found for metadata") })); match listing { CrateListing::V1(v1) => Ok(v1), } }).chain_error(|| { human(format!("failed to parse crate metadata at `{}`", metadata.display())) }) } fn write_crate_list(path: &Path, listing: CrateListingV1) -> CargoResult<()> { let metadata = path.join(".crates.toml"); (|| -> CargoResult<_> { let mut f = try!(File::create(&metadata)); let data = toml::encode_str::(&CrateListing::V1(listing)); try!(f.write_all(data.as_bytes())); Ok(()) }).chain_error(|| { human(format!("failed to write crate metadata at `{}`", metadata.display())) }) } pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { let dst = try!(resolve_root(dst, config)); let list = try!(read_crate_list(&dst)); let mut shell = config.shell(); let out = shell.out(); for (k, v) in list.v1.iter() { try!(writeln!(out, "{}:", k)); for bin in v { try!(writeln!(out, " {}", bin)); } } Ok(()) } pub fn uninstall(root: Option<&str>, spec: &str, bins: &[String], config: &Config) -> CargoResult<()> { let root = try!(resolve_root(root, config)); let mut metadata = try!(read_crate_list(&root)); let mut to_remove = Vec::new(); { let result = try!(PackageIdSpec::query_str(spec, metadata.v1.keys())) .clone(); let mut installed = match metadata.v1.entry(result.clone()) { Entry::Occupied(e) => e, Entry::Vacant(..) => panic!("entry not found: {}", result), }; let dst = root.join("bin"); for bin in installed.get() { let bin = dst.join(bin); if fs::metadata(&bin).is_err() { bail!("corrupt metadata, `{}` does not exist when it should", bin.display()) } } let bins = bins.iter().map(|s| { if s.ends_with(env::consts::EXE_SUFFIX) { s.to_string() } else { format!("{}{}", s, env::consts::EXE_SUFFIX) } }).collect::>(); for bin in bins.iter() { if !installed.get().contains(bin) { bail!("binary `{}` not installed as part of `{}`", bin, result) } } if bins.len() == 0 { to_remove.extend(installed.get().iter().map(|b| dst.join(b))); installed.get_mut().clear(); } else { for bin in bins.iter() { to_remove.push(dst.join(bin)); installed.get_mut().remove(bin); } } if installed.get().len() == 0 { installed.remove(); } } try!(write_crate_list(&root, metadata)); for bin in to_remove { try!(config.shell().status("Removing", bin.display())); try!(fs::remove_file(bin)); } Ok(()) } fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { let config_root = try!(config.get_string("install.root")); Ok(flag.map(PathBuf::from).or_else(|| { env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from) }).or_else(|| { config_root.clone().map(|(v, _)| PathBuf::from(v)) }).unwrap_or_else(|| { config.home().to_owned() })) } cargo-0.8.0/src/cargo/ops/cargo_new.rs000066400000000000000000000164711264656333200176320ustar00rootroot00000000000000use std::env; use std::fs; use std::io::prelude::*; use std::path::Path; use rustc_serialize::{Decodable, Decoder}; use git2::Config as GitConfig; use term::color::BLACK; use util::{GitRepo, HgRepo, CargoResult, human, ChainError, internal}; use util::{Config, paths}; use toml; #[derive(Clone, Copy, Debug, PartialEq)] pub enum VersionControl { Git, Hg, NoVcs } pub struct NewOptions<'a> { pub version_control: Option, pub bin: bool, pub path: &'a str, pub name: Option<&'a str>, } impl Decodable for VersionControl { fn decode(d: &mut D) -> Result { Ok(match &try!(d.read_str())[..] { "git" => VersionControl::Git, "hg" => VersionControl::Hg, "none" => VersionControl::NoVcs, n => { let err = format!("could not decode '{}' as version control", n); return Err(d.error(&err)); } }) } } struct CargoNewConfig { name: Option, email: Option, version_control: Option, } pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> { let path = config.cwd().join(opts.path); if fs::metadata(&path).is_ok() { bail!("destination `{}` already exists", path.display()) } let name = match opts.name { Some(name) => name, None => { let dir_name = try!(path.file_name().and_then(|s| s.to_str()).chain_error(|| { human(&format!("cannot create a project with a non-unicode name: {:?}", path.file_name().unwrap())) })); if opts.bin { dir_name } else { let new_name = strip_rust_affixes(dir_name); if new_name != dir_name { let message = format!( "note: package will be named `{}`; use --name to override", new_name); try!(config.shell().say(&message, BLACK)); } new_name } } }; for c in name.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } bail!("Invalid character `{}` in crate name: `{}`", c, name) } mk(config, &path, name, &opts).chain_error(|| { human(format!("Failed to create project `{}` at `{}`", name, path.display())) }) } fn strip_rust_affixes(name: &str) -> &str { for &prefix in &["rust-", "rust_", "rs-", "rs_"] { if name.starts_with(prefix) { return &name[prefix.len()..]; } } for &suffix in &["-rust", "_rust", "-rs", "_rs"] { if name.ends_with(suffix) { return &name[..name.len()-suffix.len()]; } } name } fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { GitRepo::discover(path, cwd).is_ok() || HgRepo::discover(path, cwd).is_ok() } fn mk(config: &Config, path: &Path, name: &str, opts: &NewOptions) -> CargoResult<()> { let cfg = try!(global_config(config)); let mut ignore = "target\n".to_string(); let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap(), config.cwd()); if !opts.bin { ignore.push_str("Cargo.lock\n"); } let vcs = match (opts.version_control, cfg.version_control, in_existing_vcs_repo) { (None, None, false) => VersionControl::Git, (None, Some(option), false) => option, (Some(option), _, _) => option, (_, _, true) => VersionControl::NoVcs, }; match vcs { VersionControl::Git => { try!(GitRepo::init(path, config.cwd())); try!(paths::write(&path.join(".gitignore"), ignore.as_bytes())); }, VersionControl::Hg => { try!(HgRepo::init(path, config.cwd())); try!(paths::write(&path.join(".hgignore"), ignore.as_bytes())); }, VersionControl::NoVcs => { try!(fs::create_dir(path)); }, }; let (author_name, email) = try!(discover_author()); // Hoo boy, sure glad we've got exhaustivenes checking behind us. let author = match (cfg.name, cfg.email, author_name, email) { (Some(name), Some(email), _, _) | (Some(name), None, _, Some(email)) | (None, Some(email), name, _) | (None, None, name, Some(email)) => format!("{} <{}>", name, email), (Some(name), None, _, None) | (None, None, name, None) => name, }; try!(paths::write(&path.join("Cargo.toml"), format!( r#"[package] name = "{}" version = "0.1.0" authors = [{}] [dependencies] "#, name, toml::Value::String(author)).as_bytes())); try!(fs::create_dir(&path.join("src"))); if opts.bin { try!(paths::write(&path.join("src/main.rs"), b"\ fn main() { println!(\"Hello, world!\"); } ")); } else { try!(paths::write(&path.join("src/lib.rs"), b"\ #[cfg(test)] mod test { #[test] fn it_works() { } } ")); } Ok(()) } fn discover_author() -> CargoResult<(String, Option)> { let git_config = GitConfig::open_default().ok(); let git_config = git_config.as_ref(); let name = git_config.and_then(|g| g.get_string("user.name").ok()) .map(|s| s.to_string()) .or_else(|| env::var("USER").ok()) // unix .or_else(|| env::var("USERNAME").ok()); // windows let name = match name { Some(name) => name, None => { let username_var = if cfg!(windows) {"USERNAME"} else {"USER"}; bail!("could not determine the current user, please set ${}", username_var) } }; let email = git_config.and_then(|g| g.get_string("user.email").ok()) .or_else(|| env::var("EMAIL").ok()); let name = name.trim().to_string(); let email = email.map(|s| s.trim().to_string()); Ok((name, email)) } fn global_config(config: &Config) -> CargoResult { let name = try!(config.get_string("cargo-new.name")).map(|s| s.0); let email = try!(config.get_string("cargo-new.email")).map(|s| s.0); let vcs = try!(config.get_string("cargo-new.vcs")); let vcs = match vcs.as_ref().map(|p| (&p.0[..], &p.1)) { Some(("git", _)) => Some(VersionControl::Git), Some(("hg", _)) => Some(VersionControl::Hg), Some(("none", _)) => Some(VersionControl::NoVcs), Some((s, p)) => { return Err(internal(format!("invalid configuration for key \ `cargo-new.vcs`, unknown vcs `{}` \ (found in {:?})", s, p))) } None => None }; Ok(CargoNewConfig { name: name, email: email, version_control: vcs, }) } #[cfg(test)] mod tests { use super::strip_rust_affixes; #[test] fn affixes_stripped() { assert_eq!(strip_rust_affixes("rust-foo"), "foo"); assert_eq!(strip_rust_affixes("foo-rs"), "foo"); assert_eq!(strip_rust_affixes("rs_foo"), "foo"); // Only one affix is stripped assert_eq!(strip_rust_affixes("rs-foo-rs"), "foo-rs"); assert_eq!(strip_rust_affixes("foo-rs-rs"), "foo-rs"); // It shouldn't touch the middle assert_eq!(strip_rust_affixes("some-rust-crate"), "some-rust-crate"); } } cargo-0.8.0/src/cargo/ops/cargo_package.rs000066400000000000000000000222761264656333200204340ustar00rootroot00000000000000use std::io::prelude::*; use std::fs::{self, File}; use std::path::{self, Path, PathBuf}; use semver::VersionReq; use tar::Archive; use flate2::{GzBuilder, Compression}; use flate2::read::GzDecoder; use core::{SourceId, Package, PackageId}; use core::dependency::Kind; use sources::PathSource; use util::{self, CargoResult, human, internal, ChainError, Config}; use ops; pub fn package(manifest_path: &Path, config: &Config, verify: bool, list: bool, metadata: bool) -> CargoResult> { let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), config)); let pkg = try!(src.root_package()); if metadata { try!(check_metadata(&pkg, config)); } try!(check_dependencies(&pkg, config)); if list { let root = pkg.root(); let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| { util::without_prefix(&file, &root).unwrap().to_path_buf() }).collect(); list.sort(); for file in list.iter() { println!("{}", file.display()); } return Ok(None) } let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let dir = config.target_dir(&pkg).join("package"); let dst = dir.join(&filename); if fs::metadata(&dst).is_ok() { return Ok(Some(dst)) } // Package up and test a temporary tarball and only move it to the final // location if it actually passes all our tests. Any previously existing // tarball can be assumed as corrupt or invalid, so we just blow it away if // it exists. try!(config.shell().status("Packaging", pkg.package_id().to_string())); let tmp_dst = dir.join(format!(".{}", filename)); let _ = fs::remove_file(&tmp_dst); try!(tar(&pkg, &src, config, &tmp_dst, &filename).chain_error(|| { human("failed to prepare local package for uploading") })); if verify { try!(run_verify(config, &pkg, &tmp_dst).chain_error(|| { human("failed to verify package tarball") })) } try!(fs::rename(&tmp_dst, &dst).chain_error(|| { human("failed to move temporary tarball into final location") })); Ok(Some(dst)) } // check that the package has some piece of metadata that a human can // use to tell what the package is about. #[allow(deprecated)] // connect => join in 1.3 fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { let md = pkg.manifest().metadata(); let mut missing = vec![]; macro_rules! lacking { ($( $($field: ident)||* ),*) => {{ $( if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* { $(missing.push(stringify!($field).replace("_", "-"));)* } )* }} } lacking!(description, license || license_file, documentation || homepage || repository); if !missing.is_empty() { let mut things = missing[..missing.len() - 1].connect(", "); // things will be empty if and only if length == 1 (i.e. the only case // to have no `or`). if !things.is_empty() { things.push_str(" or "); } things.push_str(&missing.last().unwrap()); try!(config.shell().warn( &format!("warning: manifest has no {things}. \ See http://doc.crates.io/manifest.html#package-metadata for more info.", things = things))) } Ok(()) } // Warn about wildcard deps which will soon be prohibited on crates.io #[allow(deprecated)] // connect => join in 1.3 fn check_dependencies(pkg: &Package, config: &Config) -> CargoResult<()> { let wildcard = VersionReq::parse("*").unwrap(); let mut wildcard_deps = vec![]; for dep in pkg.dependencies() { if dep.kind() != Kind::Development && dep.version_req() == &wildcard { wildcard_deps.push(dep.name()); } } if !wildcard_deps.is_empty() { let deps = wildcard_deps.connect(", "); try!(config.shell().warn( "warning: some dependencies have wildcard (\"*\") version constraints. \ On January 22nd, 2016, crates.io will begin rejecting packages with \ wildcard dependency constraints. See \ http://doc.crates.io/crates-io.html#using-crates.io-based-crates \ for information on version constraints.")); try!(config.shell().warn( &format!("dependencies for these crates have wildcard constraints: {}", deps))); } Ok(()) } fn tar(pkg: &Package, src: &PathSource, config: &Config, dst: &Path, filename: &str) -> CargoResult<()> { if fs::metadata(&dst).is_ok() { bail!("destination already exists: {}", dst.display()) } try!(fs::create_dir_all(dst.parent().unwrap())); let tmpfile = try!(File::create(dst)); // Prepare the encoder and its header let filename = Path::new(filename); let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename))) .write(tmpfile, Compression::Best); // Put all package files into a compressed archive let ar = Archive::new(encoder); let root = pkg.root(); for file in try!(src.list_files(pkg)).iter() { if &**file == dst { continue } let relative = util::without_prefix(&file, &root).unwrap(); try!(check_filename(relative)); let relative = try!(relative.to_str().chain_error(|| { human(format!("non-utf8 path in source directory: {}", relative.display())) })); let mut file = try!(File::open(file)); try!(config.shell().verbose(|shell| { shell.status("Archiving", &relative) })); let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), path::MAIN_SEPARATOR, relative); try!(ar.append_file(&path, &mut file).chain_error(|| { internal(format!("could not archive source file `{}`", relative)) })); } try!(ar.finish()); Ok(()) } fn run_verify(config: &Config, pkg: &Package, tar: &Path) -> CargoResult<()> { try!(config.shell().status("Verifying", pkg)); let f = try!(GzDecoder::new(try!(File::open(tar)))); let dst = pkg.root().join(&format!("target/package/{}-{}", pkg.name(), pkg.version())); if fs::metadata(&dst).is_ok() { try!(fs::remove_dir_all(&dst)); } let mut archive = Archive::new(f); try!(archive.unpack(dst.parent().unwrap())); let manifest_path = dst.join("Cargo.toml"); // When packages are uploaded to the registry, all path dependencies are // implicitly converted to registry-based dependencies, so we rewrite those // dependencies here. // // We also make sure to point all paths at `dst` instead of the previous // location that the package was originally read from. In locking the // `SourceId` we're telling it that the corresponding `PathSource` will be // considered updated and we won't actually read any packages. let registry = try!(SourceId::for_central(config)); let precise = Some("locked".to_string()); let new_src = try!(SourceId::for_path(&dst)).with_precise(precise); let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src)); let new_summary = pkg.summary().clone().map_dependencies(|d| { if !d.source_id().is_path() { return d } d.clone_inner().set_source_id(registry.clone()).into_dependency() }); let mut new_manifest = pkg.manifest().clone(); new_manifest.set_summary(new_summary.override_id(new_pkgid)); let new_pkg = Package::new(new_manifest, &manifest_path); // Now that we've rewritten all our path dependencies, compile it! try!(ops::compile_pkg(&new_pkg, None, &ops::CompileOptions { config: config, jobs: None, target: None, features: &[], no_default_features: false, spec: &[], filter: ops::CompileFilter::Everything, exec_engine: None, release: false, mode: ops::CompileMode::Build, target_rustdoc_args: None, target_rustc_args: None, })); Ok(()) } // It can often be the case that files of a particular name on one platform // can't actually be created on another platform. For example files with colons // in the name are allowed on Unix but not on Windows. // // To help out in situations like this, issue about weird filenames when // packaging as a "heads up" that something may not work on other platforms. fn check_filename(file: &Path) -> CargoResult<()> { let name = match file.file_name() { Some(name) => name, None => return Ok(()), }; let name = match name.to_str() { Some(name) => name, None => { bail!("path does not have a unicode filename which may not unpack \ on all platforms: {}", file.display()) } }; let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; for c in bad_chars.iter().filter(|c| name.contains(**c)) { bail!("cannot package a filename with a special character `{}`: {}", c, file.display()) } Ok(()) } cargo-0.8.0/src/cargo/ops/cargo_pkgid.rs000066400000000000000000000014221264656333200201250ustar00rootroot00000000000000use std::path::Path; use ops; use core::{PackageIdSpec, Package}; use util::{CargoResult, Config}; pub fn pkgid(manifest_path: &Path, spec: Option<&str>, config: &Config) -> CargoResult { let package = try!(Package::for_path(manifest_path, config)); let lockfile = package.root().join("Cargo.lock"); let source_id = package.package_id().source_id(); let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist for this command"), }; let pkgid = match spec { Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())), None => package.package_id(), }; Ok(PackageIdSpec::from_package_id(pkgid)) } cargo-0.8.0/src/cargo/ops/cargo_read_manifest.rs000066400000000000000000000116501264656333200216340ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::fs::{self, File}; use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; use core::{Package, Manifest, SourceId, PackageId}; use util::{self, CargoResult, human, Config, ChainError}; use util::important_paths::find_project_manifest_exact; use util::toml::{Layout, project_layout}; pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId, config: &Config) -> CargoResult<(Manifest, Vec)> { let root = layout.root.clone(); util::toml::to_manifest(contents, source_id, layout, config).chain_error(|| { human(format!("failed to parse manifest at `{}`", root.join("Cargo.toml").display())) }) } pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult<(Package, Vec)> { trace!("read_package; path={}; source-id={}", path.display(), source_id); let mut file = try!(File::open(path)); let mut data = Vec::new(); try!(file.read_to_end(&mut data)); let layout = project_layout(path.parent().unwrap()); let (manifest, nested) = try!(read_manifest(&data, layout, source_id, config)); Ok((Package::new(manifest, path), nested)) } pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult> { let mut all_packages = HashMap::new(); let mut visited = HashSet::::new(); trace!("looking for root package: {}, source_id={}", path.display(), source_id); try!(walk(path, &mut |dir| { trace!("looking for child package: {}", dir.display()); // Don't recurse into hidden/dot directories unless we're at the toplevel if dir != path { let name = dir.file_name().and_then(|s| s.to_str()); if name.map(|s| s.starts_with(".")) == Some(true) { return Ok(false) } // Don't automatically discover packages across git submodules if fs::metadata(&dir.join(".git")).is_ok() { return Ok(false) } } // Don't ever look at target directories if dir.file_name().and_then(|s| s.to_str()) == Some("target") && has_manifest(dir.parent().unwrap()) { return Ok(false) } if has_manifest(dir) { try!(read_nested_packages(dir, &mut all_packages, source_id, config, &mut visited)); } Ok(true) })); if all_packages.is_empty() { Err(human(format!("Could not find Cargo.toml in `{}`", path.display()))) } else { Ok(all_packages.into_iter().map(|(_, v)| v).collect()) } } fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) -> CargoResult<()> { if !try!(callback(path)) { trace!("not processing {}", path.display()); return Ok(()) } // Ignore any permission denied errors because temporary directories // can often have some weird permissions on them. let dirs = match fs::read_dir(path) { Ok(dirs) => dirs, Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => { return Ok(()) } Err(e) => return Err(From::from(e)), }; for dir in dirs { let dir = try!(dir); if try!(dir.file_type()).is_dir() { try!(walk(&dir.path(), callback)); } } Ok(()) } fn has_manifest(path: &Path) -> bool { find_project_manifest_exact(path, "Cargo.toml").is_ok() } fn read_nested_packages(path: &Path, all_packages: &mut HashMap, source_id: &SourceId, config: &Config, visited: &mut HashSet) -> CargoResult<()> { if !visited.insert(path.to_path_buf()) { return Ok(()) } let manifest = try!(find_project_manifest_exact(path, "Cargo.toml")); let (pkg, nested) = try!(read_package(&manifest, source_id, config)); let pkg_id = pkg.package_id().clone(); if !all_packages.contains_key(&pkg_id) { all_packages.insert(pkg_id, pkg); } else { info!("skipping nested package `{}` found at `{}`", pkg.name(), path.to_string_lossy()); } // Registry sources are not allowed to have `path=` dependencies because // they're all translated to actual registry dependencies. // // We normalize the path here ensure that we don't infinitely walk around // looking for crates. By normalizing we ensure that we visit this crate at // most once. // // TODO: filesystem/symlink implications? if !source_id.is_registry() { for p in nested.iter() { let path = util::normalize_path(&path.join(p)); try!(read_nested_packages(&path, all_packages, source_id, config, visited)); } } Ok(()) } cargo-0.8.0/src/cargo/ops/cargo_run.rs000066400000000000000000000041031264656333200176320ustar00rootroot00000000000000use std::path::Path; use ops::{self, ExecEngine, CompileFilter}; use util::{self, CargoResult, process, ProcessError}; use core::Package; pub fn run(manifest_path: &Path, options: &ops::CompileOptions, args: &[String]) -> CargoResult> { let config = options.config; let root = try!(Package::for_path(manifest_path, config)); let mut bins = root.manifest().targets().iter().filter(|a| { !a.is_lib() && !a.is_custom_build() && match options.filter { CompileFilter::Everything => a.is_bin(), CompileFilter::Only { .. } => options.filter.matches(a), } }); if bins.next().is_none() { match options.filter { CompileFilter::Everything => { bail!("a bin target must be available for `cargo run`") } CompileFilter::Only { .. } => { // this will be verified in cargo_compile } } } if bins.next().is_some() { match options.filter { CompileFilter::Everything => { bail!("`cargo run` requires that a project only have one \ executable; use the `--bin` option to specify which one \ to run") } CompileFilter::Only { .. } => { bail!("`cargo run` can run at most one executable, but \ multiple were specified") } } } let compile = try!(ops::compile(manifest_path, options)); let exe = &compile.binaries[0]; let exe = match util::without_prefix(&exe, config.cwd()) { Some(path) if path.file_name() == Some(path.as_os_str()) => Path::new(".").join(path).to_path_buf(), Some(path) => path.to_path_buf(), None => exe.to_path_buf(), }; let mut process = try!(compile.target_process(exe, &root)) .into_process_builder(); process.args(args).cwd(config.cwd()); try!(config.shell().status("Running", process.to_string())); Ok(process.exec().err()) } cargo-0.8.0/src/cargo/ops/cargo_rustc/000077500000000000000000000000001264656333200176225ustar00rootroot00000000000000cargo-0.8.0/src/cargo/ops/cargo_rustc/compilation.rs000066400000000000000000000112151264656333200225060ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::ffi::OsStr; use std::path::PathBuf; use semver::Version; use core::{PackageId, Package, Target}; use util::{self, CargoResult, Config}; use super::{CommandType, CommandPrototype}; /// A structure returning the result of a compilation. pub struct Compilation<'cfg> { /// All libraries which were built for a package. /// /// This is currently used for passing --extern flags to rustdoc tests later /// on. pub libraries: HashMap>, /// An array of all tests created during this compilation. pub tests: Vec<(Package, String, PathBuf)>, /// An array of all binaries created. pub binaries: Vec, /// All directires for the output of native build commands. /// /// This is currently used to drive some entries which are added to the /// LD_LIBRARY_PATH as appropriate. // TODO: deprecated, remove pub native_dirs: HashMap, /// Root output directory (for the local package's artifacts) pub root_output: PathBuf, /// Output directory for rust dependencies pub deps_output: PathBuf, /// Extra environment variables that were passed to compilations and should /// be passed to future invocations of programs. pub extra_env: HashMap>, pub to_doc_test: Vec, /// Features enabled during this compilation. pub cfgs: HashSet, config: &'cfg Config, } impl<'cfg> Compilation<'cfg> { pub fn new(config: &'cfg Config) -> Compilation<'cfg> { Compilation { libraries: HashMap::new(), native_dirs: HashMap::new(), // TODO: deprecated, remove root_output: PathBuf::from("/"), deps_output: PathBuf::from("/"), tests: Vec::new(), binaries: Vec::new(), extra_env: HashMap::new(), to_doc_test: Vec::new(), cfgs: HashSet::new(), config: config, } } /// See `process`. pub fn rustc_process(&self, pkg: &Package) -> CargoResult { self.process(CommandType::Rustc, pkg) } /// See `process`. pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult { self.process(CommandType::Rustdoc, pkg) } /// See `process`. pub fn target_process>(&self, cmd: T, pkg: &Package) -> CargoResult { self.process(CommandType::Target(cmd.as_ref().to_os_string()), pkg) } /// See `process`. pub fn host_process>(&self, cmd: T, pkg: &Package) -> CargoResult { self.process(CommandType::Host(cmd.as_ref().to_os_string()), pkg) } /// Prepares a new process with an appropriate environment to run against /// the artifacts produced by the build process. /// /// The package argument is also used to configure environment variables as /// well as the working directory of the child process. pub fn process(&self, cmd: CommandType, pkg: &Package) -> CargoResult { let mut search_path = util::dylib_path(); for dir in self.native_dirs.values() { search_path.push(dir.clone()); } search_path.push(self.root_output.clone()); search_path.push(self.deps_output.clone()); let search_path = try!(util::join_paths(&search_path, util::dylib_path_envvar())); let mut cmd = try!(CommandPrototype::new(cmd, self.config)); cmd.env(util::dylib_path_envvar(), &search_path); if let Some(env) = self.extra_env.get(pkg.package_id()) { for &(ref k, ref v) in env { cmd.env(k, v); } } cmd.env("CARGO_MANIFEST_DIR", pkg.root()) .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) .env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version())) .env("CARGO_PKG_VERSION", &pkg.version().to_string()) .cwd(pkg.root()); Ok(cmd) } } fn pre_version_component(v: &Version) -> String { if v.pre.is_empty() { return String::new(); } let mut ret = String::new(); for (i, x) in v.pre.iter().enumerate() { if i != 0 { ret.push('.') }; ret.push_str(&x.to_string()); } ret } cargo-0.8.0/src/cargo/ops/cargo_rustc/context.rs000066400000000000000000000522551264656333200216650ustar00rootroot00000000000000use std::collections::{HashSet, HashMap}; use std::path::{Path, PathBuf}; use std::str; use std::sync::Arc; use regex::Regex; use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target, Profile}; use core::{TargetKind, LibKind, Profiles, Metadata, Dependency}; use core::dependency::Kind as DepKind; use util::{self, CargoResult, ChainError, internal, Config, profile}; use super::TargetConfig; use super::custom_build::{BuildState, BuildScripts}; use super::fingerprint::Fingerprint; use super::layout::{Layout, LayoutProxy}; use super::{Kind, Compilation, BuildConfig}; use super::{ProcessEngine, ExecEngine}; #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct Unit<'a> { pub pkg: &'a Package, pub target: &'a Target, pub profile: &'a Profile, pub kind: Kind, } pub struct Context<'a, 'cfg: 'a> { pub config: &'cfg Config, pub resolve: &'a Resolve, pub sources: &'a SourceMap<'cfg>, pub compilation: Compilation<'cfg>, pub build_state: Arc, pub build_explicit_deps: HashMap, (PathBuf, Vec)>, pub exec_engine: Arc>, pub fingerprints: HashMap, Arc>, pub compiled: HashSet>, pub build_config: BuildConfig, pub build_scripts: HashMap, Arc>, host: Layout, target: Option, target_triple: String, host_dylib: Option<(String, String)>, host_exe: String, package_set: &'a PackageSet, target_dylib: Option<(String, String)>, target_exe: String, profiles: &'a Profiles, } impl<'a, 'cfg> Context<'a, 'cfg> { pub fn new(resolve: &'a Resolve, sources: &'a SourceMap<'cfg>, deps: &'a PackageSet, config: &'cfg Config, host: Layout, target_layout: Option, build_config: BuildConfig, profiles: &'a Profiles) -> CargoResult> { let target = build_config.requested_target.clone(); let target = target.as_ref().map(|s| &s[..]); let (target_dylib, target_exe) = try!(Context::filename_parts(target, config)); let (host_dylib, host_exe) = if build_config.requested_target.is_none() { (target_dylib.clone(), target_exe.clone()) } else { try!(Context::filename_parts(None, config)) }; let target_triple = target.unwrap_or_else(|| { &config.rustc_info().host[..] }).to_string(); let engine = build_config.exec_engine.as_ref().cloned().unwrap_or({ Arc::new(Box::new(ProcessEngine)) }); Ok(Context { target_triple: target_triple, host: host, target: target_layout, resolve: resolve, sources: sources, package_set: deps, config: config, target_dylib: target_dylib, target_exe: target_exe, host_dylib: host_dylib, host_exe: host_exe, compilation: Compilation::new(config), build_state: Arc::new(BuildState::new(&build_config, deps)), build_config: build_config, exec_engine: engine, fingerprints: HashMap::new(), profiles: profiles, compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), }) } /// Run `rustc` to discover the dylib prefix/suffix for the target /// specified as well as the exe suffix fn filename_parts(target: Option<&str>, cfg: &Config) -> CargoResult<(Option<(String, String)>, String)> { let mut process = util::process(cfg.rustc()); process.arg("-") .arg("--crate-name").arg("_") .arg("--crate-type").arg("dylib") .arg("--crate-type").arg("bin") .arg("--print=file-names") .env_remove("RUST_LOG"); if let Some(s) = target { process.arg("--target").arg(s); }; let output = try!(process.exec_with_output()); let error = str::from_utf8(&output.stderr).unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); let mut lines = output.lines(); let nodylib = Regex::new("unsupported crate type.*dylib").unwrap(); let nobin = Regex::new("unsupported crate type.*bin").unwrap(); let dylib = if nodylib.is_match(error) { None } else { let dylib_parts: Vec<&str> = lines.next().unwrap().trim() .split('_').collect(); assert!(dylib_parts.len() == 2, "rustc --print-file-name output has changed"); Some((dylib_parts[0].to_string(), dylib_parts[1].to_string())) }; let exe_suffix = if nobin.is_match(error) { String::new() } else { lines.next().unwrap().trim() .split('_').skip(1).next().unwrap().to_string() }; Ok((dylib, exe_suffix.to_string())) } /// Prepare this context, ensuring that all filesystem directories are in /// place. pub fn prepare(&mut self, root: &Package) -> CargoResult<()> { let _p = profile::start("preparing layout"); try!(self.host.prepare().chain_error(|| { internal(format!("couldn't prepare build directories")) })); match self.target { Some(ref mut target) => { try!(target.prepare().chain_error(|| { internal(format!("couldn't prepare build directories")) })); } None => {} } self.compilation.root_output = self.layout(root, Kind::Target).proxy().dest().to_path_buf(); self.compilation.deps_output = self.layout(root, Kind::Target).proxy().deps().to_path_buf(); return Ok(()); } /// Returns the appropriate directory layout for either a plugin or not. pub fn layout(&self, pkg: &Package, kind: Kind) -> LayoutProxy { let primary = pkg.package_id() == self.resolve.root(); match kind { Kind::Host => LayoutProxy::new(&self.host, primary), Kind::Target => LayoutProxy::new(self.target.as_ref() .unwrap_or(&self.host), primary), } } /// Returns the appropriate output directory for the specified package and /// target. pub fn out_dir(&self, unit: &Unit) -> PathBuf { let out_dir = self.layout(unit.pkg, unit.kind); if unit.target.is_custom_build() { out_dir.build(unit.pkg) } else if unit.target.is_example() { out_dir.examples().to_path_buf() } else { out_dir.root().to_path_buf() } } /// Return the (prefix, suffix) pair for dynamic libraries. /// /// If `plugin` is true, the pair corresponds to the host platform, /// otherwise it corresponds to the target platform. fn dylib(&self, kind: Kind) -> CargoResult<(&str, &str)> { let (triple, pair) = if kind == Kind::Host { (&self.config.rustc_info().host, &self.host_dylib) } else { (&self.target_triple, &self.target_dylib) }; match *pair { None => bail!("dylib outputs are not supported for {}", triple), Some((ref s1, ref s2)) => Ok((s1, s2)), } } /// Return the target triple which this context is targeting. pub fn target_triple(&self) -> &str { &self.target_triple } /// Get the metadata for a target in a specific profile pub fn target_metadata(&self, unit: &Unit) -> Option { let metadata = unit.target.metadata(); if unit.target.is_lib() && unit.profile.test { // Libs and their tests are built in parallel, so we need to make // sure that their metadata is different. metadata.map(|m| m.clone()).map(|mut m| { m.mix(&"test"); m }) } else if unit.target.is_bin() && unit.profile.test { // Make sure that the name of this test executable doesn't // conflict with a library that has the same name and is // being tested let mut metadata = unit.pkg.generate_metadata(); metadata.mix(&format!("bin-{}", unit.target.name())); Some(metadata) } else if unit.pkg.package_id() == self.resolve.root() && !unit.profile.test { // If we're not building a unit test then the root package never // needs any metadata as it's guaranteed to not conflict with any // other output filenames. This means that we'll have predictable // file names like `target/debug/libfoo.{a,so,rlib}` and such. None } else { metadata.map(|m| m.clone()) } } /// Returns the file stem for a given target/profile combo pub fn file_stem(&self, unit: &Unit) -> String { match self.target_metadata(unit) { Some(ref metadata) => format!("{}{}", unit.target.crate_name(), metadata.extra_filename), None if unit.target.allows_underscores() => { unit.target.name().to_string() } None => unit.target.crate_name().to_string(), } } /// Return the filenames that the given target for the given profile will /// generate. pub fn target_filenames(&self, unit: &Unit) -> CargoResult> { let stem = self.file_stem(unit); let suffix = if unit.target.for_host() { &self.host_exe } else { &self.target_exe }; let mut ret = Vec::new(); match *unit.target.kind() { TargetKind::Example | TargetKind::Bin | TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test => { ret.push(format!("{}{}", stem, suffix)); } TargetKind::Lib(..) if unit.profile.test => { ret.push(format!("{}{}", stem, suffix)); } TargetKind::Lib(ref libs) => { for lib in libs.iter() { match *lib { LibKind::Dylib => { if let Ok((prefix, suffix)) = self.dylib(unit.kind) { ret.push(format!("{}{}{}", prefix, stem, suffix)); } } LibKind::Lib | LibKind::Rlib => ret.push(format!("lib{}.rlib", stem)), LibKind::StaticLib => ret.push(format!("lib{}.a", stem)), } } } } assert!(ret.len() > 0); return Ok(ret); } /// For a package, return all targets which are registered as dependencies /// for that package. pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { if unit.profile.run_custom_build { return self.dep_run_custom_build(unit) } else if unit.profile.doc { return self.doc_deps(unit); } let id = unit.pkg.package_id(); let deps = self.resolve.deps(id).into_iter().flat_map(|a| a); let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() }).any(|d| { // If this target is a build command, then we only want build // dependencies, otherwise we want everything *other than* build // dependencies. if unit.target.is_custom_build() != d.is_build() { return false } // If this dependency is *not* a transitive dependency, then it // only applies to test/example targets if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example() && !unit.profile.test { return false } // If this dependency is only available for certain platforms, // make sure we're only enabling it for that platform. if !self.dep_platform_activated(d, unit.kind) { return false } // If the dependency is optional, then we're only activating it // if the corresponding feature was activated if d.is_optional() { match self.resolve.features(id) { Some(f) if f.contains(d.name()) => {} _ => return false, } } // If we've gotten past all that, then this dependency is // actually used! true }) }).filter_map(|pkg| { pkg.targets().iter().find(|t| t.is_lib()).map(|t| { Unit { pkg: pkg, target: t, profile: self.lib_profile(id), kind: unit.kind.for_target(t), } }) }).collect::>(); // If this target is a build script, then what we've collected so far is // all we need. If this isn't a build script, then it depends on the // build script if there is one. if unit.target.is_custom_build() { return ret } ret.extend(self.dep_build_script(unit)); // If this target is a binary, test, example, etc, then it depends on // the library of the same package. The call to `resolve.deps` above // didn't include `pkg` in the return values, so we need to special case // it here and see if we need to push `(pkg, pkg_lib_target)`. if unit.target.is_lib() { return ret } ret.extend(self.maybe_lib(unit)); // Integration tests/benchmarks require binaries to be built if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) { ret.extend(unit.pkg.targets().iter().filter(|t| t.is_bin()).map(|t| { Unit { pkg: unit.pkg, target: t, profile: self.lib_profile(id), kind: unit.kind.for_target(t), } })); } return ret } /// Returns the dependencies needed to run a build script. /// /// The `unit` provided must represent an execution of a build script, and /// the returned set of units must all be run before `unit` is run. pub fn dep_run_custom_build(&self, unit: &Unit<'a>) -> Vec> { // If this build script's execution has been overridden then we don't // actually depend on anything, we've reached the end of the dependency // chain as we've got all the info we're gonna get. let key = (unit.pkg.package_id().clone(), unit.kind); if self.build_state.outputs.lock().unwrap().contains_key(&key) { return Vec::new() } // When not overridden, then the dependencies to run a build script are: // // 1. Compiling the build script itself // 2. For each immediate dependency of our package which has a `links` // key, the execution of that build script. let not_custom_build = unit.pkg.targets().iter().find(|t| { !t.is_custom_build() }).unwrap(); let tmp = Unit { target: not_custom_build, profile: &self.profiles.dev, ..*unit }; self.dep_targets(&tmp).iter().filter_map(|unit| { if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { return None } self.dep_build_script(unit) }).chain(Some(Unit { profile: self.build_script_profile(unit.pkg.package_id()), kind: Kind::Host, // build scripts always compiled for the host ..*unit })).collect() } /// Returns the dependencies necessary to document a package fn doc_deps(&self, unit: &Unit<'a>) -> Vec> { let deps = self.resolve.deps(unit.pkg.package_id()).into_iter(); let deps = deps.flat_map(|a| a).map(|id| { self.get_package(id) }).filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() }).any(|dep| { match dep.kind() { DepKind::Normal => self.dep_platform_activated(dep, unit.kind), _ => false, } }) }).filter_map(|dep| { dep.targets().iter().find(|t| t.is_lib()).map(|t| (dep, t)) }); // To document a library, we depend on dependencies actually being // built. If we're documenting *all* libraries, then we also depend on // the documentation of the library being built. let mut ret = Vec::new(); for (dep, lib) in deps { ret.push(Unit { pkg: dep, target: lib, profile: self.lib_profile(dep.package_id()), kind: unit.kind.for_target(lib), }); if self.build_config.doc_all { ret.push(Unit { pkg: dep, target: lib, profile: &self.profiles.doc, kind: unit.kind.for_target(lib), }); } } // Be sure to build/run the build script for documented libraries as ret.extend(self.dep_build_script(unit)); // If we document a binary, we need the library available if unit.target.is_bin() { ret.extend(self.maybe_lib(unit)); } return ret } /// If a build script is scheduled to be run for the package specified by /// `unit`, this function will return the unit to run that build script. /// /// Overriding a build script simply means that the running of the build /// script itself doesn't have any dependencies, so even in that case a unit /// of work is still returned. `None` is only returned if the package has no /// build script. fn dep_build_script(&self, unit: &Unit<'a>) -> Option> { unit.pkg.targets().iter().find(|t| t.is_custom_build()).map(|t| { Unit { pkg: unit.pkg, target: t, profile: &self.profiles.custom_build, kind: unit.kind, } }) } fn maybe_lib(&self, unit: &Unit<'a>) -> Option> { unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { Unit { pkg: unit.pkg, target: t, profile: self.lib_profile(unit.pkg.package_id()), kind: unit.kind.for_target(t), } }) } fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { // If this dependency is only available for certain platforms, // make sure we're only enabling it for that platform. match (dep.only_for_platform(), kind) { (Some(ref platform), Kind::Host) => { *platform == self.config.rustc_info().host }, (Some(ref platform), Kind::Target) => { *platform == self.target_triple }, (None, _) => true } } /// Gets a package for the given package id. pub fn get_package(&self, id: &PackageId) -> &'a Package { self.package_set.iter() .find(|pkg| id == pkg.package_id()) .expect("Should have found package") } /// Get the user-specified linker for a particular host or target pub fn linker(&self, kind: Kind) -> Option<&Path> { self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) } /// Get the user-specified `ar` program for a particular host or target pub fn ar(&self, kind: Kind) -> Option<&Path> { self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) } /// Get the target configuration for a particular host or target fn target_config(&self, kind: Kind) -> &TargetConfig { match kind { Kind::Host => &self.build_config.host, Kind::Target => &self.build_config.target, } } /// Number of jobs specified for this build pub fn jobs(&self) -> u32 { self.build_config.jobs } /// Requested (not actual) target for the build pub fn requested_target(&self) -> Option<&str> { self.build_config.requested_target.as_ref().map(|s| &s[..]) } pub fn lib_profile(&self, _pkg: &PackageId) -> &'a Profile { if self.build_config.release { &self.profiles.release } else { &self.profiles.dev } } pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile { // TODO: should build scripts always be built with a dev // profile? How is this controlled at the CLI layer? &self.profiles.dev } } cargo-0.8.0/src/cargo/ops/cargo_rustc/custom_build.rs000066400000000000000000000377111264656333200226720ustar00rootroot00000000000000use std::collections::{HashMap, BTreeSet}; use std::fs; use std::io::prelude::*; use std::path::{PathBuf, Path}; use std::str; use std::sync::{Mutex, Arc}; use core::{PackageId, PackageSet}; use util::{CargoResult, human, Human}; use util::{internal, ChainError, profile, paths}; use util::Freshness; use super::job::Work; use super::{fingerprint, Kind, Context, Unit}; use super::CommandType; /// Contains the parsed output of a custom build script. #[derive(Clone, Debug, Hash)] pub struct BuildOutput { /// Paths to pass to rustc with the `-L` flag pub library_paths: Vec, /// Names and link kinds of libraries, suitable for the `-l` flag pub library_links: Vec, /// Various `--cfg` flags to pass to the compiler pub cfgs: Vec, /// Metadata to pass to the immediate dependencies pub metadata: Vec<(String, String)>, /// Glob paths to trigger a rerun of this build script. pub rerun_if_changed: Vec, } pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; pub struct BuildState { pub outputs: Mutex, } #[derive(Default)] pub struct BuildScripts { pub to_link: BTreeSet<(PackageId, Kind)>, pub plugins: BTreeSet, } /// Prepares a `Work` that executes the target as a custom build script. /// /// The `req` given is the requirement which this run of the build script will /// prepare work for. If the requirement is specified as both the target and the /// host platforms it is assumed that the two are equal and the build script is /// only run once (not twice). pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work, Freshness)> { let _p = profile::start(format!("build script prepare: {}/{}", unit.pkg, unit.target.name())); let key = (unit.pkg.package_id().clone(), unit.kind); let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key); let (work_dirty, work_fresh) = if overridden { (Work::new(|_| Ok(())), Work::new(|_| Ok(()))) } else { try!(build_work(cx, unit)) }; // Now that we've prep'd our work, build the work needed to manage the // fingerprint and then start returning that upwards. let (freshness, dirty, fresh) = try!(fingerprint::prepare_build_cmd(cx, unit)); Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness)) } fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> { let (script_output, build_output) = { (cx.layout(unit.pkg, Kind::Host).build(unit.pkg), cx.layout(unit.pkg, unit.kind).build_out(unit.pkg)) }; // Building the command to execute let to_exec = script_output.join(unit.target.name()); // Start preparing the process to execute, starting out with some // environment variables. Note that the profile-related environment // variables are not set with this the build script's profile but rather the // package's library profile. let profile = cx.lib_profile(unit.pkg.package_id()); let to_exec = to_exec.into_os_string(); let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx)); p.env("OUT_DIR", &build_output) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) .env("NUM_JOBS", &cx.jobs().to_string()) .env("TARGET", &match unit.kind { Kind::Host => &cx.config.rustc_info().host[..], Kind::Target => cx.target_triple(), }) .env("DEBUG", &profile.debuginfo.to_string()) .env("OPT_LEVEL", &profile.opt_level.to_string()) .env("PROFILE", if cx.build_config.release {"release"} else {"debug"}) .env("HOST", &cx.config.rustc_info().host); // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. if let Some(features) = cx.resolve.features(unit.pkg.package_id()) { for feat in features.iter() { p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); } } // Gather the set of native dependencies that this package has along with // some other variables to close over. // // This information will be used at build-time later on to figure out which // sorts of variables need to be discovered at that time. let lib_deps = { cx.dep_run_custom_build(unit).iter().filter_map(|unit| { if unit.profile.run_custom_build { Some((unit.pkg.manifest().links().unwrap().to_string(), unit.pkg.package_id().clone())) } else { None } }).collect::>() }; let pkg_name = unit.pkg.to_string(); let build_state = cx.build_state.clone(); let id = unit.pkg.package_id().clone(); let output_file = build_output.parent().unwrap().join("output"); let all = (id.clone(), pkg_name.clone(), build_state.clone(), output_file.clone()); let build_scripts = super::load_build_deps(cx, unit); let kind = unit.kind; // Check to see if the build script as already run, and if it has keep // track of whether it has told us about some explicit dependencies let prev_output = BuildOutput::parse_file(&output_file, &pkg_name).ok(); let rerun_if_changed = match prev_output { Some(ref prev) => prev.rerun_if_changed.clone(), None => Vec::new(), }; cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed)); try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg))); try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg))); let exec_engine = cx.exec_engine.clone(); // Prepare the unit of "dirty work" which will actually run the custom build // command. // // Note that this has to do some extra work just before running the command // to determine extra environment variables and such. let dirty = Work::new(move |desc_tx| { // Make sure that OUT_DIR exists. // // If we have an old build directory, then just move it into place, // otherwise create it! if fs::metadata(&build_output).is_err() { try!(fs::create_dir(&build_output).chain_error(|| { internal("failed to create script output directory for \ build command") })); } // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our // dynamic library search path in case the build script depended on any // native dynamic libraries. { let build_state = build_state.outputs.lock().unwrap(); for (name, id) in lib_deps { let key = (id.clone(), kind); let state = try!(build_state.get(&key).chain_error(|| { internal(format!("failed to locate build state for env \ vars: {}/{:?}", id, kind)) })); let data = &state.metadata; for &(ref key, ref value) in data.iter() { p.env(&format!("DEP_{}_{}", super::envify(&name), super::envify(key)), value); } } if let Some(build_scripts) = build_scripts { try!(super::add_plugin_deps(&mut p, &build_state, &build_scripts)); } } // And now finally, run the build command itself! desc_tx.send(p.to_string()).ok(); let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| { e.desc = format!("failed to run custom build command for `{}`\n{}", pkg_name, e.desc); Human(e) })); try!(paths::write(&output_file, &output.stdout)); // After the build command has finished running, we need to be sure to // remember all of its output so we can later discover precisely what it // was, even if we don't run the build command again (due to freshness). // // This is also the location where we provide feedback into the build // state informing what variables were discovered via our script as // well. let output = try!(str::from_utf8(&output.stdout).map_err(|_| { human("build script output was not valid utf-8") })); let parsed_output = try!(BuildOutput::parse(output, &pkg_name)); build_state.insert(id, kind, parsed_output); Ok(()) }); // Now that we've prepared our work-to-do, we need to prepare the fresh work // itself to run when we actually end up just discarding what we calculated // above. let fresh = Work::new(move |_tx| { let (id, pkg_name, build_state, output_file) = all; let output = match prev_output { Some(output) => output, None => try!(BuildOutput::parse_file(&output_file, &pkg_name)), }; build_state.insert(id, kind, output); Ok(()) }); Ok((dirty, fresh)) } impl BuildState { pub fn new(config: &super::BuildConfig, packages: &PackageSet) -> BuildState { let mut sources = HashMap::new(); for package in packages.iter() { match package.manifest().links() { Some(links) => { sources.insert(links.to_string(), package.package_id().clone()); } None => {} } } let mut outputs = HashMap::new(); let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); for ((name, output), kind) in i1.chain(i2) { // If no package is using the library named `name`, then this is // just an override that we ignore. if let Some(id) = sources.get(name) { outputs.insert((id.clone(), kind), output.clone()); } } BuildState { outputs: Mutex::new(outputs) } } fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { self.outputs.lock().unwrap().insert((id, kind), output); } } impl BuildOutput { pub fn parse_file(path: &Path, pkg_name: &str) -> CargoResult { let contents = try!(paths::read(path)); BuildOutput::parse(&contents, pkg_name) } // Parses the output of a script. // The `pkg_name` is used for error messages. pub fn parse(input: &str, pkg_name: &str) -> CargoResult { let mut library_paths = Vec::new(); let mut library_links = Vec::new(); let mut cfgs = Vec::new(); let mut metadata = Vec::new(); let mut rerun_if_changed = Vec::new(); let whence = format!("build script of `{}`", pkg_name); for line in input.lines() { let mut iter = line.splitn(2, ':'); if iter.next() != Some("cargo") { // skip this line since it doesn't start with "cargo:" continue; } let data = match iter.next() { Some(val) => val, None => continue }; // getting the `key=value` part of the line let mut iter = data.splitn(2, '='); let key = iter.next(); let value = iter.next(); let (key, value) = match (key, value) { (Some(a), Some(b)) => (a, b.trim_right()), // line started with `cargo:` but didn't match `key=value` _ => bail!("Wrong output in {}: `{}`", whence, line), }; match key { "rustc-flags" => { let (libs, links) = try!( BuildOutput::parse_rustc_flags(value, &whence) ); library_links.extend(links.into_iter()); library_paths.extend(libs.into_iter()); } "rustc-link-lib" => library_links.push(value.to_string()), "rustc-link-search" => library_paths.push(PathBuf::from(value)), "rustc-cfg" => cfgs.push(value.to_string()), "rerun-if-changed" => rerun_if_changed.push(value.to_string()), _ => metadata.push((key.to_string(), value.to_string())), } } Ok(BuildOutput { library_paths: library_paths, library_links: library_links, cfgs: cfgs, metadata: metadata, rerun_if_changed: rerun_if_changed, }) } pub fn parse_rustc_flags(value: &str, whence: &str) -> CargoResult<(Vec, Vec)> { let value = value.trim(); let mut flags_iter = value.split(|c: char| c.is_whitespace()) .filter(|w| w.chars().any(|c| !c.is_whitespace())); let (mut library_links, mut library_paths) = (Vec::new(), Vec::new()); loop { let flag = match flags_iter.next() { Some(f) => f, None => break }; if flag != "-l" && flag != "-L" { bail!("Only `-l` and `-L` flags are allowed in {}: `{}`", whence, value) } let value = match flags_iter.next() { Some(v) => v, None => bail!("Flag in rustc-flags has no value in {}: `{}`", whence, value) }; match flag { "-l" => library_links.push(value.to_string()), "-L" => library_paths.push(PathBuf::from(value)), // was already checked above _ => bail!("only -l and -L flags are allowed") }; } Ok((library_paths, library_links)) } } /// Compute the `build_scripts` map in the `Context` which tracks what build /// scripts each package depends on. /// /// The global `build_scripts` map lists for all (package, kind) tuples what set /// of packages' build script outputs must be considered. For example this lists /// all dependencies' `-L` flags which need to be propagated transitively. /// /// The given set of targets to this function is the initial set of /// targets/profiles which are being built. pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) { let mut ret = HashMap::new(); for unit in units { build(&mut ret, cx, unit); } cx.build_scripts.extend(ret.into_iter().map(|(k, v)| { (k, Arc::new(v)) })); // Recursive function to build up the map we're constructing. This function // memoizes all of its return values as it goes along. fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>, cx: &Context<'b, 'cfg>, unit: &Unit<'b>) -> &'a BuildScripts { // Do a quick pre-flight check to see if we've already calculated the // set of dependencies. if out.contains_key(unit) { return &out[unit] } let mut to_link = BTreeSet::new(); let mut plugins = BTreeSet::new(); if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { to_link.insert((unit.pkg.package_id().clone(), unit.kind)); } for unit in cx.dep_targets(unit).iter() { let dep_scripts = build(out, cx, unit); if unit.target.for_host() { plugins.extend(dep_scripts.to_link.iter() .map(|p| &p.0).cloned()); } else if unit.target.linkable() { to_link.extend(dep_scripts.to_link.iter().cloned()); } } let prev = out.entry(*unit).or_insert(BuildScripts::default()); prev.to_link.extend(to_link); prev.plugins.extend(plugins); return prev } } cargo-0.8.0/src/cargo/ops/cargo_rustc/engine.rs000066400000000000000000000060201264656333200214330ustar00rootroot00000000000000use std::collections::HashMap; use std::ffi::{OsStr, OsString}; use std::fmt; use std::path::Path; use std::process::Output; use util::{CargoResult, ProcessError, ProcessBuilder, process}; use util::Config; /// Trait for objects that can execute commands. pub trait ExecEngine: Send + Sync { fn exec(&self, CommandPrototype) -> Result<(), ProcessError>; fn exec_with_output(&self, CommandPrototype) -> Result; } /// Default implementation of `ExecEngine`. #[derive(Clone, Copy)] pub struct ProcessEngine; impl ExecEngine for ProcessEngine { fn exec(&self, command: CommandPrototype) -> Result<(), ProcessError> { command.into_process_builder().exec() } fn exec_with_output(&self, command: CommandPrototype) -> Result { command.into_process_builder().exec_with_output() } } /// Prototype for a command that must be executed. #[derive(Clone)] pub struct CommandPrototype { ty: CommandType, builder: ProcessBuilder, } impl CommandPrototype { pub fn new(ty: CommandType, config: &Config) -> CargoResult { Ok(CommandPrototype { builder: { let mut p = match ty { CommandType::Rustc => process(config.rustc()), CommandType::Rustdoc => process(config.rustdoc()), CommandType::Target(ref s) | CommandType::Host(ref s) => process(s), }; p.cwd(config.cwd()); p }, ty: ty, }) } pub fn get_type(&self) -> &CommandType { &self.ty } pub fn arg>(&mut self, arg: T) -> &mut CommandPrototype { self.builder.arg(arg); self } pub fn args>(&mut self, arguments: &[T]) -> &mut CommandPrototype { self.builder.args(arguments); self } pub fn cwd>(&mut self, path: T) -> &mut CommandPrototype { self.builder.cwd(path); self } pub fn env>(&mut self, key: &str, val: T) -> &mut CommandPrototype { self.builder.env(key, val); self } pub fn get_args(&self) -> &[OsString] { self.builder.get_args() } pub fn get_cwd(&self) -> Option<&Path> { self.builder.get_cwd() } pub fn get_env(&self, var: &str) -> Option { self.builder.get_env(var) } pub fn get_envs(&self) -> &HashMap> { self.builder.get_envs() } pub fn into_process_builder(self) -> ProcessBuilder { self.builder } } impl fmt::Display for CommandPrototype { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.builder.fmt(f) } } #[derive(Clone, Debug)] pub enum CommandType { Rustc, Rustdoc, /// The command is to be executed for the target architecture. Target(OsString), /// The command is to be executed for the host architecture. Host(OsString), } cargo-0.8.0/src/cargo/ops/cargo_rustc/fingerprint.rs000066400000000000000000000574661264656333200225410ustar00rootroot00000000000000use std::fs::{self, File, OpenOptions}; use std::hash::{self, Hasher}; use std::io::prelude::*; use std::io::{BufReader, SeekFrom}; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use filetime::FileTime; use rustc_serialize::{json, Encodable, Decodable, Encoder, Decoder}; use core::{Package, TargetKind}; use util; use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError}; use util::paths; use super::job::Work; use super::context::{Context, Unit}; /// A tuple result of the `prepare_foo` functions in this module. /// /// The first element of the triple is whether the target in question is /// currently fresh or not, and the second two elements are work to perform when /// the target is dirty or fresh, respectively. /// /// Both units of work are always generated because a fresh package may still be /// rebuilt if some upstream dependency changes. pub type Preparation = (Freshness, Work, Work); /// Prepare the necessary work for the fingerprint for a specific target. /// /// When dealing with fingerprints, cargo gets to choose what granularity /// "freshness" is considered at. One option is considering freshness at the /// package level. This means that if anything in a package changes, the entire /// package is rebuilt, unconditionally. This simplicity comes at a cost, /// however, in that test-only changes will cause libraries to be rebuilt, which /// is quite unfortunate! /// /// The cost was deemed high enough that fingerprints are now calculated at the /// layer of a target rather than a package. Each target can then be kept track /// of separately and only rebuilt as necessary. This requires cargo to /// understand what the inputs are to a target, so we drive rustc with the /// --dep-info flag to learn about all input files to a unit of compilation. /// /// This function will calculate the fingerprint for a target and prepare the /// work necessary to either write the fingerprint or copy over all fresh files /// from the old directories to their new locations. pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { let _p = profile::start(format!("fingerprint: {} / {}", unit.pkg.package_id(), unit.target.name())); let new = dir(cx, unit); let loc = new.join(&filename(unit)); debug!("fingerprint at: {}", loc.display()); let fingerprint = try!(calculate(cx, unit)); let compare = compare_old_fingerprint(&loc, &*fingerprint); log_compare(unit, &compare); let root = cx.out_dir(unit); let mut missing_outputs = false; if !unit.profile.doc { for filename in try!(cx.target_filenames(unit)).iter() { missing_outputs |= fs::metadata(root.join(filename)).is_err(); } } let allow_failure = unit.profile.rustc_args.is_some(); let write_fingerprint = Work::new(move |_| { match fingerprint.update_local() { Ok(()) => {} Err(..) if allow_failure => return Ok(()), Err(e) => return Err(e) } write_fingerprint(&loc, &*fingerprint) }); let fresh = compare.is_ok() && !missing_outputs; Ok((if fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop())) } /// A fingerprint can be considered to be a "short string" representing the /// state of a world for a package. /// /// If a fingerprint ever changes, then the package itself needs to be /// recompiled. Inputs to the fingerprint include source code modifications, /// compiler flags, compiler version, etc. This structure is not simply a /// `String` due to the fact that some fingerprints cannot be calculated lazily. /// /// Path sources, for example, use the mtime of the corresponding dep-info file /// as a fingerprint (all source files must be modified *before* this mtime). /// This dep-info file is not generated, however, until after the crate is /// compiled. As a result, this structure can be thought of as a fingerprint /// to-be. The actual value can be calculated via `hash()`, but the operation /// may fail as some files may not have been generated. /// /// Note that dependencies are taken into account for fingerprints because rustc /// requires that whenever an upstream crate is recompiled that all downstream /// dependants are also recompiled. This is typically tracked through /// `DependencyQueue`, but it also needs to be retained here because Cargo can /// be interrupted while executing, losing the state of the `DependencyQueue` /// graph. pub struct Fingerprint { rustc: u64, features: String, target: u64, profile: u64, deps: Vec<(String, Arc)>, local: LocalFingerprint, memoized_hash: Mutex>, } #[derive(RustcEncodable, RustcDecodable, Hash)] enum LocalFingerprint { Precalculated(String), MtimeBased(MtimeSlot, PathBuf), } struct MtimeSlot(Mutex>); impl Fingerprint { fn update_local(&self) -> CargoResult<()> { match self.local { LocalFingerprint::MtimeBased(ref slot, ref path) => { let meta = try!(fs::metadata(path).chain_error(|| { internal(format!("failed to stat `{}`", path.display())) })); let mtime = FileTime::from_last_modification_time(&meta); *slot.0.lock().unwrap() = Some(mtime); } LocalFingerprint::Precalculated(..) => return Ok(()) } *self.memoized_hash.lock().unwrap() = None; Ok(()) } fn hash(&self) -> u64 { if let Some(s) = *self.memoized_hash.lock().unwrap() { return s } let ret = util::hash_u64(self); *self.memoized_hash.lock().unwrap() = Some(ret); return ret } fn compare(&self, old: &Fingerprint) -> CargoResult<()> { if self.rustc != old.rustc { bail!("rust compiler has changed") } if self.features != old.features { bail!("features have changed: {} != {}", self.features, old.features) } if self.target != old.target { bail!("target configuration has changed") } if self.profile != old.profile { bail!("profile configuration has changed") } match (&self.local, &old.local) { (&LocalFingerprint::Precalculated(ref a), &LocalFingerprint::Precalculated(ref b)) => { if a != b { bail!("precalculated components have changed: {} != {}", a, b) } } (&LocalFingerprint::MtimeBased(ref a, ref ap), &LocalFingerprint::MtimeBased(ref b, ref bp)) => { let a = a.0.lock().unwrap(); let b = b.0.lock().unwrap(); if *a != *b { bail!("mtime based comopnents have changed: {:?} != {:?}, \ paths are {:?} and {:?}", *a, *b, ap, bp) } } _ => bail!("local fingerprint type has changed"), } if self.deps.len() != old.deps.len() { bail!("number of dependencies has changed") } for (a, b) in self.deps.iter().zip(old.deps.iter()) { if a.1.hash() != b.1.hash() { bail!("new ({}) != old ({})", a.0, b.0) } } Ok(()) } } impl hash::Hash for Fingerprint { fn hash(&self, h: &mut H) { let Fingerprint { rustc, ref features, target, profile, ref deps, ref local, memoized_hash: _, } = *self; (rustc, features, target, profile, deps, local).hash(h) } } impl Encodable for Fingerprint { fn encode(&self, e: &mut E) -> Result<(), E::Error> { e.emit_struct("Fingerprint", 6, |e| { try!(e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e))); try!(e.emit_struct_field("target", 1, |e| self.target.encode(e))); try!(e.emit_struct_field("profile", 2, |e| self.profile.encode(e))); try!(e.emit_struct_field("local", 3, |e| self.local.encode(e))); try!(e.emit_struct_field("features", 4, |e| { self.features.encode(e) })); try!(e.emit_struct_field("deps", 5, |e| { self.deps.iter().map(|&(ref a, ref b)| { (a, b.hash()) }).collect::>().encode(e) })); Ok(()) }) } } impl Decodable for Fingerprint { fn decode(d: &mut D) -> Result { fn decode(d: &mut D) -> Result { Decodable::decode(d) } d.read_struct("Fingerprint", 6, |d| { Ok(Fingerprint { rustc: try!(d.read_struct_field("rustc", 0, decode)), target: try!(d.read_struct_field("target", 1, decode)), profile: try!(d.read_struct_field("profile", 2, decode)), local: try!(d.read_struct_field("local", 3, decode)), features: try!(d.read_struct_field("features", 4, decode)), memoized_hash: Mutex::new(None), deps: { let decode = decode::, D>; let v = try!(d.read_struct_field("deps", 5, decode)); v.into_iter().map(|(name, hash)| { (name, Arc::new(Fingerprint { rustc: 0, target: 0, profile: 0, local: LocalFingerprint::Precalculated(String::new()), features: String::new(), deps: Vec::new(), memoized_hash: Mutex::new(Some(hash)), })) }).collect() } }) }) } } impl hash::Hash for MtimeSlot { fn hash(&self, h: &mut H) { self.0.lock().unwrap().hash(h) } } impl Encodable for MtimeSlot { fn encode(&self, e: &mut E) -> Result<(), E::Error> { self.0.lock().unwrap().map(|ft| { (ft.seconds_relative_to_1970(), ft.nanoseconds()) }).encode(e) } } impl Decodable for MtimeSlot { fn decode(e: &mut D) -> Result { let kind: Option<(u64, u32)> = try!(Decodable::decode(e)); Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| { FileTime::from_seconds_since_1970(s, n) })))) } } /// Calculates the fingerprint for a package/target pair. /// /// This fingerprint is used by Cargo to learn about when information such as: /// /// * A non-path package changes (changes version, changes revision, etc). /// * Any dependency changes /// * The compiler changes /// * The set of features a package is built with changes /// * The profile a target is compiled with changes (e.g. opt-level changes) /// /// Information like file modification time is only calculated for path /// dependencies and is calculated in `calculate_target_fresh`. fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult> { if let Some(s) = cx.fingerprints.get(unit) { return Ok(s.clone()) } // First, calculate all statically known "salt data" such as the profile // information (compiler flags), the compiler version, activated features, // and target configuration. let features = cx.resolve.features(unit.pkg.package_id()); let features = features.map(|s| { let mut v = s.iter().collect::>(); v.sort(); v }); // Next, recursively calculate the fingerprint for all of our dependencies. // // Skip the fingerprints of build scripts as they may not always be // available and the dirtiness propagation for modification is tracked // elsewhere. Also skip fingerprints of binaries because they don't actually // induce a recompile, they're just dependencies in the sense that they need // to be built. let deps = try!(cx.dep_targets(unit).iter().filter(|u| { !u.target.is_custom_build() && !u.target.is_bin() }).map(|unit| { calculate(cx, unit).map(|fingerprint| { (unit.pkg.package_id().to_string(), fingerprint) }) }).collect::>>()); // And finally, calculate what our own local fingerprint is let local = if use_dep_info(unit) { let dep_info = dep_info_loc(cx, unit); let mtime = try!(dep_info_mtime_if_fresh(&dep_info)); LocalFingerprint::MtimeBased(MtimeSlot(Mutex::new(mtime)), dep_info) } else { let fingerprint = try!(pkg_fingerprint(cx, unit.pkg)); LocalFingerprint::Precalculated(fingerprint) }; let mut deps = deps; deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); let fingerprint = Arc::new(Fingerprint { rustc: util::hash_u64(&cx.config.rustc_info().verbose_version), target: util::hash_u64(&unit.target), profile: util::hash_u64(&unit.profile), features: format!("{:?}", features), deps: deps, local: local, memoized_hash: Mutex::new(None), }); cx.fingerprints.insert(*unit, fingerprint.clone()); Ok(fingerprint) } // We want to use the mtime for files if we're a path source, but if we're a // git/registry source, then the mtime of files may fluctuate, but they won't // change so long as the source itself remains constant (which is the // responsibility of the source) fn use_dep_info(unit: &Unit) -> bool { let path = unit.pkg.summary().source_id().is_path(); !unit.profile.doc && path } /// Prepare the necessary work for the fingerprint of a build command. /// /// Build commands are located on packages, not on targets. Additionally, we /// don't have --dep-info to drive calculation of the fingerprint of a build /// command. This brings up an interesting predicament which gives us a few /// options to figure out whether a build command is dirty or not: /// /// 1. A build command is dirty if *any* file in a package changes. In theory /// all files are candidate for being used by the build command. /// 2. A build command is dirty if any file in a *specific directory* changes. /// This may lose information as it may require files outside of the specific /// directory. /// 3. A build command must itself provide a dep-info-like file stating how it /// should be considered dirty or not. /// /// The currently implemented solution is option (1), although it is planned to /// migrate to option (2) in the near future. pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id())); let new = dir(cx, unit); let loc = new.join("build"); debug!("fingerprint at: {}", loc.display()); // If this build script execution has been overridden, then the fingerprint // is just a hash of what it was overridden with. Otherwise the fingerprint // is that of the entire package itself as we just consider everything as // input to the build script. let (local, output_path) = { let state = cx.build_state.outputs.lock().unwrap(); match state.get(&(unit.pkg.package_id().clone(), unit.kind)) { Some(output) => { let s = format!("overridden build state with hash: {}", util::hash_u64(output)); (LocalFingerprint::Precalculated(s), None) } None => { let &(ref output, ref deps) = &cx.build_explicit_deps[unit]; let local = if deps.len() == 0 { let s = try!(pkg_fingerprint(cx, unit.pkg)); LocalFingerprint::Precalculated(s) } else { let deps = deps.iter().map(|p| unit.pkg.root().join(p)); let mtime = mtime_if_fresh(output, deps); let mtime = MtimeSlot(Mutex::new(mtime)); LocalFingerprint::MtimeBased(mtime, output.clone()) }; (local, Some(output.clone())) } } }; let mut fingerprint = Fingerprint { rustc: 0, target: 0, profile: 0, features: String::new(), deps: Vec::new(), local: local, memoized_hash: Mutex::new(None), }; let compare = compare_old_fingerprint(&loc, &fingerprint); log_compare(unit, &compare); // When we write out the fingerprint, we may want to actually change the // kind of fingerprint being recorded. If we started out, then the previous // run of the build script (or if it had never run before) may indicate to // use the `Precalculated` variant with the `pkg_fingerprint`. If the build // script then prints `rerun-if-changed`, however, we need to record what's // necessary for that fingerprint. // // Hence, if there were some `rerun-if-changed` directives forcibly change // the kind of fingerprint over to the `MtimeBased` variant where the // relevant mtime is the output path of the build script. let state = cx.build_state.clone(); let key = (unit.pkg.package_id().clone(), unit.kind); let write_fingerprint = Work::new(move |_| { if let Some(output_path) = output_path { let outputs = state.outputs.lock().unwrap(); if outputs[&key].rerun_if_changed.len() > 0 { let slot = MtimeSlot(Mutex::new(None)); fingerprint.local = LocalFingerprint::MtimeBased(slot, output_path); try!(fingerprint.update_local()); } } write_fingerprint(&loc, &fingerprint) }); Ok((if compare.is_ok() {Fresh} else {Dirty}, write_fingerprint, Work::noop())) } fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { let hash = fingerprint.hash(); debug!("write fingerprint: {}", loc.display()); try!(paths::write(&loc, util::to_hex(hash).as_bytes())); try!(paths::write(&loc.with_extension("json"), json::encode(&fingerprint).unwrap().as_bytes())); Ok(()) } /// Prepare work for when a package starts to build pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> { let new1 = dir(cx, unit); let new2 = new1.clone(); if fs::metadata(&new1).is_err() { try!(fs::create_dir(&new1)); } if fs::metadata(&new2).is_err() { try!(fs::create_dir(&new2)); } Ok(()) } /// Return the (old, new) location for fingerprints for a package pub fn dir(cx: &Context, unit: &Unit) -> PathBuf { cx.layout(unit.pkg, unit.kind).proxy().fingerprint(unit.pkg) } /// Returns the (old, new) location for the dep info file of a target. pub fn dep_info_loc(cx: &Context, unit: &Unit) -> PathBuf { dir(cx, unit).join(&format!("dep-{}", filename(unit))) } fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> { let old_fingerprint_short = try!(paths::read(loc)); let new_hash = new_fingerprint.hash(); if util::to_hex(new_hash) == old_fingerprint_short { return Ok(()) } let old_fingerprint_json = try!(paths::read(&loc.with_extension("json"))); let old_fingerprint = try!(json::decode(&old_fingerprint_json).chain_error(|| { internal(format!("failed to deserialize json")) })); new_fingerprint.compare(&old_fingerprint) } fn log_compare(unit: &Unit, compare: &CargoResult<()>) { let mut e = match *compare { Ok(..) => return, Err(ref e) => &**e, }; info!("fingerprint error for {}: {}", unit.pkg, e); while let Some(cause) = e.cargo_cause() { info!(" cause: {}", cause); e = cause; } let mut e = e.cause(); while let Some(cause) = e { info!(" cause: {}", cause); e = cause.cause(); } } fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { macro_rules! fs_try { ($e:expr) => (match $e { Ok(e) => e, Err(..) => return Ok(None) }) } let mut f = BufReader::new(fs_try!(File::open(dep_info))); // see comments in append_current_dir for where this cwd is manifested from. let mut cwd = Vec::new(); if fs_try!(f.read_until(0, &mut cwd)) == 0 { return Ok(None) } let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1])); let line = match f.lines().next() { Some(Ok(line)) => line, _ => return Ok(None), }; let pos = try!(line.find(": ").chain_error(|| { internal(format!("dep-info not in an understood format: {}", dep_info.display())) })); let deps = &line[pos + 2..]; let mut paths = Vec::new(); let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()); loop { let mut file = match deps.next() { Some(s) => s.to_string(), None => break, }; while file.ends_with("\\") { file.pop(); file.push(' '); file.push_str(try!(deps.next().chain_error(|| { internal(format!("malformed dep-info format, trailing \\")) }))); } paths.push(cwd.join(&file)); } Ok(mtime_if_fresh(&dep_info, paths.iter())) } fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { let source_id = pkg.package_id().source_id(); let source = try!(cx.sources.get(source_id).chain_error(|| { internal("missing package source") })); source.fingerprint(pkg) } fn mtime_if_fresh(output: &Path, paths: I) -> Option where I: IntoIterator, I::Item: AsRef, { let meta = match fs::metadata(output) { Ok(meta) => meta, Err(..) => return None, }; let mtime = FileTime::from_last_modification_time(&meta); let any_stale = paths.into_iter().any(|path| { let path = path.as_ref(); let meta = match fs::metadata(path) { Ok(meta) => meta, Err(..) => { info!("stale: {} -- missing", path.display()); return true } }; let mtime2 = FileTime::from_last_modification_time(&meta); if mtime2 > mtime { info!("stale: {} -- {} vs {}", path.display(), mtime2, mtime); true } else { false } }); if any_stale { None } else { Some(mtime) } } fn filename(unit: &Unit) -> String { let kind = match *unit.target.kind() { TargetKind::Lib(..) => "lib", TargetKind::Bin => "bin", TargetKind::Test => "integration-test", TargetKind::Example => "example", TargetKind::Bench => "bench", TargetKind::CustomBuild => "build-script", }; let flavor = if unit.profile.test { "test-" } else if unit.profile.doc { "doc-" } else { "" }; format!("{}{}-{}", flavor, kind, unit.target.name()) } // The dep-info files emitted by the compiler all have their listed paths // relative to whatever the current directory was at the time that the compiler // was invoked. As the current directory may change over time, we need to record // what that directory was at the beginning of the file so we can know about it // next time. pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> { debug!("appending {} <- {}", path.display(), cwd.display()); let mut f = try!(OpenOptions::new().read(true).write(true).open(path)); let mut contents = Vec::new(); try!(f.read_to_end(&mut contents)); try!(f.seek(SeekFrom::Start(0))); try!(f.write_all(try!(util::path2bytes(cwd)))); try!(f.write_all(&[0])); try!(f.write_all(&contents)); Ok(()) } cargo-0.8.0/src/cargo/ops/cargo_rustc/job.rs000066400000000000000000000031121264656333200207370ustar00rootroot00000000000000use std::sync::mpsc::Sender; use std::fmt; use util::{CargoResult, Fresh, Dirty, Freshness}; pub struct Job { dirty: Work, fresh: Work } /// Each proc should send its description before starting. /// It should send either once or close immediately. pub struct Work { inner: Box, CargoResult<()>> + Send>, } trait FnBox { fn call_box(self: Box, a: A) -> R; } impl R> FnBox for F { fn call_box(self: Box, a: A) -> R { (*self)(a) } } impl Work { pub fn new(f: F) -> Work where F: FnOnce(Sender) -> CargoResult<()> + Send + 'static { Work { inner: Box::new(f) } } pub fn noop() -> Work { Work::new(|_| Ok(())) } pub fn call(self, tx: Sender) -> CargoResult<()> { self.inner.call_box(tx) } pub fn then(self, next: Work) -> Work { Work::new(move |tx| { try!(self.call(tx.clone())); next.call(tx) }) } } impl Job { /// Create a new job representing a unit of work. pub fn new(dirty: Work, fresh: Work) -> Job { Job { dirty: dirty, fresh: fresh } } /// Consumes this job by running it, returning the result of the /// computation. pub fn run(self, fresh: Freshness, tx: Sender) -> CargoResult<()> { match fresh { Fresh => self.fresh.call(tx), Dirty => self.dirty.call(tx), } } } impl fmt::Debug for Job { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Job {{ ... }}") } } cargo-0.8.0/src/cargo/ops/cargo_rustc/job_queue.rs000066400000000000000000000225371264656333200221570ustar00rootroot00000000000000use std::collections::HashSet; use std::collections::hash_map::HashMap; use std::fmt; use std::sync::mpsc::{channel, Sender, Receiver}; use crossbeam::{self, Scope}; use term::color::YELLOW; use core::{PackageId, Target, Profile}; use util::{Config, DependencyQueue, Fresh, Dirty, Freshness}; use util::{CargoResult, Dependency, profile, internal}; use super::{Context, Kind, Unit}; use super::job::Job; /// A management structure of the entire dependency graph to compile. /// /// This structure is backed by the `DependencyQueue` type and manages the /// actual compilation step of each package. Packages enqueue units of work and /// then later on the entire graph is processed and compiled. pub struct JobQueue<'a> { jobs: usize, queue: DependencyQueue, Vec<(Job, Freshness)>>, tx: Sender>, rx: Receiver>, active: usize, pending: HashMap, PendingBuild>, compiled: HashSet<&'a PackageId>, documented: HashSet<&'a PackageId>, counts: HashMap<&'a PackageId, usize>, } /// A helper structure for metadata about the state of a building package. struct PendingBuild { /// Number of jobs currently active amt: usize, /// Current freshness state of this package. Any dirty target within a /// package will cause the entire package to become dirty. fresh: Freshness, } #[derive(Clone, Copy, Eq, PartialEq, Hash)] struct Key<'a> { pkg: &'a PackageId, target: &'a Target, profile: &'a Profile, kind: Kind, } struct Message<'a> { key: Key<'a>, result: CargoResult<()>, } impl<'a> JobQueue<'a> { pub fn new<'cfg>(cx: &Context<'a, 'cfg>) -> JobQueue<'a> { let (tx, rx) = channel(); JobQueue { jobs: cx.jobs() as usize, queue: DependencyQueue::new(), tx: tx, rx: rx, active: 0, pending: HashMap::new(), compiled: HashSet::new(), documented: HashSet::new(), counts: HashMap::new(), } } pub fn enqueue(&mut self, cx: &Context<'a, 'a>, unit: &Unit<'a>, job: Job, fresh: Freshness) { let key = Key::new(unit); self.queue.queue(cx, Fresh, key, Vec::new()).push((job, fresh)); *self.counts.entry(key.pkg).or_insert(0) += 1; } /// Execute all jobs necessary to build the dependency graph. /// /// This function will spawn off `config.jobs()` workers to build all of the /// necessary dependencies, in order. Freshness is propagated as far as /// possible along each dependency chain. pub fn execute(&mut self, config: &Config) -> CargoResult<()> { let _p = profile::start("executing the job graph"); crossbeam::scope(|scope| { self.drain_the_queue(config, scope) }) } fn drain_the_queue(&mut self, config: &Config, scope: &Scope<'a>) -> CargoResult<()> { let mut queue = Vec::new(); trace!("queue: {:#?}", self.queue); // Iteratively execute the entire dependency graph. Each turn of the // loop starts out by scheduling as much work as possible (up to the // maximum number of parallel jobs). A local queue is maintained // separately from the main dependency queue as one dequeue may actually // dequeue quite a bit of work (e.g. 10 binaries in one project). // // After a job has finished we update our internal state if it was // successful and otherwise wait for pending work to finish if it failed // and then immediately return. loop { while self.active < self.jobs { if queue.len() > 0 { let (key, job, fresh) = queue.remove(0); try!(self.run(key, fresh, job, config, scope)); } else if let Some((fresh, key, jobs)) = self.queue.dequeue() { let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| { f.combine(fresh) }); self.pending.insert(key, PendingBuild { amt: jobs.len(), fresh: total_fresh, }); queue.extend(jobs.into_iter().map(|(job, f)| { (key, job, f.combine(fresh)) })); } else { break } } if self.active == 0 { break } // Now that all possible work has been scheduled, wait for a piece // of work to finish. If any package fails to build then we stop // scheduling work as quickly as possibly. let msg = self.rx.recv().unwrap(); info!("end: {:?}", msg.key); self.active -= 1; match msg.result { Ok(()) => { let state = self.pending.get_mut(&msg.key).unwrap(); state.amt -= 1; if state.amt == 0 { self.queue.finish(&msg.key, state.fresh); } } Err(e) => { if self.active > 0 { try!(config.shell().say( "Build failed, waiting for other \ jobs to finish...", YELLOW)); for _ in self.rx.iter().take(self.active as usize) {} } return Err(e) } } } if self.queue.len() == 0 { Ok(()) } else { debug!("queue: {:#?}", self.queue); Err(internal("finished with jobs still left in the queue")) } } /// Executes a job in the `scope` given, pushing the spawned thread's /// handled onto `threads`. fn run(&mut self, key: Key<'a>, fresh: Freshness, job: Job, config: &Config, scope: &Scope<'a>) -> CargoResult<()> { info!("start: {:?}", key); self.active += 1; *self.counts.get_mut(key.pkg).unwrap() -= 1; let my_tx = self.tx.clone(); let (desc_tx, desc_rx) = channel(); scope.spawn(move || { my_tx.send(Message { key: key, result: job.run(fresh, desc_tx), }).unwrap(); }); // Print out some nice progress information try!(self.note_working_on(config, &key, fresh)); // only the first message of each job is processed if let Ok(msg) = desc_rx.recv() { try!(config.shell().verbose(|c| c.status("Running", &msg))); } Ok(()) } // This isn't super trivial because we don't want to print loads and // loads of information to the console, but we also want to produce a // faithful representation of what's happening. This is somewhat nuanced // as a package can start compiling *very* early on because of custom // build commands and such. // // In general, we try to print "Compiling" for the first nontrivial task // run for a package, regardless of when that is. We then don't print // out any more information for a package after we've printed it once. fn note_working_on(&mut self, config: &Config, key: &Key<'a>, fresh: Freshness) -> CargoResult<()> { if (self.compiled.contains(key.pkg) && !key.profile.doc) || (self.documented.contains(key.pkg) && key.profile.doc) { return Ok(()) } match fresh { // Any dirty stage which runs at least one command gets printed as // being a compiled package Dirty => { if key.profile.doc { self.documented.insert(key.pkg); try!(config.shell().status("Documenting", key.pkg)); } else { self.compiled.insert(key.pkg); try!(config.shell().status("Compiling", key.pkg)); } } Fresh if self.counts[key.pkg] == 0 => { self.compiled.insert(key.pkg); try!(config.shell().verbose(|c| c.status("Fresh", key.pkg))); } Fresh => {} } Ok(()) } } impl<'a> Dependency for Key<'a> { type Context = Context<'a, 'a>; fn dependencies(&self, cx: &Context<'a, 'a>) -> Vec> { let unit = Unit { pkg: cx.get_package(self.pkg), target: self.target, profile: self.profile, kind: self.kind, }; cx.dep_targets(&unit).iter().filter_map(|unit| { // Binaries aren't actually needed to *compile* tests, just to run // them, so we don't include this dependency edge in the job graph. if self.target.is_test() && unit.target.is_bin() { None } else { Some(Key::new(unit)) } }).collect() } } impl<'a> Key<'a> { fn new(unit: &Unit<'a>) -> Key<'a> { Key { pkg: unit.pkg.package_id(), target: unit.target, profile: unit.profile, kind: unit.kind, } } } impl<'a> fmt::Debug for Key<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile, self.kind) } } cargo-0.8.0/src/cargo/ops/cargo_rustc/layout.rs000066400000000000000000000106171264656333200215120ustar00rootroot00000000000000//! Management of the directory layout of a build //! //! The directory layout is a little tricky at times, hence a separate file to //! house this logic. The current layout looks like this: //! //! ```ignore //! # This is the root directory for all output, the top-level package //! # places all of its output here. //! target/ //! //! # This is the root directory for all output of *dependencies* //! deps/ //! //! # Root directory for all compiled examples //! examples/ //! //! # This is the location at which the output of all custom build //! # commands are rooted //! build/ //! //! # Each package gets its own directory where its build script and //! # script output are placed //! $pkg1/ //! $pkg2/ //! $pkg3/ //! //! # Each directory package has a `out` directory where output //! # is placed. //! out/ //! //! # This is the location at which the output of all old custom build //! # commands are rooted //! native/ //! //! # Each package gets its own directory for where its output is //! # placed. We can't track exactly what's getting put in here, so //! # we just assume that all relevant output is in these //! # directories. //! $pkg1/ //! $pkg2/ //! $pkg3/ //! //! # Hidden directory that holds all of the fingerprint files for all //! # packages //! .fingerprint/ //! ``` use std::fs; use std::io; use std::path::{PathBuf, Path}; use core::Package; use util::Config; use util::hex::short_hash; pub struct Layout { root: PathBuf, deps: PathBuf, native: PathBuf, build: PathBuf, fingerprint: PathBuf, examples: PathBuf, } pub struct LayoutProxy<'a> { root: &'a Layout, primary: bool, } impl Layout { pub fn new(config: &Config, pkg: &Package, triple: Option<&str>, dest: &str) -> Layout { let mut path = config.target_dir(pkg); // Flexible target specifications often point at filenames, so interpret // the target triple as a Path and then just use the file stem as the // component for the directory name. if let Some(triple) = triple { path.push(Path::new(triple).file_stem().unwrap()); } path.push(dest); Layout::at(path) } pub fn at(root: PathBuf) -> Layout { Layout { deps: root.join("deps"), native: root.join("native"), build: root.join("build"), fingerprint: root.join(".fingerprint"), examples: root.join("examples"), root: root, } } pub fn prepare(&mut self) -> io::Result<()> { if fs::metadata(&self.root).is_err() { try!(fs::create_dir_all(&self.root)); } try!(mkdir(&self.deps)); try!(mkdir(&self.native)); try!(mkdir(&self.fingerprint)); try!(mkdir(&self.examples)); try!(mkdir(&self.build)); return Ok(()); fn mkdir(dir: &Path) -> io::Result<()> { if fs::metadata(&dir).is_err() { try!(fs::create_dir(dir)); } Ok(()) } } pub fn dest<'a>(&'a self) -> &'a Path { &self.root } pub fn deps<'a>(&'a self) -> &'a Path { &self.deps } pub fn examples<'a>(&'a self) -> &'a Path { &self.examples } pub fn fingerprint(&self, package: &Package) -> PathBuf { self.fingerprint.join(&self.pkg_dir(package)) } pub fn build(&self, package: &Package) -> PathBuf { self.build.join(&self.pkg_dir(package)) } pub fn build_out(&self, package: &Package) -> PathBuf { self.build(package).join("out") } fn pkg_dir(&self, pkg: &Package) -> String { format!("{}-{}", pkg.name(), short_hash(pkg)) } } impl<'a> LayoutProxy<'a> { pub fn new(root: &'a Layout, primary: bool) -> LayoutProxy<'a> { LayoutProxy { root: root, primary: primary, } } pub fn root(&self) -> &'a Path { if self.primary {self.root.dest()} else {self.root.deps()} } pub fn deps(&self) -> &'a Path { self.root.deps() } pub fn examples(&self) -> &'a Path { self.root.examples() } pub fn build(&self, pkg: &Package) -> PathBuf { self.root.build(pkg) } pub fn build_out(&self, pkg: &Package) -> PathBuf { self.root.build_out(pkg) } pub fn proxy(&self) -> &'a Layout { self.root } } cargo-0.8.0/src/cargo/ops/cargo_rustc/links.rs000066400000000000000000000030451264656333200213120ustar00rootroot00000000000000use std::collections::HashMap; use core::{PackageId, PackageSet}; use util::CargoResult; // Validate that there are no duplicated native libraries among packages and // that all packages with `links` also have a build script. pub fn validate(deps: &PackageSet) -> CargoResult<()> { let mut map: HashMap<_, &PackageId> = HashMap::new(); for dep in deps.iter() { let lib = match dep.manifest().links() { Some(lib) => lib, None => continue, }; if let Some(prev) = map.get(&lib) { let dep = dep.package_id(); if prev.name() == dep.name() && prev.source_id() == dep.source_id() { bail!("native library `{}` is being linked to by more \ than one version of the same package, but it can \ only be linked once; try updating or pinning your \ dependencies to ensure that this package only shows \ up once\n\n {}\n {}", lib, prev, dep) } else { bail!("native library `{}` is being linked to by more than \ one package, and can only be linked to by one \ package\n\n {}\n {}", lib, prev, dep) } } if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) { bail!("package `{}` specifies that it links to `{}` but does not \ have a custom build script", dep.package_id(), lib) } map.insert(lib, dep.package_id()); } Ok(()) } cargo-0.8.0/src/cargo/ops/cargo_rustc/mod.rs000066400000000000000000000537151264656333200207620ustar00rootroot00000000000000use std::collections::HashMap; use std::env; use std::ffi::{OsStr, OsString}; use std::fs; use std::io::prelude::*; use std::path::{self, PathBuf}; use std::sync::Arc; use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve}; use core::{Profile, Profiles}; use util::{self, CargoResult, human}; use util::{Config, internal, ChainError, profile, join_paths}; use self::job::{Job, Work}; use self::job_queue::JobQueue; pub use self::compilation::Compilation; pub use self::context::{Context, Unit}; pub use self::engine::{CommandPrototype, CommandType, ExecEngine, ProcessEngine}; pub use self::layout::{Layout, LayoutProxy}; pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts}; mod context; mod compilation; mod custom_build; mod engine; mod fingerprint; mod job; mod job_queue; mod layout; mod links; #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] pub enum Kind { Host, Target } #[derive(Default, Clone)] pub struct BuildConfig { pub host: TargetConfig, pub target: TargetConfig, pub jobs: u32, pub requested_target: Option, pub exec_engine: Option>>, pub release: bool, pub doc_all: bool, } #[derive(Clone, Default)] pub struct TargetConfig { pub ar: Option, pub linker: Option, pub overrides: HashMap, } pub type PackagesToBuild<'a> = [(&'a Package,Vec<(&'a Target,&'a Profile)>)]; // Returns a mapping of the root package plus its immediate dependencies to // where the compiled libraries are all located. pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>, deps: &'a PackageSet, resolve: &'a Resolve, sources: &'a SourceMap<'cfg>, config: &'cfg Config, build_config: BuildConfig, profiles: &'a Profiles) -> CargoResult> { let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| { let default_kind = if build_config.requested_target.is_some() { Kind::Target } else { Kind::Host }; targets.iter().map(move |&(target, profile)| { Unit { pkg: pkg, target: target, profile: profile, kind: if target.for_host() {Kind::Host} else {default_kind}, } }) }).collect::>(); try!(links::validate(deps)); let dest = if build_config.release {"release"} else {"debug"}; let root = deps.iter().find(|p| p.package_id() == resolve.root()).unwrap(); let host_layout = Layout::new(config, root, None, &dest); let target_layout = build_config.requested_target.as_ref().map(|target| { layout::Layout::new(config, root, Some(&target), &dest) }); let mut cx = try!(Context::new(resolve, sources, deps, config, host_layout, target_layout, build_config, profiles)); let mut queue = JobQueue::new(&cx); try!(cx.prepare(root)); custom_build::build_map(&mut cx, &units); for unit in units.iter() { // Build up a list of pending jobs, each of which represent // compiling a particular package. No actual work is executed as // part of this, that's all done next as part of the `execute` // function which will run everything in order with proper // parallelism. try!(compile(&mut cx, &mut queue, unit)); } // Now that we've figured out everything that we're going to do, do it! try!(queue.execute(cx.config)); for unit in units.iter() { let out_dir = cx.layout(unit.pkg, unit.kind).build_out(unit.pkg) .display().to_string(); cx.compilation.extra_env.entry(unit.pkg.package_id().clone()) .or_insert(Vec::new()) .push(("OUT_DIR".to_string(), out_dir)); for filename in try!(cx.target_filenames(unit)).iter() { let dst = cx.out_dir(unit).join(filename); if unit.profile.test { cx.compilation.tests.push((unit.pkg.clone(), unit.target.name().to_string(), dst)); } else if unit.target.is_bin() || unit.target.is_example() { cx.compilation.binaries.push(dst); } else if unit.target.is_lib() { let pkgid = unit.pkg.package_id().clone(); cx.compilation.libraries.entry(pkgid).or_insert(Vec::new()) .push((unit.target.clone(), dst)); } if !unit.target.is_lib() { continue } // Include immediate lib deps as well for unit in cx.dep_targets(unit).iter() { let pkgid = unit.pkg.package_id(); if !unit.target.is_lib() { continue } if unit.profile.doc { continue } if cx.compilation.libraries.contains_key(&pkgid) { continue } let v = try!(cx.target_filenames(unit)); let v = v.into_iter().map(|f| { (unit.target.clone(), cx.out_dir(unit).join(f)) }).collect::>(); cx.compilation.libraries.insert(pkgid.clone(), v); } } } let root_pkg = root.package_id(); if let Some(feats) = cx.resolve.features(root_pkg) { cx.compilation.cfgs.extend(feats.iter().map(|feat| { format!("feature=\"{}\"", feat) })); } for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { if pkg == root_pkg { cx.compilation.cfgs.extend(output.cfgs.iter().cloned()); } for dir in output.library_paths.iter() { cx.compilation.native_dirs.insert(pkg.clone(), dir.clone()); } } Ok(cx.compilation) } fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, jobs: &mut JobQueue<'a>, unit: &Unit<'a>) -> CargoResult<()> { if !cx.compiled.insert(*unit) { return Ok(()) } // Build up the work to be done to compile this unit, enqueuing it once // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); try!(fingerprint::prepare_init(cx, unit)); let (dirty, fresh, freshness) = if unit.profile.run_custom_build { try!(custom_build::prepare(cx, unit)) } else { let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx, unit)); let work = if unit.profile.doc { try!(rustdoc(cx, unit)) } else { try!(rustc(cx, unit)) }; let dirty = work.then(dirty); (dirty, fresh, freshness) }; jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness); drop(p); // Be sure to compile all dependencies of this target as well. for unit in cx.dep_targets(unit).iter() { try!(compile(cx, jobs, unit)); } Ok(()) } fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { let crate_types = unit.target.rustc_crate_types(); let mut rustc = try!(prepare_rustc(cx, crate_types, unit)); let name = unit.pkg.name().to_string(); let is_path_source = unit.pkg.package_id().source_id().is_path(); let allow_warnings = unit.pkg.package_id() == cx.resolve.root() || is_path_source; if !allow_warnings { if cx.config.rustc_info().cap_lints { rustc.arg("--cap-lints").arg("allow"); } else { rustc.arg("-Awarnings"); } } let has_custom_args = unit.profile.rustc_args.is_some(); let exec_engine = cx.exec_engine.clone(); let filenames = try!(cx.target_filenames(unit)); let root = cx.out_dir(unit); // Prepare the native lib state (extra -L and -l flags) let build_state = cx.build_state.clone(); let current_id = unit.pkg.package_id().clone(); let build_deps = load_build_deps(cx, unit); // If we are a binary and the package also contains a library, then we // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let do_rename = unit.target.allows_underscores() && !unit.profile.test; let real_name = unit.target.name().to_string(); let crate_name = unit.target.crate_name(); let rustc_dep_info_loc = if do_rename { root.join(&crate_name) } else { root.join(&cx.file_stem(unit)) }.with_extension("d"); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); let cwd = cx.config.cwd().to_path_buf(); return Ok(Work::new(move |desc_tx| { debug!("about to run: {}", rustc); // Only at runtime have we discovered what the extra -L and -l // arguments are for native libraries, so we process those here. We // also need to be sure to add any -L paths for our plugins to the // dynamic library load path as a plugin's dynamic library may be // located somewhere in there. if let Some(build_deps) = build_deps { let build_state = build_state.outputs.lock().unwrap(); try!(add_native_deps(&mut rustc, &build_state, &build_deps, pass_l_flag, ¤t_id)); try!(add_plugin_deps(&mut rustc, &build_state, &build_deps)); } // FIXME(rust-lang/rust#18913): we probably shouldn't have to do // this manually for filename in filenames.iter() { let dst = root.join(filename); if fs::metadata(&dst).is_ok() { try!(fs::remove_file(&dst)); } } desc_tx.send(rustc.to_string()).ok(); try!(exec_engine.exec(rustc).chain_error(|| { human(format!("Could not compile `{}`.", name)) })); if do_rename && real_name != crate_name { let dst = root.join(&filenames[0]); let src = dst.with_file_name(dst.file_name().unwrap() .to_str().unwrap() .replace(&real_name, &crate_name)); if !has_custom_args || fs::metadata(&src).is_ok() { try!(fs::rename(&src, &dst).chain_error(|| { internal(format!("could not rename crate {:?}", src)) })); } } if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() { try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| { internal(format!("could not rename dep info: {:?}", rustc_dep_info_loc)) })); try!(fingerprint::append_current_dir(&dep_info_loc, &cwd)); } Ok(()) })); // Add all relevant -L and -l flags from dependencies (now calculated and // present in `state`) to the command provided fn add_native_deps(rustc: &mut CommandPrototype, build_state: &BuildMap, build_scripts: &BuildScripts, pass_l_flag: bool, current_id: &PackageId) -> CargoResult<()> { for key in build_scripts.to_link.iter() { let output = try!(build_state.get(key).chain_error(|| { internal(format!("couldn't find build state for {}/{:?}", key.0, key.1)) })); for path in output.library_paths.iter() { rustc.arg("-L").arg(path); } if key.0 == *current_id { for cfg in &output.cfgs { rustc.arg("--cfg").arg(cfg); } if pass_l_flag { for name in output.library_links.iter() { rustc.arg("-l").arg(name); } } } } Ok(()) } } fn load_build_deps(cx: &Context, unit: &Unit) -> Option> { cx.build_scripts.get(unit).cloned() } // For all plugin dependencies, add their -L paths (now calculated and // present in `state`) to the dynamic library load path for the command to // execute. fn add_plugin_deps(rustc: &mut CommandPrototype, build_state: &BuildMap, build_scripts: &BuildScripts) -> CargoResult<()> { let var = util::dylib_path_envvar(); let search_path = rustc.get_env(var).unwrap_or(OsString::new()); let mut search_path = env::split_paths(&search_path).collect::>(); for id in build_scripts.plugins.iter() { let key = (id.clone(), Kind::Host); let output = try!(build_state.get(&key).chain_error(|| { internal(format!("couldn't find libs for plugin dep {}", id)) })); for path in output.library_paths.iter() { search_path.push(path.clone()); } } let search_path = try!(join_paths(&search_path, var)); rustc.env(var, &search_path); Ok(()) } fn prepare_rustc(cx: &Context, crate_types: Vec<&str>, unit: &Unit) -> CargoResult { let mut base = try!(process(CommandType::Rustc, unit.pkg, cx)); build_base_args(cx, &mut base, unit, &crate_types); build_plugin_args(&mut base, cx, unit); try!(build_deps_args(&mut base, cx, unit)); Ok(base) } fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult { let mut rustdoc = try!(process(CommandType::Rustdoc, unit.pkg, cx)); rustdoc.arg(&root_path(cx, unit)) .cwd(cx.config.cwd()) .arg("--crate-name").arg(&unit.target.crate_name()); let mut doc_dir = cx.config.target_dir(cx.get_package(cx.resolve.root())); if let Some(target) = cx.requested_target() { rustdoc.arg("--target").arg(target); doc_dir.push(target); } doc_dir.push("doc"); // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if // it doesn't already exist. try!(fs::create_dir_all(&doc_dir)); rustdoc.arg("-o").arg(doc_dir); if let Some(features) = cx.resolve.features(unit.pkg.package_id()) { for feat in features { rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } } if let Some(ref args) = unit.profile.rustdoc_args { rustdoc.args(args); } try!(build_deps_args(&mut rustdoc, cx, unit)); if unit.pkg.has_custom_build() { rustdoc.env("OUT_DIR", &cx.layout(unit.pkg, unit.kind) .build_out(unit.pkg)); } let name = unit.pkg.name().to_string(); let build_state = cx.build_state.clone(); let key = (unit.pkg.package_id().clone(), unit.kind); let exec_engine = cx.exec_engine.clone(); Ok(Work::new(move |desc_tx| { if let Some(output) = build_state.outputs.lock().unwrap().get(&key) { for cfg in output.cfgs.iter() { rustdoc.arg("--cfg").arg(cfg); } } desc_tx.send(rustdoc.to_string()).unwrap(); exec_engine.exec(rustdoc).chain_error(|| { human(format!("Could not document `{}`.", name)) }) })) } // The path that we pass to rustc is actually fairly important because it will // show up in error messages and the like. For this reason we take a few moments // to ensure that something shows up pretty reasonably. // // The heuristic here is fairly simple, but the key idea is that the path is // always "relative" to the current directory in order to be found easily. The // path is only actually relative if the current directory is an ancestor if it. // This means that non-path dependencies (git/registry) will likely be shown as // absolute paths instead of relative paths. fn root_path(cx: &Context, unit: &Unit) -> PathBuf { let absolute = unit.pkg.root().join(unit.target.src_path()); let cwd = cx.config.cwd(); if absolute.starts_with(cwd) { util::without_prefix(&absolute, cwd).map(|s| { s.to_path_buf() }).unwrap_or(absolute) } else { absolute } } fn build_base_args(cx: &Context, cmd: &mut CommandPrototype, unit: &Unit, crate_types: &[&str]) { let Profile { opt_level, lto, codegen_units, ref rustc_args, debuginfo, debug_assertions, rpath, test, doc: _doc, run_custom_build, rustdoc_args: _, } = *unit.profile; assert!(!run_custom_build); // Move to cwd so the root_path() passed below is actually correct cmd.cwd(cx.config.cwd()); cmd.arg(&root_path(cx, unit)); cmd.arg("--crate-name").arg(&unit.target.crate_name()); for crate_type in crate_types.iter() { cmd.arg("--crate-type").arg(crate_type); } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) || (crate_types.contains(&"dylib") && unit.pkg.package_id() != cx.resolve.root()); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } if opt_level != 0 { cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); } // Disable LTO for host builds as prefer_dynamic and it are mutually // exclusive. if unit.target.can_lto() && lto && !unit.target.for_host() { cmd.args(&["-C", "lto"]); } else { // There are some restrictions with LTO and codegen-units, so we // only add codegen units when LTO is not used. if let Some(n) = codegen_units { cmd.arg("-C").arg(&format!("codegen-units={}", n)); } } if debuginfo { cmd.arg("-g"); } if let Some(ref args) = *rustc_args { cmd.args(args); } if debug_assertions && opt_level > 0 { cmd.args(&["-C", "debug-assertions=on"]); } else if !debug_assertions && opt_level == 0 { cmd.args(&["-C", "debug-assertions=off"]); } if test && unit.target.harness() { cmd.arg("--test"); } if let Some(features) = cx.resolve.features(unit.pkg.package_id()) { for feat in features.iter() { cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } } if let Some(m) = cx.target_metadata(unit) { cmd.arg("-C").arg(&format!("metadata={}", m.metadata)); cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename)); } if rpath { cmd.arg("-C").arg("rpath"); } } fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit) { fn opt(cmd: &mut CommandPrototype, key: &str, prefix: &str, val: Option<&OsStr>) { if let Some(val) = val { let mut joined = OsString::from(prefix); joined.push(val); cmd.arg(key).arg(joined); } } cmd.arg("--out-dir").arg(&cx.out_dir(unit)); cmd.arg("--emit=dep-info,link"); if unit.kind == Kind::Target { opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref())); } opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref())); opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref())); } fn build_deps_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit) -> CargoResult<()> { let layout = cx.layout(unit.pkg, unit.kind); cmd.arg("-L").arg(&{ let mut root = OsString::from("dependency="); root.push(layout.root()); root }); cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); deps.push(layout.deps()); deps }); if unit.pkg.has_custom_build() { cmd.env("OUT_DIR", &layout.build_out(unit.pkg)); } for unit in cx.dep_targets(unit).iter() { if unit.target.linkable() { try!(link_to(cmd, cx, unit)); } } return Ok(()); fn link_to(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit) -> CargoResult<()> { let layout = cx.layout(unit.pkg, unit.kind); for filename in try!(cx.target_filenames(unit)) { if filename.ends_with(".a") { continue } let mut v = OsString::new(); v.push(&unit.target.crate_name()); v.push("="); v.push(layout.root()); v.push(&path::MAIN_SEPARATOR.to_string()); v.push(&filename); cmd.arg("--extern").arg(&v); } Ok(()) } } pub fn process(cmd: CommandType, pkg: &Package, cx: &Context) -> CargoResult { // When invoking a tool, we need the *host* deps directory in the dynamic // library search path for plugins and such which have dynamic dependencies. let layout = cx.layout(pkg, Kind::Host); let mut search_path = util::dylib_path(); search_path.push(layout.deps().to_path_buf()); // We want to use the same environment and such as normal processes, but we // want to override the dylib search path with the one we just calculated. let search_path = try!(join_paths(&search_path, util::dylib_path_envvar())); let mut cmd = try!(cx.compilation.process(cmd, pkg)); cmd.env(util::dylib_path_envvar(), &search_path); Ok(cmd) } fn envify(s: &str) -> String { s.chars() .flat_map(|c| c.to_uppercase()) .map(|c| if c == '-' {'_'} else {c}) .collect() } impl Kind { fn for_target(&self, target: &Target) -> Kind { // Once we start compiling for the `Host` kind we continue doing so, but // if we are a `Target` kind and then we start compiling for a target // that needs to be on the host we lift ourselves up to `Host` match *self { Kind::Host => Kind::Host, Kind::Target if target.for_host() => Kind::Host, Kind::Target => Kind::Target, } } } cargo-0.8.0/src/cargo/ops/cargo_test.rs000066400000000000000000000144211264656333200200110ustar00rootroot00000000000000use std::ffi::{OsString, OsStr}; use std::path::Path; use ops::{self, ExecEngine, ProcessEngine, Compilation}; use util::{self, CargoResult, CargoTestError, ProcessError}; pub struct TestOptions<'a> { pub compile_opts: ops::CompileOptions<'a>, pub no_run: bool, pub no_fail_fast: bool, } #[allow(deprecated)] // connect => join in 1.3 pub fn run_tests(manifest_path: &Path, options: &TestOptions, test_args: &[String]) -> CargoResult> { let compilation = try!(compile_tests(manifest_path, options)); if options.no_run { return Ok(None) } let mut errors = try!(run_unit_tests(options, test_args, &compilation)); // If we have an error and want to fail fast, return if errors.len() > 0 && !options.no_fail_fast { return Ok(Some(CargoTestError::new(errors))) } // If a specific test was requested or we're not running any tests at all, // don't run any doc tests. if let ops::CompileFilter::Only { .. } = options.compile_opts.filter { match errors.len() { 0 => return Ok(None), _ => return Ok(Some(CargoTestError::new(errors))) } } errors.extend(try!(run_doc_tests(options, test_args, &compilation))); if errors.len() == 0 { Ok(None) } else { Ok(Some(CargoTestError::new(errors))) } } pub fn run_benches(manifest_path: &Path, options: &TestOptions, args: &[String]) -> CargoResult> { let mut args = args.to_vec(); args.push("--bench".to_string()); let compilation = try!(compile_tests(manifest_path, options)); let errors = try!(run_unit_tests(options, &args, &compilation)); match errors.len() { 0 => Ok(None), _ => Ok(Some(CargoTestError::new(errors))), } } fn compile_tests<'a>(manifest_path: &Path, options: &TestOptions<'a>) -> CargoResult> { let mut compilation = try!(ops::compile(manifest_path, &options.compile_opts)); compilation.tests.sort_by(|a, b| { (a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1)) }); Ok(compilation) } /// Run the unit and integration tests of a project. fn run_unit_tests(options: &TestOptions, test_args: &[String], compilation: &Compilation) -> CargoResult> { let config = options.compile_opts.config; let cwd = options.compile_opts.config.cwd(); let mut errors = Vec::new(); for &(ref pkg, _, ref exe) in &compilation.tests { let to_display = match util::without_prefix(exe, &cwd) { Some(path) => path, None => &**exe, }; let mut cmd = try!(compilation.target_process(exe, pkg)); cmd.args(test_args); try!(config.shell().concise(|shell| { shell.status("Running", to_display.display().to_string()) })); try!(config.shell().verbose(|shell| { shell.status("Running", cmd.to_string()) })); if let Err(e) = ExecEngine::exec(&mut ProcessEngine, cmd) { errors.push(e); if !options.no_fail_fast { break } } } Ok(errors) } #[allow(deprecated)] // connect => join in 1.3 fn run_doc_tests(options: &TestOptions, test_args: &[String], compilation: &Compilation) -> CargoResult> { let mut errors = Vec::new(); let config = options.compile_opts.config; let libs = compilation.to_doc_test.iter().map(|package| { (package, package.targets().iter().filter(|t| t.doctested()) .map(|t| (t.src_path(), t.name(), t.crate_name()))) }); for (package, tests) in libs { for (lib, name, crate_name) in tests { try!(config.shell().status("Doc-tests", name)); let mut p = try!(compilation.rustdoc_process(package)); p.arg("--test").arg(lib) .arg("--crate-name").arg(&crate_name); for &rust_dep in &[&compilation.deps_output, &compilation.root_output] { let mut arg = OsString::from("dependency="); arg.push(rust_dep); p.arg("-L").arg(arg); } for native_dep in compilation.native_dirs.values() { p.arg("-L").arg(native_dep); } if test_args.len() > 0 { p.arg("--test-args").arg(&test_args.connect(" ")); } for cfg in compilation.cfgs.iter() { p.arg("--cfg").arg(cfg); } for (_, libs) in compilation.libraries.iter() { for &(ref target, ref lib) in libs.iter() { // Note that we can *only* doctest rlib outputs here. A // staticlib output cannot be linked by the compiler (it just // doesn't do that). A dylib output, however, can be linked by // the compiler, but will always fail. Currently all dylibs are // built as "static dylibs" where the standard library is // statically linked into the dylib. The doc tests fail, // however, for now as they try to link the standard library // dynamically as well, causing problems. As a result we only // pass `--extern` for rlib deps and skip out on all other // artifacts. if lib.extension() != Some(OsStr::new("rlib")) && !target.for_host() { continue } let mut arg = OsString::from(target.crate_name()); arg.push("="); arg.push(lib); p.arg("--extern").arg(&arg); } } try!(config.shell().verbose(|shell| { shell.status("Running", p.to_string()) })); if let Err(e) = ExecEngine::exec(&mut ProcessEngine, p) { errors.push(e); if !options.no_fail_fast { return Ok(errors); } } } } Ok(errors) } cargo-0.8.0/src/cargo/ops/lockfile.rs000066400000000000000000000071141264656333200174500ustar00rootroot00000000000000use std::fs::File; use std::io::prelude::*; use std::path::Path; use rustc_serialize::{Encodable, Decodable}; use toml::{self, Encoder, Value}; use core::{Resolve, resolver, Package, SourceId}; use util::{CargoResult, ChainError, human, paths}; use util::toml as cargo_toml; pub fn load_pkg_lockfile(pkg: &Package) -> CargoResult> { let lockfile = pkg.root().join("Cargo.lock"); let source_id = pkg.package_id().source_id(); load_lockfile(&lockfile, source_id).chain_error(|| { human(format!("failed to parse lock file at: {}", lockfile.display())) }) } pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult> { // If there is no lockfile, return none. let mut f = match File::open(path) { Ok(f) => f, Err(_) => return Ok(None) }; let mut s = String::new(); try!(f.read_to_string(&mut s)); let table = toml::Value::Table(try!(cargo_toml::parse(&s, path))); let mut d = toml::Decoder::new(table); let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d)); Ok(Some(try!(v.to_resolve(sid)))) } pub fn write_pkg_lockfile(pkg: &Package, resolve: &Resolve) -> CargoResult<()> { let loc = pkg.root().join("Cargo.lock"); write_lockfile(&loc, resolve) } pub fn write_lockfile(dst: &Path, resolve: &Resolve) -> CargoResult<()> { let mut e = Encoder::new(); resolve.encode(&mut e).unwrap(); let mut out = String::new(); // Note that we do not use e.toml.to_string() as we want to control the // exact format the toml is in to ensure pretty diffs between updates to the // lockfile. let root = e.toml.get(&"root".to_string()).unwrap(); out.push_str("[root]\n"); emit_package(root.as_table().unwrap(), &mut out); let deps = e.toml.get(&"package".to_string()).unwrap().as_slice().unwrap(); for dep in deps.iter() { let dep = dep.as_table().unwrap(); out.push_str("[[package]]\n"); emit_package(dep, &mut out); } match e.toml.get(&"metadata".to_string()) { Some(metadata) => { out.push_str("[metadata]\n"); out.push_str(&metadata.to_string()); } None => {} } // Load the original lockfile if it exists. if let Ok(orig) = paths::read(dst) { if has_crlf_line_endings(&orig) { out = out.replace("\n", "\r\n"); } if out == orig { // The lockfile contents haven't changed so don't rewrite it. // This is helpful on read-only filesystems. return Ok(()) } } try!(paths::write(dst, out.as_bytes())); Ok(()) } fn has_crlf_line_endings(s: &str) -> bool { // Only check the first line. if let Some(lf) = s.find('\n') { s[..lf].ends_with('\r') } else { false } } fn emit_package(dep: &toml::Table, out: &mut String) { out.push_str(&format!("name = {}\n", lookup(dep, "name"))); out.push_str(&format!("version = {}\n", lookup(dep, "version"))); if dep.contains_key("source") { out.push_str(&format!("source = {}\n", lookup(dep, "source"))); } if let Some(ref s) = dep.get("dependencies") { let slice = Value::as_slice(*s).unwrap(); if !slice.is_empty() { out.push_str("dependencies = [\n"); for child in slice.iter() { out.push_str(&format!(" {},\n", child)); } out.push_str("]\n"); } out.push_str("\n"); } } fn lookup<'a>(table: &'a toml::Table, key: &str) -> &'a toml::Value { table.get(key).expect(&format!("didn't find {}", key)) } cargo-0.8.0/src/cargo/ops/mod.rs000066400000000000000000000034301264656333200164340ustar00rootroot00000000000000pub use self::cargo_clean::{clean, CleanOptions}; pub use self::cargo_compile::{compile, compile_pkg, resolve_dependencies, CompileOptions}; pub use self::cargo_compile::{CompileFilter, CompileMode}; pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages}; pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, Unit}; pub use self::cargo_rustc::{Context, LayoutProxy}; pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig}; pub use self::cargo_rustc::{CommandType, CommandPrototype, ExecEngine, ProcessEngine}; pub use self::cargo_run::run; pub use self::cargo_install::{install, install_list, uninstall}; pub use self::cargo_new::{new, NewOptions, VersionControl}; pub use self::cargo_doc::{doc, DocOptions}; pub use self::cargo_generate_lockfile::{generate_lockfile}; pub use self::cargo_generate_lockfile::{update_lockfile}; pub use self::cargo_generate_lockfile::UpdateOptions; pub use self::lockfile::{load_lockfile, load_pkg_lockfile}; pub use self::lockfile::{write_lockfile, write_pkg_lockfile}; pub use self::cargo_test::{run_tests, run_benches, TestOptions}; pub use self::cargo_package::package; pub use self::registry::{publish, registry_configuration, RegistryConfig}; pub use self::registry::{registry_login, search, http_proxy_exists, http_handle}; pub use self::registry::{modify_owners, yank, OwnersOptions}; pub use self::cargo_fetch::{fetch, get_resolved_packages}; pub use self::cargo_pkgid::pkgid; pub use self::resolve::{resolve_pkg, resolve_with_previous}; mod cargo_clean; mod cargo_compile; mod cargo_doc; mod cargo_fetch; mod cargo_generate_lockfile; mod cargo_install; mod cargo_new; mod cargo_package; mod cargo_pkgid; mod cargo_read_manifest; mod cargo_run; mod cargo_rustc; mod cargo_test; mod lockfile; mod registry; mod resolve; cargo-0.8.0/src/cargo/ops/registry.rs000066400000000000000000000316111264656333200175270ustar00rootroot00000000000000use std::collections::HashMap; use std::env; use std::fs; use std::io::prelude::*; use std::iter::repeat; use std::path::{Path, PathBuf}; use curl::http; use git2; use registry::{Registry, NewCrate, NewCrateDependency}; use term::color::BLACK; use core::source::Source; use core::{Package, SourceId}; use core::dependency::Kind; use core::manifest::ManifestMetadata; use ops; use sources::{RegistrySource}; use util::config; use util::paths; use util::{CargoResult, human, ChainError, ToUrl}; use util::config::{Config, ConfigValue, Location}; use util::important_paths::find_root_manifest_for_wd; pub struct RegistryConfig { pub index: Option, pub token: Option, } pub fn publish(manifest_path: &Path, config: &Config, token: Option, index: Option, verify: bool) -> CargoResult<()> { let pkg = try!(Package::for_path(&manifest_path, config)); let (mut registry, reg_id) = try!(registry(config, token, index)); try!(verify_dependencies(&pkg, ®_id)); // Prepare a tarball, with a non-surpressable warning if metadata // is missing since this is being put online. let tarball = try!(ops::package(manifest_path, config, verify, false, true)).unwrap(); // Upload said tarball to the specified destination try!(config.shell().status("Uploading", pkg.package_id().to_string())); try!(transmit(&pkg, &tarball, &mut registry)); Ok(()) } fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> { for dep in pkg.dependencies().iter() { if dep.source_id().is_path() { if dep.specified_req().is_none() { bail!("all path dependencies must have a version specified \ when publishing.\ndependency `{}` does not specify \ a version", dep.name()) } } else if dep.source_id() != registry_src { bail!("all dependencies must come from the same source.\n\ dependency `{}` comes from {} instead", dep.name(), dep.source_id()) } } Ok(()) } fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry) -> CargoResult<()> { let deps = pkg.dependencies().iter().map(|dep| { NewCrateDependency { optional: dep.is_optional(), default_features: dep.uses_default_features(), name: dep.name().to_string(), features: dep.features().to_vec(), version_req: dep.version_req().to_string(), target: dep.only_for_platform().map(|s| s.to_string()), kind: match dep.kind() { Kind::Normal => "normal", Kind::Build => "build", Kind::Development => "dev", }.to_string(), } }).collect::>(); let manifest = pkg.manifest(); let ManifestMetadata { ref authors, ref description, ref homepage, ref documentation, ref keywords, ref readme, ref repository, ref license, ref license_file, } = *manifest.metadata(); let readme = match *readme { Some(ref readme) => Some(try!(paths::read(&pkg.root().join(readme)))), None => None, }; match *license_file { Some(ref file) => { if fs::metadata(&pkg.root().join(file)).is_err() { bail!("the license file `{}` does not exist", file) } } None => {} } registry.publish(&NewCrate { name: pkg.name().to_string(), vers: pkg.version().to_string(), deps: deps, features: pkg.summary().features().clone(), authors: authors.clone(), description: description.clone(), homepage: homepage.clone(), documentation: documentation.clone(), keywords: keywords.clone(), readme: readme, repository: repository.clone(), license: license.clone(), license_file: license_file.clone(), }, tarball).map_err(|e| { human(e.to_string()) }) } pub fn registry_configuration(config: &Config) -> CargoResult { let index = try!(config.get_string("registry.index")).map(|p| p.0); let token = try!(config.get_string("registry.token")).map(|p| p.0); Ok(RegistryConfig { index: index, token: token }) } pub fn registry(config: &Config, token: Option, index: Option) -> CargoResult<(Registry, SourceId)> { // Parse all configuration options let RegistryConfig { token: token_config, index: index_config, } = try!(registry_configuration(config)); let token = token.or(token_config); let index = index.or(index_config).unwrap_or(RegistrySource::default_url()); let index = try!(index.to_url().map_err(human)); let sid = SourceId::for_registry(&index); let api_host = { let mut src = RegistrySource::new(&sid, config); try!(src.update().chain_error(|| { human(format!("failed to update registry {}", index)) })); (try!(src.config())).api }; let handle = try!(http_handle(config)); Ok((Registry::new_handle(api_host, token, handle), sid)) } /// Create a new HTTP handle with appropriate global configuration for cargo. pub fn http_handle(config: &Config) -> CargoResult { // The timeout option for libcurl by default times out the entire transfer, // but we probably don't want this. Instead we only set timeouts for the // connect phase as well as a "low speed" timeout so if we don't receive // many bytes in a large-ish period of time then we time out. let handle = http::handle().timeout(0) .connect_timeout(30_000 /* milliseconds */) .low_speed_limit(10 /* bytes per second */) .low_speed_timeout(30 /* seconds */); let handle = match try!(http_proxy(config)) { Some(proxy) => handle.proxy(proxy), None => handle, }; let handle = match try!(http_timeout(config)) { Some(timeout) => handle.connect_timeout(timeout as usize) .low_speed_timeout((timeout as usize) / 1000), None => handle, }; Ok(handle) } /// Find an explicit HTTP proxy if one is available. /// /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult> { match try!(config.get_string("http.proxy")) { Some((s, _)) => return Ok(Some(s)), None => {} } match git2::Config::open_default() { Ok(cfg) => { match cfg.get_str("http.proxy") { Ok(s) => return Ok(Some(s.to_string())), Err(..) => {} } } Err(..) => {} } Ok(None) } /// Determine if an http proxy exists. /// /// Checks the following for existence, in order: /// /// * cargo's `http.proxy` /// * git's `http.proxy` /// * http_proxy env var /// * HTTP_PROXY env var /// * https_proxy env var /// * HTTPS_PROXY env var pub fn http_proxy_exists(config: &Config) -> CargoResult { if try!(http_proxy(config)).is_some() { Ok(true) } else { Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok())) } } pub fn http_timeout(config: &Config) -> CargoResult> { match try!(config.get_i64("http.timeout")) { Some((s, _)) => return Ok(Some(s)), None => {} } Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) } pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { let RegistryConfig { index, token: _ } = try!(registry_configuration(config)); let mut map = HashMap::new(); let p = config.cwd().to_path_buf(); match index { Some(index) => { map.insert("index".to_string(), ConfigValue::String(index, p.clone())); } None => {} } map.insert("token".to_string(), ConfigValue::String(token, p)); config::set_config(config, Location::Global, "registry", ConfigValue::Table(map, PathBuf::from("."))) } pub struct OwnersOptions { pub krate: Option, pub token: Option, pub index: Option, pub to_add: Option>, pub to_remove: Option>, pub list: bool, } pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let name = match opts.krate { Some(ref name) => name.clone(), None => { let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd())); let pkg = try!(Package::for_path(&manifest_path, config)); pkg.name().to_string() } }; let (mut registry, _) = try!(registry(config, opts.token.clone(), opts.index.clone())); match opts.to_add { Some(ref v) => { let v = v.iter().map(|s| &s[..]).collect::>(); try!(config.shell().status("Owner", format!("adding {:?} to crate {}", v, name))); try!(registry.add_owners(&name, &v).map_err(|e| { human(format!("failed to add owners to crate {}: {}", name, e)) })); } None => {} } match opts.to_remove { Some(ref v) => { let v = v.iter().map(|s| &s[..]).collect::>(); try!(config.shell().status("Owner", format!("removing {:?} from crate {}", v, name))); try!(registry.remove_owners(&name, &v).map_err(|e| { human(format!("failed to remove owners from crate {}: {}", name, e)) })); } None => {} } if opts.list { let owners = try!(registry.list_owners(&name).map_err(|e| { human(format!("failed to list owners of crate {}: {}", name, e)) })); for owner in owners.iter() { print!("{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { (Some(name), Some(email)) => println!(" ({} <{}>)", name, email), (Some(s), None) | (None, Some(s)) => println!(" ({})", s), (None, None) => println!(""), } } } Ok(()) } pub fn yank(config: &Config, krate: Option, version: Option, token: Option, index: Option, undo: bool) -> CargoResult<()> { let name = match krate { Some(name) => name, None => { let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd())); let pkg = try!(Package::for_path(&manifest_path, config)); pkg.name().to_string() } }; let version = match version { Some(v) => v, None => bail!("a version must be specified to yank") }; let (mut registry, _) = try!(registry(config, token, index)); if undo { try!(config.shell().status("Unyank", format!("{}:{}", name, version))); try!(registry.unyank(&name, &version).map_err(|e| { human(format!("failed to undo a yank: {}", e)) })); } else { try!(config.shell().status("Yank", format!("{}:{}", name, version))); try!(registry.yank(&name, &version).map_err(|e| { human(format!("failed to yank: {}", e)) })); } Ok(()) } pub fn search(query: &str, config: &Config, index: Option) -> CargoResult<()> { fn truncate_with_ellipsis(s: &str, max_length: usize) -> String { if s.len() < max_length { s.to_string() } else { format!("{}…", &s[..max_length - 1]) } } let (mut registry, _) = try!(registry(config, None, index)); let crates = try!(registry.search(query).map_err(|e| { human(format!("failed to retrieve search results from the registry: {}", e)) })); let list_items = crates.iter() .map(|krate| ( format!("{} ({})", krate.name, krate.max_version), krate.description.as_ref().map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), 128)) )) .collect::>(); let description_margin = list_items.iter() .map(|&(ref left, _)| left.len() + 4) .max() .unwrap_or(0); for (name, description) in list_items.into_iter() { let line = match description { Some(desc) => { let space = repeat(' ').take(description_margin - name.len()) .collect::(); name.to_string() + &space + &desc } None => name }; try!(config.shell().say(line, BLACK)); } Ok(()) } cargo-0.8.0/src/cargo/ops/resolve.rs000066400000000000000000000125301264656333200173350ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use core::{Package, PackageId, SourceId}; use core::registry::PackageRegistry; use core::resolver::{self, Resolve, Method}; use ops; use util::CargoResult; /// Resolve all dependencies for the specified `package` using the previous /// lockfile as a guide if present. /// /// This function will also write the result of resolution as a new /// lockfile. pub fn resolve_pkg(registry: &mut PackageRegistry, package: &Package) -> CargoResult { let prev = try!(ops::load_pkg_lockfile(package)); let resolve = try!(resolve_with_previous(registry, package, Method::Everything, prev.as_ref(), None)); if package.package_id().source_id().is_path() { try!(ops::write_pkg_lockfile(package, &resolve)); } Ok(resolve) } /// Resolve all dependencies for a package using an optional previous instance /// of resolve to guide the resolution process. /// /// This also takes an optional hash set, `to_avoid`, which is a list of package /// ids that should be avoided when consulting the previous instance of resolve /// (often used in pairings with updates). /// /// The previous resolve normally comes from a lockfile. This function does not /// read or write lockfiles from the filesystem. pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, package: &Package, method: Method, previous: Option<&'a Resolve>, to_avoid: Option<&HashSet<&'a PackageId>>) -> CargoResult { try!(registry.add_sources(&[package.package_id().source_id() .clone()])); // Here we place an artificial limitation that all non-registry sources // cannot be locked at more than one revision. This means that if a git // repository provides more than one package, they must all be updated in // step when any of them are updated. // // TODO: This seems like a hokey reason to single out the registry as being // different let mut to_avoid_sources = HashSet::new(); match to_avoid { Some(set) => { for package_id in set.iter() { let source = package_id.source_id(); if !source.is_registry() { to_avoid_sources.insert(source); } } } None => {} } let summary = package.summary().clone(); let summary = match previous { Some(r) => { // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version // without disturbing the graph structure. To this end we perform // two actions here: // // 1. We inform the package registry of all locked packages. This // involves informing it of both the locked package's id as well // as the versions of all locked dependencies. The registry will // then takes this information into account when it is queried. // // 2. The specified package's summary will have its dependencies // modified to their precise variants. This will instruct the // first step of the resolution process to not query for ranges // but rather for precise dependency versions. // // This process must handle altered dependencies, however, as // it's possible for a manifest to change over time to have // dependencies added, removed, or modified to different version // ranges. To deal with this, we only actually lock a dependency // to the previously resolved version if the dependency listed // still matches the locked version. for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) { let deps = r.deps(node).into_iter().flat_map(|i| i) .filter(|p| keep(p, to_avoid, &to_avoid_sources)) .map(|p| p.clone()).collect(); registry.register_lock(node.clone(), deps); } let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| { keep(p, to_avoid, &to_avoid_sources) }).map(|d| { (d.name(), d) }).collect::>(); summary.map_dependencies(|d| { match map.get(d.name()) { Some(&lock) if d.matches_id(lock) => d.lock_to(lock), _ => d, } }) } None => summary, }; let mut resolved = try!(resolver::resolve(&summary, &method, registry)); match previous { Some(r) => resolved.copy_metadata(r), None => {} } return Ok(resolved); fn keep<'a>(p: &&'a PackageId, to_avoid_packages: Option<&HashSet<&'a PackageId>>, to_avoid_sources: &HashSet<&'a SourceId>) -> bool { !to_avoid_sources.contains(&p.source_id()) && match to_avoid_packages { Some(set) => !set.contains(p), None => true, } } } cargo-0.8.0/src/cargo/sources/000077500000000000000000000000001264656333200161715ustar00rootroot00000000000000cargo-0.8.0/src/cargo/sources/git/000077500000000000000000000000001264656333200167545ustar00rootroot00000000000000cargo-0.8.0/src/cargo/sources/git/mod.rs000066400000000000000000000002341264656333200201000ustar00rootroot00000000000000pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch}; pub use self::source::{GitSource, canonicalize_url}; mod utils; mod source; cargo-0.8.0/src/cargo/sources/git/source.rs000066400000000000000000000210361264656333200206240ustar00rootroot00000000000000use std::fmt::{self, Debug, Formatter}; use std::hash::{Hash, Hasher, SipHasher}; use std::mem; use std::path::PathBuf; use url::{self, Url}; use core::source::{Source, SourceId}; use core::GitReference; use core::{Package, PackageId, Summary, Registry, Dependency}; use util::{CargoResult, Config, to_hex}; use sources::PathSource; use sources::git::utils::{GitRemote, GitRevision}; /* TODO: Refactor GitSource to delegate to a PathSource */ pub struct GitSource<'cfg> { remote: GitRemote, reference: GitReference, db_path: PathBuf, checkout_path: PathBuf, source_id: SourceId, path_source: Option>, rev: Option, config: &'cfg Config, } impl<'cfg> GitSource<'cfg> { pub fn new(source_id: &SourceId, config: &'cfg Config) -> GitSource<'cfg> { assert!(source_id.is_git(), "id is not git, id={}", source_id); let reference = match source_id.git_reference() { Some(reference) => reference, None => panic!("Not a git source; id={}", source_id), }; let remote = GitRemote::new(source_id.url()); let ident = ident(source_id.url()); let db_path = config.git_db_path().join(&ident); let reference_path = match *reference { GitReference::Branch(ref s) | GitReference::Tag(ref s) | GitReference::Rev(ref s) => s.to_string(), }; let checkout_path = config.git_checkout_path() .join(&ident) .join(&reference_path); let reference = match source_id.precise() { Some(s) => GitReference::Rev(s.to_string()), None => source_id.git_reference().unwrap().clone(), }; GitSource { remote: remote, reference: reference, db_path: db_path, checkout_path: checkout_path, source_id: source_id.clone(), path_source: None, rev: None, config: config, } } pub fn url(&self) -> &Url { self.remote.url() } pub fn read_packages(&mut self) -> CargoResult> { if self.path_source.is_none() { try!(self.update()); } self.path_source.as_mut().unwrap().read_packages() } } fn ident(url: &Url) -> String { let mut hasher = SipHasher::new_with_keys(0,0); // FIXME: this really should be able to not use to_str() everywhere, but the // compiler seems to currently ask for static lifetimes spuriously. // Perhaps related to rust-lang/rust#15144 let url = canonicalize_url(url); let ident = url.path().unwrap_or(&[]) .last().map(|a| a.clone()).unwrap_or(String::new()); let ident = if ident == "" { "_empty".to_string() } else { ident }; url.hash(&mut hasher); format!("{}-{}", ident, to_hex(hasher.finish())) } // Some hacks and heuristics for making equivalent URLs hash the same pub fn canonicalize_url(url: &Url) -> Url { let mut url = url.clone(); // Strip a trailing slash match url.scheme_data { url::SchemeData::Relative(ref mut rel) => { if rel.path.last().map(|s| s.is_empty()).unwrap_or(false) { rel.path.pop(); } } _ => {} } // HACKHACK: For github URL's specifically just lowercase // everything. GitHub treats both the same, but they hash // differently, and we're gonna be hashing them. This wants a more // general solution, and also we're almost certainly not using the // same case conversion rules that GitHub does. (#84) if url.domain() == Some("github.com") { url.scheme = "https".to_string(); match url.scheme_data { url::SchemeData::Relative(ref mut rel) => { rel.port = Some(443); rel.default_port = Some(443); let path = mem::replace(&mut rel.path, Vec::new()); rel.path = path.into_iter().map(|s| { s.chars().flat_map(|c| c.to_lowercase()).collect() }).collect(); } _ => {} } } // Repos generally can be accessed with or w/o '.git' match url.scheme_data { url::SchemeData::Relative(ref mut rel) => { let needs_chopping = { let last = rel.path.last().map(|s| &s[..]).unwrap_or(""); last.ends_with(".git") }; if needs_chopping { let last = rel.path.pop().unwrap(); rel.path.push(last[..last.len() - 4].to_string()) } } _ => {} } return url; } impl<'cfg> Debug for GitSource<'cfg> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { try!(write!(f, "git repo at {}", self.remote.url())); match self.reference.to_ref_string() { Some(s) => write!(f, " ({})", s), None => Ok(()) } } } impl<'cfg> Registry for GitSource<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { let src = self.path_source.as_mut() .expect("BUG: update() must be called before query()"); src.query(dep) } } impl<'cfg> Source for GitSource<'cfg> { fn update(&mut self) -> CargoResult<()> { let actual_rev = self.remote.rev_for(&self.db_path, &self.reference); let should_update = actual_rev.is_err() || self.source_id.precise().is_none(); let (repo, actual_rev) = if should_update { try!(self.config.shell().status("Updating", format!("git repository `{}`", self.remote.url()))); trace!("updating git source `{:?}`", self.remote); let repo = try!(self.remote.checkout(&self.db_path)); let rev = try!(repo.rev_for(&self.reference)); (repo, rev) } else { (try!(self.remote.db_at(&self.db_path)), actual_rev.unwrap()) }; try!(repo.copy_to(actual_rev.clone(), &self.checkout_path)); let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let path_source = PathSource::new(&self.checkout_path, &source_id, self.config); self.path_source = Some(path_source); self.rev = Some(actual_rev); self.path_source.as_mut().unwrap().update() } fn download(&mut self, _: &[PackageId]) -> CargoResult<()> { // TODO: assert! that the PackageId is contained by the source Ok(()) } fn get(&self, ids: &[PackageId]) -> CargoResult> { trace!("getting packages for package ids `{:?}` from `{:?}`", ids, self.remote); self.path_source.as_ref().expect("BUG: update() must be called \ before get()").get(ids) } fn fingerprint(&self, _pkg: &Package) -> CargoResult { Ok(self.rev.as_ref().unwrap().to_string()) } } #[cfg(test)] mod test { use url::Url; use super::ident; use util::ToUrl; #[test] pub fn test_url_to_path_ident_with_path() { let ident = ident(&url("https://github.com/carlhuda/cargo")); assert!(ident.starts_with("cargo-")); } #[test] pub fn test_url_to_path_ident_without_path() { let ident = ident(&url("https://github.com")); assert!(ident.starts_with("_empty-")); } #[test] fn test_canonicalize_idents_by_stripping_trailing_url_slash() { let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")); let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_lowercasing_github_urls() { let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&url("https://github.com/pistondevelopers/piston")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_stripping_dot_git() { let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_different_protocls() { let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")); assert_eq!(ident1, ident2); } fn url(s: &str) -> Url { s.to_url().unwrap() } } cargo-0.8.0/src/cargo/sources/git/utils.rs000066400000000000000000000357701264656333200204760ustar00rootroot00000000000000use std::fmt; use std::path::{Path, PathBuf}; use std::fs::{self, File}; use rustc_serialize::{Encodable, Encoder}; use url::Url; use git2::{self, ObjectType}; use core::GitReference; use util::{CargoResult, ChainError, human, ToUrl, internal}; #[derive(PartialEq, Clone, Debug)] pub struct GitRevision(git2::Oid); impl fmt::Display for GitRevision { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } /// GitRemote represents a remote repository. It gets cloned into a local /// GitDatabase. #[derive(PartialEq,Clone,Debug)] pub struct GitRemote { url: Url, } #[derive(PartialEq,Clone,RustcEncodable)] struct EncodableGitRemote { url: String, } impl Encodable for GitRemote { fn encode(&self, s: &mut S) -> Result<(), S::Error> { EncodableGitRemote { url: self.url.to_string() }.encode(s) } } /// GitDatabase is a local clone of a remote repository's database. Multiple /// GitCheckouts can be cloned from this GitDatabase. pub struct GitDatabase { remote: GitRemote, path: PathBuf, repo: git2::Repository, } #[derive(RustcEncodable)] pub struct EncodableGitDatabase { remote: GitRemote, path: String, } impl Encodable for GitDatabase { fn encode(&self, s: &mut S) -> Result<(), S::Error> { EncodableGitDatabase { remote: self.remote.clone(), path: self.path.display().to_string() }.encode(s) } } /// GitCheckout is a local checkout of a particular revision. Calling /// `clone_into` with a reference will resolve the reference into a revision, /// and return a CargoError if no revision for that reference was found. pub struct GitCheckout<'a> { database: &'a GitDatabase, location: PathBuf, revision: GitRevision, repo: git2::Repository, } #[derive(RustcEncodable)] pub struct EncodableGitCheckout { database: EncodableGitDatabase, location: String, revision: String, } impl<'a> Encodable for GitCheckout<'a> { fn encode(&self, s: &mut S) -> Result<(), S::Error> { EncodableGitCheckout { location: self.location.display().to_string(), revision: self.revision.to_string(), database: EncodableGitDatabase { remote: self.database.remote.clone(), path: self.database.path.display().to_string(), }, }.encode(s) } } // Implementations impl GitRemote { pub fn new(url: &Url) -> GitRemote { GitRemote { url: url.clone() } } pub fn url(&self) -> &Url { &self.url } pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { let db = try!(self.db_at(path)); db.rev_for(reference) } pub fn checkout(&self, into: &Path) -> CargoResult { let repo = match git2::Repository::open(into) { Ok(repo) => { try!(self.fetch_into(&repo).chain_error(|| { human(format!("failed to fetch into {}", into.display())) })); repo } Err(..) => { try!(self.clone_into(into).chain_error(|| { human(format!("failed to clone into: {}", into.display())) })) } }; Ok(GitDatabase { remote: self.clone(), path: into.to_path_buf(), repo: repo, }) } pub fn db_at(&self, db_path: &Path) -> CargoResult { let repo = try!(git2::Repository::open(db_path)); Ok(GitDatabase { remote: self.clone(), path: db_path.to_path_buf(), repo: repo, }) } fn fetch_into(&self, dst: &git2::Repository) -> CargoResult<()> { // Create a local anonymous remote in the repository to fetch the url let url = self.url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; fetch(dst, &url, refspec) } fn clone_into(&self, dst: &Path) -> CargoResult { let url = self.url.to_string(); if fs::metadata(&dst).is_ok() { try!(fs::remove_dir_all(dst)); } try!(fs::create_dir_all(dst)); let repo = try!(git2::Repository::init_bare(dst)); try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*")); Ok(repo) } } impl GitDatabase { fn path<'a>(&'a self) -> &'a Path { &self.path } pub fn copy_to(&self, rev: GitRevision, dest: &Path) -> CargoResult { let checkout = match git2::Repository::open(dest) { Ok(repo) => { let checkout = GitCheckout::new(dest, self, rev, repo); if !checkout.is_fresh() { try!(checkout.fetch()); try!(checkout.reset()); assert!(checkout.is_fresh()); } checkout } Err(..) => try!(GitCheckout::clone_into(dest, self, rev)), }; try!(checkout.update_submodules().chain_error(|| { internal("failed to update submodules") })); Ok(checkout) } pub fn rev_for(&self, reference: &GitReference) -> CargoResult { let id = match *reference { GitReference::Tag(ref s) => { try!((|| { let refname = format!("refs/tags/{}", s); let id = try!(self.repo.refname_to_id(&refname)); let obj = try!(self.repo.find_object(id, None)); let obj = try!(obj.peel(ObjectType::Commit)); Ok(obj.id()) }).chain_error(|| { human(format!("failed to find tag `{}`", s)) })) } GitReference::Branch(ref s) => { try!((|| { let b = try!(self.repo.find_branch(s, git2::BranchType::Local)); b.get().target().chain_error(|| { human(format!("branch `{}` did not have a target", s)) }) }).chain_error(|| { human(format!("failed to find branch `{}`", s)) })) } GitReference::Rev(ref s) => { let obj = try!(self.repo.revparse_single(s)); obj.id() } }; Ok(GitRevision(id)) } pub fn has_ref(&self, reference: &str) -> CargoResult<()> { try!(self.repo.revparse_single(reference)); Ok(()) } } impl<'a> GitCheckout<'a> { fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision, repo: git2::Repository) -> GitCheckout<'a> { GitCheckout { location: path.to_path_buf(), database: database, revision: revision, repo: repo, } } fn clone_into(into: &Path, database: &'a GitDatabase, revision: GitRevision) -> CargoResult> { let repo = try!(GitCheckout::clone_repo(database.path(), into)); let checkout = GitCheckout::new(into, database, revision, repo); try!(checkout.reset()); Ok(checkout) } fn clone_repo(source: &Path, into: &Path) -> CargoResult { let dirname = into.parent().unwrap(); try!(fs::create_dir_all(&dirname).chain_error(|| { human(format!("Couldn't mkdir {}", dirname.display())) })); if fs::metadata(&into).is_ok() { try!(fs::remove_dir_all(into).chain_error(|| { human(format!("Couldn't rmdir {}", into.display())) })); } let url = try!(source.to_url().map_err(human)); let url = url.to_string(); let repo = try!(git2::Repository::clone(&url, into).chain_error(|| { internal(format!("failed to clone {} into {}", source.display(), into.display())) })); Ok(repo) } fn is_fresh(&self) -> bool { match self.repo.revparse_single("HEAD") { Ok(ref head) if head.id() == self.revision.0 => { // See comments in reset() for why we check this fs::metadata(self.location.join(".cargo-ok")).is_ok() } _ => false, } } fn fetch(&self) -> CargoResult<()> { info!("fetch {}", self.repo.path().display()); let url = try!(self.database.path.to_url().map_err(human)); let url = url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; try!(fetch(&self.repo, &url, refspec)); Ok(()) } fn reset(&self) -> CargoResult<()> { // If we're interrupted while performing this reset (e.g. we die because // of a signal) Cargo needs to be sure to try to check out this repo // again on the next go-round. // // To enable this we have a dummy file in our checkout, .cargo-ok, which // if present means that the repo has been successfully reset and is // ready to go. Hence if we start to do a reset, we make sure this file // *doesn't* exist, and then once we're done we create the file. let ok_file = self.location.join(".cargo-ok"); let _ = fs::remove_file(&ok_file); info!("reset {} to {}", self.repo.path().display(), self.revision); let object = try!(self.repo.find_object(self.revision.0, None)); try!(self.repo.reset(&object, git2::ResetType::Hard, None)); try!(File::create(ok_file)); Ok(()) } fn update_submodules(&self) -> CargoResult<()> { return update_submodules(&self.repo); fn update_submodules(repo: &git2::Repository) -> CargoResult<()> { info!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in try!(repo.submodules()).into_iter() { try!(child.init(false)); let url = try!(child.url().chain_error(|| { internal("non-utf8 url for submodule") })); // A submodule which is listed in .gitmodules but not actually // checked out will not have a head id, so we should ignore it. let head = match child.head_id() { Some(head) => head, None => continue, }; // If the submodule hasn't been checked out yet, we need to // clone it. If it has been checked out and the head is the same // as the submodule's head, then we can bail out and go to the // next submodule. let head_and_repo = child.open().and_then(|repo| { let target = try!(repo.head()).target(); Ok((target, repo)) }); let repo = match head_and_repo { Ok((head, repo)) => { if child.head_id() == head { continue } repo } Err(..) => { let path = repo.workdir().unwrap().join(child.path()); try!(git2::Repository::clone(url, &path)) } }; // Fetch data from origin and reset to the head commit let refspec = "refs/heads/*:refs/heads/*"; try!(fetch(&repo, url, refspec).chain_error(|| { internal(format!("failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), url)) })); let obj = try!(repo.find_object(head, None)); try!(repo.reset(&obj, git2::ResetType::Hard, None)); try!(update_submodules(&repo)); } Ok(()) } } } fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult where F: FnMut(&mut git2::Credentials) -> CargoResult { // Prepare the authentication callbacks. // // We check the `allowed` types of credentials, and we try to do as much as // possible based on that: // // * Prioritize SSH keys from the local ssh agent as they're likely the most // reliable. The username here is prioritized from the credential // callback, then from whatever is configured in git itself, and finally // we fall back to the generic user of `git`. // // * If a username/password is allowed, then we fallback to git2-rs's // implementation of the credential helper. This is what is configured // with `credential.helper` in git, and is the interface for the OSX // keychain, for example. // // * After the above two have failed, we just kinda grapple attempting to // return *something*. // // Note that we keep track of the number of times we've called this callback // because libgit2 will repeatedly give us credentials until we give it a // reason to not do so. If we've been called once and our credentials failed // then we'll be called again, and in this case we assume that the reason // was because the credentials were wrong. let mut cred_helper = git2::CredentialHelper::new(url); cred_helper.config(cfg); let mut called = 0; let res = f(&mut |url, username, allowed| { called += 1; if called >= 2 { return Err(git2::Error::from_str("no authentication available")) } if allowed.contains(git2::SSH_KEY) || allowed.contains(git2::USERNAME) { let user = username.map(|s| s.to_string()) .or_else(|| cred_helper.username.clone()) .unwrap_or("git".to_string()); if allowed.contains(git2::USERNAME) { git2::Cred::username(&user) } else { git2::Cred::ssh_key_from_agent(&user) } } else if allowed.contains(git2::USER_PASS_PLAINTEXT) { git2::Cred::credential_helper(cfg, url, username) } else if allowed.contains(git2::DEFAULT) { git2::Cred::default() } else { Err(git2::Error::from_str("no authentication available")) } }); if called > 0 { res.chain_error(|| { human("failed to authenticate when downloading repository") }) } else { res } } pub fn fetch(repo: &git2::Repository, url: &str, refspec: &str) -> CargoResult<()> { // Create a local anonymous remote in the repository to fetch the url with_authentication(url, &try!(repo.config()), |f| { let mut cb = git2::RemoteCallbacks::new(); cb.credentials(f); let mut remote = try!(repo.remote_anonymous(&url)); let mut opts = git2::FetchOptions::new(); opts.remote_callbacks(cb) .download_tags(git2::AutotagOption::All); try!(remote.fetch(&[refspec], Some(&mut opts), None)); Ok(()) }) } cargo-0.8.0/src/cargo/sources/mod.rs000066400000000000000000000002241264656333200173140ustar00rootroot00000000000000pub use self::path::PathSource; pub use self::git::GitSource; pub use self::registry::RegistrySource; pub mod path; pub mod git; pub mod registry; cargo-0.8.0/src/cargo/sources/path.rs000066400000000000000000000322151264656333200174760ustar00rootroot00000000000000use std::fmt::{self, Debug, Formatter}; use std::fs; use std::io::prelude::*; use std::path::{Path, PathBuf}; use filetime::FileTime; use git2; use glob::Pattern; use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; use ops; use util::{self, CargoResult, internal, internal_error, human, ChainError}; use util::Config; pub struct PathSource<'cfg> { id: SourceId, path: PathBuf, updated: bool, packages: Vec, config: &'cfg Config, } // TODO: Figure out if packages should be discovered in new or self should be // mut and packages are discovered in update impl<'cfg> PathSource<'cfg> { pub fn for_path(path: &Path, config: &'cfg Config) -> CargoResult> { trace!("PathSource::for_path; path={}", path.display()); Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config)) } /// Invoked with an absolute path to a directory that contains a Cargo.toml. /// The source will read the manifest and find any other packages contained /// in the directory structure reachable by the root manifest. pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { trace!("new; id={}", id); PathSource { id: id.clone(), path: path.to_path_buf(), updated: false, packages: Vec::new(), config: config, } } pub fn root_package(&mut self) -> CargoResult { trace!("root_package; source={:?}", self); try!(self.update()); match self.packages.iter().find(|p| p.root() == &*self.path) { Some(pkg) => Ok(pkg.clone()), None => Err(internal("no package found in source")) } } pub fn read_packages(&self) -> CargoResult> { if self.updated { Ok(self.packages.clone()) } else if (self.id.is_path() && self.id.precise().is_some()) || self.id.is_registry() { // If our source id is a path and it's listed with a precise // version, then it means that we're not allowed to have nested // dependencies (they've been rewritten to crates.io dependencies). // // If our source id is a registry dependency then crates are // published one at a time so we don't recurse as well. Note that // cargo by default doesn't package up nested dependencies but it // may do so for custom-crafted tarballs. // // In these cases we specifically read just one package, not a list // of packages. let path = self.path.join("Cargo.toml"); let (pkg, _) = try!(ops::read_package(&path, &self.id, self.config)); Ok(vec![pkg]) } else { ops::read_packages(&self.path, &self.id, self.config) } } /// List all files relevant to building this package inside this source. /// /// This function will use the appropriate methods to determine the /// set of files underneath this source's directory which are relevant for /// building `pkg`. /// /// The basic assumption of this method is that all files in the directory /// are relevant for building this package, but it also contains logic to /// use other methods like .gitignore to filter the list of files. pub fn list_files(&self, pkg: &Package) -> CargoResult> { let root = pkg.root(); let parse = |p: &String| { Pattern::new(p).map_err(|e| { human(format!("could not parse pattern `{}`: {}", p, e)) }) }; let exclude = try!(pkg.manifest().exclude().iter() .map(|p| parse(p)).collect::, _>>()); let include = try!(pkg.manifest().include().iter() .map(|p| parse(p)).collect::, _>>()); let mut filter = |p: &Path| { let relative_path = util::without_prefix(p, &root).unwrap(); include.iter().any(|p| p.matches_path(&relative_path)) || { include.len() == 0 && !exclude.iter().any(|p| p.matches_path(&relative_path)) } }; // If this package is a git repository, then we really do want to query // the git repository as it takes into account items such as .gitignore. // We're not quite sure where the git repository is, however, so we do a // bit of a probe. // // We check all packages in this source that are ancestors of the // specified package (including the same package) to see if they're at // the root of the git repository. This isn't always true, but it'll get // us there most of the time! let repo = self.packages.iter() .map(|pkg| pkg.root()) .filter(|path| root.starts_with(path)) .filter_map(|path| git2::Repository::open(&path).ok()) .next(); match repo { Some(repo) => self.list_files_git(pkg, repo, &mut filter), None => self.list_files_walk(pkg, &mut filter), } } fn list_files_git(&self, pkg: &Package, repo: git2::Repository, filter: &mut FnMut(&Path) -> bool) -> CargoResult> { warn!("list_files_git {}", pkg.package_id()); let index = try!(repo.index()); let root = try!(repo.workdir().chain_error(|| { internal_error("Can't list files on a bare repository.", "") })); let pkg_path = pkg.root(); let mut ret = Vec::new(); // We use information from the git repository to guide us in traversing // its tree. The primary purpose of this is to take advantage of the // .gitignore and auto-ignore files that don't matter. // // Here we're also careful to look at both tracked and untracked files as // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { use libgit2_sys::GIT_FILEMODE_COMMIT; let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; (join(&root, &entry.path), Some(is_dir)) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); if let Some(suffix) = util::without_prefix(pkg_path, &root) { opts.pathspec(suffix); } let statuses = try!(repo.statuses(Some(&mut opts))); let untracked = statuses.iter().filter_map(|entry| { match entry.status() { git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)), _ => None } }); 'outer: for (file_path, is_dir) in index_files.chain(untracked) { let file_path = try!(file_path); // Filter out files outside this package. if !file_path.starts_with(pkg_path) { continue } // Filter out Cargo.lock and target always { let fname = file_path.file_name().and_then(|s| s.to_str()); if fname == Some("Cargo.lock") { continue } if fname == Some("target") { continue } } // Filter out sub-packages of this package for other_pkg in self.packages.iter().filter(|p| *p != pkg) { let other_path = other_pkg.root(); if other_path.starts_with(pkg_path) && file_path.starts_with(other_path) { continue 'outer; } } let is_dir = is_dir.or_else(|| { fs::metadata(&file_path).ok().map(|m| m.is_dir()) }).unwrap_or(false); if is_dir { warn!(" found submodule {}", file_path.display()); let rel = util::without_prefix(&file_path, &root).unwrap(); let rel = try!(rel.to_str().chain_error(|| { human(format!("invalid utf-8 filename: {}", rel.display())) })); // Git submodules are currently only named through `/` path // separators, explicitly not `\` which windows uses. Who knew? let rel = rel.replace(r"\", "/"); match repo.find_submodule(&rel).and_then(|s| s.open()) { Ok(repo) => { let files = try!(self.list_files_git(pkg, repo, filter)); ret.extend(files.into_iter()); } Err(..) => { try!(PathSource::walk(&file_path, &mut ret, false, filter)); } } } else if (*filter)(&file_path) { // We found a file! warn!(" found {}", file_path.display()); ret.push(file_path); } } return Ok(ret); #[cfg(unix)] fn join(path: &Path, data: &[u8]) -> CargoResult { use std::os::unix::prelude::*; use std::ffi::OsStr; Ok(path.join(::from_bytes(data))) } #[cfg(windows)] fn join(path: &Path, data: &[u8]) -> CargoResult { use std::str; match str::from_utf8(data) { Ok(s) => Ok(path.join(s)), Err(..) => Err(internal("cannot process path in git with a non \ unicode filename")), } } } fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool) -> CargoResult> { let mut ret = Vec::new(); for pkg in self.packages.iter().filter(|p| *p == pkg) { let loc = pkg.root(); try!(PathSource::walk(loc, &mut ret, true, filter)); } return Ok(ret); } fn walk(path: &Path, ret: &mut Vec, is_root: bool, filter: &mut FnMut(&Path) -> bool) -> CargoResult<()> { if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) { if (*filter)(path) { ret.push(path.to_path_buf()); } return Ok(()) } // Don't recurse into any sub-packages that we have if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { return Ok(()) } for dir in try!(fs::read_dir(path)) { let dir = try!(dir).path(); let name = dir.file_name().and_then(|s| s.to_str()); // Skip dotfile directories if name.map(|s| s.starts_with(".")) == Some(true) { continue } else if is_root { // Skip cargo artifacts match name { Some("target") | Some("Cargo.lock") => continue, _ => {} } } try!(PathSource::walk(&dir, ret, false, filter)); } return Ok(()) } } impl<'cfg> Debug for PathSource<'cfg> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "the paths source") } } impl<'cfg> Registry for PathSource<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { self.packages.query(dep) } } impl<'cfg> Source for PathSource<'cfg> { fn update(&mut self) -> CargoResult<()> { if !self.updated { let packages = try!(self.read_packages()); self.packages.extend(packages.into_iter()); self.updated = true; } Ok(()) } fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{ // TODO: assert! that the PackageId is contained by the source Ok(()) } fn get(&self, ids: &[PackageId]) -> CargoResult> { trace!("getting packages; ids={:?}", ids); Ok(self.packages.iter() .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id)) .map(|pkg| pkg.clone()) .collect()) } fn fingerprint(&self, pkg: &Package) -> CargoResult { if !self.updated { return Err(internal_error("BUG: source was not updated", "")); } let mut max = FileTime::zero(); let mut max_path = PathBuf::from(""); for file in try!(self.list_files(pkg)) { // An fs::stat error here is either because path is a // broken symlink, a permissions error, or a race // condition where this path was rm'ed - either way, // we can ignore the error and treat the path's mtime // as 0. let mtime = fs::metadata(&file).map(|meta| { FileTime::from_last_modification_time(&meta) }).unwrap_or(FileTime::zero()); warn!("{} {}", mtime, file.display()); if mtime > max { max = mtime; max_path = file; } } trace!("fingerprint {}: {}", self.path.display(), max); Ok(format!("{} ({})", max, max_path.display())) } } cargo-0.8.0/src/cargo/sources/registry.rs000066400000000000000000000553571264656333200204260ustar00rootroot00000000000000//! A `Source` for registry-based packages. //! //! # What's a Registry? //! //! Registries are central locations where packages can be uploaded to, //! discovered, and searched for. The purpose of a registry is to have a //! location that serves as permanent storage for versions of a crate over time. //! //! Compared to git sources, a registry provides many packages as well as many //! versions simultaneously. Git sources can also have commits deleted through //! rebasings where registries cannot have their versions deleted. //! //! # The Index of a Registry //! //! One of the major difficulties with a registry is that hosting so many //! packages may quickly run into performance problems when dealing with //! dependency graphs. It's infeasible for cargo to download the entire contents //! of the registry just to resolve one package's dependencies, for example. As //! a result, cargo needs some efficient method of querying what packages are //! available on a registry, what versions are available, and what the //! dependencies for each version is. //! //! One method of doing so would be having the registry expose an HTTP endpoint //! which can be queried with a list of packages and a response of their //! dependencies and versions is returned. This is somewhat inefficient however //! as we may have to hit the endpoint many times and we may have already //! queried for much of the data locally already (for other packages, for //! example). This also involves inventing a transport format between the //! registry and Cargo itself, so this route was not taken. //! //! Instead, Cargo communicates with registries through a git repository //! referred to as the Index. The Index of a registry is essentially an easily //! query-able version of the registry's database for a list of versions of a //! package as well as a list of dependencies for each version. //! //! Using git to host this index provides a number of benefits: //! //! * The entire index can be stored efficiently locally on disk. This means //! that all queries of a registry can happen locally and don't need to touch //! the network. //! //! * Updates of the index are quite efficient. Using git buys incremental //! updates, compressed transmission, etc for free. The index must be updated //! each time we need fresh information from a registry, but this is one //! update of a git repository that probably hasn't changed a whole lot so //! it shouldn't be too expensive. //! //! Additionally, each modification to the index is just appending a line at //! the end of a file (the exact format is described later). This means that //! the commits for an index are quite small and easily applied/compressable. //! //! ## The format of the Index //! //! The index is a store for the list of versions for all packages known, so its //! format on disk is optimized slightly to ensure that `ls registry` doesn't //! produce a list of all packages ever known. The index also wants to ensure //! that there's not a million files which may actually end up hitting //! filesystem limits at some point. To this end, a few decisions were made //! about the format of the registry: //! //! 1. Each crate will have one file corresponding to it. Each version for a //! crate will just be a line in this file. //! 2. There will be two tiers of directories for crate names, under which //! crates corresponding to those tiers will be located. //! //! As an example, this is an example hierarchy of an index: //! //! ```notrust //! . //! ├── 3 //! │   └── u //! │   └── url //! ├── bz //! │   └── ip //! │   └── bzip2 //! ├── config.json //! ├── en //! │   └── co //! │   └── encoding //! └── li //!    ├── bg //!    │   └── libgit2 //!    └── nk //!    └── link-config //! ``` //! //! The root of the index contains a `config.json` file with a few entries //! corresponding to the registry (see `RegistryConfig` below). //! //! Otherwise, there are three numbered directories (1, 2, 3) for crates with //! names 1, 2, and 3 characters in length. The 1/2 directories simply have the //! crate files underneath them, while the 3 directory is sharded by the first //! letter of the crate name. //! //! Otherwise the top-level directory contains many two-letter directory names, //! each of which has many sub-folders with two letters. At the end of all these //! are the actual crate files themselves. //! //! The purpose of this layout is to hopefully cut down on `ls` sizes as well as //! efficient lookup based on the crate name itself. //! //! ## Crate files //! //! Each file in the index is the history of one crate over time. Each line in //! the file corresponds to one version of a crate, stored in JSON format (see //! the `RegistryPackage` structure below). //! //! As new versions are published, new lines are appended to this file. The only //! modifications to this file that should happen over time are yanks of a //! particular version. //! //! # Downloading Packages //! //! The purpose of the Index was to provide an efficient method to resolve the //! dependency graph for a package. So far we only required one network //! interaction to update the registry's repository (yay!). After resolution has //! been performed, however we need to download the contents of packages so we //! can read the full manifest and build the source code. //! //! To accomplish this, this source's `download` method will make an HTTP //! request per-package requested to download tarballs into a local cache. These //! tarballs will then be unpacked into a destination folder. //! //! Note that because versions uploaded to the registry are frozen forever that //! the HTTP download and unpacking can all be skipped if the version has //! already been downloaded and unpacked. This caching allows us to only //! download a package when absolutely necessary. //! //! # Filesystem Hierarchy //! //! Overall, the `$HOME/.cargo` looks like this when talking about the registry: //! //! ```notrust //! # A folder under which all registry metadata is hosted (similar to //! # $HOME/.cargo/git) //! $HOME/.cargo/registry/ //! //! # For each registry that cargo knows about (keyed by hostname + hash) //! # there is a folder which is the checked out version of the index for //! # the registry in this location. Note that this is done so cargo can //! # support multiple registries simultaneously //! index/ //! registry1-/ //! registry2-/ //! ... //! //! # This folder is a cache for all downloaded tarballs from a registry. //! # Once downloaded and verified, a tarball never changes. //! cache/ //! registry1-/-.crate //! ... //! //! # Location in which all tarballs are unpacked. Each tarball is known to //! # be frozen after downloading, so transitively this folder is also //! # frozen once its unpacked (it's never unpacked again) //! src/ //! registry1-/-/... //! ... //! ``` use std::collections::HashMap; use std::fs::{self, File}; use std::io::prelude::*; use std::path::PathBuf; use curl::http; use flate2::read::GzDecoder; use git2; use rustc_serialize::hex::ToHex; use rustc_serialize::json; use tar::Archive; use url::Url; use core::{Source, SourceId, PackageId, Package, Summary, Registry}; use core::dependency::{Dependency, DependencyInner, Kind}; use sources::{PathSource, git}; use util::{CargoResult, Config, internal, ChainError, ToUrl, human}; use util::{hex, Sha256, paths}; use ops; static DEFAULT: &'static str = "https://github.com/rust-lang/crates.io-index"; pub struct RegistrySource<'cfg> { source_id: SourceId, checkout_path: PathBuf, cache_path: PathBuf, src_path: PathBuf, config: &'cfg Config, handle: Option, sources: HashMap>, hashes: HashMap<(String, String), String>, // (name, vers) => cksum cache: HashMap>, updated: bool, } #[derive(RustcDecodable)] pub struct RegistryConfig { /// Download endpoint for all crates. This will be appended with /// `///download` and then will be hit with an HTTP GET /// request to download the tarball for a crate. pub dl: String, /// API endpoint for the registry. This is what's actually hit to perform /// operations like yanks, owner modifications, publish new crates, etc. pub api: String, } #[derive(RustcDecodable)] struct RegistryPackage { name: String, vers: String, deps: Vec, features: HashMap>, cksum: String, yanked: Option, } #[derive(RustcDecodable)] struct RegistryDependency { name: String, req: String, features: Vec, optional: bool, default_features: bool, target: Option, kind: Option, } impl<'cfg> RegistrySource<'cfg> { pub fn new(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> { let hash = hex::short_hash(source_id); let ident = source_id.url().host().unwrap().to_string(); let part = format!("{}-{}", ident, hash); RegistrySource { checkout_path: config.registry_index_path().join(&part), cache_path: config.registry_cache_path().join(&part), src_path: config.registry_source_path().join(&part), config: config, source_id: source_id.clone(), handle: None, sources: HashMap::new(), hashes: HashMap::new(), cache: HashMap::new(), updated: false, } } /// Get the configured default registry URL. /// /// This is the main cargo registry by default, but it can be overridden in /// a .cargo/config pub fn url(config: &Config) -> CargoResult { let config = try!(ops::registry_configuration(config)); let url = config.index.unwrap_or(DEFAULT.to_string()); url.to_url().map_err(human) } /// Get the default url for the registry pub fn default_url() -> String { DEFAULT.to_string() } /// Decode the configuration stored within the registry. /// /// This requires that the index has been at least checked out. pub fn config(&self) -> CargoResult { let contents = try!(paths::read(&self.checkout_path.join("config.json"))); let config = try!(json::decode(&contents)); Ok(config) } /// Open the git repository for the index of the registry. /// /// This will attempt to open an existing checkout, and failing that it will /// initialize a fresh new directory and git checkout. No remotes will be /// configured by default. fn open(&self) -> CargoResult { match git2::Repository::open(&self.checkout_path) { Ok(repo) => return Ok(repo), Err(..) => {} } try!(fs::create_dir_all(&self.checkout_path)); let _ = fs::remove_dir_all(&self.checkout_path); let repo = try!(git2::Repository::init(&self.checkout_path)); Ok(repo) } /// Download the given package from the given url into the local cache. /// /// This will perform the HTTP request to fetch the package. This function /// will only succeed if the HTTP download was successful and the file is /// then ready for inspection. /// /// No action is taken if the package is already downloaded. fn download_package(&mut self, pkg: &PackageId, url: &Url) -> CargoResult { // TODO: should discover filename from the S3 redirect let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let dst = self.cache_path.join(&filename); if fs::metadata(&dst).is_ok() { return Ok(dst) } try!(self.config.shell().status("Downloading", pkg)); try!(fs::create_dir_all(dst.parent().unwrap())); let expected_hash = try!(self.hash(pkg)); let handle = match self.handle { Some(ref mut handle) => handle, None => { self.handle = Some(try!(ops::http_handle(self.config))); self.handle.as_mut().unwrap() } }; // TODO: don't download into memory (curl-rust doesn't expose it) let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec()); if resp.get_code() != 200 && resp.get_code() != 0 { return Err(internal(format!("failed to get 200 response from {}\n{}", url, resp))) } // Verify what we just downloaded let actual = { let mut state = Sha256::new(); state.update(resp.get_body()); state.finish() }; if actual.to_hex() != expected_hash { bail!("failed to verify the checksum of `{}`", pkg) } try!(paths::write(&dst, resp.get_body())); Ok(dst) } /// Return the hash listed for a specified PackageId. fn hash(&mut self, pkg: &PackageId) -> CargoResult { let key = (pkg.name().to_string(), pkg.version().to_string()); if let Some(s) = self.hashes.get(&key) { return Ok(s.clone()) } // Ok, we're missing the key, so parse the index file to load it. try!(self.summaries(pkg.name())); self.hashes.get(&key).chain_error(|| { internal(format!("no hash listed for {}", pkg)) }).map(|s| s.clone()) } /// Unpacks a downloaded package into a location where it's ready to be /// compiled. /// /// No action is taken if the source looks like it's already unpacked. fn unpack_package(&self, pkg: &PackageId, tarball: PathBuf) -> CargoResult { let dst = self.src_path.join(&format!("{}-{}", pkg.name(), pkg.version())); if fs::metadata(&dst.join(".cargo-ok")).is_ok() { return Ok(dst) } try!(fs::create_dir_all(dst.parent().unwrap())); let f = try!(File::open(&tarball)); let gz = try!(GzDecoder::new(f)); let mut tar = Archive::new(gz); try!(tar.unpack(dst.parent().unwrap())); try!(File::create(&dst.join(".cargo-ok"))); Ok(dst) } /// Parse the on-disk metadata for the package provided pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { if self.cache.contains_key(name) { return Ok(self.cache.get(name).unwrap()); } // see module comment for why this is structured the way it is let path = self.checkout_path.clone(); let fs_name = name.chars().flat_map(|c| c.to_lowercase()).collect::(); let path = match fs_name.len() { 1 => path.join("1").join(&fs_name), 2 => path.join("2").join(&fs_name), 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), _ => path.join(&fs_name[0..2]) .join(&fs_name[2..4]) .join(&fs_name), }; let summaries = match File::open(&path) { Ok(mut f) => { let mut contents = String::new(); try!(f.read_to_string(&mut contents)); let ret: CargoResult>; ret = contents.lines().filter(|l| l.trim().len() > 0) .map(|l| self.parse_registry_package(l)) .collect(); try!(ret.chain_error(|| { internal(format!("failed to parse registry's information \ for: {}", name)) })) } Err(..) => Vec::new(), }; let summaries = summaries.into_iter().filter(|summary| { summary.0.package_id().name() == name }).collect(); self.cache.insert(name.to_string(), summaries); Ok(self.cache.get(name).unwrap()) } /// Parse a line from the registry's index file into a Summary for a /// package. /// /// The returned boolean is whether or not the summary has been yanked. fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> { let RegistryPackage { name, vers, cksum, deps, features, yanked } = try!(json::decode::(line)); let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); let deps: CargoResult> = deps.into_iter().map(|dep| { self.parse_registry_dependency(dep) }).collect(); let deps = try!(deps); self.hashes.insert((name, vers), cksum); Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false))) } /// Converts an encoded dependency in the registry to a cargo dependency fn parse_registry_dependency(&self, dep: RegistryDependency) -> CargoResult { let RegistryDependency { name, req, features, optional, default_features, target, kind } = dep; let dep = try!(DependencyInner::parse(&name, Some(&req), &self.source_id)); let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { "dev" => Kind::Development, "build" => Kind::Build, _ => Kind::Normal, }; // Unfortunately older versions of cargo and/or the registry ended up // publishing lots of entries where the features array contained the // empty feature, "", inside. This confuses the resolution process much // later on and these features aren't actually valid, so filter them all // out here. let features = features.into_iter().filter(|s| !s.is_empty()).collect(); Ok(dep.set_optional(optional) .set_default_features(default_features) .set_features(features) .set_only_for_platform(target) .set_kind(kind) .into_dependency()) } /// Actually perform network operations to update the registry fn do_update(&mut self) -> CargoResult<()> { if self.updated { return Ok(()) } try!(self.config.shell().status("Updating", format!("registry `{}`", self.source_id.url()))); let repo = try!(self.open()); // git fetch origin let url = self.source_id.url().to_string(); let refspec = "refs/heads/*:refs/remotes/origin/*"; try!(git::fetch(&repo, &url, refspec).chain_error(|| { internal(format!("failed to fetch `{}`", url)) })); // git reset --hard origin/master let reference = "refs/remotes/origin/master"; let oid = try!(repo.refname_to_id(reference)); trace!("[{}] updating to rev {}", self.source_id, oid); let object = try!(repo.find_object(oid, None)); try!(repo.reset(&object, git2::ResetType::Hard, None)); self.updated = true; self.cache.clear(); Ok(()) } } impl<'cfg> Registry for RegistrySource<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { // If this is a precise dependency, then it came from a lockfile and in // theory the registry is known to contain this version. If, however, we // come back with no summaries, then our registry may need to be // updated, so we fall back to performing a lazy update. if dep.source_id().precise().is_some() { let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| { s.0.clone() }).collect::>(); if try!(summaries.query(dep)).len() == 0 { try!(self.do_update()); } } let mut summaries = { let summaries = try!(self.summaries(dep.name())); summaries.iter().filter(|&&(_, yanked)| { dep.source_id().precise().is_some() || !yanked }).map(|s| s.0.clone()).collect::>() }; // Handle `cargo update --precise` here. If specified, our own source // will have a precise version listed of the form `=` where // `` is the name of a crate on this source and `` is the // version requested (agument to `--precise`). summaries.retain(|s| { match self.source_id.precise() { Some(p) if p.starts_with(dep.name()) => { let vers = &p[dep.name().len() + 1..]; s.version().to_string() == vers } _ => true, } }); summaries.query(dep) } } impl<'cfg> Source for RegistrySource<'cfg> { fn update(&mut self) -> CargoResult<()> { // If we have an imprecise version then we don't know what we're going // to look for, so we always attempt to perform an update here. // // If we have a precise version, then we'll update lazily during the // querying phase. Note that precise in this case is only // `Some("locked")` as other `Some` values indicate a `cargo update // --precise` request if self.source_id.precise() != Some("locked") { try!(self.do_update()); } Ok(()) } fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { let config = try!(self.config()); let url = try!(config.dl.to_url().map_err(internal)); for package in packages.iter() { if self.source_id != *package.source_id() { continue } if self.sources.contains_key(package) { continue } let mut url = url.clone(); url.path_mut().unwrap().push(package.name().to_string()); url.path_mut().unwrap().push(package.version().to_string()); url.path_mut().unwrap().push("download".to_string()); let path = try!(self.download_package(package, &url).chain_error(|| { internal(format!("failed to download package `{}` from {}", package, url)) })); let path = try!(self.unpack_package(package, path).chain_error(|| { internal(format!("failed to unpack package `{}`", package)) })); let mut src = PathSource::new(&path, &self.source_id, self.config); try!(src.update()); self.sources.insert(package.clone(), src); } Ok(()) } fn get(&self, packages: &[PackageId]) -> CargoResult> { let mut ret = Vec::new(); for src in self.sources.values() { ret.extend(try!(src.get(packages)).into_iter()); } return Ok(ret); } fn fingerprint(&self, pkg: &Package) -> CargoResult { Ok(pkg.package_id().version().to_string()) } } cargo-0.8.0/src/cargo/util/000077500000000000000000000000001264656333200154635ustar00rootroot00000000000000cargo-0.8.0/src/cargo/util/config.rs000066400000000000000000000443601264656333200173050ustar00rootroot00000000000000use std::cell::{RefCell, RefMut, Ref, Cell}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::hash_map::{HashMap}; use std::env; use std::ffi::OsString; use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; use std::mem; use std::path::{Path, PathBuf}; use rustc_serialize::{Encodable,Encoder}; use toml; use core::shell::{Verbosity, ColorConfig}; use core::{MultiShell, Package}; use util::{CargoResult, ChainError, Rustc, internal, human, paths}; use util::toml as cargo_toml; use self::ConfigValue as CV; pub struct Config { home_path: PathBuf, shell: RefCell, rustc_info: Rustc, values: RefCell>, values_loaded: Cell, cwd: PathBuf, rustc: PathBuf, rustdoc: PathBuf, target_dir: RefCell>, } impl Config { pub fn new(shell: MultiShell, cwd: PathBuf, homedir: PathBuf) -> CargoResult { let mut cfg = Config { home_path: homedir, shell: RefCell::new(shell), rustc_info: Rustc::blank(), cwd: cwd, values: RefCell::new(HashMap::new()), values_loaded: Cell::new(false), rustc: PathBuf::from("rustc"), rustdoc: PathBuf::from("rustdoc"), target_dir: RefCell::new(None), }; try!(cfg.scrape_tool_config()); try!(cfg.scrape_rustc_version()); try!(cfg.scrape_target_dir_config()); Ok(cfg) } pub fn default() -> CargoResult { let shell = ::shell(Verbosity::Verbose, ColorConfig::Auto); let cwd = try!(env::current_dir().chain_error(|| { human("couldn't get the current directory of the process") })); let homedir = try!(homedir(&cwd).chain_error(|| { human("Cargo couldn't find your home directory. \ This probably means that $HOME was not set.") })); Config::new(shell, cwd, homedir) } pub fn home(&self) -> &Path { &self.home_path } pub fn git_db_path(&self) -> PathBuf { self.home_path.join("git").join("db") } pub fn git_checkout_path(&self) -> PathBuf { self.home_path.join("git").join("checkouts") } pub fn registry_index_path(&self) -> PathBuf { self.home_path.join("registry").join("index") } pub fn registry_cache_path(&self) -> PathBuf { self.home_path.join("registry").join("cache") } pub fn registry_source_path(&self) -> PathBuf { self.home_path.join("registry").join("src") } pub fn shell(&self) -> RefMut { self.shell.borrow_mut() } pub fn rustc(&self) -> &Path { &self.rustc } pub fn rustdoc(&self) -> &Path { &self.rustdoc } pub fn rustc_info(&self) -> &Rustc { &self.rustc_info } pub fn values(&self) -> CargoResult>> { if !self.values_loaded.get() { try!(self.load_values()); self.values_loaded.set(true); } Ok(self.values.borrow()) } pub fn cwd(&self) -> &Path { &self.cwd } pub fn target_dir(&self, pkg: &Package) -> PathBuf { self.target_dir.borrow().clone().unwrap_or_else(|| { pkg.root().join("target") }) } pub fn set_target_dir(&self, path: &Path) { *self.target_dir.borrow_mut() = Some(path.to_owned()); } pub fn get(&self, key: &str) -> CargoResult> { let vals = try!(self.values()); let mut parts = key.split('.').enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, None => return Ok(None), }; for (i, part) in parts { match *val { CV::Table(ref map, _) => { val = match map.get(part) { Some(val) => val, None => return Ok(None), } } CV::Integer(_, ref path) | CV::String(_, ref path) | CV::List(_, ref path) | CV::Boolean(_, ref path) => { let idx = key.split('.').take(i) .fold(0, |n, s| n + s.len()) + i - 1; let key_so_far = &key[..idx]; bail!("expected table for configuration key `{}`, \ but found {} in {}", key_so_far, val.desc(), path.display()) } } } Ok(Some(val.clone())) } pub fn get_string(&self, key: &str) -> CargoResult> { match try!(self.get(key)) { Some(CV::String(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("string", key, val), None => Ok(None), } } pub fn get_path(&self, key: &str) -> CargoResult> { if let Some((specified_path, path_to_config)) = try!(self.get_string(&key)) { if specified_path.contains("/") || (cfg!(windows) && specified_path.contains("\\")) { // An absolute or a relative path let prefix_path = path_to_config.parent().unwrap().parent().unwrap(); // Joining an absolute path to any path results in the given absolute path Ok(Some(prefix_path.join(specified_path))) } else { // A pathless name Ok(Some(PathBuf::from(specified_path))) } } else { Ok(None) } } pub fn get_list(&self, key: &str) -> CargoResult, PathBuf)>> { match try!(self.get(key)) { Some(CV::List(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("list", key, val), None => Ok(None), } } pub fn get_table(&self, key: &str) -> CargoResult, PathBuf)>> { match try!(self.get(key)) { Some(CV::Table(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("table", key, val), None => Ok(None), } } pub fn get_i64(&self, key: &str) -> CargoResult> { match try!(self.get(key)) { Some(CV::Integer(i, path)) => Ok(Some((i, path))), Some(val) => self.expected("integer", key, val), None => Ok(None), } } pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { val.expected(ty).map_err(|e| { human(format!("invalid configuration for key `{}`\n{}", key, e)) }) } fn load_values(&self) -> CargoResult<()> { let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); try!(walk_tree(&self.cwd, |mut file, path| { let mut contents = String::new(); try!(file.read_to_string(&mut contents)); let table = try!(cargo_toml::parse(&contents, &path).chain_error(|| { human(format!("could not parse TOML configuration in `{}`", path.display())) })); let toml = toml::Value::Table(table); let value = try!(CV::from_toml(&path, toml).chain_error(|| { human(format!("failed to load TOML configuration from `{}`", path.display())) })); try!(cfg.merge(value)); Ok(()) }).chain_error(|| human("Couldn't load Cargo configuration"))); *self.values.borrow_mut() = match cfg { CV::Table(map, _) => map, _ => unreachable!(), }; Ok(()) } fn scrape_tool_config(&mut self) -> CargoResult<()> { self.rustc = try!(self.get_tool("rustc")); self.rustdoc = try!(self.get_tool("rustdoc")); Ok(()) } fn scrape_rustc_version(&mut self) -> CargoResult<()> { self.rustc_info = try!(Rustc::new(&self.rustc)); Ok(()) } fn scrape_target_dir_config(&mut self) -> CargoResult<()> { if let Some((dir, dir2)) = try!(self.get_string("build.target-dir")) { let mut path = PathBuf::from(dir2); path.pop(); path.pop(); path.push(dir); *self.target_dir.borrow_mut() = Some(path); } else if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { *self.target_dir.borrow_mut() = Some(self.cwd.join(dir)); } Ok(()) } fn get_tool(&self, tool: &str) -> CargoResult { let var = format!("build.{}", tool); if let Some(tool_path) = try!(self.get_path(&var)) { return Ok(tool_path); } let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::(); let tool = env::var_os(&var).unwrap_or_else(|| OsString::from(tool)); Ok(PathBuf::from(tool)) } } #[derive(Eq, PartialEq, Clone, RustcEncodable, RustcDecodable, Copy)] pub enum Location { Project, Global } #[derive(Eq,PartialEq,Clone,RustcDecodable)] pub enum ConfigValue { Integer(i64, PathBuf), String(String, PathBuf), List(Vec<(String, PathBuf)>, PathBuf), Table(HashMap, PathBuf), Boolean(bool, PathBuf), } impl fmt::Debug for ConfigValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()), CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()), CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()), CV::List(ref list, ref path) => { try!(write!(f, "[")); for (i, &(ref s, ref path)) in list.iter().enumerate() { if i > 0 { try!(write!(f, ", ")); } try!(write!(f, "{} (from {})", s, path.display())); } write!(f, "] (from {})", path.display()) } CV::Table(ref table, _) => write!(f, "{:?}", table), } } } impl Encodable for ConfigValue { fn encode(&self, s: &mut S) -> Result<(), S::Error> { match *self { CV::String(ref string, _) => string.encode(s), CV::List(ref list, _) => { let list: Vec<&String> = list.iter().map(|s| &s.0).collect(); list.encode(s) } CV::Table(ref table, _) => table.encode(s), CV::Boolean(b, _) => b.encode(s), CV::Integer(i, _) => i.encode(s), } } } impl ConfigValue { fn from_toml(path: &Path, toml: toml::Value) -> CargoResult { match toml { toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), toml::Value::Array(val) => { Ok(CV::List(try!(val.into_iter().map(|toml| { match toml { toml::Value::String(val) => Ok((val, path.to_path_buf())), v => Err(human(format!("expected string but found {} \ in list", v.type_str()))), } }).collect::>()), path.to_path_buf())) } toml::Value::Table(val) => { Ok(CV::Table(try!(val.into_iter().map(|(key, value)| { let value = try!(CV::from_toml(path, value).chain_error(|| { human(format!("failed to parse key `{}`", key)) })); Ok((key, value)) }).collect::>()), path.to_path_buf())) } v => bail!("found TOML configuration value of unknown type `{}`", v.type_str()), } } fn merge(&mut self, from: ConfigValue) -> CargoResult<()> { match (self, from) { (&mut CV::String(..), CV::String(..)) | (&mut CV::Integer(..), CV::Integer(..)) | (&mut CV::Boolean(..), CV::Boolean(..)) => {} (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { let new = mem::replace(new, Vec::new()); old.extend(new.into_iter()); } (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { let new = mem::replace(new, HashMap::new()); for (key, value) in new.into_iter() { match old.entry(key.clone()) { Occupied(mut entry) => { let path = value.definition_path().to_path_buf(); let entry = entry.get_mut(); try!(entry.merge(value).chain_error(|| { human(format!("failed to merge key `{}` between \ files:\n \ file 1: {}\n \ file 2: {}", key, entry.definition_path().display(), path.display())) })); } Vacant(entry) => { entry.insert(value); } }; } } (expected, found) => { return Err(internal(format!("expected {}, but found {}", expected.desc(), found.desc()))) } } Ok(()) } pub fn i64(&self) -> CargoResult<(i64, &Path)> { match *self { CV::Integer(i, ref p) => Ok((i, p)), _ => self.expected("integer"), } } pub fn string(&self) -> CargoResult<(&str, &Path)> { match *self { CV::String(ref s, ref p) => Ok((s, p)), _ => self.expected("string"), } } pub fn table(&self) -> CargoResult<(&HashMap, &Path)> { match *self { CV::Table(ref table, ref p) => Ok((table, p)), _ => self.expected("table"), } } pub fn list(&self) -> CargoResult<&[(String, PathBuf)]> { match *self { CV::List(ref list, _) => Ok(list), _ => self.expected("list"), } } pub fn boolean(&self) -> CargoResult<(bool, &Path)> { match *self { CV::Boolean(b, ref p) => Ok((b, p)), _ => self.expected("bool"), } } pub fn desc(&self) -> &'static str { match *self { CV::Table(..) => "table", CV::List(..) => "array", CV::String(..) => "string", CV::Boolean(..) => "boolean", CV::Integer(..) => "integer", } } pub fn definition_path(&self) -> &Path { match *self { CV::Boolean(_, ref p) | CV::Integer(_, ref p) | CV::String(_, ref p) | CV::List(_, ref p) | CV::Table(_, ref p) => p } } fn expected(&self, wanted: &str) -> CargoResult { Err(internal(format!("expected a {}, but found a {} in {}", wanted, self.desc(), self.definition_path().display()))) } fn into_toml(self) -> toml::Value { match self { CV::Boolean(s, _) => toml::Value::Boolean(s), CV::String(s, _) => toml::Value::String(s), CV::Integer(i, _) => toml::Value::Integer(i), CV::List(l, _) => toml::Value::Array(l .into_iter() .map(|(s, _)| toml::Value::String(s)) .collect()), CV::Table(l, _) => toml::Value::Table(l.into_iter() .map(|(k, v)| (k, v.into_toml())) .collect()), } } } fn homedir(cwd: &Path) -> Option { let cargo_home = env::var_os("CARGO_HOME").map(|home| { cwd.join(home) }); let user_home = env::home_dir().map(|p| p.join(".cargo")); return cargo_home.or(user_home); } fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> where F: FnMut(File, &Path) -> CargoResult<()> { let mut current = pwd; loop { let possible = current.join(".cargo").join("config"); if fs::metadata(&possible).is_ok() { let file = try!(File::open(&possible)); try!(walk(file, &possible)); } match current.parent() { Some(p) => current = p, None => break, } } // Once we're done, also be sure to walk the home directory even if it's not // in our history to be sure we pick up that standard location for // information. let home = try!(homedir(pwd).chain_error(|| { human("Cargo couldn't find your home directory. \ This probably means that $HOME was not set.") })); if !pwd.starts_with(&home) { let config = home.join("config"); if fs::metadata(&config).is_ok() { let file = try!(File::open(&config)); try!(walk(file, &config)); } } Ok(()) } pub fn set_config(cfg: &Config, loc: Location, key: &str, value: ConfigValue) -> CargoResult<()> { // TODO: There are a number of drawbacks here // // 1. Project is unimplemented // 2. This blows away all comments in a file // 3. This blows away the previous ordering of a file. let file = match loc { Location::Global => cfg.home_path.join("config"), Location::Project => unimplemented!(), }; try!(fs::create_dir_all(file.parent().unwrap())); let contents = paths::read(&file).unwrap_or(String::new()); let mut toml = try!(cargo_toml::parse(&contents, &file)); toml.insert(key.to_string(), value.into_toml()); let contents = toml::Value::Table(toml).to_string(); try!(paths::write(&file, contents.as_bytes())); Ok(()) } cargo-0.8.0/src/cargo/util/dependency_queue.rs000066400000000000000000000112031264656333200213500ustar00rootroot00000000000000//! A graph-like structure used to represent a set of dependencies and in what //! order they should be built. //! //! This structure is used to store the dependency graph and dynamically update //! it to figure out when a dependency should be built. use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::hash::Hash; pub use self::Freshness::{Fresh, Dirty}; #[derive(Debug)] pub struct DependencyQueue { /// A list of all known keys to build. /// /// The value of the hash map is list of dependencies which still need to be /// built before the package can be built. Note that the set is dynamically /// updated as more dependencies are built. dep_map: HashMap, V)>, /// A reverse mapping of a package to all packages that depend on that /// package. /// /// This map is statically known and does not get updated throughout the /// lifecycle of the DependencyQueue. reverse_dep_map: HashMap>, /// A set of dirty packages. /// /// Packages may become dirty over time if their dependencies are rebuilt. dirty: HashSet, /// The packages which are currently being built, waiting for a call to /// `finish`. pending: HashSet, } /// Indication of the freshness of a package. /// /// A fresh package does not necessarily need to be rebuilt (unless a dependency /// was also rebuilt), and a dirty package must always be rebuilt. #[derive(PartialEq, Eq, Debug, Clone, Copy)] pub enum Freshness { Fresh, Dirty, } /// A trait for discovering the dependencies of a piece of data. pub trait Dependency: Hash + Eq + Clone { type Context; fn dependencies(&self, cx: &Self::Context) -> Vec; } impl Freshness { pub fn combine(&self, other: Freshness) -> Freshness { match *self { Fresh => other, Dirty => Dirty } } } impl DependencyQueue { /// Creates a new dependency queue with 0 packages. pub fn new() -> DependencyQueue { DependencyQueue { dep_map: HashMap::new(), reverse_dep_map: HashMap::new(), dirty: HashSet::new(), pending: HashSet::new(), } } /// Adds a new package to this dependency queue. /// /// It is assumed that any dependencies of this package will eventually also /// be added to the dependency queue. pub fn queue(&mut self, cx: &K::Context, fresh: Freshness, key: K, value: V) -> &mut V { let slot = match self.dep_map.entry(key.clone()) { Occupied(v) => return &mut v.into_mut().1, Vacant(v) => v, }; if fresh == Dirty { self.dirty.insert(key.clone()); } let mut my_dependencies = HashSet::new(); for dep in key.dependencies(cx).into_iter() { assert!(my_dependencies.insert(dep.clone())); let rev = self.reverse_dep_map.entry(dep).or_insert(HashSet::new()); assert!(rev.insert(key.clone())); } &mut slot.insert((my_dependencies, value)).1 } /// Dequeues a package that is ready to be built. /// /// A package is ready to be built when it has 0 un-built dependencies. If /// `None` is returned then no packages are ready to be built. pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> { let key = match self.dep_map.iter() .find(|&(_, &(ref deps, _))| deps.len() == 0) .map(|(key, _)| key.clone()) { Some(key) => key, None => return None }; let (_, data) = self.dep_map.remove(&key).unwrap(); let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh}; self.pending.insert(key.clone()); Some((fresh, key, data)) } /// Returns the number of remaining packages to be built. pub fn len(&self) -> usize { self.dep_map.len() + self.pending.len() } /// Indicate that a package has been built. /// /// This function will update the dependency queue with this information, /// possibly allowing the next invocation of `dequeue` to return a package. pub fn finish(&mut self, key: &K, fresh: Freshness) { assert!(self.pending.remove(key)); let reverse_deps = match self.reverse_dep_map.get(key) { Some(deps) => deps, None => return, }; for dep in reverse_deps.iter() { if fresh == Dirty { self.dirty.insert(dep.clone()); } assert!(self.dep_map.get_mut(dep).unwrap().0.remove(key)); } } } cargo-0.8.0/src/cargo/util/errors.rs000066400000000000000000000251551264656333200173550ustar00rootroot00000000000000use std::error::Error; use std::ffi; use std::fmt; use std::io; use std::process::{Output, ExitStatus}; use std::str; use semver; use rustc_serialize::json; use curl; use git2; use toml; use url; pub type CargoResult = Result>; // ============================================================================= // CargoError trait pub trait CargoError: Error + Send + 'static { fn is_human(&self) -> bool { false } fn cargo_cause(&self) -> Option<&CargoError>{ None } } impl Error for Box { fn description(&self) -> &str { (**self).description() } fn cause(&self) -> Option<&Error> { (**self).cause() } } impl CargoError for Box { fn is_human(&self) -> bool { (**self).is_human() } fn cargo_cause(&self) -> Option<&CargoError> { (**self).cargo_cause() } } // ============================================================================= // Chaining errors pub trait ChainError { fn chain_error(self, callback: F) -> CargoResult where E: CargoError, F: FnOnce() -> E; } #[derive(Debug)] struct ChainedError { error: E, cause: Box, } impl<'a, T, F> ChainError for F where F: FnOnce() -> CargoResult { fn chain_error(self, callback: C) -> CargoResult where E: CargoError, C: FnOnce() -> E { self().chain_error(callback) } } impl ChainError for Result { fn chain_error(self, callback: C) -> CargoResult where E2: CargoError, C: FnOnce() -> E2 { self.map_err(move |err| { Box::new(ChainedError { error: callback(), cause: Box::new(err), }) as Box }) } } impl ChainError for Box { fn chain_error(self, callback: C) -> CargoResult where E2: CargoError, C: FnOnce() -> E2 { Err(Box::new(ChainedError { error: callback(), cause: self, })) } } impl ChainError for Option { fn chain_error(self, callback: C) -> CargoResult where E: CargoError, C: FnOnce() -> E { match self { Some(t) => Ok(t), None => Err(Box::new(callback())), } } } impl Error for ChainedError { fn description(&self) -> &str { self.error.description() } } impl fmt::Display for ChainedError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.error, f) } } impl CargoError for ChainedError { fn is_human(&self) -> bool { self.error.is_human() } fn cargo_cause(&self) -> Option<&CargoError> { Some(&*self.cause) } } // ============================================================================= // Process errors pub struct ProcessError { pub desc: String, pub exit: Option, pub output: Option, cause: Option, } impl Error for ProcessError { fn description(&self) -> &str { &self.desc } fn cause(&self) -> Option<&Error> { self.cause.as_ref().map(|s| s as &Error) } } impl fmt::Display for ProcessError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.desc, f) } } impl fmt::Debug for ProcessError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } // ============================================================================= // Cargo test errors. /// Error when testcases fail pub struct CargoTestError { pub desc: String, pub exit: Option, pub causes: Vec, } impl CargoTestError { #[allow(deprecated)] // connect => join in 1.3 pub fn new(errors: Vec) -> Self { if errors.len() == 0 { panic!("Cannot create CargoTestError from empty Vec") } let desc = errors.iter().map(|error| error.desc.clone()) .collect::>() .connect("\n"); CargoTestError { desc: desc, exit: errors[0].exit, causes: errors, } } } impl fmt::Display for CargoTestError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.desc, f) } } impl fmt::Debug for CargoTestError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl Error for CargoTestError { fn description(&self) -> &str { &self.desc } fn cause(&self) -> Option<&Error> { self.causes.get(0).map(|s| s as &Error) } } // ============================================================================= // Concrete errors struct ConcreteCargoError { description: String, detail: Option, cause: Option>, is_human: bool, } impl fmt::Display for ConcreteCargoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "{}", self.description)); if let Some(ref s) = self.detail { try!(write!(f, " ({})", s)); } Ok(()) } } impl fmt::Debug for ConcreteCargoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl Error for ConcreteCargoError { fn description(&self) -> &str { &self.description } fn cause(&self) -> Option<&Error> { self.cause.as_ref().map(|c| { let e: &Error = &**c; e }) } } impl CargoError for ConcreteCargoError { fn is_human(&self) -> bool { self.is_human } } // ============================================================================= // Human errors #[derive(Debug)] pub struct Human(pub E); impl Error for Human { fn description(&self) -> &str { self.0.description() } fn cause(&self) -> Option<&Error> { self.0.cause() } } impl fmt::Display for Human { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } impl CargoError for Human { fn is_human(&self) -> bool { true } fn cargo_cause(&self) -> Option<&CargoError> { self.0.cargo_cause() } } // ============================================================================= // CLI errors pub type CliResult = Result; #[derive(Debug)] pub struct CliError { pub error: Box, pub unknown: bool, pub exit_code: i32 } impl Error for CliError { fn description(&self) -> &str { self.error.description() } fn cause(&self) -> Option<&Error> { self.error.cause() } } impl fmt::Display for CliError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.error, f) } } impl CliError { pub fn new(error: &str, code: i32) -> CliError { let error = human(error.to_string()); CliError::from_boxed(error, code) } pub fn from_error(error: E, code: i32) -> CliError { let error = Box::new(error); CliError::from_boxed(error, code) } pub fn from_boxed(error: Box, code: i32) -> CliError { let human = error.is_human(); CliError { error: error, exit_code: code, unknown: !human } } } impl From> for CliError { fn from(err: Box) -> CliError { CliError::from_boxed(err, 101) } } // ============================================================================= // various impls macro_rules! from_error { ($($p:ty,)*) => ( $(impl From<$p> for Box { fn from(t: $p) -> Box { Box::new(t) } })* ) } from_error! { semver::ReqParseError, io::Error, ProcessError, git2::Error, json::DecoderError, json::EncoderError, curl::ErrCode, CliError, toml::Error, url::ParseError, toml::DecodeError, ffi::NulError, } impl From> for Box { fn from(t: Human) -> Box { Box::new(t) } } impl CargoError for semver::ReqParseError {} impl CargoError for io::Error {} impl CargoError for git2::Error {} impl CargoError for json::DecoderError {} impl CargoError for json::EncoderError {} impl CargoError for curl::ErrCode {} impl CargoError for ProcessError {} impl CargoError for CargoTestError {} impl CargoError for CliError {} impl CargoError for toml::Error {} impl CargoError for toml::DecodeError {} impl CargoError for url::ParseError {} impl CargoError for ffi::NulError {} // ============================================================================= // Construction helpers pub fn process_error(msg: &str, cause: Option, status: Option<&ExitStatus>, output: Option<&Output>) -> ProcessError { let exit = match status { Some(s) => s.to_string(), None => "never executed".to_string(), }; let mut desc = format!("{} ({})", &msg, exit); if let Some(out) = output { match str::from_utf8(&out.stdout) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } match str::from_utf8(&out.stderr) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } } ProcessError { desc: desc, exit: status.map(|a| a.clone()), output: output.map(|a| a.clone()), cause: cause, } } pub fn internal_error(error: &str, detail: &str) -> Box { Box::new(ConcreteCargoError { description: error.to_string(), detail: Some(detail.to_string()), cause: None, is_human: false }) } pub fn internal(error: S) -> Box { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: None, is_human: false }) } pub fn human(error: S) -> Box { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: None, is_human: true }) } pub fn caused_human(error: S, cause: E) -> Box where S: fmt::Display, E: Error + Send + 'static { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: Some(Box::new(cause)), is_human: true }) } cargo-0.8.0/src/cargo/util/graph.rs000066400000000000000000000044331264656333200171360ustar00rootroot00000000000000use std::fmt; use std::hash::Hash; use std::collections::hash_set::{HashSet, Iter}; use std::collections::hash_map::{HashMap, Keys}; pub struct Graph { nodes: HashMap> } enum Mark { InProgress, Done } pub type Nodes<'a, N> = Keys<'a, N, HashSet>; pub type Edges<'a, N> = Iter<'a, N>; impl Graph { pub fn new() -> Graph { Graph { nodes: HashMap::new() } } pub fn add(&mut self, node: N, children: &[N]) { self.nodes.insert(node, children.iter().map(|n| n.clone()).collect()); } pub fn link(&mut self, node: N, child: N) { self.nodes.entry(node).or_insert_with(|| HashSet::new()).insert(child); } pub fn get_nodes(&self) -> &HashMap> { &self.nodes } pub fn edges(&self, node: &N) -> Option> { self.nodes.get(node).map(|set| set.iter()) } pub fn sort(&self) -> Option> { let mut ret = Vec::new(); let mut marks = HashMap::new(); for node in self.nodes.keys() { self.visit(node, &mut ret, &mut marks); } Some(ret) } fn visit(&self, node: &N, dst: &mut Vec, marks: &mut HashMap) { if marks.contains_key(node) { return; } marks.insert(node.clone(), Mark::InProgress); for child in self.nodes[node].iter() { self.visit(child, dst, marks); } dst.push(node.clone()); marks.insert(node.clone(), Mark::Done); } pub fn iter(&self) -> Nodes { self.nodes.keys() } } impl fmt::Debug for Graph { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(writeln!(fmt, "Graph {{")); for (n, e) in self.nodes.iter() { try!(writeln!(fmt, " - {}", n)); for n in e.iter() { try!(writeln!(fmt, " - {}", n)); } } try!(write!(fmt, "}}")); Ok(()) } } impl PartialEq for Graph { fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } } impl Eq for Graph {} impl Clone for Graph { fn clone(&self) -> Graph { Graph { nodes: self.nodes.clone() } } } cargo-0.8.0/src/cargo/util/hex.rs000066400000000000000000000011231264656333200166120ustar00rootroot00000000000000use std::hash::{Hasher, Hash, SipHasher}; use rustc_serialize::hex::ToHex; pub fn to_hex(num: u64) -> String { [ (num >> 0) as u8, (num >> 8) as u8, (num >> 16) as u8, (num >> 24) as u8, (num >> 32) as u8, (num >> 40) as u8, (num >> 48) as u8, (num >> 56) as u8, ].to_hex() } pub fn hash_u64(hashable: &H) -> u64 { let mut hasher = SipHasher::new_with_keys(0, 0); hashable.hash(&mut hasher); hasher.finish() } pub fn short_hash(hashable: &H) -> String { to_hex(hash_u64(hashable)) } cargo-0.8.0/src/cargo/util/important_paths.rs000066400000000000000000000036651264656333200212570ustar00rootroot00000000000000use std::fs; use std::path::{Path, PathBuf}; use util::{CargoResult, human}; /// Iteratively search for `file` in `pwd` and its parents, returning /// the path of the directory. pub fn find_project(pwd: &Path, file: &str) -> CargoResult { find_project_manifest(pwd, file).map(|mut p| { // remove the file, leaving just the directory p.pop(); p }) } /// Iteratively search for `file` in `pwd` and its parents, returning /// the path to the file. pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { let mut current = pwd; loop { let manifest = current.join(file); if fs::metadata(&manifest).is_ok() { return Ok(manifest) } match current.parent() { Some(p) => current = p, None => break, } } bail!("could not find `{}` in `{}` or any parent directory", file, pwd.display()) } /// Find the root Cargo.toml pub fn find_root_manifest_for_wd(manifest_path: Option, cwd: &Path) -> CargoResult { match manifest_path { Some(path) => { let absolute_path = cwd.join(&path); if !absolute_path.ends_with("Cargo.toml") { bail!("the manifest-path must be a path to a Cargo.toml file") } if !fs::metadata(&absolute_path).is_ok() { bail!("manifest path `{}` does not exist", path) } Ok(absolute_path) }, None => find_project_manifest(&cwd, "Cargo.toml"), } } /// Return the path to the `file` in `pwd`, if it exists. pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { let manifest = pwd.join(file); if fs::metadata(&manifest).is_ok() { Ok(manifest) } else { Err(human(format!("Could not find `{}` in `{}`", file, pwd.display()))) } } cargo-0.8.0/src/cargo/util/lev_distance.rs000066400000000000000000000035751264656333200205030ustar00rootroot00000000000000// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::cmp; pub fn lev_distance(me: &str, t: &str) -> usize { if me.is_empty() { return t.chars().count(); } if t.is_empty() { return me.chars().count(); } let mut dcol = (0..t.len() + 1).collect::>(); let mut t_last = 0; for (i, sc) in me.chars().enumerate() { let mut current = i; dcol[0] = current + 1; for (j, tc) in t.chars().enumerate() { let next = dcol[j + 1]; if sc == tc { dcol[j + 1] = current; } else { dcol[j + 1] = cmp::min(current, next); dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; } current = next; t_last = j; } } dcol[t_last + 1] } #[test] fn test_lev_distance() { use std::char::{ from_u32, MAX }; // Test bytelength agnosticity for c in (0u32..MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { assert_eq!(lev_distance(&c, &c), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; let c = "Mary häd ä little lämb\n\nLittle lämb\n"; assert_eq!(lev_distance(a, b), 1); assert_eq!(lev_distance(b, a), 1); assert_eq!(lev_distance(a, c), 2); assert_eq!(lev_distance(c, a), 2); assert_eq!(lev_distance(b, c), 1); assert_eq!(lev_distance(c, b), 1); } cargo-0.8.0/src/cargo/util/mod.rs000066400000000000000000000022271264656333200166130ustar00rootroot00000000000000pub use self::config::Config; pub use self::dependency_queue::Dependency; pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; pub use self::errors::{CargoResult, CargoError, ChainError, CliResult}; pub use self::errors::{CliError, ProcessError, CargoTestError}; pub use self::errors::{Human, caused_human}; pub use self::errors::{process_error, internal_error, internal, human}; pub use self::graph::Graph; pub use self::hex::{to_hex, short_hash, hash_u64}; pub use self::lev_distance::{lev_distance}; pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix}; pub use self::process_builder::{process, ProcessBuilder}; pub use self::rustc::Rustc; pub use self::sha256::Sha256; pub use self::to_semver::ToSemver; pub use self::to_url::ToUrl; pub use self::vcs::{GitRepo, HgRepo}; pub mod config; pub mod errors; pub mod graph; pub mod hex; pub mod important_paths; pub mod paths; pub mod process_builder; pub mod profile; pub mod to_semver; pub mod to_url; pub mod toml; pub mod lev_distance; mod dependency_queue; mod sha256; mod shell_escape; mod vcs; mod rustc; cargo-0.8.0/src/cargo/util/paths.rs000066400000000000000000000067451264656333200171640ustar00rootroot00000000000000use std::env; use std::ffi::{OsStr, OsString}; use std::fs::File; use std::io::prelude::*; use std::path::{Path, PathBuf, Component}; use util::{human, internal, CargoResult, ChainError}; pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { env::join_paths(paths.iter()).or_else(|e| { let paths = paths.iter().map(Path::new).collect::>(); internal(format!("failed to join path array: {:?}", paths)).chain_error(|| { human(format!("failed to join search paths together: {}\n\ Does ${} have an unterminated quote character?", e, env)) }) }) } pub fn dylib_path_envvar() -> &'static str { if cfg!(windows) {"PATH"} else if cfg!(target_os = "macos") {"DYLD_LIBRARY_PATH"} else {"LD_LIBRARY_PATH"} } pub fn dylib_path() -> Vec { match env::var_os(dylib_path_envvar()) { Some(var) => env::split_paths(&var).collect(), None => Vec::new(), } } pub fn normalize_path(path: &Path) -> PathBuf { let mut components = path.components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek() .cloned() { components.next(); PathBuf::from(c.as_os_str()) } else { PathBuf::new() }; for component in components { match component { Component::Prefix(..) => unreachable!(), Component::RootDir => { ret.push(component.as_os_str()); } Component::CurDir => {} Component::ParentDir => { ret.pop(); } Component::Normal(c) => { ret.push(c); } } } ret } pub fn without_prefix<'a>(a: &'a Path, b: &'a Path) -> Option<&'a Path> { let mut a = a.components(); let mut b = b.components(); loop { match b.next() { Some(y) => match a.next() { Some(x) if x == y => continue, _ => return None, }, None => return Some(a.as_path()), } } } pub fn read(path: &Path) -> CargoResult { (|| -> CargoResult { let mut ret = String::new(); let mut f = try!(File::open(path)); try!(f.read_to_string(&mut ret)); Ok(ret) }).chain_error(|| { internal(format!("failed to read `{}`", path.display())) }) } pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { (|| -> CargoResult<()> { let mut f = try!(File::create(path)); try!(f.write_all(contents)); Ok(()) }).chain_error(|| { internal(format!("failed to write `{}`", path.display())) }) } #[cfg(unix)] pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { use std::os::unix::prelude::*; Ok(path.as_os_str().as_bytes()) } #[cfg(windows)] pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { match path.as_os_str().to_str() { Some(s) => Ok(s.as_bytes()), None => Err(human(format!("invalid non-unicode path: {}", path.display()))) } } #[cfg(unix)] pub fn bytes2path(bytes: &[u8]) -> CargoResult { use std::os::unix::prelude::*; use std::ffi::OsStr; Ok(PathBuf::from(OsStr::from_bytes(bytes))) } #[cfg(windows)] pub fn bytes2path(bytes: &[u8]) -> CargoResult { use std::str; match str::from_utf8(bytes) { Ok(s) => Ok(PathBuf::from(s)), Err(..) => Err(human("invalid non-unicode path")), } } cargo-0.8.0/src/cargo/util/process_builder.rs000066400000000000000000000102731264656333200212200ustar00rootroot00000000000000use std::collections::HashMap; use std::env; use std::ffi::{OsString, OsStr}; use std::fmt; use std::path::Path; use std::process::{Command, Output}; use util::{ProcessError, process_error}; use util::shell_escape::escape; #[derive(Clone, PartialEq, Debug)] pub struct ProcessBuilder { program: OsString, args: Vec, env: HashMap>, cwd: Option, } impl fmt::Display for ProcessBuilder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "`{}", self.program.to_string_lossy())); for arg in self.args.iter() { try!(write!(f, " {}", escape(arg.to_string_lossy()))); } write!(f, "`") } } impl ProcessBuilder { pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { self.args.push(arg.as_ref().to_os_string()); self } pub fn args>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { self.args.extend(arguments.iter().map(|t| { t.as_ref().to_os_string() })); self } pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { self.cwd = Some(path.as_ref().to_os_string()); self } pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { self.env.insert(key.to_string(), Some(val.as_ref().to_os_string())); self } pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { self.env.insert(key.to_string(), None); self } pub fn get_args(&self) -> &[OsString] { &self.args } pub fn get_cwd(&self) -> Option<&Path> { self.cwd.as_ref().map(Path::new) } pub fn get_env(&self, var: &str) -> Option { self.env.get(var).cloned().or_else(|| Some(env::var_os(var))) .and_then(|s| s) } pub fn get_envs(&self) -> &HashMap> { &self.env } pub fn exec(&self) -> Result<(), ProcessError> { let mut command = self.build_command(); let exit = try!(command.status().map_err(|e| { process_error(&format!("Could not execute process `{}`", self.debug_string()), Some(e), None, None) })); if exit.success() { Ok(()) } else { Err(process_error(&format!("Process didn't exit successfully: `{}`", self.debug_string()), None, Some(&exit), None)) } } pub fn exec_with_output(&self) -> Result { let mut command = self.build_command(); let output = try!(command.output().map_err(|e| { process_error(&format!("Could not execute process `{}`", self.debug_string()), Some(e), None, None) })); if output.status.success() { Ok(output) } else { Err(process_error(&format!("Process didn't exit successfully: `{}`", self.debug_string()), None, Some(&output.status), Some(&output))) } } pub fn build_command(&self) -> Command { let mut command = Command::new(&self.program); if let Some(cwd) = self.get_cwd() { command.current_dir(cwd); } for arg in self.args.iter() { command.arg(arg); } for (k, v) in self.env.iter() { match *v { Some(ref v) => { command.env(k, v); } None => { command.env_remove(k); } } } command } fn debug_string(&self) -> String { let mut program = format!("{}", self.program.to_string_lossy()); for arg in self.args.iter() { program.push(' '); program.push_str(&format!("{}", arg.to_string_lossy())); } program } } pub fn process>(cmd: T) -> ProcessBuilder { ProcessBuilder { program: cmd.as_ref().to_os_string(), args: Vec::new(), cwd: None, env: HashMap::new(), } } cargo-0.8.0/src/cargo/util/profile.rs000066400000000000000000000041771264656333200175020ustar00rootroot00000000000000use std::env; use std::fmt; use std::mem; use time; use std::iter::repeat; use std::cell::RefCell; thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); type Message = (usize, u64, String); pub struct Profiler { desc: String, } fn enabled_level() -> Option { env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok()) } pub fn start(desc: T) -> Profiler { if enabled_level().is_none() { return Profiler { desc: String::new() } } PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::precise_time_ns())); Profiler { desc: desc.to_string(), } } impl Drop for Profiler { fn drop(&mut self) { let enabled = match enabled_level() { Some(i) => i, None => return, }; let start = PROFILE_STACK.with(|stack| stack.borrow_mut().pop().unwrap()); let end = time::precise_time_ns(); let stack_len = PROFILE_STACK.with(|stack| stack.borrow().len()); if stack_len == 0 { fn print(lvl: usize, msgs: &[Message], enabled: usize) { if lvl > enabled { return } let mut last = 0; for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { if l != lvl { continue } println!("{} {:6}ms - {}", repeat(" ").take(lvl + 1).collect::(), time / 1000000, msg); print(lvl + 1, &msgs[last..i], enabled); last = i; } } MESSAGES.with(|msgs_rc| { let mut msgs = msgs_rc.borrow_mut(); msgs.push((0, end - start, mem::replace(&mut self.desc, String::new()))); print(0, &msgs, enabled); }); } else { MESSAGES.with(|msgs| { let msg = mem::replace(&mut self.desc, String::new()); msgs.borrow_mut().push((stack_len, end - start, msg)); }); } } } cargo-0.8.0/src/cargo/util/rustc.rs000066400000000000000000000030151264656333200171700ustar00rootroot00000000000000use std::path::Path; use util::{self, CargoResult, internal, ChainError}; pub struct Rustc { pub verbose_version: String, pub host: String, pub cap_lints: bool, } impl Rustc { /// Run the compiler at `path` to learn varioues pieces of information about /// it. /// /// If successful this function returns a description of the compiler along /// with a list of its capabilities. pub fn new>(path: P) -> CargoResult { let mut cmd = util::process(path.as_ref()); cmd.arg("-vV"); let mut ret = Rustc::blank(); let mut first = cmd.clone(); first.arg("--cap-lints").arg("allow"); let output = match first.exec_with_output() { Ok(output) => { ret.cap_lints = true; output } Err(..) => try!(cmd.exec_with_output()), }; ret.verbose_version = try!(String::from_utf8(output.stdout).map_err(|_| { internal("rustc -v didn't return utf8 output") })); ret.host = { let triple = ret.verbose_version.lines().filter(|l| { l.starts_with("host: ") }).map(|l| &l[6..]).next(); let triple = try!(triple.chain_error(|| { internal("rustc -v didn't have a line for `host:`") })); triple.to_string() }; Ok(ret) } pub fn blank() -> Rustc { Rustc { verbose_version: String::new(), host: String::new(), cap_lints: false, } } } cargo-0.8.0/src/cargo/util/sha256.rs000066400000000000000000000105141264656333200170420ustar00rootroot00000000000000pub use self::imp::Sha256; // Someone upstream will link to OpenSSL, so we don't need to explicitly // link to it ourselves. Hence we pick up Sha256 digests from OpenSSL #[cfg(not(windows))] // allow improper ctypes because size_t falls under that in old compilers #[allow(bad_style, improper_ctypes)] mod imp { use libc; enum EVP_MD_CTX {} enum EVP_MD {} enum ENGINE {} extern { fn EVP_DigestInit_ex(ctx: *mut EVP_MD_CTX, kind: *const EVP_MD, imp: *mut ENGINE) -> libc::c_int; fn EVP_DigestUpdate(ctx: *mut EVP_MD_CTX, d: *const libc::c_void, cnt: libc::size_t) -> libc::c_int; fn EVP_DigestFinal_ex(ctx: *mut EVP_MD_CTX, md: *mut libc::c_uchar, s: *mut libc::c_uint) -> libc::c_int; fn EVP_MD_CTX_create() -> *mut EVP_MD_CTX; fn EVP_MD_CTX_destroy(ctx: *mut EVP_MD_CTX); fn EVP_sha256() -> *const EVP_MD; } pub struct Sha256 { ctx: *mut EVP_MD_CTX } impl Sha256 { pub fn new() -> Sha256 { unsafe { let ctx = EVP_MD_CTX_create(); assert!(!ctx.is_null()); let ret = Sha256 { ctx: ctx }; let n = EVP_DigestInit_ex(ret.ctx, EVP_sha256(), 0 as *mut _); assert_eq!(n, 1); return ret; } } pub fn update(&mut self, bytes: &[u8]) { unsafe { let n = EVP_DigestUpdate(self.ctx, bytes.as_ptr() as *const _, bytes.len() as libc::size_t); assert_eq!(n, 1); } } pub fn finish(&mut self) -> [u8; 32] { unsafe { let mut ret = [0u8; 32]; let mut out = 0; let n = EVP_DigestFinal_ex(self.ctx, ret.as_mut_ptr(), &mut out); assert_eq!(n, 1); assert_eq!(out, 32); return ret; } } } impl Drop for Sha256 { fn drop(&mut self) { unsafe { EVP_MD_CTX_destroy(self.ctx) } } } } // Leverage the crypto APIs that windows has built in. #[cfg(windows)] mod imp { extern crate winapi; extern crate advapi32; use std::io; use std::ptr; use self::winapi::{DWORD, HCRYPTPROV, HCRYPTHASH}; use self::winapi::{PROV_RSA_AES, CRYPT_SILENT, CRYPT_VERIFYCONTEXT, CALG_SHA_256, HP_HASHVAL}; use self::advapi32::{CryptAcquireContextW, CryptCreateHash, CryptDestroyHash}; use self::advapi32::{CryptGetHashParam, CryptHashData, CryptReleaseContext}; macro_rules! call{ ($e:expr) => ({ if $e == 0 { panic!("failed {}: {}", stringify!($e), io::Error::last_os_error()) } }) } pub struct Sha256 { hcryptprov: HCRYPTPROV, hcrypthash: HCRYPTHASH, } impl Sha256 { pub fn new() -> Sha256 { let mut hcp = 0; call!(unsafe { CryptAcquireContextW(&mut hcp, ptr::null(), ptr::null(), PROV_RSA_AES, CRYPT_VERIFYCONTEXT | CRYPT_SILENT) }); let mut ret = Sha256 { hcryptprov: hcp, hcrypthash: 0 }; call!(unsafe { CryptCreateHash(ret.hcryptprov, CALG_SHA_256, 0, 0, &mut ret.hcrypthash) }); return ret; } pub fn update(&mut self, bytes: &[u8]) { call!(unsafe { CryptHashData(self.hcrypthash, bytes.as_ptr() as *mut _, bytes.len() as DWORD, 0) }) } pub fn finish(&mut self) -> [u8; 32] { let mut ret = [0u8; 32]; let mut len = ret.len() as DWORD; call!(unsafe { CryptGetHashParam(self.hcrypthash, HP_HASHVAL, ret.as_mut_ptr(), &mut len, 0) }); assert_eq!(len as usize, ret.len()); return ret; } } impl Drop for Sha256 { fn drop(&mut self) { if self.hcrypthash != 0 { call!(unsafe { CryptDestroyHash(self.hcrypthash) }); } call!(unsafe { CryptReleaseContext(self.hcryptprov, 0) }); } } } cargo-0.8.0/src/cargo/util/shell_escape.rs000066400000000000000000000072651264656333200204720ustar00rootroot00000000000000// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::borrow::Cow; use std::env; pub fn escape(s: Cow) -> Cow { if cfg!(unix) { unix::escape(s) } else if env::var("MSYSTEM").is_ok() { unix::escape(s) } else { windows::escape(s) } } pub mod windows { use std::borrow::Cow; use std::iter::repeat; /// Escape for the windows cmd.exe shell, for more info see this url: /// /// http://blogs.msdn.com/b/twistylittlepassagesallalike/archive/2011/04/23 /// /everyone-quotes-arguments-the-wrong-way.aspx pub fn escape(s: Cow) -> Cow { let mut needs_escape = false; for ch in s.chars() { match ch { '"' | '\t' | '\n' | ' ' => needs_escape = true, _ => {} } } if !needs_escape { return s } let mut es = String::with_capacity(s.len()); es.push('"'); let mut chars = s.chars().peekable(); loop { let mut nslashes = 0; while let Some(&'\\') = chars.peek() { chars.next(); nslashes += 1; } match chars.next() { Some('"') => { es.extend(repeat('\\').take(nslashes * 2 + 1)); es.push('"'); } Some(c) => { es.extend(repeat('\\').take(nslashes)); es.push(c); } None => { es.extend(repeat('\\').take(nslashes * 2)); break; } } } es.push('"'); es.into() } #[test] fn test_escape() { assert_eq!(escape("--aaa=bbb-ccc".into()), "--aaa=bbb-ccc"); assert_eq!(escape("linker=gcc -L/foo -Wl,bar".into()), r#""linker=gcc -L/foo -Wl,bar""#); assert_eq!(escape(r#"--features="default""#.into()), r#""--features=\"default\"""#); assert_eq!(escape(r#"\path\to\my documents\"#.into()), r#""\path\to\my documents\\""#); } } pub mod unix { use std::borrow::Cow; const SHELL_SPECIAL: &'static str = r#" \$'"`!"#; /// Escape characters that may have special meaning in a shell, /// including spaces. pub fn escape(s: Cow) -> Cow { let escape_char = '\\'; // check if string needs to be escaped let clean = SHELL_SPECIAL.chars().all(|sp_char| !s.contains(sp_char)); if clean { return s } let mut es = String::with_capacity(s.len()); for ch in s.chars() { if SHELL_SPECIAL.contains(ch) { es.push(escape_char); } es.push(ch) } es.into() } #[test] fn test_escape() { assert_eq!(escape("--aaa=bbb-ccc".into()), "--aaa=bbb-ccc"); assert_eq!(escape("linker=gcc -L/foo -Wl,bar".into()), r#"linker=gcc\ -L/foo\ -Wl,bar"#); assert_eq!(escape(r#"--features="default""#.into()), r#"--features=\"default\""#); assert_eq!(escape(r#"'!\$`\\\n "#.into()), r#"\'\!\\\$\`\\\\\\n\ "#); } } cargo-0.8.0/src/cargo/util/to_semver.rs000066400000000000000000000012511264656333200200330ustar00rootroot00000000000000use semver::Version; pub trait ToSemver { fn to_semver(self) -> Result; } impl ToSemver for Version { fn to_semver(self) -> Result { Ok(self) } } impl<'a> ToSemver for &'a str { fn to_semver(self) -> Result { match Version::parse(self) { Ok(v) => Ok(v), Err(..) => Err(format!("cannot parse '{}' as a semver", self)), } } } impl<'a> ToSemver for &'a String { fn to_semver(self) -> Result { (**self).to_semver() } } impl<'a> ToSemver for &'a Version { fn to_semver(self) -> Result { Ok(self.clone()) } } cargo-0.8.0/src/cargo/util/to_url.rs000066400000000000000000000016761264656333200173470ustar00rootroot00000000000000use url::{self, Url, UrlParser}; use std::path::Path; pub trait ToUrl { fn to_url(self) -> Result; } impl ToUrl for Url { fn to_url(self) -> Result { Ok(self) } } impl<'a> ToUrl for &'a Url { fn to_url(self) -> Result { Ok(self.clone()) } } impl<'a> ToUrl for &'a str { fn to_url(self) -> Result { UrlParser::new().scheme_type_mapper(mapper).parse(self).map_err(|s| { format!("invalid url `{}`: {}", self, s) }) } } impl<'a> ToUrl for &'a Path { fn to_url(self) -> Result { Url::from_file_path(self).map_err(|()| { format!("invalid path url `{}`", self.display()) }) } } fn mapper(s: &str) -> url::SchemeType { match s { "git" => url::SchemeType::Relative(9418), "ssh" => url::SchemeType::Relative(22), s => url::whatwg_scheme_type_mapper(s), } } cargo-0.8.0/src/cargo/util/toml.rs000066400000000000000000001022421264656333200170050ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::default::Default; use std::fmt; use std::fs; use std::path::{Path, PathBuf}; use std::str; use toml; use semver; use rustc_serialize::{Decodable, Decoder}; use core::{SourceId, Profiles}; use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId, GitReference}; use core::dependency::Kind; use core::manifest::{LibKind, Profile, ManifestMetadata}; use core::package_id::Metadata; use util::{self, CargoResult, human, ToUrl, ToSemver, ChainError, Config}; /// Representation of the projects file layout. /// /// This structure is used to hold references to all project files that are relevant to cargo. #[derive(Clone)] pub struct Layout { pub root: PathBuf, lib: Option, bins: Vec, examples: Vec, tests: Vec, benches: Vec, } impl Layout { fn main(&self) -> Option<&PathBuf> { self.bins.iter().find(|p| { match p.file_name().and_then(|s| s.to_str()) { Some(s) => s == "main.rs", None => false } }) } } fn try_add_file(files: &mut Vec, file: PathBuf) { if fs::metadata(&file).is_ok() { files.push(file); } } fn try_add_files(files: &mut Vec, root: PathBuf) { match fs::read_dir(&root) { Ok(new) => { files.extend(new.filter_map(|dir| { dir.map(|d| d.path()).ok() }).filter(|f| { f.extension().and_then(|s| s.to_str()) == Some("rs") }).filter(|f| { // Some unix editors may create "dotfiles" next to original // source files while they're being edited, but these files are // rarely actually valid Rust source files and sometimes aren't // even valid UTF-8. Here we just ignore all of them and require // that they are explicitly specified in Cargo.toml if desired. f.file_name().and_then(|s| s.to_str()).map(|s| { !s.starts_with(".") }).unwrap_or(true) })) } Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */} } } /// Returns a new `Layout` for a given root path. /// The `root_path` represents the directory that contains the `Cargo.toml` file. pub fn project_layout(root_path: &Path) -> Layout { let mut lib = None; let mut bins = vec![]; let mut examples = vec![]; let mut tests = vec![]; let mut benches = vec![]; let lib_canidate = root_path.join("src").join("lib.rs"); if fs::metadata(&lib_canidate).is_ok() { lib = Some(lib_canidate); } try_add_file(&mut bins, root_path.join("src").join("main.rs")); try_add_files(&mut bins, root_path.join("src").join("bin")); try_add_files(&mut examples, root_path.join("examples")); try_add_files(&mut tests, root_path.join("tests")); try_add_files(&mut benches, root_path.join("benches")); Layout { root: root_path.to_path_buf(), lib: lib, bins: bins, examples: examples, tests: tests, benches: benches, } } pub fn to_manifest(contents: &[u8], source_id: &SourceId, layout: Layout, config: &Config) -> CargoResult<(Manifest, Vec)> { let manifest = layout.root.join("Cargo.toml"); let manifest = match util::without_prefix(&manifest, config.cwd()) { Some(path) => path.to_path_buf(), None => manifest.clone(), }; let contents = try!(str::from_utf8(contents).map_err(|_| { human(format!("{} is not valid UTF-8", manifest.display())) })); let root = try!(parse(contents, &manifest)); let mut d = toml::Decoder::new(toml::Value::Table(root)); let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| { human(e.to_string()) })); let pair = try!(manifest.to_manifest(source_id, &layout, config)); let (mut manifest, paths) = pair; match d.toml { Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()), None => {} } if !manifest.targets().iter().any(|t| !t.is_custom_build()) { bail!("no targets specified in the manifest\n \ either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] \ section must be present") } return Ok((manifest, paths)); fn add_unused_keys(m: &mut Manifest, toml: &toml::Value, key: String) { match *toml { toml::Value::Table(ref table) => { for (k, v) in table.iter() { add_unused_keys(m, v, if key.len() == 0 { k.clone() } else { key.clone() + "." + k }) } } toml::Value::Array(ref arr) => { for v in arr.iter() { add_unused_keys(m, v, key.clone()); } } _ => m.add_warning(format!("unused manifest key: {}", key)), } } } pub fn parse(toml: &str, file: &Path) -> CargoResult { let mut parser = toml::Parser::new(&toml); match parser.parse() { Some(toml) => return Ok(toml), None => {} } let mut error_str = format!("could not parse input as TOML\n"); for error in parser.errors.iter() { let (loline, locol) = parser.to_linecol(error.lo); let (hiline, hicol) = parser.to_linecol(error.hi); error_str.push_str(&format!("{}:{}:{}{} {}\n", file.display(), loline + 1, locol + 1, if loline != hiline || locol != hicol { format!("-{}:{}", hiline + 1, hicol + 1) } else { "".to_string() }, error.desc)); } Err(human(error_str)) } type TomlLibTarget = TomlTarget; type TomlBinTarget = TomlTarget; type TomlExampleTarget = TomlTarget; type TomlTestTarget = TomlTarget; type TomlBenchTarget = TomlTarget; /* * TODO: Make all struct fields private */ #[derive(RustcDecodable)] pub enum TomlDependency { Simple(String), Detailed(DetailedTomlDependency) } #[derive(RustcDecodable, Clone, Default)] pub struct DetailedTomlDependency { version: Option, path: Option, git: Option, branch: Option, tag: Option, rev: Option, features: Option>, optional: Option, default_features: Option, } #[derive(RustcDecodable)] pub struct TomlManifest { package: Option>, project: Option>, profile: Option, lib: Option, bin: Option>, example: Option>, test: Option>, bench: Option>, dependencies: Option>, dev_dependencies: Option>, build_dependencies: Option>, features: Option>>, target: Option>, } #[derive(RustcDecodable, Clone, Default)] pub struct TomlProfiles { test: Option, doc: Option, bench: Option, dev: Option, release: Option, } #[derive(RustcDecodable, Clone, Default)] pub struct TomlProfile { opt_level: Option, lto: Option, codegen_units: Option, debug: Option, debug_assertions: Option, rpath: Option, } #[derive(RustcDecodable)] pub struct TomlProject { name: String, version: TomlVersion, authors: Vec, build: Option, links: Option, exclude: Option>, include: Option>, // package metadata description: Option, homepage: Option, documentation: Option, readme: Option, keywords: Option>, license: Option, license_file: Option, repository: Option, } pub struct TomlVersion { version: semver::Version, } impl Decodable for TomlVersion { fn decode(d: &mut D) -> Result { let s = try!(d.read_str()); match s.to_semver() { Ok(s) => Ok(TomlVersion { version: s }), Err(e) => Err(d.error(&e)), } } } impl TomlProject { pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult { PackageId::new(&self.name, self.version.version.clone(), source_id) } } struct Context<'a, 'b> { deps: &'a mut Vec, source_id: &'a SourceId, nested_paths: &'a mut Vec, config: &'b Config, } // These functions produce the equivalent of specific manifest entries. One // wrinkle is that certain paths cannot be represented in the manifest due // to Toml's UTF-8 requirement. This could, in theory, mean that certain // otherwise acceptable executable names are not used when inside of // `src/bin/*`, but it seems ok to not build executables with non-UTF8 // paths. fn inferred_lib_target(name: &str, layout: &Layout) -> Option { layout.lib.as_ref().map(|lib| { TomlTarget { name: Some(name.to_string()), path: Some(PathValue::Path(lib.clone())), .. TomlTarget::new() } }) } fn inferred_bin_targets(name: &str, layout: &Layout) -> Vec { layout.bins.iter().filter_map(|bin| { let name = if &**bin == Path::new("src/main.rs") || *bin == layout.root.join("src").join("main.rs") { Some(name.to_string()) } else { bin.file_stem().and_then(|s| s.to_str()).map(|f| f.to_string()) }; name.map(|name| { TomlTarget { name: Some(name), path: Some(PathValue::Path(bin.clone())), .. TomlTarget::new() } }) }).collect() } fn inferred_example_targets(layout: &Layout) -> Vec { layout.examples.iter().filter_map(|ex| { ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: Some(name.to_string()), path: Some(PathValue::Path(ex.clone())), .. TomlTarget::new() } }) }).collect() } fn inferred_test_targets(layout: &Layout) -> Vec { layout.tests.iter().filter_map(|ex| { ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: Some(name.to_string()), path: Some(PathValue::Path(ex.clone())), .. TomlTarget::new() } }) }).collect() } fn inferred_bench_targets(layout: &Layout) -> Vec { layout.benches.iter().filter_map(|ex| { ex.file_stem().and_then(|s| s.to_str()).map(|name| { TomlTarget { name: Some(name.to_string()), path: Some(PathValue::Path(ex.clone())), .. TomlTarget::new() } }) }).collect() } impl TomlManifest { pub fn to_manifest(&self, source_id: &SourceId, layout: &Layout, config: &Config) -> CargoResult<(Manifest, Vec)> { let mut nested_paths = vec![]; let mut warnings = vec![]; let project = self.project.as_ref().or_else(|| self.package.as_ref()); let project = try!(project.chain_error(|| { human("no `package` or `project` section found.") })); if project.name.trim().is_empty() { bail!("package name cannot be an empty string.") } let pkgid = try!(project.to_package_id(source_id)); let metadata = pkgid.generate_metadata(&layout.root); // If we have no lib at all, use the inferred lib if available // If we have a lib with a path, we're done // If we have a lib with no path, use the inferred lib or_else package name let lib = match self.lib { Some(ref lib) => { try!(validate_library_name(lib)); Some( TomlTarget { name: lib.name.clone().or(Some(project.name.clone())), path: lib.path.clone().or( layout.lib.as_ref().map(|p| PathValue::Path(p.clone())) ), ..lib.clone() } ) } None => inferred_lib_target(&project.name, layout), }; let bins = match self.bin { Some(ref bins) => { let bin = layout.main(); for target in bins { try!(validate_binary_name(target)); } bins.iter().map(|t| { if bin.is_some() && t.path.is_none() { TomlTarget { path: bin.as_ref().map(|&p| PathValue::Path(p.clone())), .. t.clone() } } else { t.clone() } }).collect() } None => inferred_bin_targets(&project.name, layout) }; let blacklist = vec!["build", "deps", "examples", "native"]; for bin in bins.iter() { if blacklist.iter().find(|&x| *x == bin.name()) != None { bail!("the binary target name `{}` is forbidden", bin.name()) } } let examples = match self.example { Some(ref examples) => { for target in examples { try!(validate_example_name(target)); } examples.clone() } None => inferred_example_targets(layout) }; let tests = match self.test { Some(ref tests) => { for target in tests { try!(validate_test_name(target)); } tests.clone() } None => inferred_test_targets(layout) }; let benches = match self.bench { Some(ref benches) => { for target in benches { try!(validate_bench_name(target)); } benches.clone() } None => inferred_bench_targets(layout) }; if let Err(e) = unique_names_in_targets(&bins) { bail!("found duplicate binary name {}, but all binary targets \ must have a unique name", e); } if let Err(e) = unique_names_in_targets(&examples) { bail!("found duplicate example name {}, but all binary targets \ must have a unique name", e); } if let Err(e) = unique_names_in_targets(&benches) { bail!("found duplicate bench name {}, but all binary targets must \ have a unique name", e); } if let Err(e) = unique_names_in_targets(&tests) { bail!("found duplicate test name {}, but all binary targets must \ have a unique name", e) } // processing the custom build script let new_build = project.build.as_ref().map(PathBuf::from); // Get targets let targets = normalize(&lib, &bins, new_build, &examples, &tests, &benches, &metadata, &mut warnings); if targets.is_empty() { debug!("manifest has no build targets"); } let mut deps = Vec::new(); { let mut cx = Context { deps: &mut deps, source_id: source_id, nested_paths: &mut nested_paths, config: config, }; // Collect the deps try!(process_dependencies(&mut cx, self.dependencies.as_ref(), |dep| dep)); try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(), |dep| dep.set_kind(Kind::Development))); try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(), |dep| dep.set_kind(Kind::Build))); if let Some(targets) = self.target.as_ref() { for (name, platform) in targets.iter() { try!(process_dependencies(&mut cx, platform.dependencies.as_ref(), |dep| { dep.set_only_for_platform(Some(name.clone())) })); try!(process_dependencies(&mut cx, platform.build_dependencies.as_ref(), |dep| { dep.set_only_for_platform(Some(name.clone())) .set_kind(Kind::Build) })); try!(process_dependencies(&mut cx, platform.dev_dependencies.as_ref(), |dep| { dep.set_only_for_platform(Some(name.clone())) .set_kind(Kind::Development) })); } } } let exclude = project.exclude.clone().unwrap_or(Vec::new()); let include = project.include.clone().unwrap_or(Vec::new()); let summary = try!(Summary::new(pkgid, deps, self.features.clone() .unwrap_or(HashMap::new()))); let metadata = ManifestMetadata { description: project.description.clone(), homepage: project.homepage.clone(), documentation: project.documentation.clone(), readme: project.readme.clone(), authors: project.authors.clone(), license: project.license.clone(), license_file: project.license_file.clone(), repository: project.repository.clone(), keywords: project.keywords.clone().unwrap_or(Vec::new()), }; let profiles = build_profiles(&self.profile); let mut manifest = Manifest::new(summary, targets, exclude, include, project.links.clone(), metadata, profiles); if project.license_file.is_some() && project.license.is_some() { manifest.add_warning(format!("warning: only one of `license` or \ `license-file` is necessary")); } for warning in warnings { manifest.add_warning(warning.clone()); } Ok((manifest, nested_paths)) } } /// Will check a list of toml targets, and make sure the target names are unique within a vector. /// If not, the name of the offending binary target is returned. fn unique_names_in_targets(targets: &[TomlTarget]) -> Result<(), String> { let values = targets.iter().map(|e| e.name()).collect::>(); let mut seen = HashSet::new(); for v in values { if !seen.insert(v.clone()) { return Err(v); } } Ok(()) } fn validate_library_name(target: &TomlTarget) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { Err(human(format!("library target names cannot be empty."))) } else if name.contains("-") { Err(human(format!("library target names cannot contain hyphens: {}", name))) } else { Ok(()) } }, None => Ok(()) } } fn validate_binary_name(target: &TomlTarget) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { Err(human(format!("binary target names cannot be empty."))) } else { Ok(()) } }, None => Err(human(format!("binary target bin.name is required"))) } } fn validate_example_name(target: &TomlTarget) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { Err(human(format!("example target names cannot be empty"))) } else { Ok(()) } }, None => Err(human(format!("example target example.name is required"))) } } fn validate_test_name(target: &TomlTarget) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { Err(human(format!("test target names cannot be empty"))) } else { Ok(()) } }, None => Err(human(format!("test target test.name is required"))) } } fn validate_bench_name(target: &TomlTarget) -> CargoResult<()> { match target.name { Some(ref name) => { if name.trim().is_empty() { Err(human(format!("bench target names cannot be empty"))) } else { Ok(()) } }, None => Err(human(format!("bench target bench.name is required"))) } } fn process_dependencies(cx: &mut Context, new_deps: Option<&HashMap>, mut f: F) -> CargoResult<()> where F: FnMut(DependencyInner) -> DependencyInner { let dependencies = match new_deps { Some(ref dependencies) => dependencies, None => return Ok(()) }; for (n, v) in dependencies.iter() { let details = match *v { TomlDependency::Simple(ref version) => { let mut d: DetailedTomlDependency = Default::default(); d.version = Some(version.clone()); d } TomlDependency::Detailed(ref details) => details.clone(), }; let reference = details.branch.clone().map(GitReference::Branch) .or_else(|| details.tag.clone().map(GitReference::Tag)) .or_else(|| details.rev.clone().map(GitReference::Rev)) .unwrap_or_else(|| GitReference::Branch("master".to_string())); let new_source_id = match details.git { Some(ref git) => { let loc = try!(git.to_url().map_err(|e| { human(e) })); Some(SourceId::for_git(&loc, reference)) } None => { details.path.as_ref().map(|path| { cx.nested_paths.push(PathBuf::from(path)); cx.source_id.clone() }) } }.unwrap_or(try!(SourceId::for_central(cx.config))); let dep = try!(DependencyInner::parse(&n, details.version.as_ref() .map(|v| &v[..]), &new_source_id)); let dep = f(dep) .set_features(details.features.unwrap_or(Vec::new())) .set_default_features(details.default_features.unwrap_or(true)) .set_optional(details.optional.unwrap_or(false)) .into_dependency(); cx.deps.push(dep); } Ok(()) } #[derive(RustcDecodable, Debug, Clone)] struct TomlTarget { name: Option, crate_type: Option>, path: Option, test: Option, doctest: Option, bench: Option, doc: Option, plugin: Option, harness: Option, } #[derive(RustcDecodable, Clone)] enum PathValue { String(String), Path(PathBuf), } /// Corresponds to a `target` entry, but `TomlTarget` is already used. #[derive(RustcDecodable)] struct TomlPlatform { dependencies: Option>, build_dependencies: Option>, dev_dependencies: Option>, } impl TomlTarget { fn new() -> TomlTarget { TomlTarget { name: None, crate_type: None, path: None, test: None, doctest: None, bench: None, doc: None, plugin: None, harness: None, } } fn name(&self) -> String { match self.name { Some(ref name) => name.clone(), None => panic!("target name is required") } } } impl PathValue { fn to_path(&self) -> PathBuf { match *self { PathValue::String(ref s) => PathBuf::from(s), PathValue::Path(ref p) => p.clone(), } } } impl fmt::Debug for PathValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { PathValue::String(ref s) => s.fmt(f), PathValue::Path(ref p) => p.display().fmt(f), } } } fn normalize(lib: &Option, bins: &[TomlBinTarget], custom_build: Option, examples: &[TomlExampleTarget], tests: &[TomlTestTarget], benches: &[TomlBenchTarget], metadata: &Metadata, warnings: &mut Vec) -> Vec { fn configure(toml: &TomlTarget, target: &mut Target) { let t2 = target.clone(); target.set_tested(toml.test.unwrap_or(t2.tested())) .set_doc(toml.doc.unwrap_or(t2.documented())) .set_doctest(toml.doctest.unwrap_or(t2.doctested())) .set_benched(toml.bench.unwrap_or(t2.benched())) .set_harness(toml.harness.unwrap_or(t2.harness())) .set_for_host(toml.plugin.unwrap_or(t2.for_host())); } fn lib_target(dst: &mut Vec, l: &TomlLibTarget, metadata: &Metadata, warnings: &mut Vec) { let path = l.path.clone().unwrap_or( PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name()))) ); let crate_types = match l.crate_type.clone() { Some(kinds) => { // For now, merely warn about invalid crate types. // In the future, it might be nice to make them errors. kinds.iter().filter_map(|s| { let kind = LibKind::from_str(s); if let Err(ref error) = kind { warnings.push(format!("warning: {}", error)) } kind.ok() }).collect() } None => { vec![ if l.plugin == Some(true) {LibKind::Dylib} else {LibKind::Lib} ] } }; let mut target = Target::lib_target(&l.name(), crate_types, &path.to_path(), metadata.clone()); configure(l, &mut target); dst.push(target); } fn bin_targets(dst: &mut Vec, bins: &[TomlBinTarget], default: &mut FnMut(&TomlBinTarget) -> PathBuf) { for bin in bins.iter() { let path = bin.path.clone().unwrap_or_else(|| { PathValue::Path(default(bin)) }); let mut target = Target::bin_target(&bin.name(), &path.to_path(), None); configure(bin, &mut target); dst.push(target); } } fn custom_build_target(dst: &mut Vec, cmd: &Path) { let name = format!("build-script-{}", cmd.file_stem().and_then(|s| s.to_str()).unwrap_or("")); dst.push(Target::custom_build_target(&name, cmd, None)); } fn example_targets(dst: &mut Vec, examples: &[TomlExampleTarget], default: &mut FnMut(&TomlExampleTarget) -> PathBuf) { for ex in examples.iter() { let path = ex.path.clone().unwrap_or_else(|| { PathValue::Path(default(ex)) }); let mut target = Target::example_target(&ex.name(), &path.to_path()); configure(ex, &mut target); dst.push(target); } } fn test_targets(dst: &mut Vec, tests: &[TomlTestTarget], metadata: &Metadata, default: &mut FnMut(&TomlTestTarget) -> PathBuf) { for test in tests.iter() { let path = test.path.clone().unwrap_or_else(|| { PathValue::Path(default(test)) }); // make sure this metadata is different from any same-named libs. let mut metadata = metadata.clone(); metadata.mix(&format!("test-{}", test.name())); let mut target = Target::test_target(&test.name(), &path.to_path(), metadata); configure(test, &mut target); dst.push(target); } } fn bench_targets(dst: &mut Vec, benches: &[TomlBenchTarget], metadata: &Metadata, default: &mut FnMut(&TomlBenchTarget) -> PathBuf) { for bench in benches.iter() { let path = bench.path.clone().unwrap_or_else(|| { PathValue::Path(default(bench)) }); // make sure this metadata is different from any same-named libs. let mut metadata = metadata.clone(); metadata.mix(&format!("bench-{}", bench.name())); let mut target = Target::bench_target(&bench.name(), &path.to_path(), metadata); configure(bench, &mut target); dst.push(target); } } let mut ret = Vec::new(); if let Some(ref lib) = *lib { lib_target(&mut ret, lib, metadata, warnings); bin_targets(&mut ret, bins, &mut |bin| Path::new("src").join("bin") .join(&format!("{}.rs", bin.name()))); } else if bins.len() > 0 { bin_targets(&mut ret, bins, &mut |bin| Path::new("src") .join(&format!("{}.rs", bin.name()))); } if let Some(custom_build) = custom_build { custom_build_target(&mut ret, &custom_build); } example_targets(&mut ret, examples, &mut |ex| Path::new("examples") .join(&format!("{}.rs", ex.name()))); test_targets(&mut ret, tests, metadata, &mut |test| { if test.name() == "test" { Path::new("src").join("test.rs") } else { Path::new("tests").join(&format!("{}.rs", test.name())) } }); bench_targets(&mut ret, benches, metadata, &mut |bench| { if bench.name() == "bench" { Path::new("src").join("bench.rs") } else { Path::new("benches").join(&format!("{}.rs", bench.name())) } }); ret } fn build_profiles(profiles: &Option) -> Profiles { let profiles = profiles.as_ref(); return Profiles { release: merge(Profile::default_release(), profiles.and_then(|p| p.release.as_ref())), dev: merge(Profile::default_dev(), profiles.and_then(|p| p.dev.as_ref())), test: merge(Profile::default_test(), profiles.and_then(|p| p.test.as_ref())), bench: merge(Profile::default_bench(), profiles.and_then(|p| p.bench.as_ref())), doc: merge(Profile::default_doc(), profiles.and_then(|p| p.doc.as_ref())), custom_build: Profile::default_custom_build(), }; fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile { let &TomlProfile { opt_level, lto, codegen_units, debug, debug_assertions, rpath } = match toml { Some(toml) => toml, None => return profile, }; Profile { opt_level: opt_level.unwrap_or(profile.opt_level), lto: lto.unwrap_or(profile.lto), codegen_units: codegen_units, rustc_args: None, rustdoc_args: None, debuginfo: debug.unwrap_or(profile.debuginfo), debug_assertions: debug_assertions.unwrap_or(profile.debug_assertions), rpath: rpath.unwrap_or(profile.rpath), test: profile.test, doc: profile.doc, run_custom_build: profile.run_custom_build, } } } cargo-0.8.0/src/cargo/util/vcs.rs000066400000000000000000000013731264656333200166300ustar00rootroot00000000000000use std::path::Path; use git2; use util::{CargoResult, process}; pub struct HgRepo; pub struct GitRepo; impl GitRepo { pub fn init(path: &Path, _: &Path) -> CargoResult { try!(git2::Repository::init(path)); return Ok(GitRepo) } pub fn discover(path: &Path, _: &Path) -> Result { git2::Repository::discover(path) } } impl HgRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { try!(process("hg").cwd(cwd).arg("init").arg(path).exec()); return Ok(HgRepo) } pub fn discover(path: &Path, cwd: &Path) -> CargoResult { try!(process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()); return Ok(HgRepo) } } cargo-0.8.0/src/crates-io/000077500000000000000000000000001264656333200153015ustar00rootroot00000000000000cargo-0.8.0/src/crates-io/Cargo.toml000066400000000000000000000005141264656333200172310ustar00rootroot00000000000000[package] name = "crates-io" version = "0.1.0" authors = ["Alex Crichton "] license = "MIT/Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = """ Helpers for interacting with crates.io """ [lib] name = "crates_io" path = "lib.rs" [dependencies] curl = "0.2" rustc-serialize = "0.3" cargo-0.8.0/src/crates-io/lib.rs000066400000000000000000000212421264656333200164160ustar00rootroot00000000000000extern crate curl; extern crate rustc_serialize; use std::collections::HashMap; use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; use std::io::{self, Cursor}; use std::path::Path; use std::result; use curl::http; use curl::http::handle::Method::{Put, Get, Delete}; use curl::http::handle::{Method, Request}; use rustc_serialize::json; pub struct Registry { host: String, token: Option, handle: http::Handle, } pub type Result = result::Result; #[derive(PartialEq, Clone, Copy)] pub enum Auth { Authorized, Unauthorized } pub enum Error { Curl(curl::ErrCode), NotOkResponse(http::Response), NonUtf8Body, Api(Vec), Unauthorized, TokenMissing, Io(io::Error), NotFound, } #[derive(RustcDecodable)] pub struct Crate { pub name: String, pub description: Option, pub max_version: String } #[derive(RustcEncodable)] pub struct NewCrate { pub name: String, pub vers: String, pub deps: Vec, pub features: HashMap>, pub authors: Vec, pub description: Option, pub documentation: Option, pub homepage: Option, pub readme: Option, pub keywords: Vec, pub license: Option, pub license_file: Option, pub repository: Option, } #[derive(RustcEncodable)] pub struct NewCrateDependency { pub optional: bool, pub default_features: bool, pub name: String, pub features: Vec, pub version_req: String, pub target: Option, pub kind: String, } #[derive(RustcDecodable)] pub struct User { pub id: u32, pub login: String, pub avatar: Option, pub email: Option, pub name: Option, } #[derive(RustcDecodable)] struct R { ok: bool } #[derive(RustcDecodable)] struct ApiErrorList { errors: Vec } #[derive(RustcDecodable)] struct ApiError { detail: String } #[derive(RustcEncodable)] struct OwnersReq<'a> { users: &'a [&'a str] } #[derive(RustcDecodable)] struct Users { users: Vec } #[derive(RustcDecodable)] struct Crates { crates: Vec } impl Registry { pub fn new(host: String, token: Option) -> Registry { Registry::new_handle(host, token, http::Handle::new()) } pub fn new_handle(host: String, token: Option, handle: http::Handle) -> Registry { Registry { host: host, token: token, handle: handle, } } pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { let body = json::encode(&OwnersReq { users: owners }).unwrap(); let body = try!(self.put(format!("/crates/{}/owners", krate), body.as_bytes())); assert!(json::decode::(&body).unwrap().ok); Ok(()) } pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { let body = json::encode(&OwnersReq { users: owners }).unwrap(); let body = try!(self.delete(format!("/crates/{}/owners", krate), Some(body.as_bytes()))); assert!(json::decode::(&body).unwrap().ok); Ok(()) } pub fn list_owners(&mut self, krate: &str) -> Result> { let body = try!(self.get(format!("/crates/{}/owners", krate))); Ok(json::decode::(&body).unwrap().users) } pub fn publish(&mut self, krate: &NewCrate, tarball: &Path) -> Result<()> { let json = json::encode(krate).unwrap(); // Prepare the body. The format of the upload request is: // // // (metadata for the package) // // let stat = try!(fs::metadata(tarball).map_err(Error::Io)); let header = { let mut w = Vec::new(); w.extend([ (json.len() >> 0) as u8, (json.len() >> 8) as u8, (json.len() >> 16) as u8, (json.len() >> 24) as u8, ].iter().map(|x| *x)); w.extend(json.as_bytes().iter().map(|x| *x)); w.extend([ (stat.len() >> 0) as u8, (stat.len() >> 8) as u8, (stat.len() >> 16) as u8, (stat.len() >> 24) as u8, ].iter().map(|x| *x)); w }; let tarball = try!(File::open(tarball).map_err(Error::Io)); let size = stat.len() as usize + header.len(); let mut body = Cursor::new(header).chain(tarball); let url = format!("{}/api/v1/crates/new", self.host); let token = match self.token.as_ref() { Some(s) => s, None => return Err(Error::TokenMissing), }; let request = self.handle.put(url, &mut body) .content_length(size) .header("Accept", "application/json") .header("Authorization", &token); let response = handle(request.exec()); let _body = try!(response); Ok(()) } pub fn search(&mut self, query: &str) -> Result> { let body = try!(self.req(format!("/crates?q={}", query), None, Get, Auth::Unauthorized)); Ok(json::decode::(&body).unwrap().crates) } pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version), None)); assert!(json::decode::(&body).unwrap().ok); Ok(()) } pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version), &[])); assert!(json::decode::(&body).unwrap().ok); Ok(()) } fn put(&mut self, path: String, b: &[u8]) -> Result { self.req(path, Some(b), Put, Auth::Authorized) } fn get(&mut self, path: String) -> Result { self.req(path, None, Get, Auth::Authorized) } fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result { self.req(path, b, Delete, Auth::Authorized) } fn req(&mut self, path: String, body: Option<&[u8]>, method: Method, authorized: Auth) -> Result { let mut req = Request::new(&mut self.handle, method) .uri(format!("{}/api/v1{}", self.host, path)) .header("Accept", "application/json") .content_type("application/json"); if authorized == Auth::Authorized { let token = match self.token.as_ref() { Some(s) => s, None => return Err(Error::TokenMissing), }; req = req.header("Authorization", &token); } match body { Some(b) => req = req.body(b), None => {} } handle(req.exec()) } } fn handle(response: result::Result) -> Result { let response = try!(response.map_err(Error::Curl)); match response.get_code() { 0 => {} // file upload url sometimes 200 => {} 403 => return Err(Error::Unauthorized), 404 => return Err(Error::NotFound), _ => return Err(Error::NotOkResponse(response)) } let body = match String::from_utf8(response.move_body()) { Ok(body) => body, Err(..) => return Err(Error::NonUtf8Body), }; match json::decode::(&body) { Ok(errors) => { return Err(Error::Api(errors.errors.into_iter().map(|s| s.detail) .collect())) } Err(..) => {} } Ok(body) } impl fmt::Display for Error { #[allow(deprecated)] // connect => join in 1.3 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::NonUtf8Body => write!(f, "response body was not utf-8"), Error::Curl(ref err) => write!(f, "http error: {}", err), Error::NotOkResponse(ref resp) => { write!(f, "failed to get a 200 OK response: {}", resp) } Error::Api(ref errs) => { write!(f, "api errors: {}", errs.connect(", ")) } Error::Unauthorized => write!(f, "unauthorized API access"), Error::TokenMissing => write!(f, "no upload token found, please run `cargo login`"), Error::Io(ref e) => write!(f, "io error: {}", e), Error::NotFound => write!(f, "cannot find crate"), } } } cargo-0.8.0/src/doc/000077500000000000000000000000001264656333200141605ustar00rootroot00000000000000cargo-0.8.0/src/doc/CNAME000066400000000000000000000000161264656333200147230ustar00rootroot00000000000000doc.crates.io cargo-0.8.0/src/doc/build-script.md000066400000000000000000000441441264656333200171120ustar00rootroot00000000000000% Build Script Support - Cargo Documentation Some packages need to compile third-party non-Rust code, for example C libraries. Other packages need to link to C libraries which can either be located on the system or possibly need to be built from source. Others still need facilities for functionality such as code generation before building (think parser generators). Cargo does not aim to replace other tools that are well-optimized for these tasks, but it does integrate with them with the `build` configuration option. ```toml [package] # ... build = "build.rs" ``` The Rust file designated by the `build` command (relative to the package root) will be compiled and invoked before anything else is compiled in the package, allowing your Rust code to depend on the built or generated artifacts. Note that there is no default value for `build`, it must be explicitly specified if required. Some example use cases of the build command are: * Building a bundled C library. * Finding a C library on the host system. * Generating a Rust module from a specification. * Performing any platform-specific configuration needed for the crate. Each of these use cases will be detailed in full below to give examples of how the build command works. ## Inputs to the Build Script When the build script is run, there are a number of inputs to the build script, all passed in the form of [environment variables][env]. In addition to environment variables, the build script’s current directory is the source directory of the build script’s package. [env]: environment-variables.html ## Outputs of the Build Script All the lines printed to stdout by a build script that start with `cargo:` are interpreted by Cargo and must be of the form `key=value`. Example output: ```notrust cargo:rustc-link-lib=static=foo cargo:rustc-link-search=native=/path/to/foo cargo:rustc-cfg=foo cargo:root=/path/to/foo cargo:libdir=/path/to/foo/lib cargo:include=/path/to/foo/include ``` There are a few special keys that Cargo recognizes, affecting how the crate this build script is for is built: * `rustc-link-lib` indicates that the specified value should be passed to the compiler as a `-l` flag. * `rustc-link-search` indicates the specified value should be passed to the compiler as a `-L` flag. * `rustc-cfg` indicates that the specified directive will be passed as a `--cfg` flag to the compiler. This is often useful for performing compile-time detection of various features. * `rerun-if-changed` is a path to a file or directory which indicates that the build script should be re-run if it changes (detected by a more-recent last-modified timestamp on the file). Normally build scripts are re-run if any file inside the crate root changes, but this can be used to scope changes to just a small set of files. If this path points to a directory the entire directory will be traversed for changes. Any other element is a user-defined metadata that will be passed to dependencies. More information about this can be found in the [`links`][links] section. [links]: #the-links-manifest-key ## Build Dependencies Build scripts are also allowed to have dependencies on other Cargo-based crates. Dependencies are declared through the `build-dependencies` section of the manifest. ```toml [build-dependencies] foo = { git = "https://github.com/your-packages/foo" } ``` The build script **does not** have access to the dependencies listed in the `dependencies` or `dev-dependencies` section (they’re not built yet!). All build dependencies will also not be available to the package itself unless explicitly stated as so. ## The `links` Manifest Key In addition to the manifest key `build`, Cargo also supports a `links` manifest key to declare the name of a native library that is being linked to: ```toml [package] # ... links = "foo" build = "build.rs" ``` This manifest states that the package links to the `libfoo` native library, and it also has a build script for locating and/or building the library. Cargo requires that a `build` command is specified if a `links` entry is also specified. The purpose of this manifest key is to give Cargo an understanding about the set of native dependencies that a package has, as well as providing a principled system of passing metadata between package build scripts. Primarily, Cargo requires that there is at most one package per `links` value. In other words, it’s forbidden to have two packages link to the same native library. Note, however, that there are [conventions in place][star-sys] to alleviate this. [star-sys]: #-sys-packages As mentioned above in the output format, each build script can generate an arbitrary set of metadata in the form of key-value pairs. This metadata is passed to the build scripts of **dependent** packages. For example, if `libbar` depends on `libfoo`, then if `libfoo` generates `key=value` as part of its metadata, then the build script of `libbar` will have the environment variables `DEP_FOO_KEY=value`. Note that metadata is only passed to immediate dependents, not transitive dependents. The motivation for this metadata passing is outlined in the linking to system libraries case study below. ## Overriding Build Scripts If a manifest contains a `links` key, then Cargo supports overriding the build script specified with a custom library. The purpose of this functionality is to prevent running the build script in question altogether and instead supply the metadata ahead of time. To override a build script, place the following configuration in any acceptable Cargo [configuration location](config.html). ```toml [target.x86_64-unknown-linux-gnu.foo] rustc-link-search = ["/path/to/foo"] rustc-link-lib = ["foo"] root = "/path/to/foo" key = "value" ``` This section states that for the target `x86_64-unknown-linux-gnu` the library named `foo` has the metadata specified. This metadata is the same as the metadata generated as if the build script had run, providing a number of key/value pairs where the `rustc-flags`, `rustc-link-search`, and `rustc-link-lib` keys are slightly special. With this configuration, if a package declares that it links to `foo` then the build script will **not** be compiled or run, and the metadata specified will instead be used. # Case study: Code generation Some Cargo packages need to have code generated just before they are compiled for various reasons. Here we’ll walk through a simple example which generates a library call as part of the build script. First, let’s take a look at the directory structure of this package: ```notrust . ├── Cargo.toml ├── build.rs └── src └── main.rs 1 directory, 3 files ``` Here we can see that we have a `build.rs` build script and our binary in `main.rs`. Next, let’s take a look at the manifest: ```toml # Cargo.toml [package] name = "hello-from-generated-code" version = "0.1.0" authors = ["you@example.com"] build = "build.rs" ``` Here we can see we’ve got a build script specified which we’ll use to generate some code. Let’s see what’s inside the build script: ```rust,no_run // build.rs use std::env; use std::fs::File; use std::io::Write; use std::path::Path; fn main() { let out_dir = env::var("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("hello.rs"); let mut f = File::create(&dest_path).unwrap(); f.write_all(b" pub fn message() -> &'static str { \"Hello, World!\" } ").unwrap(); } ``` There’s a couple of points of note here: * The script uses the `OUT_DIR` environment variable to discover where the output files should be located. It can use the process’ current working directory to find where the input files should be located, but in this case we don’t have any input files. * This script is relatively simple as it just writes out a small generated file. One could imagine that other more fanciful operations could take place such as generating a Rust module from a C header file or another language definition, for example. Next, let’s peek at the library itself: ```rust,ignore // src/main.rs include!(concat!(env!("OUT_DIR"), "/hello.rs")); fn main() { println!("{}", message()); } ``` This is where the real magic happens. The library is using the rustc-defined `include!` macro in combination with the `concat!` and `env!` macros to include the generated file (`mod.rs`) into the crate’s compilation. Using the structure shown here, crates can include any number of generated files from the build script itself. We’ve also seen a brief example of how a build script can use a crate as a dependency purely for the build process and not for the crate itself at runtime. # Case study: Building some native code Sometimes it’s necessary to build some native C or C++ code as part of a package. This is another excellent use case of leveraging the build script to build a native library before the Rust crate itself. As an example, we’ll create a Rust library which calls into C to print “Hello, World!â€. Like above, let’s first take a look at the project layout: ```notrust . ├── Cargo.toml ├── build.rs └── src ├── hello.c └── main.rs 1 directory, 4 files ``` Pretty similar to before! Next, the manifest: ```toml # Cargo.toml [package] name = "hello-world-from-c" version = "0.1.0" authors = ["you@example.com"] build = "build.rs" ``` For now we’re not going to use any build dependencies, so let’s take a look at the build script now: ```rust,no_run // build.rs use std::process::Command; use std::env; use std::path::Path; fn main() { let out_dir = env::var("OUT_DIR").unwrap(); // note that there are a number of downsides to this approach, the comments // below detail how to improve the portability of these commands. Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) .arg(&format!("{}/hello.o", out_dir)) .status().unwrap(); Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) .current_dir(&Path::new(&out_dir)) .status().unwrap(); println!("cargo:rustc-link-search=native={}", out_dir); println!("cargo:rustc-link-lib=static=hello"); } ``` This build script starts out by compiling our C file into an object file (by invoking `gcc`) and then converting this object file into a static library (by invoking `ar`). The final step is feedback to Cargo itself to say that our output was in `out_dir` and the compiler should link the crate to `libhello.a` statically via the `-l static=hello` flag. Note that there are a number of drawbacks to this hardcoded approach: * The `gcc` command itself is not portable across platforms. For example it’s unlikely that Windows platforms have `gcc`, and not even all Unix platforms may have `gcc`. The `ar` command is also in a similar situation. * These commands do not take cross-compilation into account. If we’re cross compiling for a platform such as Android it’s unlikely that `gcc` will produce an ARM executable. Not to fear, though, this is where a `build-dependencies` entry would help! The Cargo ecosystem has a number of packages to make this sort of task much easier, portable, and standardized. For example, the build script could be written as: ```rust,ignore // build.rs // Bring in a dependency on an externally maintained `gcc` package which manages // invoking the C compiler. extern crate gcc; fn main() { gcc::compile_library("libhello.a", &["src/hello.c"]); } ``` Add a build time dependency on the `gcc` crate with the following addition to your `Cargo.toml`: ```toml [build-dependencies] gcc = "0.3" ``` The [`gcc` crate](https://crates.io/crates/gcc) abstracts a range of build script requirements for C code: * It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` for Unix platforms, etc.). * It takes the `TARGET` variable into account by passing appropriate flags to the compiler being used. * Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all handled automatically. * The stdout output and `OUT_DIR` locations are also handled by the `gcc` library. Here we can start to see some of the major benefits of farming as much functionality as possible out to common build dependencies rather than duplicating logic across all build scripts! Back to the case study though, let’s take a quick look at the contents of the `src` directory: ```c // src/hello.c #include void hello() { printf("Hello, World!\n"); } ``` ```rust,ignore // src/main.rs // Note the lack of the `#[link]` attribute. We’re delegating the responsibility // of selecting what to link to over to the build script rather than hardcoding // it in the source file. extern { fn hello(); } fn main() { unsafe { hello(); } } ``` And there we go! This should complete our example of building some C code from a Cargo package using the build script itself. This also shows why using a build dependency can be crucial in many situations and even much more concise! # Case study: Linking to system libraries The final case study here will be investigating how a Cargo library links to a system library and how the build script is leveraged to support this use case. Quite frequently a Rust crate wants to link to a native library often provided on the system to bind its functionality or just use it as part of an implementation detail. This is quite a nuanced problem when it comes to performing this in a platform-agnostic fashion, and the purpose of a build script is again to farm out as much of this as possible to make this as easy as possible for consumers. As an example to follow, let’s take a look at one of [Cargo’s own dependencies][git2-rs], [libgit2][libgit2]. This library has a number of constraints: [git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys [libgit2]: https://github.com/libgit2/libgit2 * It has an optional dependency on OpenSSL on Unix to implement the https transport. * It has an optional dependency on libssh2 on all platforms to implement the ssh transport. * It is often not installed on all systems by default. * It can be built from source using `cmake`. To visualize what’s going on here, let’s take a look at the manifest for the relevant Cargo package. ```toml [package] name = "libgit2-sys" version = "0.1.0" authors = ["..."] links = "git2" build = "build.rs" [dependencies] libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" } [target.x86_64-unknown-linux-gnu.dependencies] openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" } # ... ``` As the above manifests show, we’ve got a `build` script specified, but it’s worth noting that this example has a `links` entry which indicates that the crate (`libgit2-sys`) links to the `git2` native library. Here we also see the unconditional dependency on `libssh2` via the `libssh2-sys` crate, as well as a platform-specific dependency on `openssl-sys` for unix (other variants elided for now). It may seem a little counterintuitive to express *C dependencies* in the *Cargo manifest*, but this is actually using one of Cargo’s conventions in this space. ## `*-sys` Packages To alleviate linking to system libraries, Cargo has a *convention* of package naming and functionality. Any package named `foo-sys` will provide two major pieces of functionality: * The library crate will link to the native library `libfoo`. This will often probe the current system for `libfoo` before resorting to building from source. * The library crate will provide **declarations** for functions in `libfoo`, but it does **not** provide bindings or higher-level abstractions. The set of `*-sys` packages provides a common set of dependencies for linking to native libraries. There are a number of benefits earned from having this convention of native-library-related packages: * Common dependencies on `foo-sys` alleviates the above rule about one package per value of `links`. * A common dependency allows centralizing logic on discovering `libfoo` itself (or building it from source). * These dependencies are easily overridable. ## Building libgit2 Now that we’ve got libgit2’s dependencies sorted out, we need to actually write the build script. We’re not going to look at specific snippets of code here and instead only take a look at the high-level details of the build script of `libgit2-sys`. This is not recommending all packages follow this strategy, but rather just outlining one specific strategy. The first step of the build script should do is to query whether libgit2 is already installed on the host system. To do this we’ll leverage the preexisting tool `pkg-config` (when its available). We’ll also use a `build-dependencies` section to refactor out all the `pkg-config` related code (or someone’s already done that!). If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t installed, the next step is to build libgit2 from bundled source code (distributed as part of `libgit2-sys` itself). There are a few nuances when doing so that we need to take into account, however: * The build system of libgit2, `cmake`, needs to be able to find libgit2’s optional dependency of libssh2. We’re sure we’ve already built it (it’s a Cargo dependency), we just need to communicate this information. To do this we leverage the metadata format to communicate information between build scripts. In this example the libssh2 package printed out `cargo:root=...` to tell us where libssh2 is installed at, and we can then pass this along to cmake with the `CMAKE_PREFIX_PATH` environment variable. * We’ll need to handle some `CFLAGS` values when compiling C code (and tell `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well. * Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR` environment variable, and then we’ll print the necessary metadata to instruct rustc how to link to libgit2. Most of the functionality of this build script is easily refactorable into common dependencies, so our build script isn’t quite as intimidating as this descriptions! In reality it’s expected that build scripts are quite succinct by farming logic such as above to build dependencies. cargo-0.8.0/src/doc/config.md000066400000000000000000000067751264656333200157660ustar00rootroot00000000000000% Configuration - Cargo Documentation This document will explain how Cargo’s configuration system works, as well as available keys or configuration. For configuration of a project through its manifest, see the [manifest format](manifest.html). # Hierarchical structure Cargo allows to have local configuration for a particular project or global configuration (like git). Cargo also extends this ability to a hierarchical strategy. If, for example, Cargo were invoked in `/home/foo/bar/baz`, then the following configuration files would be probed for: * `/home/foo/bar/baz/.cargo/config` * `/home/foo/bar/.cargo/config` * `/home/foo/.cargo/config` * `/home/.cargo/config` * `/.cargo/config` With this structure you can specify local configuration per-project, and even possibly check it into version control. You can also specify personal default with a configuration file in your home directory. # Configuration Format All configuration is currently in the [TOML format][toml] (like the manifest), with simple key-value pairs inside of sections (tables) which all get merged together. [toml]: https://github.com/toml-lang/toml # Configuration keys All of the following keys are optional, and their defaults are listed as their value unless otherwise noted. Key values that specify a tool may be given as an absolute path, a relative path or as a pathless tool name. Absolute paths and pathless tool names are used as given. Relative paths are resolved relative to the parent directory of the `.cargo` directory of the config file that the value resides within. ```toml # An array of paths to local repositories which are to be used as overrides for # dependencies. For more information see the Cargo Guide. paths = ["/path/to/override"] [cargo-new] # This is your name/email to place in the `authors` section of a new Cargo.toml # that is generated. If not present, then `git` will be probed, and if that is # not present then `$USER` and `$EMAIL` will be used. name = "..." email = "..." # By default `cargo new` will initialize a new git repository. This key can be # set to `none` to disable this behavior. vcs = "none" # For the following sections, $triple refers to any valid target triple, not the # literal string "$triple", and it will apply whenever that target triple is # being compiled to. [target] # For Cargo builds which do not mention --target, these are the ar/linker tools # which are passed to rustc to use (via `-C ar=` and `-C linker=`). By default # these flags are not passed to the compiler. ar = ".." linker = ".." [target.$triple] # Similar to the above ar/linker tool configuration, but this only applies to # when the `$triple` is being compiled for. ar = ".." linker = ".." # Configuration keys related to the registry [registry] index = "..." # URL of the registry index (defaults to the central repository) token = "..." # Access token (found on the central repo’s website) [http] proxy = "..." # HTTP proxy to use for HTTP requests (defaults to none) timeout = 60000 # Timeout for each HTTP request, in milliseconds [build] jobs = 1 # number of jobs to run by default (default to # cpus) rustc = "rustc" # the rust compiler tool rustdoc = "rustdoc" # the doc generator tool target-dir = "target" # path of where to place all generated artifacts ``` # Environment Variables Cargo recognizes a few global [environment variables][env] to configure itself. Settings specified via config files take precedence over those specified via environment variables. [env]: environment-variables.html cargo-0.8.0/src/doc/crates-io.md000066400000000000000000000261761264656333200164040ustar00rootroot00000000000000% Cargo and crates.io In addition to using dependencies from git repositories (as mentioned in [the guide](guide.html)) Cargo can also publish to and download from the [crates.io][crates-io] central repository. This site serves as a location to discover and download packages, and `cargo` is configured to use it by default to find requested packages. The guide will explain how crates can use crates.io through the `cargo` command line tool. [crates-io]: https://crates.io/ # Using crates.io-based crates The method of specifying a dependency on a crate from crates.io is slightly different than the method of specifying a dependency on a git repository. The syntax for doing so is: ```toml [dependencies] glob = "0.0.3" ``` With this format, adding new dependencies should just add a new line, you don’t need to add `[dependencies]` for each dependency listed, for example: ```toml [dependencies] glob = "0.0.3" num = "0.0.4" ``` The string value for each key in this table is a [semver][semver] version requirement. [semver]: https://github.com/steveklabnik/semver#requirements **Caret requirements** allow SemVer compatible updates to a specified version. `^1.2.3` is an example of a caret requirement. When considering “compatible†versions, `0.1` and `0.2` are not considered compatible, but `1.0` and `1.1` are for example. If no operator is specified, this is the default requirement (e.g. `1.3` is the same as `^1.3`). `0.0.x` is not considered compatible with any other version. Missing minor and patch versions are desugared to `0` but allow flexibility for that value. ```notrust ^1.2.3 := >=1.2.3 <2.0.0 ^0.2.3 := >=0.2.3 <0.3.0 ^0.0.3 := >=0.0.3 <0.0.4 ^0.0 := >=0.0.0 <0.1.0 ^0 := >=0.0.0 <1.0.0 ``` **Tilde requirements** specify a minimal version with some ability to update. `~1.2.3` is an example of a tilde requirement. ```notrust ~1.2.3 := >=1.2.3 <1.3.0 ~1.2 := >=1.2.0 <1.3.0 ~1 := >=1.0.0 <2.0.0 ``` **Wildcard requirements** allow for any version where the wildcard is positioned. `*`, `1.*` and `1.2.*` are examples of wildcard requirements. ```notrust * := >=0.0.0 1.* := >=1.0.0 <2.0.0 1.2.* := >=1.2.0 <1.3.0 ``` **Inequality requirements** allow manually specifying a version range or an exact version to depend on. Here are some examples of inequality requirements: ```notrust >= 1.2.0 > 1 < 2 = 1.2.3 ``` Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, < 1.5`. # Pre-1.0 versions While SemVer says that there is no compatibility before 1.0.0, many programmers treat a `0.x.y` release in the same way as a `1.x.y` release: that is, `y` is incremented for bugfixes, and `x` is incremented for new features. As such, Cargo considers a `0.x.y` and `0.x.z` version, where `z > y`, to be compatible. # Publishing crates Ok, now that we’ve got a crate which is using dependencies from crates.io, let’s publish it! Publishing a crate is when a specific version is uploaded to crates.io. Take care when publishing a crate, because a publish is **permanent**. The version can never be overwritten, and the code cannot be deleted. There is no limit to the number of versions which can be published, however. ## Acquiring an API token First thing’s first, you’ll need an account on [crates.io][crates-io] to acquire an API token. To do so, [visit the home page][crates-io] and log in via a GitHub account (required for now). After this, visit your [Account Settings](https://crates.io/me) page and run the `cargo login` command specified. ```notrust $ cargo login abcdefghijklmnopqrstuvwxyz012345 ``` This command will inform Cargo of your API token and store it locally in your `~/.cargo/config`. Note that this token is a **secret** and should not be shared with anyone else. If it leaks for any reason, you should regenerate it immediately. ## Packaging a crate The next step is to package up your crate into a format that can be uploaded to crates.io. For this we’ll use the `cargo package` subcommand. This will take our entire crate and package it all up into a `*.crate` file in the `target/package` directory. ```notrust $ cargo package ``` As an added bonus, the `*.crate` will be verified independently of the current source tree. After the `*.crate` is created, it’s unpacked into `target/package` and then built from scratch to ensure that all necessary files are there for the build to succeed. This behavior can be disabled with the `--no-verify` flag. Now’s a good time to take a look at the `*.crate` file to make sure you didn’t accidentally package up that 2GB video asset. Cargo will automatically ignore files ignored by your version control system when packaging, but if you want to specify an extra set of files to ignore you can use the `exclude` key in the manifest: ```toml [package] # ... exclude = [ "public/assets/*", "videos/*", ] ``` The syntax of each element in this array is what [rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you’d rather roll with a whitelist instead of a blacklist, Cargo also supports an `include` key: ```toml [package] # ... include = [ "**/*.rs", "Cargo.toml", ] ``` ## Uploading the crate Now that we’ve got a `*.crate` file ready to go, it can be uploaded to crates.io with the `cargo publish` command. And that’s it, you’ve now published your first crate! ```notrust $ cargo publish ``` If you’d like to skip the `cargo package` step, the `cargo publish` subcommand will automatically package up the local crate if a copy isn’t found already. Be sure to check out the [metadata you can specify](manifest.html#package-metadata) to ensure your crate can be discovered more easily! ## Restrictions There are a few restrictions when publishing a crate in the registry: * Once a version is uploaded, it can never be overwritten. To upload a new copy of a crate you must upload a new version. * Crate names are allocated on a first-come-first-serve basis. Once a crate name is taken it cannot be used for another crate. * There is currently a 10MB upload size limit on `*.crate` files. # Managing a crates.io-based crate Management of crates is primarily done through the command line `cargo` tool rather than the crates.io web interface. For this, there are a few subcommands to manage a crate. ## `cargo yank` Occasions may arise where you publish a version of a crate that actually ends up being broken for one reason or another (syntax error, forgot to include a file, etc.). For situations such as this, Cargo supports a “yank†of a version of a crate. ```notrust $ cargo yank --vers 1.0.1 $ cargo yank --vers 1.0.1 --undo ``` A yank **does not** delete any code. This feature is not intended for deleting accidentally uploaded secrets, for example. If that happens, you must reset those secrets immediately. The semantics of a yanked version are that no new dependencies can be created against that version, but all existing dependencies continue to work. One of the major goals of crates.io is to act as a permanent archive of crates that does not change over time, and allowing deletion of a version would go against this goal. Essentially a yank means that all projects with a `Cargo.lock` will not break, while any future `Cargo.lock` files generated will not list the yanked version. ## `cargo owner` A crate is often developed by more than one person, or the primary maintainer may change over time! The owner of a crate is the only person allowed to publish new versions of the crate, but an owner may designate additional owners. ```notrust $ cargo owner --add my-buddy $ cargo owner --remove my-buddy $ cargo owner --add github:rust-lang:owners $ cargo owner --remove github:rust-lang:owners ``` The owner IDs given to these commands must be GitHub user names or GitHub teams. If a user name is given to `--add`, that user becomes a “named†owner, with full rights to the crate. In addition to being able to publish or yank versions of the crate, they have the ability to add or remove owners, *including* the owner that made *them* an owner. Needless to say, you shouldn’t make people you don’t fully trust into a named owner. In order to become a named owner, a user must have logged into crates.io previously. If a team name is given to `--add`, that team becomes a “team†owner, with restricted right to the crate. While they have permission to publish or yank versions of the crate, they *do not* have the ability to add or remove owners. In addition to being more convenient for managing groups of owners, teams are just a bit more secure against owners becoming malicious. The syntax for teams is currently `github:org:team` (see examples above). In order to add a team as an owner one must be a member of that team. No such restriction applies to removing a team as an owner. ## GitHub Permissions Team membership is not something GitHub provides simple public access to, and it is likely for you to encounter the following message when working with them: > It looks like you don’t have permission to query a necessary property from GitHub to complete this request. You may need to re-authenticate on crates.io to grant permission to read GitHub org memberships. Just go to https://crates.io/login This is basically a catch-all for “you tried to query a team, and one of the five levels of membership access control denied thisâ€. That is not an exaggeration. GitHub’s support for team access control is Enterprise Grade. The most likely cause of this is simply that you last logged in before this feature was added. We originally requested *no* permissions from GitHub when authenticating users, because we didn’t actually ever use the user’s token for anything other than logging them in. However to query team membership on your behalf, we now require [the `read:org` scope](https://developer.github.com/v3/oauth/#scopes). You are free to deny us this scope, and everything that worked before teams were introduced will keep working. However you will never be able to add a team as an owner, or publish a crate as a team owner. If you ever attempt to do this, you will get the error above. You may also see this error if you ever try to publish a crate that you don’t own at all, but otherwise happens to have a team. If you ever change your mind, or just aren’t sure if crates.io has sufficient permission, you can always go to https://crates.io/login, which will prompt you for permission if crates.io doesn’t have all the scopes it would like to. An additional barrier to querying GitHub is that the organization may be actively denying third party access. To check this, you can go to: https://github.com/organizations/:org/settings/oauth_application_policy where `:org` is the name of the organization (e.g. rust-lang). You may see something like: ![Organization Access Control](images/org-level-acl.png) Where you may choose to explicitly remove crates.io from your organization’s blacklist, or simply press the “Remove Restrictions†button to allow all third party applications to access this data. Alternatively, when crates.io requested the `read:org` scope, you could have explicitly whitelisted crates.io querying the org in question by pressing the “Grant Access†button next to its name: ![Authentication Access Control](images/auth-level-acl.png) cargo-0.8.0/src/doc/environment-variables.md000066400000000000000000000064431264656333200210230ustar00rootroot00000000000000% Environment Variables Cargo sets a number of environment variables which your code can detect. To get the value of any of these variables in a Rust program, do this: ``` let version = env!("CARGO_PKG_VERSION") ``` `version` will now contain the value of `CARGO_PKG_VERSION`. Here are a list of the variables Cargo sets, organized by when it sets them: # Environment variables Cargo reads * `CARGO_HOME` - Cargo maintains a local cache of the registry index and of git checkouts of crates. By default these are stored under `$HOME/.cargo`, but this variable overrides the location of this directory. * `CARGO_PROFILE` - If this is set to a positive integer *N*, Cargo will record timing data as it runs. When it exits, it will print this data as a profile *N* levels deep. * `CARGO_TARGET_DIR` - Location of where to place all generated artifacts, relative to the current working directory. * `RUSTC` - Instead of running `rustc`, Cargo will execute this specified compiler instead. * `RUSTDOC` - Instead of running `rustdoc`, Cargo will execute this specified `rustdoc` instance instead. # Environment variables Cargo sets for build scripts * `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package being built (the package containing the build script). Also note that this is the value of the current working directory of the build script when it starts. * `CARGO_FEATURE_` - For each activated feature of the package being built, this environment variable will be present where `` is the name of the feature uppercased and having `-` translated to `_`. * `OUT_DIR` - the folder in which all output should be placed. This folder is inside the build directory for the package being built, and it is unique for the package in question. * `TARGET` - the target triple that is being compiled for. Native code should be compiled for this triple. Some more information about target triples can be found in [clang’s own documentation][clang]. * `HOST` - the host triple of the rust compiler. * `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can be useful to pass a `-j` parameter to a system like `make`. * `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the profile currently being built. * `PROFILE` - name of the profile currently being built (see [profiles][profile]). * `DEP__` - For more information about this set of environment variables, see build script documentation about [`links`][links]. [links]: build-script.html#the-links-manifest-key [profile]: manifest.html#the-profile-sections [clang]:http://clang.llvm.org/docs/CrossCompilation.html#target-triple # Environment variables Cargo sets for crates * `CARGO_PKG_VERSION` - The full version of your package. * `CARGO_PKG_VERSION_MAJOR` - The major version of your package. * `CARGO_PKG_VERSION_MINOR` - The minor version of your package. * `CARGO_PKG_VERSION_PATCH` - The patch version of your package. * `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. cargo-0.8.0/src/doc/faq.md000066400000000000000000000155161264656333200152610ustar00rootroot00000000000000% Frequently Asked Questions - Cargo Documentation # Is the plan to use GitHub as a package repository? No. The plan for Cargo is to use crates.io, like npm or Rubygems do with npmjs.org and rubygems.org. We plan to support git repositories as a source of packages forever, because they can be used for early development and temporary patches, even when people use the registry as the primary source of packages. # Why build crates.io rather than use GitHub as a registry? We think that it’s very important to support multiple ways to download packages, including downloading from GitHub and copying packages into your project itself. That said, we think that crates.io offers a number of important benefits, and will likely become the primary way that people download packages in Cargo. For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a central registry model as well as a Git-based model, and most packages are downloaded through the registry in those ecosystems, with an important minority of packages making use of git-based packages. [1]: https://www.npmjs.org [2]: https://bundler.io Some of the advantages that make a central registry popular in other languages include: * **Discoverability**. A central registry provides an easy place to look for existing packages. Combined with tagging, this also makes it possible for a registry to provide ecosystem-wide information, such as a list of the most popular or most-depended-on packages. * **Speed**. A central registry makes it possible to easily fetch just the metadata for packages quickly and efficiently, and then to efficiently download just the published package, and not other bloat that happens to exist in the repository. This adds up to a significant improvement in the speed of dependency resolution and fetching. As dependency graphs scale up, downloading all of the git repositories bogs down fast. Also remember that not everybody has a high-speed, low-latency Internet connection. # Will Cargo work with C code (or other languages)? Yes! Cargo handles compiling Rust code, but we know that many Rust projects link against C code. We also know that there are decades of tooling built up around compiling languages other than Rust. Our solution: Cargo allows a package to [specify a script](build-script.html) (written in Rust) to run before invoking `rustc`. Rust is leveraged to implement platform-specific configuration and refactor out common build functionality among packages. # Can Cargo be used inside of `make` (or `ninja`, or ...) Indeed. While we intend Cargo to be useful as a standalone way to compile Rust projects at the top-level, we know that some people will want to invoke Cargo from other build tools. We have designed Cargo to work well in those contexts, paying attention to things like error codes and machine-readable output modes. We still have some work to do on those fronts, but using Cargo in the context of conventional scripts is something we designed for from the beginning and will continue to prioritize. # Does Cargo handle multi-platform projects or cross-compilation? Rust itself provides facilities for configuring sections of code based on the platform. Cargo also supports [platform-specific dependencies][target-deps], and we plan to support more per-platform configuration in `Cargo.toml` in the future. [target-deps]: manifest.html#the-dependencies-section In the longer-term, we’re looking at ways to conveniently cross-compile projects using Cargo. # Does Cargo support environments, like `production` or `test`? We support environments through the use of [profiles][profile] to support: [profile]: manifest.html#the-profile-sections * environment-specific flags (like `-g --opt-level=0` for development and `--opt-level=3` for production). * environment-specific dependencies (like `hamcrest` for test assertions). * environment-specific `#[cfg]` * a `cargo test` command # Does Cargo work on Windows? Yes! All commits to Cargo are required to pass the local test suite on Windows. If, however, you find a Windows issue, we consider it a bug, so [please file an issue][3]. [3]: https://github.com/rust-lang/cargo/issues # Why do binaries have `Cargo.lock` in version control, but not libraries? The purpose of a `Cargo.lock` is to describe the state of the world at the time of a successful build. It is then used to provide deterministic builds across whatever machine is building the project by ensuring that the exact same dependencies are being compiled. This property is most desirable from applications and projects which are at the very end of the dependency chain (binaries). As a result, it is recommended that all binaries check in their `Cargo.lock`. For libraries the situation is somewhat different. A library is not only used by the library developers, but also any downstream consumers of the library. Users dependent on the library will not inspect the library’s `Cargo.lock` (even if it exists). This is precisely because a library should **not** be deterministically recompiled for all users of the library. If a library ends up being used transitively by several dependencies, it’s likely that just a single copy of the library is desired (based on semver compatibility). If all libraries were to check in their `Cargo.lock`, then multiple copies of the library would be used, and perhaps even a version conflict. In other words, libraries specify semver requirements for their dependencies but cannot see the full picture. Only end products like binaries have a full picture to decide what versions of dependencies should be used. # Can libraries use `*` as a version for their dependencies? **Starting January 22nd, 2016, crates.io will begin rejecting packages with wildcard dependency constraints.** While they _can_, strictly speaking, they should not. A version requirement of `*` says “This will work with every version ever,†which is never going to be true. Libraries should always specify the range that they do work with, even if it’s something as general as “every 1.x.y version.†# Why `Cargo.toml`? As one of the most frequent interactions with Cargo, the question of why the configuration file is named `Cargo.toml` arises from time to time. The leading capital-`C` was chosen to ensure that the manifest was grouped with other similar configuration files in directory listings. Sorting files often puts capital letters before lowercase letters, ensuring files like `Makefile` and `Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize the fact that the file is in the [TOML configuration format](https://github.com/toml-lang/toml). Cargo does not allow other names such as `cargo.toml` or `Cargofile` to emphasize the ease of how a Cargo repository can be identified. An option of many possible names has historically led to confusion where one case was handled but others were accidentally forgotten. cargo-0.8.0/src/doc/footer.html000066400000000000000000000005011264656333200163400ustar00rootroot00000000000000 cargo-0.8.0/src/doc/guide.md000066400000000000000000000364141264656333200156070ustar00rootroot00000000000000% Cargo Guide Welcome to the Cargo guide. This guide will give you all that you need to know about how to use Cargo to develop Rust projects. # Why Cargo exists Cargo is a tool that allows Rust projects to declare their various dependencies, and ensure that you’ll always get a repeatable build. To accomplish this goal, Cargo does four things: * Introduces two metadata files with various bits of project information. * Fetches and builds your project’s dependencies. * Invokes `rustc` or another build tool with the correct parameters to build your project. * Introduces conventions, making working with Rust projects easier. # Converting to Cargo You can convert an existing Rust project to use Cargo. You’ll have to create a `Cargo.toml` file with all of your dependencies, and move your source files and test files into the places where Cargo expects them to be. See the [manifest description](manifest.html) and the [Project Layout](#project-layout) section below for more details. # Creating A New Project To start a new project with Cargo, use `cargo new`: ```shell $ cargo new hello_world --bin ``` We’re passing `--bin` because we’re making a binary program: if we were making a library, we’d leave it off. If you’d like to not initialize a new git repository as well (the default), you can also pass `--vcs none`. Let’s check out what Cargo has generated for us: ```shell $ cd hello_world $ tree . . ├── Cargo.toml └── src └── main.rs 1 directory, 2 files ``` If we had just used `cargo new hello_world` without the `--bin` flag, then the we would have a `lib.rs` instead of a `main.rs`. For now, however, this is all we need to get started. First, let’s check out `Cargo.toml`: ```toml [package] name = "hello_world" version = "0.1.0" authors = ["Your Name "] ``` This is called a **manifest**, and it contains all of the metadata that Cargo needs to compile your project. Here’s what’s in `src/main.rs`: ``` fn main() { println!("Hello, world!"); } ``` Cargo generated a “hello world†for us. Let’s compile it:
$ cargo build
   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
And then run it: ```shell $ ./target/debug/hello_world Hello, world! ``` We can also use `cargo run` to compile and then run it, all in one step:
$ cargo run
     Fresh hello_world v0.1.0 (file:///path/to/project/hello_world)
   Running `target/debug/hello_world`
Hello, world!
To pass some arguments to your program, use `cargo run first_arg second_arg`. If flags are being passed, use a “--†separator to tell Cargo which flags go where, like `cargo run -- --foo -b bar`. You’ll now notice a new file, `Cargo.lock`. It contains information about our dependencies. Since we don’t have any yet, it’s not very interesting. Once you’re ready for release, you can use `cargo build --release` to compile your files with optimizations turned on:
$ cargo build --release
   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
# Working on an existing Cargo project If you download an existing project that uses Cargo, it’s really easy to get going. First, get the project from somewhere. In this example, we’ll use `color-rs`: ```sh $ git clone https://github.com/bjz/color-rs.git $ cd color-rs ``` To build, just use `cargo build`:
$ cargo build
   Compiling color v0.1.0 (file:///path/to/project/color-rs)
This will fetch all of the dependencies and then build them, along with the project. # Adding Dependencies To depend on a library, add it to your `Cargo.toml`. ## Adding a dependency It’s quite simple to add a dependency. Simply add it to your `Cargo.toml` file: ```toml [dependencies] time = "0.1.12" ``` Re-run `cargo build` to download the dependencies and build your source with the new dependencies. ```toml [package] name = "hello_world" version = "0.1.0" authors = ["Your Name "] [dependencies] regex = "0.1.41" ``` You added the `regex` library, which provides support for regular expressions. Now, you can pull in that library using `extern crate` in `main.rs`. ``` extern crate regex; use regex::Regex; fn main() { let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); println!("Did our date match? {}", re.is_match("2014-01-01")); } ``` The next time we build, Cargo will fetch this new dependency, all of its dependencies, compile them all, and update the `Cargo.lock`:
$ cargo build
    Updating registry `https://github.com/rust-lang/crates.io-index`
 Downloading memchr v0.1.5
 Downloading libc v0.1.10
 Downloading regex-syntax v0.2.1
 Downloading memchr v0.1.5
 Downloading aho-corasick v0.3.0
 Downloading regex v0.1.41
   Compiling memchr v0.1.5
   Compiling libc v0.1.10
   Compiling regex-syntax v0.2.1
   Compiling memchr v0.1.5
   Compiling aho-corasick v0.3.0
   Compiling regex v0.1.41
   Compiling foo v0.1.0 (file:///path/to/project/hello_world)
Run it:
$ cargo run
     Running `target/hello_world`
Did our date match? true
Our `Cargo.lock` contains the exact information about which revision of all of these dependencies we used. Now, if `regex` gets updated, we will still build with the same revision, until we choose to `cargo update`. # Project Layout Cargo uses conventions for file placement to make it easy to dive into a new Cargo project: * `Cargo.toml` and `Cargo.lock` are stored in the root of your project. * Source code goes in the `src` directory. * The default library file is `src/lib.rs`. * The default executable file is `src/main.rs`. * Other executables can be placed in `src/bin/*.rs`. * External tests go in the `tests` directory. * Example executable files go in the `examples` directory. * Benchmarks go in the `benches` directory. These are explained in more detail in the [manifest description](manifest.html#the-project-layout). # Cargo.toml vs Cargo.lock `Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk about them, here’s a summary: * `Cargo.toml` is about describing your dependencies in a broad sense, and is written by you. * `Cargo.lock` contains exact information about your dependencies, and is maintained by Cargo. * If you’re building a library, put `Cargo.lock` in your `.gitignore`. * If you’re building an executable, check `Cargo.lock` into `git`. Let’s dig in a little bit more. `Cargo.toml` is a **manifest** file. In the manifest, we can specify a bunch of different metadata about our project. For example, we can say that we depend on another project: ```toml [package] name = "hello_world" version = "0.1.0" authors = ["Your Name "] [dependencies] color = { git = "https://github.com/bjz/color-rs.git" } ``` This project has a single dependency, on the `color` library. We’ve stated in this case that we’re relying on a particular Git repository that lives on GitHub. Since we haven’t specified any other information, Cargo assumes that we intend to use the latest commit on the `master` branch to build our project. Sound good? Well, there’s one problem: If you build this project today, and then you send a copy to me, and I build this project tomorrow, something bad could happen. `bjz` could update `color-rs` in the meantime, and my build would include this commit, while yours would not. Therefore, we would get different builds. This would be bad, because we want reproducible builds. We could fix this problem by putting a `rev` line in our `Cargo.toml`: ```toml [dependencies] color = { git = "https://github.com/bjz/color-rs.git", rev = "bf739419" } ``` Now, our builds will be the same. But, there’s a big drawback: now we have to manually think about SHA-1s every time we want to update our library. This is both tedious and error prone. Enter the `Cargo.lock`. Because of its existence, we don’t need to manually keep track of the exact revisions: Cargo will do it for us. When we have a manifest like this: ```toml [package] name = "hello_world" version = "0.1.0" authors = ["Your Name "] [dependencies] color = { git = "https://github.com/bjz/color-rs.git" } ``` Cargo will take the latest commit, and write that information out into our `Cargo.lock` when we build for the first time. That file will look like this: ```toml [root] name = "hello_world" version = "0.1.0" dependencies = [ "color 0.1.0 (git+https://github.com/bjz/color-rs.git#bf739419e2d31050615c1ba1a395b474269a4b98)", ] [[package]] name = "color" version = "0.1.0" source = "git+https://github.com/bjz/color-rs.git#bf739419e2d31050615c1ba1a395b474269a4b98" ``` You can see that there’s a lot more information here, including the exact revision we used to build. Now, when you give your project to someone else, they’ll use the exact same SHA, even though we didn’t specify it in our `Cargo.toml`. When we’re ready to opt in to a new version of the library, Cargo can re-calculate the dependencies, and update things for us: ```shell $ cargo update # updates all dependencies $ cargo update -p color # updates just “color†``` This will write out a new `Cargo.lock` with the new version information. Note that the argument to `cargo update` is actually a [Package ID Specification](pkgid-spec.html) and `color` is just a short specification. # Overriding Dependencies Sometimes, you may want to override one of Cargo’s dependencies. For example, let’s say you’re working on a project, `conduit-static`, which depends on the package `conduit`. You find a bug in `conduit`, and you want to write a patch. Here’s what `conduit-static`’s `Cargo.toml` looks like: ```toml [package] name = "conduit-static" version = "0.1.0" authors = ["Yehuda Katz "] [dependencies] conduit = "0.7" ``` You check out a local copy of `conduit`, let’s say in your `~/src` directory: ```shell $ cd ~/src $ git clone https://github.com/conduit-rust/conduit.git ``` You’d like to have `conduit-static` use your local version of `conduit`, rather than the one on GitHub, while you fix the bug. Cargo solves this problem by allowing you to have a local configuration that specifies an **override**. If Cargo finds this configuration when building your package, it will use the override on your local machine instead of the source specified in your `Cargo.toml`. Cargo looks for a directory named `.cargo` up the directory hierarchy of your project. If your project is in `/path/to/project/conduit-static`, it will search for a `.cargo` in: * `/path/to/project/conduit-static` * `/path/to/project` * `/path/to` * `/path` * `/` This allows you to specify your overrides in a parent directory that includes commonly used packages that you work on locally, and share them with all projects. To specify overrides, create a `.cargo/config` file in some ancestor of your project’s directory (common places to put it is in the root of your code directory or in your home directory). Inside that file, put this: ```toml paths = ["/path/to/project/conduit"] ``` This array should be filled with directories that contain a `Cargo.toml`. In this instance, we’re just adding `conduit`, so it will be the only one that’s overridden. This path must be an absolute path. Note: using a local configuration to override paths will only work for crates that have been published to crates.io. You cannot use this feature to tell Cargo how to find local unpublished crates. More information about local configuration can be found in the [configuration documentation](config.html). # Tests Cargo can run your tests with the `cargo test` command. Cargo runs tests in two places: in each of your `src` files, and any tests in `tests/`. Tests in your `src` files should be unit tests, and tests in `tests/` should be integration-style tests. As such, you’ll need to import your crates into the files in `tests`. To run your tests, just run `cargo test`:
$ cargo test
   Compiling color v0.1.0 (https://github.com/bjz/color-rs.git#bf739419)
   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
     Running target/test/hello_world-9c2b65bbb79eabce

running 0 tests

test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
Of course, if your project has tests, you’ll see more output, with the correct number of tests. You can also run a specific test by passing a filter:
$ cargo test foo
This will run any test with `foo` in its name. `cargo test` runs additional tests as well. For example, it will compile any examples, you’ve included, and will also test the examples in your documentation. Please see the [testing guide][testing] in the Rust documentation for more details. [testing]: https://doc.rust-lang.org/book/testing.html # Path Dependencies Over time our `hello_world` project has grown significantly in size! It’s gotten to the point that we probably want to split out a separate crate for others to use. To do this Cargo supports **path dependencies** which are typically sub-crates that live within one repository. Let’s start off by making a new crate inside of our `hello_world` project: ```shell # inside of hello_world/ $ cargo new hello_utils ``` This will create a new folder `hello_utils` inside of which a `Cargo.toml` and `src` folder are ready to be configured. In order to tell Cargo about this, open up `hello_world/Cargo.toml` and add these lines: ```toml [dependencies] hello_utils = { path = "hello_utils" } ``` This tells Cargo that we depend on a crate called `hello_utils` which is found in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). And that’s it! The next `cargo build` will automatically build `hello_utils` and all of its own dependencies, and others can also start using the crate as well. ## Travis-CI To test your project on Travis-CI, here is a sample `.travis.yml` file: ``` language: rust ``` cargo-0.8.0/src/doc/header.html000066400000000000000000000031551264656333200163020ustar00rootroot00000000000000
cargo-0.8.0/src/doc/images/000077500000000000000000000000001264656333200154255ustar00rootroot00000000000000cargo-0.8.0/src/doc/images/Cargo-Logo-Small.png000066400000000000000000001614701264656333200211430ustar00rootroot00000000000000‰PNG  IHDR2µ6ŠVâÿIDATxìÁ  û§¶Ç €š³c°$AÆ¿ϬζmÛ¶mÛFlŸâœmÛ¶mûùÓU·:áë_ò-f;WjføÃg¼ç€ÃîÚû¡ÏîÜ÷¬{YiäÁ£³vü_¥RÿÿŸ¦RœXzà¨õî>ðÂ;÷¸}¼}'¸}gÿüŽÝãÀ]û=:pïaŽ<|ü íø¯¥Rê¼ü’ÄÌæ-Zßî(kì"k.еÀ ¢E¬AyžƒÊ˜ÊÃ*U®Yõü¬T{ˆ$É¿ ÑGO&Iþ­‹É‰ =Ö÷°¢±®Çæ$x ‹íÌȃ¡ÖoBÀ!'d%\%WVyJyõ¢W¯>'Iþ¾´‘%ÿœDl.é±±«¬¾½[sN+šE a9Ì{ïîàô„’0s¢õ>çyŽB§¦Ò¡zC^®ž¯¬òðßÙÒ’DíçüMIÚ¾¦“5¶”5÷ðØX9’Åx7HàîíÀqB™S´“ “PÇpp „ µC9!/»«üœ‡êEÊ«WŸ’$@ƒG’üI9Ö\…ØØÝccK+êÓÿ0¼‚ á¸;Et$A ÷þà¢C¤và€;à B˜;?pDò¦ò¸²ÊM!¯^@(=´H’Ÿ ¡‡~;È’d°´òœS4_Þ®Úü`×V«±´M2E„͈.w ÌÛ'd?mdÑAÖv掷‚~8‹9Äþó´, ½m-›„|²Yž{ý‹y^|éýæ½_×ãÀ8Id§žvI²ÞÊ ÕjñÃõ&©¿²Ghlè±5¹x›AûB÷‡Ž»Ó‚ºáЊü4¸¬FnŽy`ætôß·ˆžOFiòÙÚÍNV›–R^⛆óþ@ý“ß½ú‚Ën¾xš +ÑZk­ÍÄ–l»ÎÜ‹ì±ñÜ;;úÎNÞüj> À‰±@8îÖ²Ld ¸ ö×+s'BÑœH€ÿt;iîýa%JyÀÝ1ÿé‚,€›aQV%Ôfè°¬6®M°BVêžiy•ºOZ¼òîØc7ßõÄ…=úô ÀJ¢e–]މ'©ÅϧÚzy7Ûiƒù÷(ë»5-6ÊÂ)гîÖ»­“(̺ïß³sVQr$ÉšþÌÜ2³J*5J¥æKCËÌÌÌÌÌÌÌð¶/Ë8<—™™™‡©™„U‰á°uíøñSZæ­îI›ö‡ôÌ E ¾ùóßB2 ¢B SXÅjzgï“’ÁËÞs=2‚½(TDÀÀE"¡¸î ¯ÅUðs$RØbËÉ:IqmÇlvÀlqH;¿Ý¼|;ÞüÆoþöwü‡·½ë¯§ìë¢üµûZöõ‰Q?áÓzýÁ=ÿã×/Óoýi?òÇü*âp¼Ý.™Æ#B¯Rr_¦²0•}ÄöÈrÌ%äZöcf {YöU±ª–´\[ ”Á®¥~Nsx ðÚ]±½v°½ƒJ"ÇÀn ŒSDÔÑw óYƒkZpž˜›ÕšÍËÏ2Þ~úŸríΟø òÓ~á·½÷ÆÛŸxä¡we_¯ë’oçïæõ]ûzâúÑq˜¶¿:‡á·>pÉýøaØÊz½1õ%d ã*pÎ÷©°bJH LRm¦Áì\³¾@°ÚÆ3*˜ª“(3똓YC?ÐÔ—ô"Ú‘âˆÄ䉜#»!0Œ ´ÞÑu Þ7ˆë߃´ìvkÖ·ŸguëiÒp‹Y+ÌfýÙšÑ÷3¦ì7Sô_†ëþ½ºþ+ ¯ÇÚb_9²}M!¶)î~¦cúíÄÝ/ŽÓpÄÉ,£–¾U)SA"R@„U}Ÿ €s!@ µyÙÒl€«{h)%ÈU¥™á×ù1ÚBJfHq lw1ÙµèÛ†öl‰zÐñx}ò*Ë3xëWhedÞ7:§JÆ Š ¨+a[×¢®ýpvý»ÐîÀ‡xÝÔ¾äæ7ü^WµŸwü”v¿!…ÝoJqx“ä Ré{‘q%Þ€@®=,!fê©dJP„˾ˆ¢ 5:‘²ý\N`”ÓI[1Dœd\Ûãfã~È:ö ÎA H¦¡XG¡ñ®@ɃkQ?#‹²[ÝåäÖ³lî<aÉÁÌq0ïLñå×HÙŠ-6XHH™ hp¾Ûfí¾íÿ®ûR`Ík¼ö Û+²×¼â¥v¿@ÓðÛcØýœ†Y JÄ+@K ­Š­”îQd¶çJ˜5Æ; v¶îý*è (¯œ \B"‹ÃuWLy5×?7hå¸CòD )¼ˆ)›ul[s´Á73SQÓ°cyçNo>CØÜ¢k"—göù’5«6ø|Å¢™Ælp=„0ªkPß’¥û˜øþÝh÷vàý¼Fk²} ö5[9…Ãî7Çp6ï‡ÇrÉé|d"×—SÈ;5j¤Âì¤í×¾TUƒA0e|Q>Þ )—†)ºlöÑ>ßΑÙUÜⳑ"bðJa°Aò#Ó 1™ÕlgJÉySJˆŸÙu7§7¹{ãi†Ó—hdÇÁ¼±&Fª’)¯ƒÔ=Pƒr îÚ~ÎeEÔ~mD»]ÒökAþÝì±_øÅÀŠ}½fJVý<öõÚ©æòSžþà¿ü•aXÿ¶t6ï˜ãè…€’È) rQH“òJFÕJEª £îØ2pbSØ>"%¸J-•„Sèî§ý¡ÐêÁÃÖŒ7p…’¬qoÖq ‰”M}Ñ4Þ–úçf¦à¶Û§7Ÿcsç9˜î2ï„Ù¬)ð*é‘{& p©ß5%/¢pTÄö2ÔëÔà b*Íù–Œ¦½ïMïiú1o~€}]ø’íó_þ.~e=±}öËÿò´|îWå°y€láP ­æ”«ÒQ©½¢˜lÏ †Pí%ŒêÝùQ!*ôêP¤©-È”Ïæb3Ð\BÇ´‡×qí!`{rLÆ” ÀÔÔ—7ûèMyu ašXÞ}…åͧׯ2oó¾1È•ª*J”Ú S1%YTV½G hÜûP@TÈÉöËÏIµÚØ) TíúâtjŸøÚæñïÛÌîsi·dºóAöuÁK\ûÒ·ÿ“4n—¿£qéJãAs †ÃIrµ€¹üƒô^k¿¨hCP„˜ªÍR)JM(¶3×p1(D£HÒŽvñ0í¥G‘î~W¬ãëeƒÃí~o¹/¼okl"ea³¼ÃòösŒ§Ï£iͬu¹ÆîYÊ­ ¤Æ`êHTNÜû°ýªÊœ³û/½±²_@í½üR2(—ß“bŸBRSg®éí°Ašƒì®ýÔŸÜd_®dóñ/`_»ÜÁ#×WzÏ{·Ãxy·¾Ë´=ÁibÖwxçÈi$—'R!b. ªš¢œ(B ’ªzlG+Ø„”¢ÙBŸÝg³Žº¸Šºžœ&âd‰{T ^ìÆH´Þ—0ï[‹M 6JD’†qر¾û"ëÛ϶·˜µÉÔ—÷¾ª«”j¯îÞ¡ô ¬¤ Ÿ³¬Tp‰P`–Êw¬ûéT!-õ}{‡oZ»gmç$i²h#)³ÌÃÉצ°;v¾ùܬÝ>h{J^ýš?¾.v]zÓï<^þà¿xo=òí‚q‚ÝvÍvuˬ\ßõÌç3„Df9 ”€šûªMòb¥jÿô’5$kÈH³ ;<.‰û#Ì–Å-’F$Û‰#»q"†"ô§kÄ5¨ëÀu¦úÖ'7YÁkX¾„g˼ó¹¶qH}2•\ªõ<Ôl©Hµ’õð¡öÊ´°XO["µx~®³‚Ð{-ª´äãÔƒ¶h3Ëâz2N„Ä4nmÆS™ì;fQ"Íí¾²íæoÕ¦ÿr`ÉÿÏÚ?êz¿¸è5òWßù®üÞ0펼oÁÏiç÷‘¤³ÃêäÓî¯0›ÍqÞ#9@!GÌf)5ß ¢eÏàEN Hvrèg ZæK´5Å'i@ä°Ä}‰Mô­7ëè¼iìÔq »§·žg}ëYÂp‡y§,f­Á«ØÃj¡ª«šOK€+[œkÖSy<ÖþXé R•VLÙVQbl¡XLç<ªž,×ÌÈÚ“µÍ9'‘4‘–§rX ]ÁŸ”,ήa‡Ú=¶Ÿöï¾—}ýPd_û§¸Øµ¯ÃOýµÇ·¿ãï¼7 §G dœ;Q45Y@§u =ÆÚ×*'™çN*ÏYK©}¶òÁâ®Á5]ŽÒ£¾Q‰a"ÇÁ–d˜Yëä„+1©@4HjQsbŠ.¨ï¾ íÞ&®ûüýS8þߕܨ?!öÂ×Á'ÿªãÛßþwÞ;mïy‰„”hšŽ˜ß-H´´³ËÐ"Í!DÖËÛŒ›”È|>§ëZƒVš›×ˆkæØ¨ÐâQ\ŸA€´ƒ¸35’bÀNK^¬m¼-Q:³aä¬XãþÖ3lN^@âŠEç `%qO(ÖW@Õ*J @ è u?çªÈjO«(¸ÚØ/Í|ÛÓ:ëy^âPuû¤}V?£iMÍJŽ;0€ìs°ß§l`LuX^Tˆª"õê}G«C7k¶7ÄõŸ+gPÃõßþoÖ>Ùÿ¹Øµ¯ùS¿ìøö·ýí3Ý=ò TÞ{ëeyß´5¦Ñf6‡¸þ2hÇv³f·¾Í´[Ñw-óÅA«9| ·¸fŸÉacJÓ†G¦ib'R„¦Q,2Q¬£ÁÏ5LÖÓÛ/p÷Õ§IÃm]¶q¡¦$î«…­ƒæœüuµ€€Y‘ºW§ js_ÄbŠ»7/eïü©d6€:u`£Wt–}¿0›Œ¨„iGwHqS`9EûýL)A¹¿ÂWÊÖ½ë®ßQÊ硞¨"dÎÙô@×}_’îâúÏže_ŸˆŠl_‹Oú•Ç·Î,Æd`™¦`§h¹«õ ¦–ümpÝ%\{É>³YÞ¢»ú“høátÃç5q¸K—ÄqðÛ0MB¤õb0p¾Á·fÁ¬ñ¾>¹aðÚ-_¢Óƒyc÷ ¢5*Ñ8½ç{Ê”ÜÄL…š(Ró_˜²É5­ïœVkx^‰¯œ)Uâ&(%¡øþ(KsÉ¢!$LƒõúˆµïEJ¯bÁŸ›Ûj ųÁçM!V«õ8¸äù1û‰ú~©¾ûRÜì­Ëæ _lÙ×ÿ‘’§¿õ_p±k_=úfÙ´;™˜"3 3 DiOZ¿ˆN§Hék¥\ç5¤60ך‚fl/$êÈT«ÎŒ¨ý:‹¹¤2«šPMvò›âîSúéæ_ëü¹Ô}ݺ}Ã[×î±/î°¯ÿé’÷~ëçr±k_O<òÀñÍoý›gŠìäÈËT‚žŽ÷~|IJð†Çæ,f cˆˆ(Kè×úhžî±_Lwé1¸‚ŸÚµg½vA†õ ߆ 7iudÑCçA$A±ƒ)f2ç+ãTAJ,T;VǤÎÛ@©ŸåZ"KZO ïé±ÙKªU#kK×/²[gwð¸èüªdaX3lî§Ž/ ( òYäœ#as“áô9òöU4ä,E1I͵^ëˆV•iPOJˈ NU*Øš}«=5ªÕÆ{‡¨#£ö׿4¶O|ö¶yü­ùÊý. ²¯ÿ¡’|àû¹Øµ¯k§Ç7¾åo¾7w”` »uyúå-dxâZǵûgÖSuö¾«}ªŒÁ-'ƒQ{ýçÓ^#loã4“|wíî£Y<ÄË7îòʾ˜Ë݈æ‰YßÒõ=j‰azÎ.¦³´úŸ5Ãëóý…ṵ́úZ@êX”+*¤a³)9ë×Ñ´³L3Ï178ߨ¾wéI˧¥ñ„4œ§ã°a·Ý„H¦oÕ†ÕÚ ÍUoy±¸~™iõÓæ6BÂ9o*­ÚáÞ˽Ãç@J|û ç\­kˆ%Ó¦àŠE>?:¥fU-à¢Ì¿ãÛÞwóíÿìßøÙÀKìë¿Yîã?ðå|Ù¾g¿.ðúu¿öWnžûš?®WIÄh½- 3L.4(@ ‹B¦FlÃ]zÂz_i¼c§•ãn…¦‘wˆ@œú|ƒºf'Ÿã”X­NÇц¦›á´1He2Ô§OhÈ.U§€:ÿX¬$”Û-ýzYTœ€x»×¦[dí.e×åäæ’šÃFÎ`ÌæÆpòÂw³ºó<)&\33 8É´ºFH91ØU°æ¾JDÒHNˆ³ào{ùq{nšsÁ-L[(ê…:ãY õ„µfÕðå{œ?uU5à•ïE-U±epK©äÖ"9gÍÓé#ŸrÕÿâßúKÞü»ßøÔo¹³×_ñ-O?ÿÊ­M<[Ü»öË c¼à7¹_ô÷ÿ–3}í‰a× ‘œ3]ëØn‹™ç¡#o ©³’ö¿:²c{[ãŸD›9qX"y N»ú8UÇf³f÷ê÷Ѱ2³ƒḚ̈ŸYo6lÖ+ƒUÓö–Å-ª%ƒ€« øÒ@©ö*\mim¨çúàCÄYZÞ7=Ò$=ƒ—¶—ÉxIqа=‘4ž i¤'‰FØ¾ÌæöGYÞz–qÀÍpÝ!â[¼:f3«hÏ:Û†q‚œP ¸ùÊ_ÿC?ó¥?ô~ìí³Å~ÙÂÿgÿöu¡«eõðcæg,Kñ@LÓdÄc¦hY}ìÙChÀÜm{A·ן¡»/‘¸µ{ˆ*8qˆÔìJOe[e(ªj…rÿUGJÞžÉ)‚ýï ‡’'²¾{šþ¯‹ïÿ’úî_ßðÎîãë yn..€â$‘K’âêZ˜õ•ú$åoŠg8îM¶¤F?h@Ä#¾jO-Ž|) A 0uÚ“&eÒ#Sr¤îõ½¹hœŸ?@/žpØoØoo8 ËåÙúÞ‰o%©wSE‡Ól7då/ªe ’AIÉMT”B¶Í€AQLÉÎkç¤ô±À³˜æöú3¶Ÿ}Ê[Ñ­>`•KOßF´‡80åMOûaFÚ0Ò4ÞïÁ‚ô~¤ŸNÜ¿d\Šî_(;cô;b:°BÖ¤ìN/…:TPˆ) ™âBHÍÿŠw ÒDŒÇÇøû]â›ßBÿ_]Óÿ$ü0ðŒoh‰þÿˆûHšpf' @ÝG©åw%g, ÏñÜ}òOx`ùþ¯ »üi ãKúÙ§˜Œ w´€T‡M*h“#é8“CÆ;’´„îŒÙì‚ùâËÎ$›4&t:JŠf̈CˆÎcŸÓ€óRÝ]•’ueºC~N,ªÙ"ÄêVagÁ€¼!w’BÆ㥭PÝ3lÿ7×7?Jê1»øˆåÅSúÙ ŸBî§Ù•qb˜Žx7Òo †x\hm¸Ð,Þ72î´}n*Ƽ(ˆ/öâŠàOî= \åÜuâ xWn§ú<Õé6ƒ*¨Fœ›ð2‰Nã/ˆiøÆu—³{ï×üYàçoïÿÌŸù3„óŸü5¾î/¤Ymžý£ßÇC/$’5õ™—õÆk$—(U§Xß{\c½«Í–=ì_‹Ùß íã` Säðâ?ÿJ*O*w¢ÞK&ÜúòÿÕ`mg%ª}BÕÍC§-ăHÚ‹è4ƒ¡Yè [×ã\C*†ˆ H0ˆ ;jYV³drvN9/ªƒÒoB…”ÿèkO}+„´f\ÆæêS¶Û êzBNḛ̀ÒÄömð–õóiCS,F–iߨ@¥9ûØ~&uL†G\½Ï“{¯’¦ZŠ¢°C Õþ?®fšùqÔ’[\æáu(žÃqj7›ÍÇ»/^þìú;aùaü±‹oÌõݾÞqûÿ¨ÛÁ¡83äŸuQêÎÇBßð€ø€â¬4ôû/)²ûär˜f«ÈÃ_TU@õ S’@Ž’‘3F ÐÌ.…Ÿ£XùiÀ&jàKÊ€(n@Ä{æÿxogx…›¨hšVÓÊFñ"Ã:‘%ÕÂùÊX¨+ÕA¦ «SAÁ²4æ!«Žßc÷ÙX‡ [¶2?—Ùü’† ß²ÒadYoŽ8?¿¯Ów$‚Y*õÝÃ#iÿ‚ñîS7â|qP§šÅH¦œejª *”6CT2á6@ÞŒTÌ…ííš):šÙ‹ÕåE;[ýªî›VfTùúÇ}(À)p¡(”Õfù÷ì–Z#÷Î È%Àˆ$<ˆ2n¿Â¥È4>gÚ­iŸc¡öÛì‚Âc‹@B‚föמ‘•4BÚÛçI‘˜ïÏ9Å!6Z9—Ò@ÆÞG|‹¦9£KxÚÙ §š&’]©k܉˜œ‚-DMhƒ7Ε~¢J*Ïä½P=Ç*h¤O Ð6‰o®_ssõ¿Yϟذ¤_=¢iVZ6íÁÀø0dnÚa"4B×4˜‚ kCÝü=fËè† iÿ9Óæ ¦Ã].©}1¢´ûÄ~ŃOdË¢Lí0"­•·1*›íŽýáˆøž~ñˆË³sˆG$­‰ûäšß°pª‰¯÷uUð¢0S\—÷@5¤N¾jä×N4_`ÂeÉô…õ·i}ÌÂçÓÿšHq\Iwñ“iÎ~2ÝüŽH<Þ17Vb’".÷ª!åìB€Ë"4k~ÒIf$(åu*† œX飜dõ3PGýâ`/ª\#‚ñ¾óGøö+_â`ý¯1µ'ŠË`Q-yêÎLÕ“ HeZ'(Æ53<à3UœÇe帜B¢ïgˆ÷¯8k|ÚÛNO$ŽÒ¸’ÍéVº²ûx›a•N&†œ”–å÷"ÒvùL¤¾_à´´Ì御Y<¦?Â5KÍBò=¢ ‡fÂiYÂQ@ÌAuTuFÚ,<®ÜÇÛ2wJaŒ¼¼;Ù„>ås/‘Æ ø¥5Êýê#“ áœs4óGÆÙ:Þ=Çï>3{šŽ$Må%{ç+`@2؉ "åüMNV¢š;z7ô¢ô¤Ð4 :!¥ ûÛÿ΋—ß2`Z>ø˜Ùù;6ÀÐx Ä#]Æh`;î'Bp´}"¸Hpíbû>Ãüçqûù¥iþ=?ÂI‚iô~ÆéF5‰$˜FÔ¹,U;]Ág!÷@vo5WM¼P” F”Þ€³2¨’¦<¯Oä/##&”îW¶ð·›?0Ð4˜ÜF4¢)1f¢¥wR}Ãê~I!É¥¤FìžF;°÷“*Ó&Úà Lr[†ÞGI40{‚m{š?4X³Z5!ε!ù9îì§2ô3I‡+†Ûϸ3ŽiÊC†ù·áÝùÇœ=ú˜~y‰G‰Cb³Ùlׯf_~v1£m…»W7 wß#8EÇ=IÓ‰ëTŸ´Ï_îÍ|ó£§sT3Iyk)Ù=ÝÇ©üȲ)¤ ”DÊ`£•¥ÕÂÀËB·¤_=%ôøÐdFúÎþ>i‚Â4w(T¢¦*XÌ)(µ„uR¢ðŸ4S@ ŸMIsÆ€•QŒ“Mç$ôtËéVâgHêHñ`tDcĤAc^pÒ–³ŽÐt6=•¦7€¹þâ;ܽü>ãÝg,û‰Å²¦Ø¢Ñ˜÷x‰HJԹܲg ´xí+ TþêÛO]W@]S–y"”²Z Ng] mÚÞƒÙ¯¿ÍóWßfðèÞùeFËèBâ Ï¬è¸Æ wLcÇ´M<Ü ÁƒFpAOL,Ãâ0&îvwÛ‰Gç­-KN“ßRÜÙ}ì÷û *dpª“ÈBT=탑2Ó?»¹×k¶zBÛ/³ÿþÁ–ì¦ñŒÄ µßV3²¶q¥„-ìó:•D3H½QJæÞ“e]^häl2’Âì‚~ù¹²æ¤8Ú~MtÄé„9i'xï˜÷õ $o9ßñóú‹o±½ú>nxŲ‹œ÷!´è”p¾ÉdÓ†”Úl)4 n$ˆÅ"çÔU£P8—9iä¬gŒŠP]`5©](ÕÅÂUo°õzø`´ ‡½-AîBGš^¸¼˜á§k+ŸÓfÃ4íý„Äš¦<°àÈY¹ìõíÀqHˆÀõzdµè²äé¾Gvo1~ßïû}üùßó“y±½ÂB,ÐæJÉD~~Æ|ñ˜Ùê±e_¤‘iØj…¼Ì"ÇРग¤JÌà}É6†˜2‰”²ö,¥âH‘¹c2¸Æ8&ÛTÞ.žà"ý@ T9Þ L$3C9ŽȚh±èŠ!®³ì-ªãöæš›WÿÃ<Á:Ùópæi H—hD5§#ˆÑ8pðm‡Î\"0Z‚DœS„"ç*¥¦s5ãI(B¥d Zˆ¹I+‰ eM§™Rv¦w4àÚ­o³já‚‹'F9lïhÂ?¦²HÇkD¤¨ˆ.ò3E€ °” {]ÏA¹]´AxúhÆÓ‡©´î3²ûx‹q{{ R8!²¢Š`688?cvöó»ÚÙNÔl{tÚ!$ÄJ£LÍ@‚Q$ÊDβ+jÏHiÊÓDW¨µ^½ÀðÎvžmy¢¹D´³¸Å¸ù»–MY©7í+«õtLb9ïq^ÁÀO|Ëaàú«°»~†¯XÍ„åE‹s ’j%Æý$å¦a ©§iLʃ:OðˬV0±7) €}¤’j£V‘¶H¡¬Ø•3S»¦Λ†e`SL&: Ñ$Ï.>`¾XØ$XÇ׌Û;ÆÃY~D¤1›ñ4npDP ùYDNJHDªg"v¿¿» k…&øb¤€ _—¸²{­eeò[öÃHJjžú«³'6}t¾±2ÍXÝ’’³†Dõ³½Š‚WÔÌ «¢êv.Yïg憹„Ràt¡-8ƨ8Þkzlòxö!¡€ F=`¸…42L#ÃK×4eÛä ªÃµsЛ›×\}õ}¦Ýs–Í‘'Ëïûâê1M±Læ/%û° dÉÁ¿jg™˜H ¨uˆ›Y¦§q(j%(ˆšœ]Ž1‚ó‚Ïœ!O_í¼µ¾ˆãp8²»½A¥a¶|ăKÚF÷·Œw¯leé€ÑEÆÉ~4F(ƒì"—ÿÞSÊ]*ï¹°½·}™(%³¬’{ {Ûqb)÷›œX㞨 Ò0¿xÊlõŽ5ñˆÓxÜãB¡Y‰Üh‡’EÕD¤R òû“TªhÉÆ² CÕt&Mh‚àÁa{5ÍNÚYïë©Ig„É §haÀ3Œ‘˜Ô²†Ù¬+âñÐÌP×ÚÆ¦ë/¾ÍöêÞr1ƒþA‹ó‹ ¾à䄳–Ÿ·º\ Y‘²Ex¡O$R:2Gt8Ð +9SêÁH2PcšF’FȧóuúŠlëÞ5ö\›Í# wKÎ?µÁ„KG¦ãW ›iQ- MBBS„¢ P°êvoW™<`¢XcM¤þž IÆ;¡®ãS½²ûx;!(‚Î'Úù%ËË÷ ý%mÓ‚NÆ›R#)•Í×EK˜Œ6 àÀIfáK±¾)N ]SMs)›}ö¥”¤1h~' bÝ¿xjv7¾¿DÁhñp•{_Ñú^ã˜ge‹¶±¦¼ä àØxñê·/¿î¿bÑyוּ쫬S˨üÅÄ®ì[üÉRÉF C¥ˆÁU;Eu"ŽÑˆ¿Î\è,TZë5*GœÚ´“8EQuß4ß“°Û8·Vw‹wXÍWˆãí5’– :&Lzå ¸ª'YáPp!.ØßPÎt§(žÕŽ‹:ªðW 0ÏõǨ´M•­É=ýâ-Ç}]iŽË‹§fBhà•Ž0­mº'N'à³ÅMùò{êÎDr_«YÔ/@DAÊô.3Ü•”¨Ž¬š'ê‘þ1íêӊ묹ž†;H#)m‚q²ìËJžÅ¢£ 6y4#ÇD`½Þpóê[ï>¥çއ½Ð\6(3Tëf!ñW*ÚD½G ›$’<‚1Åÿ¼eiÓq@ÜÞÄùŽ®_ º´LWõ@h>ØžvÛ-1‰ñðοoÀ‘†5zxn%¤c`:ls=€ÉX)÷”ñàâZT&$8D.Ì-»¿®›–R.ªKv}6U[Øì½7mgRû[ù;úM2å>¾æ¡$3-lB¡G‰"# '«LÜ ÅT<À¼ƨ4ž“ýŠcLP—ÆR ¨_þ)—´0§½xß²/Ú3 è¦=78‰r‡a²æ½â˜u˾šñâ{ËÎ^~õ‚íëg¤ÃW,Û‘‹EÀû.8A€„ž#æaBñæ§4âe)¨2°ƒÆÌó T–¿R¶œOÁÚ7ÉÖßµËwwëëýÝÝíèWïÒÏæxF¦ã ÓÝÆ¶L9Fâp: à=š8_¼Å€S2« –™JœáTðm‹а`»¹%&pumû›ZÜ¢ö—eWHu©…«ðM Ç}|í#¥ÄêáO`õèc†I¹½]³ÙPßáÛø“zÀá#x‡BeÛ‹TYP>I«¿}p(Ö/Ìð't)™g(¶uhöÞ/eùño¢}øsPדŽwèpÄ­MGonwÜnޤ«ÅŒËó…Il|6çFÒüä{ÿ›Oÿ×?äøü_rî¾àɹØ{ChJÆ8ÅT”µ_‡WÖt–g©f„3µQ‹<¯c+«äò¤• vBR!¿1÷Úv¶2“0#'9ߟdzGñðñ»ô!ýc¼û>qó:¼²lÔ&’iÍ,ÿà Û‘_‹Ý‡`³ µ]ú3ëïGô+6¯Ÿ±þö¿úˆx Œ‘"¡*l}¤žçÌÔ3:¾¡W@à>¾þÇ5.\\œ1­ÎÙo׬×WÆÁšÏzÚn‰#a"oqâ‡E*ŽŠËå')äloà•9eÙÚ¦]–ïÛåÚ3Ô†D7ˆNŒÃÄþ8)Ò|ÖÒv B@3ïkœ"·¯^°~õ}³{^´O–-ÞÏ(N(UTsY¤pmâ‹Pšð1ÕéluÒ°×Õp2¿ß{HJö s8ç­ïe¥›‘\‡ ÁäQrxEJ‚sŠK/i`Ú½&7¤é€“h|4µL(â]À•òœÚ«+Ëz±©ç®™“¤':á°yÁá³L¤¡s{ú x¹+CT(!Xh¾Pò‘Ô²S³@ä¾Gvo3„4îw/’¹‚®—,V?‘ãñÀîî5›«ÚÆGÉ·=iIiÄKʹÄXÄÒ¹dÔœ%Pü÷½SÚÐ ýSdþÍâÄ„äGÌÝ4LÆûŠXé¨ÐwÁV¬‰dâi3#i`»¾ãæÕwÙß|ÂÌm¹œyú‡-"-)Uz(>—*Æä—ï³”»«/Yç‰e· )`'dŽ\"gnBÛdÜb×ã°=•â3¯¬?G}ƒÁßqmÛŸ†ý^&û¾1T°MQ‡«ºMÝîÛ²äá °*XsÊu}]N:†I8|÷ï·6èÝÆ@1¸–) ˆ¦J·¨l´€¶@E2H€+’5{fÿ}¼ÕHi¼CâÅ×àç´ý9ý£§Lé)ÇýšõíkHGf³9m·@$™…²Ó çÁZ@q"‰ ³3Üü=˾$,„¤= 4MŒÓdMî1©ɬoiŒ6Ñ ¡·k&®¿|Îí‹ïÃñ%çóÄã-"sSHYDµ¢.Yõã‡*sŠ#Ep’ùjT·‡ÒwÒš¥! ¿~{Vq/Δ*âg¸vžeYq÷‰{¦ãÑÁèjàèÁ9°>áÁ€]ˆ™kæát¿èi;«üþfˆ?ûû´ã̼ XÐBJ·àͨÿŠ1fJ ›Ô]÷Ìþ·÷.±#X3?·ao-C“fÉrþ€Ùâ'1Û»Wìnnhš`|M73Âg2.WÄ‘h$‚ï0«åÕGfc-â틚¦»L LFHF›XõÁúY.t–}Å主»æê«o1m>gîw¼3ó4«6÷ä²*ÀKæ¡eaµË¥¡«ÛŽT¨çS*¼±fâªÏV-Ѩ” žœí9Ä@×.Á5DÛà´aÜÝ@Ü“Æ56Á™1 ( TD²¿sº%üË«ƒ¹V<8 XÔÌ‹\âæg/ä—‹ôœ˜7†GÏ›Q½€ÿßᜠɯOKY¸ç‘ÝÇÛ‰ÒßJ ’ ]IqÄÄÄÃñ·èñ†äzsu½xðâßgswÍvsÅf³f1ŸÓus3iV„åG6ytí¤aCšÖV’FÛ°m‚m‚ñË]×â|c´ \g‚í×ÏŸ±½~F˜^ÛrŽùe ÒðØýfñ´P}hÙžæ©®ºs§~_$”ìÎZ=ÿuÔÂm“ZbÖž™sàÅ¡ ô4ÍŒ“&Lñ€ · ;+嘆-Ž„¦ä1š´ª–i§`bQ© ¹ßöòæÈõzàby†¯¯ }jϬþ·Zï_õÍ,N‘7bO÷y–È ÷=²ûx»QJŠ~]‡IéhÙx†qºŽn~Áb¶âìâ§pØíÙ®_pÜ{.~Êï°E¶ûxC<\cWq0Þ×á8c²¾ÒjÑÒ¶-x#­2FáúúŠõëÿaëÌ:ÙñdÑX©–’0%ÊnFÐÂïrÎe¿|È_úìQ–Ï€˜È„Õ¢/Ù1fa¶3Éjö «ÖÙ(Ù$Q<¦È८%¦É¦¬L™&a¼±…4"Þ3é,W‹hu¼™•é!ù6û‰Ã!2¥Ä«›§œF¾¿ê…– d¨9j¬ü|34Q"k>ëà¦U±sNÀOï3²ûx»QÌSf iöÄŠ)s¨ª£‘:}šÀœJwLÚÐÍÏaÉÃGOaþœýd®n^!:1k¡usF&Žã€¤À|–·‰Ã….g_nžÿ€ýMl÷JwÖ ô™ÛUJ—ì㯄œéiÌ'@ •b‘³®et¨Ü7êqÁyGR£šÆ”f $Il1éøƵÙ95Ð'D°BU$—eWÒ¨úÓð&Ð8Wãå]®ZÚÆU— ê{…S"+P4°§f¼±NËÄ)‚÷༠¼ªè€{cÅûx»QR¢€]x)%ž}yàÃ'3{-EÎLjõz¢2±GyMjf8 œ=ÝÓ­Ç#¶Nìê8Ò†–óÕc:?¡QÄ÷·7/¹{õ?˜¶_0ó‡l—ÓfÖ?e_%Ôr± XD-<¶ÌÌ—7ßï°ó²ù;8©ý±šKÌœuÅBL âð>І&÷ ç¨D’M™6ƶOÓ’íryîâ*V•)£P”œ‚–]U©š?g6H,fwõœ-:œ·Ä T0ÊÌúSöJÍ`SªbqxÄ’¾ ŠJŽ2‰®ÏPI¸÷SË·÷¡T‘vœ(×ÛÈW×—«†Ë³pòeó./IÑX÷)©‘ViΘ6ÏIÓŽÖ{.;¦EÇ~€ëm"„>íØÝ|Âîêøñ‹.2»ð8™¡äŒiªî°P—\™ N ð Ôx2çÙsÓº’b¾w/Ôr1;‰¸ÿÃÞU7²kËÖŒ)´˜Íò^þÌÌÌÌÌÌÌÌÌÌL33ÃåÇo²w“lb;†™‘ôí®éªS*† Ϫ:5FÑf£vŸ£Ö‘V¡â*wƒÏRÝ™blyÎ:/ÒÈ~m`Ž÷b¿&Ð÷˜+ Ìín,še4ºo—7F¬Úê¼ÆYî:¾‡à™*fb¦®WQv/`¼}qp{Úc¬\ÀÊÞxëƒ¯ÂæÛ_€¥øÐ¤n/öî[EÖXBòšePP MÄqQË*Xo; ŽK:1D°-…¨pšØÊûOËÓê&ãc@æ§ÙêPŸµö5ä ‡€¼WõPõ.NìÕöÕ`ÕxƒØ‰vˆ%% †õ*öðc+45Û¹ ]Óÿaš0ùòC`dzÆ«êjpSŸuÏ«y>ëÕŸÙwjR‘(ø1gÙ£`sãfitÏ–:9Óç\ïOô¤jÒ‡i¢ Ðj6ƒ§¼Â}f¦  õ/`´}~¼GÇìÇmïú>h®ÅVw„ÍŒË8M È ÒѵP…L™`™TZ%L¾³<ã5€Ü vrAqïÕZ…ÐqR7Sðjñ”qŸ­ ¶W)É;{¹8ᇓñïœCÕ»€j°Î쪾è3æê,®ZÍÓUÚ*©Þ!ÐÒ¤¯[ R Å{ãªéŒPB\_´™ÍÞ+W¸ ”ÞI¨Ï6VÖ!®z£§KØH²™<]Òn‡‡}¿øÜµœ—`¢¿>0í4ŽlaT,/äTÉ/´3)f®0åó·•.˜g2¿© Qϼ[.oð„kÇXììÇÊÞ[01ì_GÿúÚÍiþ°´[ ”*TŘ.ÝH4#Mº&×½÷ÁäËàjPöÊi6­gð+LaãšKh6Ñó™®ÃU»“k W"CÀcÉš’üŠ‘’=*+]FÐ’â]GÙ™p¸‰…ñ¹Á¢˜Ü;½9íïp»Så½XkÂ#)¶eÜÆŒ5ªW!PZ×S€d‹3«²(@4dÁ²µ¹…ÇZɦÿ¨G¶Í Hþðkõ¸Ä•ÇV[Xl;TÞ 5ÅÈ’üþ¦ž ðÆ@É£ö EïŠÓð½3ÈCV×pðè­h,ÂN„­ ¨ Çöh2œ*fʲQ•¶1‚ŠÔüd޾h9™skSÀTh»Í©Pwbh,4Ãð*BÿÊÞ21?¾Îñ’}ÁLÎ ‚²‚X}•™ä|nbU°4n¦>}0n¦ÀRíärÞ«•Þ \h,ÊîaëX ‹ ?M%éÑà´°{˜rÎkù‘ñ˜³ì?ȹé8% 4n €Ã¨Çk—-ø4¿¿–â¥2¬ƒŸBà±iˆeŸú2?¸Š¢{ãí·Ã®Pµ¾zä&ì]» UlÐzýuZ­Î òÖ"5h1*>C`ãxèþ(sEÆð?‚Ë'ÖD6e^K«h­…ëd‚AêÜv/ÀOÆà—Æ›ð£.Ýãè+MV¯Uìï)i)Ò ?ØŠMm›àµŸ’f^²íµ·RcPÉ,TôÜÏöûÛÒëŠY7Y¥f}<­Y5˜fßÃcɲGþ ç ›³§¦×`&’]e3Jt—ËØ:›)]\%ÿ=øá&ªÞ¥‰¡5cû®âà±[ >Ý~­í.³`äÍe4Ú+Ô …¨IWÇÀrújŒ˜×àv¡fg™WFöµépeï,ªþfÌÅöÄ¦ì« û‚ÙÏ837W´R ©ebbS˜YÌÄ—ë§Ì ظ”N{WLQ§ÎTŸÄÛl1®ŸYˆP]2.Õ­Y  \Õ_DfCªŸç#›—‡£8H[dÙA ÇW›¬ÀiÅ0>™Ë³‰Qä —1”É _ƒ@D£Q6Z¿ ÏχPvÏ£˜°´8ZÇâBLó¥ÝˆqhN­‡þîÁµÐ^؃f{‘ìª L'ÍEƒÆÂA‚Wsq•¯Qî êGÙ=Ë•G?Úà!µðÃúLÇh´g³uQi½bújçhqvŠ 7ܨ dPQ•f%Ñ.. z€V• €Š=9p‰ iÑsk°Ìϼë¬Níá,s ›—H«™˜â1u@½I¹CL'°©ž@Àä‹í½ÈꄌŽû&êÌ­-À5Œ^MO3¥ TÊ]øáªÞeÛÓXÚY4cû¡ã·19`wwŒÍë]ŒJÇ­ÎÊ&\g6‰¯1›G®Ã×ìkâÊ’}UÅð½a¤FŒú±¨´>¦@'÷1êû;K6zÛg® <˜¡€ÔدVmŒS%KÀH‡ëYl3¨³ä0‰ˆmÚ#»xñpcÙÈæÅlM b\:PDÀ&“›i&Ÿak¡Bwý~T»×¦{²¢| ( îIœX ,5ë¦oÞSh[PêFŠ¥]àA`,­µc7cùÀ)”±Þ(ƒ[<‚|ñòFŽj¸‰²?e_ç&×Ëð£MÄ¢ÇD‘•2]ÐÄn,c› øöYL@`6ðYIƒ4\fµ/ƒöŽJ« eaå•"óY& ?NYxmöV±A[GÓ´5‚ÆzÇ:ö¯Œ³ÒþPß[›§º~˜Ëœ“™KD@LÀ€U¨ÁÎÙw‚q-]eï"F瞊ÝK/F9¼ŽþÎ5î2n,¬òT¦¬¹XÏls~f0`Y×Z½@ÀÃ9¦,mp Õî%Œ'€VtÏ A–v‡¦D_Âﮃ¢ÕþE„á5„b‡I#J‚´tZš¸²ŒyÅjI&±,OîÓÔÏzO}ÛkD:áë䓺¯?KôjRùÇôÙ°`Õñ=¾o‚ñ¦­Šê ƒ€CEðg¦O3.WoÍcdw™ï£ Ž NFô*Ť‘Vèhꇧü¸‡þé§¢¸ô< ×_kÖÛàÊ!ׇÿÆ:@„dò PIÇà}Ê$JÆÒ¦,BÛñT°:ÚDmLêÖ¹pàËÓçÄX©1‹Ùp!¹ÕxE'ÕãäE¤Eï²D#µ;iÏì4aF=ŸÕ·9³‹› ¿õ0½·àg$Q&›Æ³û£4ð –öð–9ÍK๪AÊùÔÍÓ7w;¡™o{¦ºá3¹H€fo ¶sÛoúgtßrFÝÏÓ|l<Æœp.3qMrQ$°F_Q¨êÇ]ƒ-”ƒZ21$ûÊR<ƒpÎ&“m‚㈬HŠØŒn`úÒ¹—Œ‘Õ aVÆË~vªØØ`„KY–Ù+VgǤß)Û©Fñ3í–HÒ\€O7·[å¡Û8²‡³ÌËÊòBUz’“• £ ?ëÐôŒFG&W(J49µ²‡fÑ?û ,”ç™?˜õ½˜žÝƒ8ûé˜ À¦´)Æ%|9†gà>e:6 m؃”îH7L»4Q¡²W¨>§IY (IéOë4³1ی˾/æ+ f•Á࢖/pÜ6]·ÚòP]’ÍÕÀk\W=£!J[&S¿ÆÝ}7‹Ïl^x·Ýòí_þ‘ŸRøæUfMãe‘F±1+»¾ÌdMæ_…¨x PçîÊü.{ƒÄ¤õ5-Lë¹·gÖ‡—¡¿P÷y­ªÈ—# q |㲋­ZÀ„)ŠKi мÀC ÚD̪§ùš‘?Y›ÝÏY§ùVë”™1¾gYºÆšwdÿe^î¾ëÛ–î¾ýë>ûžùŠÇ•ƒk÷~Ô{üë¥ÅÎM!F³¥F DCF¢¡v‰xVªz~Näªiæ²ohabnÖ´g«WâúNÉ3+Áý”S t1å ÇÄé;´nœ]¥Ô¸Å4fË,’mDfóyo°Ý¯ØÏÕ­‚ý„úgš~R†—‚¦QÐGMìÊÆÎÔBE©¼“ÅB¹øfõRçRÚ˜›q5 8Û]J³¾t¯±9éHuWï?ÖlžÆçd¯»ýßãžÛ¿ú—âhã>?Ú|b9Øú¼j´³4ìo¾hsRTÜšðb— 8éY =#Õ€\Ö³198›©µ;£‹odLɃ+›cN¦íSd“‘ ¤`bu)“±:*˜xœ%ªŽev’AUiÃéõúópáÚ¯¶8FoR÷¶‹SÖÈU?F3t^ŽmŒžÎËç=bÐðëÒV Ó6ìÝ h ìTÌÊ&ß•;,$Ó¸a·OY½_ ZÀ•ë;Oã3·ÿ»÷ñßzàÞ;¿î«_÷O_ö\?Úxm1ØüÑr´ss¬v™’ÙÅŠ›ºCðbTæÐ‹D'fî} ‰¡ 4TøªMmY =œ°ý³?ëÚ*ìÁ•dAƒq0¬M(®Š©¸Ùà¤ýІX“L­bdÃ"`«W‘^ÞãÍÀÚžÜ÷v=Á,„ˆ7_b£[b88}eˆ¢ ¼Û~8*íFm=7À)ûÍœ»©383éÊ´kÒfé‡r“%Ú/™V+ë6PQ»D'–©ß©-³}ƒéЕJhîZÎËÿ¼LÁë;ò{îüÆ»û_¾òªáƃãÝ¿)‡[ïª~;#ž§è«Š‰ u²”ùAßа f¡À$¼—©ŽýÁ™ >ìé@biKð<Ö× ààž–;Žìo¡ª,±«D7 ¬P ¸2«°9Äè%Cc¹¶]â-€±n Vt# JÏgo»4ÄÆvÉþJŸ¬Œ*hgN1‡bWV»¥|eÖ¥ÓØ¸5€¤+ fÅQiŠÍ®e°%mÏþR­˜êm!àýÏËÈæv×7¿çö¯ù® x½²l¼Ô¶¾Õ•Ý#Y ‹"…¥¾NcLe70XîƒÄ©öø4ë¶ñà3}NbQKãe‰+';3ý˜ý4+Šõ3åÚÇþ¥ í&Ç[ÿûîÎð?ŠÞ»ø¢ª"T%™OSYRã}î8)@ Ë`Wï¯IóaE“îGŠ~Á¬ÉSeÞõ^})†³éX¤A«Ø§&Ø^Äáý¹9ãѸ±†=(ð ± À¶1±@S˱èw¦ö~ã¹ÞzqˆÊƒ€ã‘6ÞãÆ%œZkãÐÞ&Žh©Kº½>Dö}ü`'µ Xßs¡ Ýtög¦ÛÚ±Ì:¦M hÜ¡I‰,È C4š/“Ý"S“¥Öm´ñGÕë'ª~¾j9ÃæöÔ§>5{Þ~÷^ö/ßùÛk;w>¸²ûŠÉFç?u4,å˜{3`<ƒþP›u6O%<ÿòX§Õ0϶6ÀOc{/V”9ôFÓzÿE!,ë’„zW@¨mFfå‘LPÆ{1«wR,LSŒÉ× 4R `vÒÚãØhb©1œ¦F7ü¶‹pjOë¹èpëñ4×{Õ$°?"è½ùÂÝG–ƒì,ËÈ€ÙßÊB®ôÜ î hi™ÇæÞ‡!ýŠia–ë)A«9½6?™Õ´ú}Ò,8éw¨½œ° *~6Äb^Röõ¤¿?üâ;~ò›÷îxñÁþ³_Ù¼å{F£þ‰©ÛHæ+d5(”Ù ”AAµ’^+‚)_¹ØX€¶UÀßêÀìV#±1¹`zÆb\=˜•Õv‰huÒû •I¶-ЛÃÆÜT'—SÙkëû”ehz%ªÕBõÞª.o#D™Øá-uE@Zß*ðàÙ8³‹+clu+. LV+¹jPx,/döÖâÀÙ«cþ€uyåÎ ŒT§qêÞ¸ Q°d©\Ô˜S¶fV‰¤ÖµO´“ÿeÊà„ñF‹6²yùë¿ü³æ‹Ÿô»÷ªÛ¿ý¯×ºOx`i÷Õv¯|øh<ÎK4à‘×l¥ æ«Ùë­Lê¶v‚V×8±"Cä0~ߪYS« ªKV)m¶ ÕkO¤G/&f€I^5©LߎŸ\216¹3†‰¤nU&T†…ÇéËC\Ù,¸Bylµ“«-ë:wu4‰íR/6¯ˆX»åàiFƒQ¨Õþž¬lÿJC¹ÖøÙº–iLõÖEvÉft»Àb!+6€m®FÜêàmv ȇŠX¬(òš–Ìô-Ë4^µs {‡-¯ú§¯¿ùÏò“ø}žÿÚåíg=½·~M9ê­z_!sžì‹jö ?déHp Ñü@à¥íAÓ¶Ž&Ź€#Ë7KƒÍvk øŒbg ?øŒWX4?/ å‚ ølŽ,":`é2kiáÂj2KkfÇ“¬lDLŽ`Ön‰±Fì[nàØÁ6Ú.ñÆód_8uÌÕKº ¹Ä¶Ú†åôE@Õÿ•­bŠ£é˜¹°41¯¸-&@chs¢IÜ+“_ Z}^©éá¼6W5ålêö3r²ñ¹~Žêf»¼Nm6¯_öµj9/¯¿ã[–ï¾ãë>÷µÿð¥ã­û>ä]:¿R ·ÞËWrW!†²­zÊt m0^± ýÑ{²´gôRp`Ð:hŸ û@`(ª€vhuD2soŸKÔ$y€9AIïØ´?oŒL¦An& p ploÝ]šbx23Fµs§>[v–0ŨéI€ººE&†<#è3îõÆóC\Û)Py»7rÚÈÍᔑï,v² ¶óü};2µ7MÀ°(Ö€K BªGe[#x@âzªXìà…]IŒÌ2)û®)N¶O­rãyUë${mdïåu·ý{½ú¾ü—ËÁƽÕ`ãñ±Øþ\çûKÃAѼ&FwŒ€ÓÈ2Å•1›iit_y–4]ŽÃ¤JY'R‰…z›ØQ°£ö6,À ó¥i²‡Ô7i¬Q¯…ǰk9g_w~óÁ×ÿó×|Íkþþ‹Ÿ?°×„ñÖ¸ª{3ªCcnz&²\ù£"‚¤y&Va2~R¯D°‹`=´ÌNƒ#C#pÕg)62LkÙ7Ú«X8ü~X<õñàAµÑ‹ ‘£‰SÅÔD…À{¹h©|CwiŸmc»±2 ¹ÄêE߀#$Ý0÷  hŸlfXÐÌØT(ãµ pô6b·G½×Í;¶@Ð ±^ñ…£HöäZ»ÞMàP!ïTù|•ú2X ±ã±áª4OšÕÜÉ¥…3mìÂÁ쬭é)J–Õ‘}yÞ •c”¾,âÔ²YKâc—‰ÍìîÛ¿>Ÿ°¯í?|ùŸTƒkúÑÆ_ÇñöǺªßòʼn«ƒõÎü¡y1¯\€ðÀ•ŽI`'zÊëäm ÕÈxE¬¨ðÙ"šûß{oüX,ø4VNÑ ÕXΙ&†äR¦.›´ešx=1.Ê'‚a :ð•€`W‘¬†Ï¬¾-Ù¨áùó`Kz4š€ÄŠc)Ä-KË>³m²*‚RoÀMáÌV÷G»ÉAql—6ÆŒ¯å¹Ã¸Ì”¡Y꺴`Ä¥,›©!] 6E÷Éö&šmc]튧¨ûÖ>ÁêÙ8~ N#ÎWý›£ÙÓú(s {Û}/øP1«ˆF:1;™1{4<æ™X••Vس!gÄãäÆˆKÙÉ£qi(¬KSI[wØ-¼áÜ€Áyæh!KÞßTxËÅá$ðßǹõP,îz¯Ä¥k#ƒ)}ªzVÛ ë*–I)O›-ÚÀn‹€Íx )ðÒ,b§˜¢ó²XĪ=€ríÓÐ9õè¬a˜¸(lJïX+ü ;³_rÁGØJs {–³gÞ¾ç/ÿ‡Ï¸ç ß÷ÏÛ¯ýµÊk¯ùíèwß¿u3žŸèKA«jöÔ zùÞ‘ìŠ •gdRõJ“uÉëÙ^ cÍê‚ ª §æZ{±¸öÞX>5¯µ:«å(®UåxÝÞ;ýéÄ b]!›%–fAK–ž~¤Ï’'¨ÞÊ6Ä ä6ÖÛxŒIdÚËMOu’x6 „çJþ—Nrå3쓇Ú8´¯É€ýÚ¾„£*—7 \¼6fzžÌ¬ ko³[ÕY0vGÚþtx 7éˆ]¦ú1ØÓtÑ)(ý{Ù=ÌʧŠÚ]ôÈPÂ7 X~O”Kï½ÇÞÖnÄâò BÙC4 }+yÑ¿7QlXгÙgÝÈ ìÁ×dë¯øÕì¾õIw•½s_]cWÞB…F o\YæFõv!O ²z0NˆR¢U³2©­E.‚ï‹ÉøPq‘ DÞì ½ÿ´~8Ž}$²•›ÀS¶«¸‰…j»ƒvzCìŽ*´š ìÛ³„V«©}}5+’84V}P½Mz { ù„R>ë=³ORÙ0AÉnJ÷rW‘fiHd7wb‡3÷(rëо•&Z ‡c«-~ðß­uÖY:*çäriîò:.µfo:?Äpì¡1TZ¤IJ{åÎ+ʘ-ôû„г Ó¦˜N÷*ì{èbˆ}ÈöŸü ì=ò.@±‰j¸0îòËOýRG¦ñX†›ökwQÄ`Ó2=ÚÊÈŠ{?5øò»û#ÄnÄ?謹€˜/òHÿè2IÎutMh‚”Ò™ƒ* êj QÉœôl‚ŸX]¸@àb…fž£µ¼†Îá÷GëèG£¹úÞÈ[{?€«¶Šý]lï ÐíyØëòb{–Ñ^XFÖÞd-ûG(Á©fô «ì7. Çn‚KæÌJ›ëÌ%L”@.µú¬Eàj_²ãÂñ=ˆñª½Ú¨P‘?I”Èg·‘ñ¯và  ²ãƒüïô¯ì½…“äØÒ=vî•TÜ<ÝÃ=<³ôàãï3c ÙfÆÃ 2ƒÌöù1¿·»oq¨g§±PÒ½×U¿Š5ÓtŒ&rUR©U*mëtÂÉ“·ºø`»‡å^FYùc²È•RñtDœD5Ö;ÛÇêX„t¤lXÈa& ¿Mu’DßÄ.. Ò3zíoÃÊ£Ýni²Tž@¼°z„§@ª9‡Ó1O¥Xgäº9ñ*›”É äYZùy»Õº˜.“yn\NK±´ó¸´²ºÚé/_Å,89a<™!¹¾Y™H‘ ¨Á%ˆ§!LwØ~¸¡ý’P2«‡S¯Fˆ5¼ÈZIÜ·®ÿÍh]ýK·á}Ð1”§(§C\\,¼¯©älÚíËK]ôú}´º+È{@ÖÃùñFç‡ðÞ[ 2Êô˜Ñ6‘¶H`û#­_R°´|®I)Ði?Êm£$W0´Þ-Ä ÇÉôQ:ji5†e¼:œAš¤„Y™ðèfG4Æžòòo]iã[÷ûÂ[]ʱ¡ô ÛÓh¹Yýn†Ç·{èw¼zU Ñ”8bGÌð`3èW¶í a;ÕYƒì!EŸ,¢ÈP‘¨'HÕ)”báÅÞÐCäÜŽ¦ïë¹¥bŽèr¼>Ëñôd-V·fWn>zøüÝß¹?7\&óܸ”&Ñf;2š?‹§XY]Æúµ{Ⱥ¸×8=Bò*ynˆÉJ©Ð‡ U 1½„Ÿ v¢ ‡(DYŸåðý[hmý1º7ÿV´7>@¶ð« ÄÙ1êéF£ .†SL¦µ€ÓÒ ƒ¥~ÝîZý+ÈÚ«˜L§Ø}úk|ýË?…“g¨Îáî™\˜•»öÞΤL²‹jêÕÙHÊ_3çÃPª‡~¾ñLØQ û­D3 ž|2hE÷ovÑmgò+9Vú¹¼OÏ— ÒëdòyoÎB€üœ‘…Ö0[? N7– •Q,ž2ÒÛ’¢±iûܰ)4gè çYõ ±#ÕS©TÆ…EÛ„ƒ’¬®EhŽSÀË{Óq†Ïv³ô‹¥ò`vsÜ[¿;]Y])ZYµÙ.æ†Ëdž—Ób(QOOP ÷09y†ÙéWˆãWè¶€+×¶1X¿Úuqv1†xi¾¢³Ÿw‘!éC&žš—œ¹Y."s.% µŠ|ý[èÞúÛлþ‡(›26Lß šœb6]侦¸—"ÝN KK]ôKh÷Öu×Q#Ç›½çxöñ_Äîoÿ,âÙ§ØèÍpu£"Ï4±Ne «Óo­Mÿ$-zI ÙÖ¤dôöÏ'Æ‘iÉþK4z‚ôÃm͸6õZ`9Ze%€ õnl ¬,UøÍ³1žíMägz/$×(÷@¼é±\€—^’„•eéQÉ}:©¼f‹¤<ÀÎLi"¢•Ö! yG,JAh6)T^P8CÀ>6—T‡G2ì{üüE+|q²9Ýû“û÷¦÷îmt¶–&ýv½çãx)LÝÜp™,_üç/ò@Ê/G=‘‡¾®Fâ%wˆ¼½ —÷±²²Žjiål†ñè“ÉH¼VÑGá£b+xABP”<[Ö¿Öòmd5À±š LOàQ#Ä€ª (+IÜ÷»Š¢€Ï;@Ö–ux~Œó£ç˜¿FމP Ú«¼k±¢çÔ£`&Ã=ñÕlsxÔ'&冑v](…mXêܧ 盳Ɉ*H¬ÍÓŠæyˆünBîô¶Ñ$âËWcíßt U™H7•Úãa²¬±2hÉÉ»m+ËÅ|Àú Ãúüuæ(Ê"% '¯o´Pä±ÙÆêî1D»$¨°¤žCÂS¨¬«¦¬:~G½Ç²ÍòèÁ ù ^X¶^¶I I*)þ7Î'¯O.ª~•µV굫+X_iý¢ÎR}ê²j‚zzŽèÔ ”—mÉ헺̀Ah(kÉ/ Œ‘P U'H®ƒvwíõ«â…‡§˜Ž†(<Ðé´ÐêtPφÉ£5¸‰Îò=d½k€Ë‘¡Á©î~UÕ(çCDž/À«5_/À«ø˜Ž‡8ó9FÇ/àë3´‹„A<몀j euT1ÖëS-@¥Tœ`g Ê@GŒÜÖª*0ð!\œ+óFPÒ‚˜3‰.µÖ"I4ôülsb €Éjkëm‘í©êˆ™öB^L‚ ¡ö—™QIÃöÕ6nm9I!,¦)§QÎåEŒ±‹kmùÿ$ÅæãY8á·n¼Œ‰€¥!¼<`ýäÐÑ{fâÒž^ÿÁgº?\ Î/,C‚‚¹÷¨’Çî™Ãáy§XªúKkñέ¾´Ñiû© ³7pa*© ¸ˆPM‘åm¤ºº”ºdó—Ù´eH&ŠʈµjŠjvŽ0>B9|òü)¦'_³#ôû]¬mm£la4.F¥TW?úç±òÞ?ŠÖ¼ò]1Œ8‘ó§%ÎçÏÊEžce¥åå%´z«píuÔèàôx;Ÿ}¯?ý3˜üKÅ96!Ï +?#Ü. 1'—{}À£>¬h²æ\ ÚV ›ÛÖìÜÊÎËÔŸ³¤§åx,eoL¥Ô; "ÂJfds»Q6Å+Um¥Qü—_ … 8GÞ- /’<2÷s´r'³NËcÐõz_!!eƒÆ˜iÿ² שÁÀ§J…ÝÐñ· =>ÎÅTTö9µà³¶¤6²¢#ž{ÖêIž5Ë<†S/rüìE¯~5º>Y½úhöþ£íìñ­v÷Jû¼—;.ŽvPöPÊ!B=•. !½cö¿•‹Ø‘ í@•jÉ£…ÙÕøa|€êâ%fÇ_¡¾xŽØÏ6‰±†2„%‰‚PiiÂûšÕ2R$®÷N*QÀÐú1¢‚_·µJìàÃ;=ܿޅwಖϓýí ¸­2´ hf{6½(îýæIç‰M£0 ÇãxRë=š&H—Ãeí¹uàŠžTÓóÎ*ŠÞ:¦uŽ×{ÇøõËVüxu6+îNîÝ?|¸Ù¹¹^wûØÏÝdaü qr€Xža§âžvÚÐ;Ÿ·ráCNƒÎBMœî¢´…„j‚zvj|$‚ÙÙS”§_ ·§À+Ý©¾Àññ1vßLp>m£;¸‚µ- –7à;pÅ:BÊqrø;¿ýËóÄýŸA}ò Öº‹Ä}k+=dY&ÀU×ìÓlå €kÜò\¸lê%,œl À5ª–Ž9/;=É€  ‹¼?ШË÷œ%»êîyÍ6.Vƒ yÆù›Íûí ký\?— !÷ N¼¯‡7º2)éþ–{¹qÈòü&eÄ—;cQ·7‰¾ÿÚ ƒN©"‡Œ×c-ø¬ª-ø}šKÖóš²=¾€Ë»ðÅÚý äý+8{üòן¥ïý¥?Uï¾~>]¾ú¤|ÿÉÝìá­Nw½sÞÊËWâ}…ñâô±!H×H`˜î47÷vëø¼2Û[(¬tyHŽÎ"ΆpgÆP(©’_Œzv†j|ˆr¸ƒz¸W`©cs9a¥D™àè"â´ìcŠUñ¾vŸÿ Ï?þ³8ßùºé[k-¬¯ö%Ù?«ª:¨¨e}€€£]È 64#q/C=`ªºa[Š€fëÏiBÒ¸Ï@´°㼇-Š3¢‰½fq ob2—GúJVôÐî_A*Vñboˆïÿðñç?ýQ9ÌîÝZ‹<¼Öº¹:½xgÓ—HãWH³C¤òL(1Hî‹7(×,:xg¹m¸„æ¹q)91z.Â/CÂù8¨Ñë€GuÄ©žJ¢¿žœ íczöBÂ΢>ÄF¯ŽS«ÓÃ_|/~¶˜üö5¶Öº¸~} ƒ¥e ‚’HÛ…ðÓìŒBíÅô~a”r^hÌÑØ¡¼F+Œkä0MØ’+!pQ©Âhú»fÌ9GägÁ{лs€½^Vé`èÌM™! ÒÜ}xZ*áXÎ*ÕÈ­µoÎ*<Ý•v#¤HÌ´7ö|\Ëà‘ƒ³ 1Å9JE0-IúÙÍ…|<l.çýpï¬GÏ“‹VmùyT£U!‚<Ãx–ðñ§_â¯ý•¿žñëÙr'V<¼š=Þ^é¬w†­VùÊÇá „Éê‰ Õ3 “6¡çª@‰ÄïùNÆçí^LÊIÙºS¸EÈ!IäȦ9#Õ“z1@H‹³ Äé” ^ÚésÌΞ“],µf"Öw÷Þ]¬nÞB•ÚÒRS§E{€¼Õƒsb b¦2ågUPï„ù1½JÅ0TCLà´$ ¾o”a›`ŽdŽ 6ìDäç®Áø—5ýH Èú ¼ò‚XFö»ó–¯½­ú»w½+¤UUÔ¤ÿ¼]IxY¹ƒj™o/rk3@²ÐçÏ´p}£ïõå\ÔЗς÷–•¯ `?Õ]v8=Ž-XÊ&£õ)mžà±‘ã³Ýyîëçß+O_ÿrv{«ß°Ùº³éÛ}e˜¼D=|…zrˆ0;j),‘Bc*¶z:Ú6Ø6°w“ÆßB;??ÕÖJ1o.;ô;…8tƒáU$Q4Q1ëZfUNO¦G¨F’O«†»B¶õi†î` ­þ*×ÇÅ$a\1ë¢h/I‘ 9‘[µ¥<4·¥2@œ<èd#…ïÀ„üÆ„>M½(ÐÕ5`+¼|Ž:åF±ÌfÆÑQŸ?Y/L¶Ù¼®‰|vÑ«$lˆ%õ¢nn¶9-€äÎ:ÒZ„Ü{Ç2Yi¡|!÷˯DÆGe±vK@Cª–M™zºäv™ü•ñÂØQÀQÉ@–Q]VUn¹-šuÚ¢È0­3<Sàׯ{õËáÕYÿʃúɽ¹÷5hoöÇEQî¸4YTw¦ÇÒ‚PPO²Ëx]Ñ~'6ÁsI—ðŸO¸ÜÿþüŸÿ èõú¤ )8«}•že¼;²Ñ¹6*(ªi+Äj†º °ÅòLÀ¬<{ŠúükÉat;ú+›Ò5­[N#B*P´úb>AÁpòŒÚfZPÒh£x!Æ…½—œ¨“x,š’>”ëahD¾Oò+ÏÑÌùˆ1ô$ 2®i<` *[œrã©Üɇy6ê‹ôõ¹rʾؙÊH·9¨Io¦†Ã"oöðþ>–z™Iâ; rPÙÜmðŽ™ 6Úû’)}&ÁãðÂã“×­ô›ÝÕjèïÌnn?JïÝßjÝZ­޲Eå1ŽwÆ ïë I+Þ[±GÞÃ}8Ð…œ=æËfž—ÓR²òÓ‘ãû9úŒÀ²”«I6Cón§ ! ¨¥¤Iªâìõx³³˜ž|…êâŠ4]©ÎÒB¶ŒáÌcZ{Àw¡§xiÎ{½.ñ  ÕloòÄlë4¹NPŒrå@Š¶áœ¯&ÿ5æ{ºp’?‡×!×%­\vjˆ ¯\ x@Øë ’°=‘¦Æxñbj5¿(§$}–ƒN&´ŒåAŽõ¥EÆA pž!™3” {º€ou1•åL½³q•áéÇÇ»ƒðüìjÙZ¹_=¼¿íÝê¶7ºgE>ÛqiôR*aò±ŠÇ/•Çda½G³$jZ7ä}ИÑyùÌsãršN‚w²†‚òLæ @‹cuŠ+ƒÒ¤çĤmD+`â¤%„: †Z¤©c5”Äl5|éé3Ìæ 'ûÂâï-­£è_Á4v1.="Zh­NÎ0ÐÐ6óª@ ¸ô\å®;2Nó}ZµõmÔ)WYõ.ôþØä}‘Ó1Ç÷Å¢ÉÙÉ¢kd$X1¤k,&INÇSû\êi)ºsµ#ÿO:-É%…%U±Xžl÷pçZGÇäÑKñÆö¨6d|Œ¬6‹J‘ij€ymr.Ãñhá}åé“ýµúÌm—WoÝOï=Ø*n¯‡ÖÀdN*¯§â} ï+ÅZ%™\ÃÓã$,†´`›—ý>`(FªÜò¸Ëgž—ÓÀ0ÊxgK@+î4œTâ© «v!Á ëZz$ß‚ÏÛH.—|ÛJ@j "Á¤×íLsi;˜>EyöY8G§ÛF{°‰¯b\·0©s‘ÊÛ}ä­à2È|DN ·”Tо€RLäZzWcõý“ÊÉuס\",PÙÕ±O³ÁS`4±IüóÉÒ}M¯ÆâžkVÖ¨!±‡7»Bk_^ \]“ñnÒ;9Ï› Øm,èµ39޲Új ryÝoA”÷šÀeœ4½×ÍÆ÷YÈðò8ǯv:áéùõªïëöÂûjm´ÏòbÁû/¼¯}Tã7’Ž™ž ’ºÍê)ô=Ëm³’á™g‘A‹'`ÚÀÁgm\B»ü-J@CAçZ…ƒ£º©L6— õ²ö2²Îа®]ч/zÂÄF¶Ëçæ ïzº4C,ÕK-ȶÏ1=þ i¼‹ÂÍÐë¯ ë¬£rŒ«BZš|ÑGw°*}rämÉy5¼Õ´³ñd“t>tB˜Ü¼ólÿῊë¿û¯ jÝB=‹)›&öNRן ?(%ó¥¿aUimÈÞ }Ù¯I‰jE.߃n†'·{XéBn½£+dw® çLüݶ'¯MŒ‰ü˜Xo%8æ¶êšéÞ`ˆH"Zyt8dølo‘ûZ®‡Ù¹÷õpžûÚÌ·7b«—¼›.÷¯¦‡¢öÊ1RïËÈü0Ìn*ÊR¼Ñ‚¨~7ݰÐ"¿ SÆßµ(‘±®a “,oL¥› —Jþ¥€“ç?ÄÙ‹¿&`–÷¶àÛës @ë"‰÷y^;Z×ECÊéõdÑAðJ¼´êüÒì :í­Þ¤?3[ÁÇO‡Ã—{§Ñ{öþAALCKxÇk%¢ªòÙqvŠNo€¢ÕƵÇ;Û;º·ÿNLËÄŒôgKÞYƒ+Æ| {'Á÷Á‰T`å Þô£Ûqj:¾­%­F03Ïœ([¬ $‡aÅ{6¢s²—møÆ;sàû§()À{±lneíñò8›{_½øâb«*VÔ÷ïÝñs mmöÎséyïkW„4ãlˆPÎ Úüž×$kÛi!!3sŸt˜sèR¤‡î~ž÷rü;BìÛi PCŒ ÕðÒXõ#;]Í¡*§ïüEŒ÷~ŒÑëŸââàsLÏ_£Ý¿Š¢·×^†ó9†ƒqÅ@@Q‹@ @œI@ȶã}”ç/Pž~pñ…£Ýéá—Ÿ|Ý??;øó&©«ŒvnR+AÁO†šT“7˜žïŠfÛúͰ±ýûè^ÿcdËPûUñx½Jâïà$k“ŸñÎQ¹Ö Wäm¥’©ÛüªÓÚê(x³á–k|_ó ®l!€#=$˽“ZºA^!}v¸NÓvµuóA||okî}…bà} x!NÊ eÝKø¯ÈÈëmV>ѸîÌ<Ôx<ÏÅDžì2Ò×Tãuïxdo«±g+”ô¾(B(Þšæ$øk¥S¤T2Œö~„Ùî_ÆøÕ÷0>}†ãW¿D˜ % "T©‚Š­°“·­w–kÕ5ŽXž£ža¶Pã8{òb·62·¾Òݳ”õ€•\™ÏBääoÓåÑg›ŠìP5»@9~ƒÙ…pàpãþïáæïý³(ó-5AÈ`ˆ³úGÀ4™;ع™Ž^ ¤W@’õÒ§÷zü qb¤É鱿hdÞN ÷r^ÍYyj>ø²r€ì7Á jf¨cŽN©XMÅÒÍúÞ½;n>ͼØì^ä-éy|…4ÝG˜#UC@X÷B›IÎÐPÏ^cf+í$éòšà,?MÖdÞ)ê»lQ’5ÍgîRú+þ²;dä<Ù֘ʧÉ11ÂzNDƨ{&‰Ú‹/ÿŒŸÿÏ8ÿò¿Aœ©^´Ô¨G˜ÀóêõÀYq=¤€XWÓ‘ʹõÛëkX¿ó7 Ûú›0*c—å:D{Œàh\GYT(²š!Vcàjzr²ÈÝ\õX@•¸&·Õ&ÀéÛj¼þHqFCÞ…¶yþ¬iîËýHK}C%7Ë «f½Lf#Éå3­mêT¿Ë‘½½‹Õ¹tLM=wºM@d!!ËrŒ¿޾‡,+ÐÌ'Y`Ø7šÖ(« ÙžIEC*T•z|8ÃÚr›ÛßB\û#ûpRo¡Š="½~]y-ƒšèUÅz‚º£®Æpˆˆ=JÏd Co¶%1Ïceqœ™ô©Ö,Ì 4sGöç¹`÷“Ba÷›6.òÇĉÞ}†“a§;~ù<ÇI¼‰å­ØÜ¼§¨FG"•SÕûªG¢±Ÿ´çQ=Ȇ {1mE²¹0Ÿ7œDQh‰1áðtFºE“ëÆÜšk #+¯¼ï—Î<7.©„ÆœY4ðXö(ò=’D1z)¨ô=jˆi¢v˜KìÕQ-Úñ ÇôÚb4Sœk`²‡V¹ƒÍöÜ¿–ãþƒGè^û\toâ]Lb_÷9“,Ï2™úŽ*z¼<¬ñËg5¾<죽öOÞÿOîmb¥¸€›¼F˜‰»ý´0Qœ`j‚f¯ï›ÀYVvÚº•¨Ù–Ïö¦2ÂîŘô™~ùj"*·Î5ï¥åÙv3Ï‘uï’ýo£­-õ¨“efÝfE‘¿®Å4U‘•ÃGš‰ï˜@àÒ×!2ÙA©$‰ãá$âtT#ääu¥:e”à µÏÉ!¤ŸsúË~·W'xxg 7¿jåpä?Äqµ!ÍêÒPŽhZ„°©±™—må„©䵪Árœ.>$s‰c^;àà‘6j!R¨1Dó@r„¸¤o’³Éyslã$“Érœ>~^âWÏ3œ„ë¸qï;øöGïãÞ.º8@>C=|)÷]ô¾(F €9¨oò¬ì9ž*¼ÔõŽõwaX }öbŒ³QÀÑy…WoJ”U”áÂ'µ´¹}õj"û¼s†W¨çÓ96´Ž:-ûêê¥3ÏËiÜÛ´¬T׫âéºAø4^T£îã@[’_ù[н!¨é>QgpÎÉ/ðt&çVДµž“Us¨šm=A¤\&{Xp”úñ®õñèf··ï£¸ò8mý>ëmLÒ@šæ8Iˆ½y Y^.‡÷9༙¦Ï˜a8‰‰ýÐ:rÁÌ}mèØë†÷$sò<¤ 0´$HÁzxìðìl™4m#ÀãåQŸ]áóƒ%ä+ñÞ‡ßÁn`³?…›¾ñõpWÚ†dXMœ!†Ðð®(ÝÍÅŒæ~² øvŽJ|òl$ª¸Hå!(ëý ÒGšñÊŽ/jy¿¬“&ÿÙ"%kÞ3Kt:«ðwüþÃKgž—Ó~ð›çÂËÒ‘ÿß@½àkUÂ0Œ~J'«!’0ÊðT‰Ø%`CZ*jè˜~\_ϰÜb¥ü•U/Æ^èbACµ$:iÒ1€z|ˆ8zVµƒU¿‡{WJÜßÞÀêÜK+—ÿûñ Φ¨Öú,"o&šð=уwyYÖ‚ËЄ4Jù ‚±J:³ (¡Xa?@ð6ÄØÄ<ŸþFîFAK«“ ¯Š¹2 f¬>´W©FY&*$Ÿ¼,ñ³¯=Žª¸zç[sï뉨Ëvã!‚x_;"wg'B‹‘›MŠ<±l4CEgޱ O~CÀƒÓJ߇“€I)ÿïñävWÆÙeè¶œðæ<;/ rßÎáëÝ^L œ*CÄûhÃ^ÿÙŸúÙ¥3ÏKiŽ'¦OÑ4bØ ‹›€ÆðS^'Û pˆ¼ÇpU ‘ªRqÌpc=C¯¥RÑü,H>Ê Äƾxm’{+ë!dØÄx~ö ýð ·–NñðF ·¶ ßüCúïâ`²iåÑ\ÒmÖ^BÞZBÖê ¸yß’Ï!ðŒê¨¼³” zaMi VÙÔKè„Í‘}óbå¬y>Dîeæ( ™õø“/J|º¿¿ôO>ø6披ØêO‘Í^ŠH朔¬9° ¡§¸$yKVnùɼ>VQIÂeú@spìÙ$ DÀX~7ŸíNñÅΓ™ÜoBÜ[4Ë÷ðíû}|t¯‡y›“x:È"ÞÙáY‰váMç…ýCA~HHzÙ,_ü—ŸÙÏ0ņiªv² µèáB”}žç'a4$d™| X¡Ô¾D¯‡ÿõ¤?tlÜâýŒ¡®^§·W'çòV¡z|¨$©! Ð4D m\i °¶ÙÇxíއÀÇ¿üúË븲±*Ó¢rÌÄëþÑ…‡æ=sWž•Tfyï¨P«Oµ¬|Ó‰"¹˜Ç 4TIÚå§Éƒß7©†Æ{™‹H0¬x=¾ŠSÿǸ¾}Ž­% Â…Eîë‹0}¬(ZÃɃá¼fçÀŒ ‡´h^Þ(ø*÷-6Á8‚àh%¥"”š²ÞË¥ðstVâþõžÌã„÷È}†ÕAŽ•žÃÞIÀ‹ƒ‰¶my©j~ùjŠ[›-tÛ@0sxìâË8Î%\jÓgÈ + GuT‚®)Tȇš27$…jHI`PŒ&œ…†¸ôì$\4ÝÃJQî°¹z•ŒnLƒ¸C”žÊj:BPI!?{%¼Æíå3ÜYŸ¢U`çù—øòÙ!. ¤ÎU´—¶Pt–xW@€T[›ôÞxª*¨ÇɇT®ùAØNz©ÉT!MÎŒùþ±€`ǰѻˆ.Ãîx??þ?¿øL–ÿ&l?üÞ»×—'Èg;È“}É1¦z¨Ô‰¨`ê$ŸxrQ±“ |7Ы‚lË>†Ú–ŒÊk”cäÜ’FMƒ(Ø~½;E@rxp£‹î³6eÆÃõ+m$9“pRŽ+¼ÌêÜm6ÐTdÚ­ŒäWÓw)Äaòådÿ%4n\NÀ\V„¥$ÒøÐ¡©g/!Ø’#%ËÒ‹#XñµƒXC Q©´#2,MnÌ ä®–©¼vR¶“ë…„}±ÖéçgÒ×ég{èÅ=\ïŸâÞúëícœì‰Ï>ÿÏw‡˜Æ%Qõ(²ïõA"¨8Àè‹YÑF†6ò6Ûb(m¸d¦ ¤[zbfijƒ)u"Ç$tñÙé-üðèwñEùGXºö;øî‡wñ{ï­àΠ‡¨/^Jè(Ã:”ô›b´Ê:½êéë!ÎGr*H ‹¥€8j“Ñ{·× ÇðÏ9•6O(ç–yàÑÍ.n\iÉuÜÞjÏ·;rüÙE¯_OðÛ >ßKÅ 2ï³È¤ÑÞË4)­Œ3 6¿#ÍÄ¥3ÏËi o§ù«5ßìÁŒM"-†£Že³&úåx­l*0‘S&§‰`” Šèä yÞl«²×‘‰7œ /Õ¬LùR‡«¥¯å¢â9=@Qîb£8/íFﳃ_à³_þ¼)×q¶1 @&ñÜš›³]Hä®)÷Ž;°:çLXÓøŽ-Cb1 ÀP_Bª˜2¼<(ñW>ËñÓ‹¿罿}ˆ?úhÞvØhÂO÷PKâþ Q$s¦@ X*Tb¦ùÌa8©1ž¼>š@fÕ›kææô¿V—LŽuÆÛä÷I8:­Ä‹J¢‰g·µZ ©ôN’(ÝþæÙ¿y> ï“‹€ýãJ(Ó2©æq÷j[Àöõ›ŸÏßÓÂF³1Þ(k¼V|­3AóK4:=0‚÷'ãÁ…`[8XÍŒ‘ç”m ÛbC^1ÛFE H€w–ÁÎ0Ô<`z~†s z‰á bºœ š ½ÀM÷ÑÇ!® Np«û w¶:èn}Ç­?ÀNùgÕBôð.B9fFæÇ 4œáwiz0j¶ÿ‘SÇy ´÷UF¸Iåñ||ò¢Æ¾òØ›ÝÄÚµ'øî·Þü¿Žíõ¸0|‰ÙpµÎzM¸8T¥¡ÂÁ‚âÑÙ p>”¹§ëaŠ_Œ B°d Á•ðÆÅ$à·/'Ø?)å³n_íˆ'V§ðéë ~õÕpî}Mqr^!%è\Sˆ§“Z¦m,¢šcBYE)…£ˆ¤4'iõÌ€w½–o¡1,úÉbA‰DÐUäÚ™_£n;ܨښhrÂëô†‡Õ`ÍÁÑó3* °²èÉseÚ=Pa„0=A °ÑÚǃõs<¹ÙÆÕ[P®ü>^ÅpPÞDÛjˉ-7òá–Êàׂ‡]š‰ûÌ£ÈsÄäñêMÄ¿Œ˜k~!ôáÑ{âÛO¶pw3`£7…/P^ÏíåäD=°„¦CKFe¨§÷OkŒ'5znÐiyuͰæóH¶Újа¥Â˜©'¸¾œãÚZ!ÓÓ?y>÷¾žñò°\€)MB°\Ïh*š [©#ä¼K½LÞãp¦ ´íQ}K—wzdäxyzCsR7D!ˆäWIä«R,˜xgxEpä¶•ÎŽ‰\1N7¬¦’‡Æ°RÖ@³¶¬ Å~P½n–øY`±ÃˆzPºH§CÅòB&Cùr½ø·úGxr­Æýí-t6¿ƒ}÷;Ø­â¬\FЦu‚+C@#ácº þ×{,óÌÏ-Ãp¥çñû_x¼_Ãúõ÷ñïãñ­«Ù!Òð9êÑdšUyŽXŽt`Ga’r^A¢¶¿“Ä;щ½ŠïÝ]ÆÊ G·•A§Ì5=wæÁØt Ä”8ò®œ–ˆ:¢Í;Ìl,öæ¼Ö™œ7È2 ç MèÐb™C°µÒÂr?“óg2‘þÓgcœjx3JiÐ ¼oõòÈèý3”„n[z€ÇD= ä¢Éq¦pêh‘¯Æ|˜)ø8øC©Žü4õÞLnÆ~.9YV?CX˜IR³Á¯¡ß‰ÅùÙª”\Z˜³C´ë]lä{x°qŽ'Û]lÝxŒ‹îïâu˜{i³«˜Å€zi®k8ÊÉãFÉ‚E„<ÏÔûªÅûúÙÎfíG¸ÿp`O¶pkeŒNýi´#•G”':*mÄJÀD€i‚¤¸#±J †–í–Sï ¬JX¢kdXªÞ!++±’÷’„ý‹ý´˜$Ìü‹q€'Á •Û[[íy³íùš¢’Ê1ëf2Lå½ínl´¡Å'Ùßkg¼äDu`‚ø; {;¿S2 {®Y5 Ù;ðLdkÒÞ„\—zYâ…Ûz¬&héÕ$+P(¯%ŸdC~ƒûë8ގ΃^Ÿ#äZŒF=ÃeÅA4Ò.fÇð³},a·—ðÁÍî\‡[ûöð»x5»‡³r ÃX2ÏÐÚ™Ä>y}[¼ aÝO1On/¼¯_Ÿ]ÃêÜûúöûwñÞíkùòÉB°ð5¤çQ*/[ ò¸<858‚P£±Û|ÈIÇIFH‚Uc8´!¦lšAÿLr…RaœƒLƒ¸ë ûo\‘!¸­³È©ä8äsÛE&À&òÚbÖøü=b|3©˜ûÞÆåGÖ õêZ úè¢f¾ÊÌ´<©hTRù—ÒzY Z!¤Z€Vüî,¿Á·n%lß¾·úmì„ïàùhÕ@,èäí Ã)ðÉË€ï}žáÙÅu,m=Á·ß»‹÷o¸Ò>BQî“]$i:‡‹à¯JCsq™ÈŒÃÈŠ6|Þ˺b>ï΋‡ÈŠsOfÑê׿ZGú™ŠPc‘Ù,l8‹iñë¯G8:«•ü aéo­¶L¤ë9.iWúõÓ±4WuRuYàxX‰*t8ÌŤÖaÄd˜V¬b$La=PàÙÛiÑÁ„3C+0'Kî$P$¶¡Q @‚ÀE?l8“QlhæÍ˜èwM)““ Tßàû ¿ zÜçPå„À–~ÌùÙ ­ÂŒåXB=W¡›öpµ½÷¯ñd»å«pRü.^ÎîcX ðæ"àO#~úb Ãìî?|Œï>^ÇÝ ºéµ(y(ëõlñYm,QKÎ2ÉÊ/À+ï#k dÒUÊz¨“\Á*"«;yŠ}µÍÅÒMìÂ;ì@oXG:àä·VúÖ–ŠäåÁ ¯Ä³Àœã0裳 úû%²?úGýN6Á»hÎpÜbâùÁöýâm4(œèÃ>Ç—i¾Kö[ æ·`=!YÓoJj@äü¦ìL@T.ìLJc±¹¤Fq!Æ&7ÎüŒ ³ma0ª®b¬:òši“|›zi©‰—VÔXËöð`íÜN¸}c è\ÇþxK›ðÑ{ÛøèN­î´êWÀt­û ¸8RÐ?4Žy/×4¨v¿ÏÚ2/Á·–àÛ«~®³…‹ýßàôÓÿiü.+Œn½×y ÔÎ ë,[˜G"±H&Öàîq?ÿ@Ö!Iñ`e •ãöV¹2òå{>Î7@b¨ÄpX¼»9ùUd|ÎÇAÁ‰I¬·(Þ¥}Þz•ëwô ÎXŒÌc9d oš|‰§FO ú×#(Ã?°8Áœ¾~¶!ÀBÉžŠ¨ô 忈bsŸn8©â‘ÍÏIhbÁ––éƒeÇ÷ó¿TÕOB: u…X‰—_¡»È¥u°}%àÑ­¶×¦X¾(tÄÉ¡Œ¦KÕX8_`HO  K­ƒÄPYú#[ƒ¹­À÷®a2âüù¼;áé_Î?A'£Ž\FRUXôÐmgFÔ‘?g牆v(,š¼KŸWjŸ>¡ŽÀ­+mùl˜^Y9^ø`w¯weØp»åMÈï<Û˜ñ²!ºv‹*(ÉÒ¦ R|doý¢pÆ-‚“a¥3d´ï;Ç$8_Cÿ"‹OfÔSÀðÅ‚’),Ü bвŸ¡Ñ¾0×&fs‘×Ìϱ90&.$ãà÷v‹h-v MèÇð8ÔN©"ÍNêYýi²'£Òby.œ¯ký®t’!+sªzä.À·PàèË?ƒã?D¹ÿCtâ:ÓߢåàrD‚˜!·rLØíoÜŠ›ØžFó BL’„¯C«êˆÇ·ºèuIµçAú,¿} !èµÅÄôå¼!ÄžNVMo^‘c_#`‡¼÷¶š”KF¾iá¡çÅ~L&þ9@ƒÞN3_æ0Ý*ûhìÐFlngº G°âñ@0•Ej¯ÇòâB°l8VzÏ_£Ó‚ ?×ÒVx<`C¥º:@=µz hîËYá?Ûƒ»í™¸&pÒÖ8ûêO!~ ÷æûhÅS #rÄÄÐ*%R# èM0‚÷Ìr!à93ÜE¯Q@‘CiŠÜÉaçÔpm­^Ûs‰IÌCxb™w’ô¯ƒä¾`¡Ü0-PaÐÍøŠÜƒªº\§ Æü0€Khž—ÓlØEBÌSÐ˰ìt]L®Œ€G`ÐÄAy*~N£hÔSí° °"êô„Ïïz„`Øóqµ…Óq‰çªNB:­ !P&¡õ³ Ü׋¼²ÞAý¼H÷pÖ)ñûÇIŒ[¯¢9 ƒU]ŽÑ™~* ·yÞœ7äfA}ðÕ`†ïÚá½0dXÃ3Œ~Þ_[Ý„3ÞϬŒ†¯'÷ÛžçÍy…ŸðõÞI“øÂãïœf—û9ô§„ª1šF[­$“ŸdXÓw¡å¥à’É¢¹)&´“¡ ¨5Æ·ñ80føW /-09pÚ9“Á¦€ kRÊ U£)¤ÈÂbN?ô"廦Ql8YLß)EEa÷M…£J¦7-¶wçëaÔŠÖ[ì…†uàµ`øL÷ñ}&Òùàš¿V?ßãø¼„›¿yq†~Á…ŸcUg öZϲÙ&eº'ô«gèåú4ìô²íÈß‚î—ûþü`ï<¶Ay@ý°½c‘½F}ÝÎBP±€ó~±ͲïöÑieˆÉÉy:-oæê5è=·…ï¼¾Æ[gï<2˜_D±šê¬öÞÃ;Ã÷Ò×LØ[m~°ŠÅª#Ï£€ÄÜ=!›ë²CR˜Ÿ"pšJ™ÄEà´'>€ò=¿xUá|0ž%¼9€Ãó  úâ¨Âëã…¬L­›N}‹¦/Ô¸­`C·°üƒ$= †””ø9½¨1-ò9c¹îæ" €üŒæ\Mzéa1?Ä?.T³•× Û\Nÿ—çå‹1ÇFo´È(Tèyä{9í»¼˜° -D½~H˜yïzïo÷$|¬bšƒýTh)[k>º?Êy6î sЬR¿Ë‘½…Ƈ[Ì€CzS04 ˜(ác ¤W4hìé#š2·Äc­ôó£MÒ{€” ~ª\Ùb³ƒZÂŽ*$œÈ—”ÑŸª*!y™ˆÏvfx¾WɵäL¢˜‹ãÕØÃÌwq¼Ï°ª$ÃÓ)œêÉOfy3“s³ó)]ÓÕÏm2 îbUg”FˆôÂLß­íµó$ "n®’/v!²Ôüêë‘Hùð3AØ; u(4úm/ze 2Qƒ-5qsÃe0Ï·ßþõÿ{mïýŠsnin ¨K6¬>šó±ÔSéäÀ ‚ä`(æ½í_ôçÍ€„&i—û¸­ÞŽu Ÿß¨"záÍyPb^O@ ‡¯®eR xyXacà塪¤¡~ZFæêÒ¢C<&Ð%†#lÃ5‰æ½C«È”> "„™&©½IX‹±*kMèÜöb཯F>“@˜Ï•yž_+ðü6Í OžÞöÄßêÉ:1'+Þ×ê ýkÝ…&£ÞÖ— xHþ–…Íó eÀˆ„¥gó×ݶ—µ×qô" ÙÐ †˜uÜÜððáCܸqÃÍ o»ùËð%NOO[ggg+Þû¥¹Y–an.'}µ\«[n5ÃÔ³âbrf‚Qò']¶aÐÖØ*E°S/΄¿¼6˜qø€ƒzˆ¬¤"™!ºkÔ‘Mäû'Ÿî”øjw‘ü* Š ¸½™ãÆF†û× Ü¿–¡ß‘‡Xšœ?yQÊÚ;PB¬¼êÌD^+ûTЃd^Ð@/"Adñ7©¦Þ¦™|n« ïµ"•Ø1R´i¼)\s¡× $¢:"‰¨ÌUéw8Õ8Ö†*³¶”Ϭ‡I±{\B”Xô$zZ¥Þ@ÅŸíOõ‰ä·¾'F“z× $ð«ÝUU¹v»Á`à¦ÓéÂð¶š›/þã?Î?ýôÓe¯67,,Ë2Ÿç¹÷s“Ç^z«Ùo‰Õ4r¥ ‰Ö*ÁªGe5¼È§ŠÍÞÌĆn>ÈbM‚m4Þœ ˜œ iœåvI’á9°>ð€6k‡g_¼*ñõ~%ü²AÇáÚš—óôÛnzXê³:á|”0šQ)Dï§$q:er>šP´IBÕ·ÀêÚµ®èn @Y¢Yi4,wJâðX‡lå—×ÀŸgEšèŰ çMtCí<õŽmÁAÁYBÇE8¨i ™]ùÛãÈI¸XSIVCO`4“ž¾ž"Èu^YÉñѽ>n]é0÷Õü^†sÇ1uæ»dsƒŸ/Î9_×µ[^^v½^ÏÍ‚…ám3ÿ6^ôW_}•Í­ŸeÙ"„,¼Ü¼6-–gY‰ (˜"¬´u"øð—Þ /Ú_ÙfµÄ±!7Er¼ŒÒ%±™?cRÎü ÏÍ&vp®¢˜^·™h­ç—’\ƽ« ¥‡nÛþ¥¢DñÙN‰Ï…_Fí³vápÿj!ë„$€IɇÂÝ%nó>ÆØP+54ï- ­-µ¤‘Xím<œàb‡À²U“e»'ó3ñ¿é…yÏ”÷Ž=h“ì’›Ë-Ü»ÑSv¿ìO¬ÓòÒŽä3óqû|1IÂ#ûâÕ¿þj$¼2 ƒå^.us¡sô»,2ÐZ3Œ–frXo-¦ä£|Yœ.(ŠÂݹs @›Þ&óoÓÅù¹u ¼Ìö‡S[¬zbbEQdyQä1ù<ÆÈYÄÛ1™s‚ 1Td"°‚MS8%wÌ_e†^ìQdÒßz{b©IÌ4ç'𓳦ç]í9t HÅlçM-ãî,y\]Í@õZÎé<F|ñºšZ%ÕÊ:&t[ÀÝ­E©b~òb&y7}@ Wî…]óZÞW6ƒÊcCCÃ= ÑôÌb‚òY¥ãì_h~ËT#9X˜ G¹¹Ð«ÜwÆÂª Á?ž…`x]1A$|´Â) ýá$`2‹xº;ÅožŽðé‹…e I¥Óê®®sk -#È0~§&×΂¹åÎA‹Èþ“ÿaǧ”²c6_‹gct‹psss“'xK·`yñâ…Ÿ[@@Ë9Y’‚˜SoLþºÌmdÙb)ò¼ 9•\uè¬m¦åÂa#¦Iú³6bû*ªª’ñlÎEتÕ3cˆÇ@Ê%ƒ,ÿŒ!-Œ‘ [ C_þ§U Yއ€xY(2r§HaùþlÐ>{Yb8BÏØZõr½Ó*J5Ôä^æï¸­¹¤H¹n£Ñކ.h ¡œxÑ^2œ§v¾ì7ž Cp*À²hÀÁÌ]ÚªŒó¬/XưrÙZ2o•©Îýœ‘ßÃÖZu”Ê¥Tûr,̪DY‘³žg"c÷ïô±2Ș—l $ªw0ì´¡<42©ê˜;ç–yºen¾[Úúú:Þ‚0óí-çàåæÖÐ ÛB¼£ÉõȲ\?ßDB&(I¨†”ÊÄ3ÙÐT~YÖÁÍßùgQº5$ß‘ Bä}у“µ˜êù¨tˆm“ý”_U&¨‘ wjôe‡ úûò:÷™S†®¯æxïVŽ•´ÆÏnÀú`q<Ìd¨³QÄ×{•T3¯¯g’CC¢'© h+»ªÛU×â¡Òkcåõ ü/ï¸S5¶l¥˜ à†„Jê…ø¦j«Ý$‚€eA³ ­£É‡#`Êà9;mA'Ãxd Ü•åB<æªV s@  Xä`«óuQxš‰Æ!ð)+—Mî\2÷XHÇ9ò…G¶°†- ÀÂ)€>c殺ÿWMÿ´˜! Z“Å{Ÿé’/^‹9Ÿ§”ÚípÄ;s`4«É¯[úK3ÃòÚ&üñ?‹;ô¯¢Øü#Ìf¥(;Ä>Je" Øñ‚&HaÈ À ô&(äø—_€òÖ• yõ>μ8±Ìw6³9Ð-½rÞ¢wI{k䤇§¿Ý©d¦¢vA0‹`ï!=TÓKJ€h¢ šŠŸ™†‹‘š_ò>©ÄX†3çeùXêj1µ†\CPž“|,Cp&7Žçâ:5¦{×1 CÿýÛ=tÛ^rd××[ˆú/žW‘{¯Ì{\¿wçZ[&‘;Cò%‘7˪¿¦@3¼¥Üi (æ&@BX„˜z¼Ó¥ÓéàÙ³gnnøÿ»ùÿ?]̼éæVè*€Y¦(0Í‹A’¯ºít '«üGŸø“¯ñg]ÖŠyf&5¸\ÌGi¯!ì`ÛÙè½þ ò¢…k÷~ËÛ Öý½¨²uˆìs]!%†§ M‚Ù$Å5·Â†k5ÀrËøG$zF`O8H8:P<9rœ²äÄ$‰/ù³^ÛáÁµ§)W0“c¤p|±6¯ uˆâ­Mf”8b‘Â÷Õ‚UníÃnÃÍ%Á˜ ÑÕ†çSÚüš3âˆÌ[‘÷ÐxT1Z• MyUAŒ¢kæî¸¶¶„A¿EáDÊçÉížä½º-y-> nØ­ÍÎ|»+¤CM H&ÛSA­¶™¾JÍÓ™C³ÙŸúñÙüƒ›y—AAK.[—¤'(ËRžµµµ·Âüÿ_.d2™äÚ –`—&‘YMçèù:Äcš\ßè]ÛXYÿÖov·ñ«ÃoãùÙ-”±‹"‡Lø!óÞ6có“M˜'ZZ³ñLÎ÷D(pkûÛX½þ6üðËá—‘çq ºMRfèìs2·£H!Rãã°6mëm3«s2SR% 93†¿&Ç#Ç|µWãåQ…I™°¾ä±ÜŸœäYZäË´õ |ÑÈy³5†Ü…‘p¬ºÿ¬¨Ád a ò\I¬¶ ‡L)5ŽûÍ(7+‡²¦+JÕV=ƪTˆñx} ù}?S.š·”‘f5°€§æÿö¾2Èq-éò\éJ2»ìâfzô1/33333ïïeæý˜ùfžyLÍÜ]Ìàr™QÒÕJW‘qÃÑû¦czà"2$˲å’åS 'O<¤˜)»¨$<×"Bl!c“'vz6ñÆ(¹Ïž÷d½ƒ©/ü½ƒ‡ …êšh$®¬ðÒÝ*n,y…S•âo˜¯x³~¨ú!åìX¥,ýM‘Úolù|þmaÖ×ûìîîÚ±y€ar1÷1xE¦²…›/=ý'~ã3ßÿû7¿ÿw}ßÙÿüÔÉÊo%k©ù^Ú|·öO æéÇc[ÎiŽ—Ù˜ Z4 Œ{GŒ{ ;»4·0=³€§Õ_;ómüqZåÔÊf¥1}ŽÉ¢æ(£ Áðz&/Ï £ó³QÌYÉcJ ƒµýÂ@vê #Àê~€¡Ÿz[<hDd¡âö§‘Ïá¯1|͸O“îQðc^Ì/þ‘” ã±Ù$Nˆsd†Š+{׿{ð¹ôSÍ”pö¢Ù«3Rÿ *Êü¹BÊS Ò£ó¬MÔ…”³MèÒÑ¥dŠ…Q0çÎè}¤´pØ–øìuz5¿ ÇŽ÷gþèŸþìü¾ozé/þ–§þãÙ¹BÑTÈž­Á‚—ÑÛ¬¯×‰—––¬Ø\ `â-¼¯‰}ÜÈ&k^4ö§£`ôÛšÞÌ8PäžrÓɨùâ vû—ðâú7á­“h ô¥Û·!¥‰U˜! Ùˆ‚1‚ñ€Ív0´ck"w¡üÒò>“cMí~Íÿ¡íÔ[P&h².ZÄ7d^ƒ_&­ÄXPqu/ ¾IzKÁ Í’3DÕ ïk0Ney8w7&~gé¹ýFˆ­Zþòø…Cð öRM€ÀÞ "& šâ”0yy|myW.ÍpÑh0ßÌä§Ï›ª¼òS‹‘û2iàÔ‚)¼h„™ô‡ü†"hm¢<Nöº¢ˆ=±´‰ÜshÜÖ%>uÅÃ'.çqОA¥XÆìTÅœ˶H.©ÓÌ9jøû»ƒ±òiÒu]H)q||ü¶0ëk}Âååe+6 @>>W…Œ6y¡J©P…!zC¿~Pïôï®ìâöòâmúÂóžK ±[êTµáy¼ºýÍxsëjýr hFõ(bgI·#ˆb ‚Â` ú}6â™™ý›ì˜ur½bOʬ˜F0“ÆÌO2ùfbB·;ŒÐè) FǽK»ŽZZqA6W&!ŒEœ3ŵ|ÃG)eü€òi¸° QÉ Ú_Ì @˜Mïü¸]IÏà™ Ö¹ ,¶Þgð¾ˆ/þÕN4ŠÃP„}ÔbxXz›¯» J&‚Mï| `9$½éDl 󵀋Ô÷žÚ6n p£gÁ!4À¨pËÆƒ]‰O\ÉÆ^X‡í2NÎVbËaº”E.ãÒuÚ?jáÎÊ6nì£Ùé2Ù$“öž¥ŽmÛQâ$t:·…Y_«Å &b£j Wµá¹*¯Ô¢0$R„n-í`ç°‘¼Ž¾ÔrÁÅâLsÕ)tÔY\Ùÿ¼´~{Ý*„p%·æPòßô¦ho|BÐðŒ`Œ06fâG`b«©²ªŒ sÆÌ0Ó–34)$*äÞNÝ{HÀ4[Ûß Œ&>ÃɪEUËÅŠ¥¯)‹BΕmâ‘e\ÏŽ;!+{ÏÅaçSX=žFŽŒ¸ `IXdvÚ8œÝä#ãÇMÛ|zýý+²ñq.ÎÌáfçñàøi|iíÜ9\„p+È«ð²%H¯ÇËCØYXj"X©V‡?ØØCçÄØØƒ3Ȳ‘ˆŒä>­¾G³˜@Íå… pzÚQ'èóÑœÅæB½£ˆüšqY’Æ÷ihI*¸Hý—9WйÊ9Aá¨{›Â ¿ 6öMVˆ83oNŠ7ÛœXcŸÁ‚±ßl¬×^óølc¢§áá¯ÜföŠt,‚1(v¡&å‡ÀÀ%ì#v3æá eQ˜Z€—«ÂM@ÍÍÃr²l“µÏá$˜_ÅÕ6ý`tºÿŒxÅS™ ,Úkx`¬D–·ût NT-œÔ€ÆD]ÎE9¶)KÓÑg¢„ÿa3D¨; ºCEf±$ªA]` qÐbò¾Ò¼Ï„С1(B²Í¦Ž9bÂ2æ€ùñq¬ÕO hNâ|•9ÍH°NóËÀ v£ø“®™r¡AÎà”=z1{& w ŽccûØŽ«ï9ƒµÃ2f*UœšÍc®’C!ëÒç:nuñ`c«Û5´»L— øöç.â;ž»„S ³Ô1j‚7óè,ä3^úƒyh )=‘ÈŠÞ•zdñE±=2ÿÖô syœc)Êd0£N~@Y¡R¡e[Èf2Ò†ÐÄË´ÍÅs:F>ªRleº)â*î¯ïS¥ó°Ñ¦/<“ZÙ‹A/‡aë/>ÿ:Þÿñ;¸ºÁwO"_^„—›"MX’õìÁlz#÷ ÅìMpXdäÊôb&œ™’Á=ÄU0[¡\$’˜ÑV-Äfl7ѳc@³‘u´ÒþKD*­Œ¡7$®×^C¡Ó'ð"!ÀÍZ@Þ&ŒåÉ\` ~ÂÔQÓÀ`Â0š8"~djçO&ôx`ÈÀ93žØÄ2;¼ðñˆ¸ Ìì&0sgú±©#–’q™<¹És=Ê+%n™v Z6>úº‹½žÃqoggpa±LÈR>8¨·ñ`};‡-Š(lÀ.à[c/lq¶Bÿ´Sð²è~ É¢Ha<áàèXĆV«)bU™„Ó™0ú£Øðv3ë+|›õøIüÇ®è-ÂLK‡š–öÐlÝü§›OÀL_ ¯ƒ0)Y(…¬cÅ@U P+f\tû#òZ›GUÌOÏ&Fÿ³®CÇ' µ´yˆå­CâƒE*À ÓÀáþ)Ï•ËfàH A¯E×@:[)…0L,Œ- ‹"•V*Ie÷ †+·â¨Õ-ÏVK¥Ø@ËÛÈž|ù€ß[¿§¹¦ä>xm(Å@s}ó õ¢+íÈ–61ÓWv8lõaII€%¥¯]ضÅr8 ! ygdôŸl”TF °XÉà».ÍÇÒų(fäÆ#@»»²‡£ä½…HH‰Ô¤{z6êÔ,ö†ñÊÖs¸¼}ÇÃ"HDÏæÜ‰EÞƒ‘x$WôtyßL¢«G4nGœ“C)'p~ÞÂL‘ã›DŸe?$1:"2[€òi»õ[GaZéK¹´Éó¦öG‘ÖÞ‡Ù¼.8!nJ!i¡Äð=”˜ž¬Xb‚ÅoÐQ¹‡¼öÆ,Úo ¨RÊô¯h™ðy˜0ƒ$Ì3³˜À#Ú¢4OÞþÚ…¾jÅUÈ,¶eT§Ê8=_@¥”A¹à¤¯[ä}m4Ñׂýö1ŽvÐï4àYŠx4ì ûq0"-ŒÚÓî<Â=™ GÇMÜ]ÞĽå-´º}8’ˆÜcþ.ÞY¡¥ø:"³0kðíõ¥m÷G?yå¯~ñêÚ߈›Æß¶ˆB•´Ùô±qØÁqk€´q{”¸ÝA ÁŒxTHL™Ïd2d PUs6fK¦‹Yê ŽÇXÝ:$êÆ~½… ýSE—¨Ó•*Zê^í•ó¨ ¦èFsl}>aªK D+ÐC|4e…”nŸ;~ÒôûJ ŒºEìüåýtþ]EZ!)¢nL—6ôÇJ_`*oÅ`në©K&¨òp,\ óAMê™#CòÆP¯`p0Îe "2­‚rŠ•C hH§~›ªÐÔŒÉ×ÒÂDXÅŸ;`ĉ·¥`ãî–_zIâodÐT‰U~±„Ù©ò°W‹lýÛ‡Mʇ…ã!ºÇ5´Žö1è¶ ¥”°l‹"©¼‡éRŽŠN¡ éz;’h?”#ó2®¯ \W7ŠÖqýî2î®l¡Ùî¥(>ÿxë°ý“û[±€ÂÛx±¾Þö<º€C½/  ³Sküs—WþÒ‹76þA£3¸îI›\ñã6Úè }×øHÕfi[&ånºȤ$‡ÝN›{5¬í·Ð% uùŒÓÜ]DúEµ”EÁ“T…èŸq£Ýï»UÜYÞD³K\Çî-oýðòÚ÷þÇŸyñÏÝ\Þ»rùþ¶Ã‘ÌÛ{‘Aøz.Ñ—QµùXÚÎhÏl¤ÇÈ ¥¢huçøó{ÍWÏ-Nýúo:?÷gcWýéÄvê}4{cÌvåÈÈè|5ûè FTÈdrhvWì}üÆòþçÛ½Qžm[Y¥’…Í˱wvíÄLñ¹§OWûbµð½µFÛ«˜*åko£@nàJìÄÀÏán½‚û‡Ç8[©ãÂLŽ@n$ç%ž¢V‘ÕksT>‡œì¹… ÉIÄ›Ê šµw Øi§!7GDžµbüôÉH}–žG @ŠÉ—à…Î0ØñæÂ2>&aÖ 1õ6¯ )#˜¸9œP}1)«pì'fX ìH€5ñ‡–iÁA‡ßlKе©µîl ,mۀȣZÎ`nÚA.ëRˆÐéßÖ¤Þ åÑï¶È{²Ò<$€I-4À&ÄNIÔX´/ŸI*—eäs¹ÀÖpÜꀺRAu¯?ÜýÅWooýßÛ«Wô;{úÒ†ŽÌ;È)-m¬Ÿ|¢Ÿßÿ±rv<$\¹”À|ðþ2Eª³²süáýæKNT~Û³ggÿX)ï]èôÇèôG˜)ç‘õ\¶fgPØà&Û[7–7>rcåðùþpÜà !²º³@ X‰aë u7kïÅù‹Ïœù-qo篌oä\ÒW)æ1?]B1Ÿ¡~ºL PðlÔ;6º3Øjw0Ÿ;ÀùÙ>l”Âp¤™}¦ôƒÒB®Ä ð b ¹À^JÑÐ^:j0¨5AÀÖ즹"}·; ¯c¶¡ ©ëŒcÑûé>OƒeÅ£êÇ"‹“mD¼Ÿ=;³bÉÊ&Rr˜jòÅh‘= KážøÍãCDƒ–KǃE éZXÆ89nýâ¦rV¼Må…ö›w76¨ÈÃéù¥2®CUJ¼¡z³G@–l‚qâu¨ËÄÒ}¡aHžlK›B˜ú{Ô¡ÂÊÂúóYÈŠÈæ‹°l‰Z½|6ƒáØo­v~.þgû¿nל?5#s»b|ßÀR1EIÛï4ì±íëï©Y`°³Äè1ô¾œñùu-UïáVýg×öŸ¹trúwÆ€ó'‹9ïd­Õ§Ö8$ °¨·«Wì|èÎZí…ø>hÈ !r†ÀAD+•þÜ, %4ÕÃFïû+¥ìÇŸ9=ý›Î,”m³Ó+µº¢uÌO ØÜ¬×µ“pÍ®Äþ ˆµ.Ît¤9¥_q è¼7gSN 6"S”Њ =wƒU®û =%ì E °BÈݵ1¡;ä©Y‹Bª@6çyšê²@yøÊ¤G¦C]§hÂïf9l›mt<€i)ˆ"Í#†ð(]ø23ÖÚº~c3„åkÁÕ×tv'weÐ6õ¨|‰•½DJ:ÄʶÏÉb¾šA.+)yïRþUxŽº$)M8¨Bô;-øÃ‚`l¨þ¦üÁ@p\ÎÿÑSZsOçni;íQ%ZM±„|)ñ8]Ì´1õëK{?÷‰Wîÿðü|åv|œõÿÒoM:S¢áÈ^¿¾jƒ=1 €^ÞMÌ~¦j°,a ÷¸ÝßÝûÿÔë+ÿòÞFý—#¿ÞéõqmŸ”7ŽÛ€¬— pÍ`a:‹j¹‚Æ`µnǽnïTÐK â…IìdzºRÚÅlu´«¤S‚*yÄëÄKæ§Ñ4æË\-º˜äÔ¨:f›rB™ÃPÑ€¹˜b†f6'à#²GÒí‘PÌ`˜ýÕ°~ò‹ß ×Å :@Þ`Èò¦Â+„1ÓÒMܾ’¸³!på¾ÀÞQGM‹Ó•Ü DŸ(f3t|R­¾·º—äÁôð݃vÍ£= ºM’H—ÒNçP¦ƒ¦uk‘•YÉôBÏs?©€í8(–+X8u¥Ê éÀs,„AxøÊÍÿò_þ¥_ùƒ~ãŸgr™åÛwrßñÜIùý?û¼úO?ügu«–ÜC®î›:‘ïì­€M°=QÖÿãåÏL µù1-!{k´íèç$€L T½[+?¹¾×üÜ|5ÿtÞùaOçÛòÊ!jàñbîÒ`ï,*™Ánu‡õË÷v~1®}î[/Îÿ¶o½0÷Û½!:›CrmÌL¨9=Ÿqè?| »A¨°Q¯`Ù÷0_8ÆéÒ!fò}ÖûÒí;AÈ |ËfñCÁùv¤íLa®è$¼+¬–ø™¯ø>bûGzaÄ1â½I£Ñ …pÏ(=žP¹=g(EXiAÀíX쥱ô5"…‘(Ã/œíx(NŸƒûˆÂ!Âa‘Ì@Iúª ´…@æØR©ét6 èÜ,pcCbm?ƒöЦÊó©9–’mYTÜ9<Æq»‡ÑȧT€n³‰a¿ h>—”RW¡•()åGèö"îS](¶”(NU‘É`ÛR· ôãƒ×ïnýį¬þÄ^½óPßóÅ¥õÀðÇÞ÷Š—¦Xjõ΀ä˜y¿Ûl2Yh‚Ú“—ðyô1VzO‚×¾­"€±~lk@Ë€¿Ø@'Aí8O¶Û~/o‰L@3ö+½Ÿæe@¯75Öv¯ü–ï>÷³¤ü™Lñ¡»;ÆîQ 3¥Ƶç9©ª,5uíYì´ª¨dê8W©a±”:š "…‡ˆAÌâœ,@‡¡à‰AfžW· q˜Êúò)p‰TúG'ûuZ7À›ákªHkJòÐóšÂÌOp.`îXˆÉ¾Ë4¤…‹òÙ_WôëPA ©¡‘)C^›þÌBŽ:Oå® ˜fÏÂÝ-‰Ûë|• ËÍ`¦,)tÓ$RŒ|öÍv7 ç¨r¹0[ÁéÅ9ܸ~ýn›Ž…°™‡¦7Rã>_‘V' ÐÒm "PœÿŠ­D4 †p;ÉåîÆUÇþÒÕµÓ[:}RÔ÷uOßû޾÷%8 î²b{ÈÞ ÔÔ#Ô2Ĥb˜çâ$¦'2ûÆç`¯Ì‚éÁ9§FÛ¡ÞvéØG÷¤ Ó#àâÿ° h–~œ¬œø颕‹ ƒQ„³Ôš²]kã°ÙCµ”ÇÜt‘*©Ž´á¹ ÙPÁ÷óñq\ÙF¥ÑÄ…Ê!NLõ!0‚bt0x p0½(aTY[h±‡eH7GJsÇÀC@èIs AH²Bf ™ù*ò!$<çhòJÓqJÕAzˆ‚> (OFÃÐì€>WÚËR(i6ÃK›„pÔ±HFçî¦ e¨u¨˜wé\RsYâd9­Z>½ÖÂâ|BvÁt¥DäTúšÓÙ¦i2,:¿–f§2 ™CÇyGÇu‘ÉÉ“R{`ÐÛ/ß\ÿá/\]û¹Z£»–z`" bxñýûè å»5Göø Æ€âk ïíÉxk›QZVdüx¬ŸwÁ_z[ÓÕÇõãØ#kðß@ÏMœÿš™Jc@£\µJɤ$‰;žš-áÌ\•‚G7t¬¹NÝË›5tûDAÖuIŸjºìaaº_œÄÕýgñüòyl·§aYIXÊÜ*>±ÒÓŸÌFq¥R㛂Åa ¢%룙ê€Üs©5ïYУÎR«¥=ÅÉiáüœÀü|&ξDc@ù@HkÀ‚ÒŸÆû#¢þG@Pû2‰ƒ–O]sã6¢<–÷¦âk:‹ 'ÊÔB”õ(ÄG¯?DBt^Û:$i˲0W-ã›/Á3çNØaÒÃÛív0Ù|žòéB}]© N7OR'¬í T™Euî$òÅ)â4º2¡ ×ãN•ñï~òù_ý Ÿ¿õïb;Ó($ø·pJ%[dÚ;k‘_û*£l+W˜€öôý9¼4AÓT Ýï x;¥mdÁUG¯CþÜôçËÿ¬ì©qxc)Ey*ž eY‚ä·³3Å$ÿfw˜Ìé¤Q7­ZÄT¼v¤ ¢¬ëX(´{YÜ8˜Ær½‰³S8[íBb¤›Ï¡¬¡Ô ˜Lk„˜Vh PLƒç©ä¦Ž1©Î}MŠšRØæ·eô>Љ¡· !å¥H 1Aûùuæ-B`c³&pcÍÁVÍE>›§ñ€.ñæ$yZ¡ I5¸Öè Û”ü—8µ8‹“óÓ(d3i›j#VR¤n³ùÝÈñü n&KÀåx@P’H®Îpõ»›ß·Ú½?¦ÿlpuÄ0Ô÷˜üMë!§XÈžxå{@6¹¨G€šEö•d' ÎlVäp5 “5`Oª²’Mª` ôZËâ9æ"X"(í2€ a““¯=Àv!$õÉ"6în ÜXuqÔJ<°l>&0’¿!KZˆjU }?$`ôG#œ>1‡ó§i¬šboT«‚(j!K¿¿Q4B¤ø{tÔk/›C®X¦D>U€¡(‰ßèï¼pmõb*Å/¶û£G F®¶NÜ #'Š^´¼d__OmLf€™õ•Lil°g&ÀlgŸo:Æ?—W==^Ók„Þ¶ãsR2W+h´•Ø…C’Ûs$ålŠR>Z ˜ Æhuk4€uav*µ"õÖ…ŠTPÌÉø+"¶Ú=,ä’óåÌØ˜© ›é!AÕHCã˲ˆ >B 6f5Ñtçx ·%…º÷t(‹4j¬¹˜ÞœIhU€ ä@$À¥/½%³ðr%²ŒÛ»®\–h²˜*Äv*I:a’Öý¡ÝFƒ<]ªv"‚?¡Õn“سN¡˜Ï"ˆ ›†Ù¤×#D†\JAœÍPÛ²mdryä %x™yß"Ѝi¯Ö¼öÒÍxíÎÖ‡¨í ðT\…”:t´Á€š©¾zïÙ7*¨ñ—éÞ‘z*~^7›³fõS€¨C¤ƒ\‡7õÚè3:ÿßÏH}ç%%„$]L7>€äÒ±gäöy.ys•nݹ£½]äd€Ó39â`96åµHT°ÞêÑ È´ˆ0 qxX£$¡V p$qÇ臔’<.A9FîÃÔ±¸nk¢÷§N}NÚI@–ÊIÇÆöa믯ý¯XìóC‡ÍnR±Îå<7 ^WFc?ˆ.žšÝ_?@·?v¾ùâB0ùCD’á# ‰@ô]Ïž¢5Þ勤 ÁËÛÔ®Þß6A F~Œ×Ö[PD{œ‡àç]½îësY´Í•Ì¡^Gz¿ ^Œ ­*Ò5›Êcºœ£›?¥%Ø®«ûì,¤d‘–1Ž,EIfÏË'¡p*Z1]D½=ÄÞq—;âü%­ãü…œåbR+çúC‰æ¸ˆW6g1­áÒts¥!\'€¢n↥Þ­y$šÑh 0(é”™Yz䇌fšßңǫqkyCn®Œ ´pweW¯ÞE}oGád ò®Ì“¦TDÈ£f‡h,)ñ´Ýj¢Õj“†r¹l›éƒáa@ˬ4hïUÁJÅál€´ôuQb®—…©÷¶Þy>–|ú߯ÜZÿlÖs{¿ù{Ÿ’×—vË'fJý̓fxv¡bÿÆï¹4Ð@Ƕíïxú„¿¶{¬¹öó‹Óx*¢§NÍ€¯Ø{‹üÕßvo÷%þØSµóÈø¿»Wo»q膠ßòdºî68æ)®Þç€ 8—¡+M †ÙhIÖ±uØJÊì(x‚ªBšú¤”“ü<Ï%¨…´×U2ÏŠÀFИºD´Þ`ç¨Cï›Hp·zä™ÅÔ j²4Q=£àç*ñ1ÙÐf0WháB•ÚŸà Ôá\  ò˜³0$,¥Œð!ÃŽŒ‘™ß":ªq$7oÃì20^Zˆ&lÔú6vp÷æ4öÖ‘•!.ÌçA˜´(¯ÿíÄÇ‹=HM­û8>®£VoBJ›Ìq¨ nù¢YtURøÉŸ‹ÕY•%`EŽâsllïÁÉ–Q-9ôÏF“£¤¾ô¹7—þ×Íåý/&mo1 ÚθÒM8…±ÚJ'!Úž[¬XÅ\&еŸ Ÿõ¢oj18w¢%໼u„ï~öädéï-2Qš|.ÁùMßèg\GÆ7sõÁ¶ƒZ‚‚ê-BN³¶ùyI6I,îês8ÀÚ0»úÌü(ŠÆŽms!Ñ;ëô# Ð4iÇ‚„¢´÷`é„rDÉfä) qó8”ѾRÆFétýQ2`·‡ýz7‘!Š­‡B. ¢nLå©xà9É92ûN|| —wgPtê¸ÚÉJŠ’óZTJ±ÐÛ‚‡|°Ð"‡”œìa¶-(In¶:fn-‚­šÀµ[õ\÷c(ºNÏäéšP!öZÄ¿‹·éÚ‘:݇A¯`<‚DDÞp`(«ñhL×;¹Æ”¼×`²"$¡íºº¢~ÜÄÎA¨1¶-!]"Ñ€Šûv¿ðüµÕÿÇÞ5Ù±uÝ}Ú·¯Ç3IFqžm¿ß¶íÂoÛ¶m›Ÿž­ØÎ “IFwtÙú÷Ù}ªkW¿J꙽«NõM§Á¬»±öZ¿óÒ‘ég1ëZ+¸Žóe÷ìÒ0Ã’ªþéó‹!¾á·_3Òî«ÀÉ©y}r¶&¿¯ðó¸6øÈóǨu˜w­èáÛ¶ÂÅ¥5xuda|þb|4;•§)Óö'öœ±^<(­4i¬¬´YïnOŽezŽ×§qJõ{»Æú?¿œ·7b&Iû{«ë-²¢“f®m€F˜‚~àbp è$ËÉzœ (ÅP RHpsÞ£åæs3ó$³ŒodTvæ‹ÖŸ+ÏÕ1“³à•™ œÀg¶t/À`q ?¯G+5q¥qIRê1N‚,Ó K©À*Ã[™‹¼Ú—ž# „8{IÀî“:IùðôUL(äð¸DN•º÷¤¾Z[m¨i"áxÍuZâ>Ÿ*-‰ÚBž ^@%çz½IÕ¢]PîôeeqF,Tsߤƒù…EìG.QYh:Ý7u!§“ÁÄlí£¼|ú0{A°‰Ï&* TÆ7t¯#0鯯›¶P)8p†º£ÝG§ÍçöŸƒ¯üÔ›:·\5 ÿôÈ~8tz®Ý2˜e`¯!ÄÏ}Û§Á‡1†Q¯ù:Ö“{ÏØSk6¦ûæøjt?=Õä}/VrR¤&–6ȰÙ.h`™º¹yC×'oîþ"\uô}i«³–]ͦæ»$jæóùxi¹Ý&… ä%F dÐJ|(¸f—–›žüý„hya~»×Îa?í×6Ë^¬qESNÚé´-ˆâ’—>n¥îÑt/§-cÉ9Ê«`! Éï1f’I°0dfB™Šçû”•¾O»„&º݃[aËæmà5.‚¡%þ¡òš”s”Í)4Ó4å=j›$Á=1¯Ãî!ÌÖ,¨–ŠÒ¥Š2/׎y`Vû_”u&Ùžïwhy»U_ÅŸ²EKÊ[Á¥{"©æÚ¤Wä%ebŸ÷éÁðÐeh2VV×azvj`žOZù4HÀg=ëü¿Ç÷œù#TQyÆÂDmKC03Ì|2í¶«†=Ç2CËÒ5|óŠp(ãëš &¾Ü]“Ô×`Yròa üsGnÎjú];Û7nß(°§f>³ÿ¬sbrÎf{˜<Òcn-UzÒO5Û ðz©)'ßdÈcfØ@YŸ•j˜9~ÚŽáÞÏp£Gþ,­·LZP-8PtôdOPÓôØÊ¯*ãÁôìÌüÔêíÄø$ ûD3G&æž819¿³–utIïÛ¶©çŽñÁò®cUç–V‰Š€Hëˆ'D2yG‡¶ï±Å.8³´œl ãøvо´Æuɸ5›:.J^ZyA®75Ôk‹3°åºYj} -’ ·Mƒ¾ªˆÐriyþNÈkÁkC³¯fc B5ý•A´‰Xq‚@ŸU äHä²  Ñö áGq£_5õkË«XB.Â"^ÃâýX—8{íSçÿûã/ü‹ã“ Ïáçl€ÅÞ¸­ˆa°'ó2 s0ºL‰#Dðܺ¡úÉjŸŽ%SEfb¤æjêØ?s ‚ &hQ¹”3$` ÐÜ ‚^£ó–”^Ö…ùå©#çæ=5½°³)lùÅú1>6”«[6vß¼uc×=Çì¬Zv‘ºQ¯‘PŠ ­ŽàB‡%Va¸²€;K˜5ÐÆßšA†šFÌ ²ÔŸ2L›2²m[1#kbF&húª=(3SYJ®T?: ¸)0³t¨—‡nÚA5MÀv ÀÖð)“ÜÀë@«¹íF=.ä»–J‰?áª%YÛ}LT(hhoRÜsç­ôgº8¿yÀlê¡EMìiý;–rbjþf`‚õOÛ|·øúm´[vmòžºÛ†{}$2ký]…Èu¬·€–…øî/¿.Y”±~{l÷)û9Üs.³àÎ'Ó¦>Æb`lƒ€zilq=À/=²s´ï3Æ+ŸšsÌ‚‚Zlî¯P)£LìôÙ%¹VÓ‚ ’ì“2šÙÅ•³NÎ>ŠÍä}~¶@õëDJJ"îÑGžm™Eü:×oî½·Z´7ÅÝ!e¼¤îX´=Ÿ€³Ñ0ƒk‚WÉ0EZ5ï“D8€¡JM¸i:Ù6ؾ}øõ‹ ‰¤´¤LÇÐ5¢Q¬4î@›Ò@lr*,Ê*‹øuÄý’”’z#¦9rj¨¾RÙ¾&ßé ¯­î'™,‹ÈLˆ\¡†å$%iΑ%g¸vàÔ…ÿ~bïÙ?ÿßWÔœ#§©Nžq½Ȇ œN†÷ݸ9ÄFôìsä¨õ%Ÿ|ý[‰e!~õ;> ²xmáX†þÂÁIõ Ü‰Ù¥\Zý‚ƒ7¾XÎ]lm#d=53]¶bOeãΑÞϪ<„ß‚KËß”‘Ù¤HêA¼–äÒ#àÄÓ³œ9¿t0Ã[™âƒ½ÖÂX!Ñ7M#‡2BW£ÃÔ}=hœ¡ÌªÈÊn §Lœ4]רGÖìx(>fƒ±¤ôPqÆ»— ’‹²ADÐ %ÌȶïØ„ô½Z–AYØÒº{OprÆ$!C©V)Ú`Õ¤H¥BAÍüبC@§Ùå#•’qU‰kþ3ÓŽ(yÍ@+4$€‚€«P*Ë5¢ÿ,ò£ ×óõoOï;÷{h^sDÞN ø®0׺ ¶lìÞ²5,䬳c,döÖ…xà¦-Åë/ÿ”48ØWËÏ.¬åfj+ÐhÒ‰t)èêßÛvì@ ›CN[/÷Ñ‘ÈÃp]èŠE ©Ìlµ}Åu¤PÔ©T%ÃŽ(‚Vs&^'aæ'e"­,ºŸ)ž­‘r«ºG=4ZÊ)SÏE¦¸ûXC;µ¿Fâß¡µßQ¾½ÁÔ„Cuø*4xû—¹³Å[ÿñË_'^>2e?¾çtW§ ž¨4‰´—™Z2Sï€ =ÚB@Tr!”Å^GñÕsV¤^»ghºä¤ÐÔ?eßð™7þ@¹^\mPï+Œ,“ìûÉxXö"ˆˆÚ~HÔFczóËpçµØ<> /ì›4Bpr`S_‘JVù9t!Hâ{nq…àCˆˆJøtZMÊÀðµ"' `%D f£¦i¬Ô@àQMAzÎÉåc-|;éÑ@3°¹}'/üíã{Îþ îDžµÉñ™^ÛÙwtº3s±¦3•+eh£Óõƒ$)Y?ðÕXOî9S89=_\«·]‹eLQú5½€õÌä OM0ÀãJ¶E6<€×dé2”ƒš³à[>çæïÙÔ[%×§¦-/æ»x4hEì¢EuMêÝ­5<štr‚ËóÙº9“8`2Ö-@ú>×&‘IC× ×Vj”‘ò+“¿&0DÉ’vìëI™O“×J?ŸÀ,çJ+…‰¶‘ig÷ºø×˜]ÿ ®3NKx(ñÝ}Óqzr¾=uaÑâºu\ê`eñÛßóyæ“{Oç?òܱ2R ©>VÄJHƒ/²«ç&ëb±2Ôf¯5uLÖs¯ ÈØ3ìð¾ñ3oø^d¤zª/å‡!‚GrÜ2 ‹ ¢>Ö –›»ÆðZ¦´ Á‰>]Û÷‰87ÜØBA2:Må–îC©X@òi/™Ÿ<óÜóI¶F1q6qˆJ"í¼*+‰?æÕÁ‹´ÀL‹^ -Ëßûyt$úë§öûKÌÀÎsp7Mêj/ð÷/åëJWÈ@,²]°…ò2žˬ4(,»Jû䘴q˜n´Á~áŠ@våÁ€ÿ­Ÿsó÷ŽoèCKÖŸ4¡QoLfi¸rEDR? f>QDvŒ ÀÈ@ØñÝ$}ƒ\ˆæ—×É‘("Í5é9Cüèíª@ÎÍÁ"®ÿ<òØ ‚d™;œ–L&ö¼rò¸y/@Hå˜|áðÔ_>hêŸqÚ+ÌKõÀ£P˜ì~Ä{_ï¾.~:¼+‘³ïZÁ³ Ä=–Ii—‘J»DiŒ|Ë]שôL¹®kt^;ñkxË® wâ ¡!„Ƥaó®M%cÎ1U_ $PëöÌü ‹\šž]‚¥µfAÐpÁø¦¨ аÂz½“SS ˜”Ã’jÒó ×õçô AVí·X]3hçÒ1 I9óÜÉ_úûGüèžãþM^:)Cg‡íÑš¬Œ¼‚KwYF–…PLÿŠ’:.0 4ýÀÉ0R‚’|MÊaÚl!/ISSÍ+{dß7Ò_óü ™øDÉŽeà+,^qâG{€²0º¿ ³3]±éËE†úºho2‚d’(íê¸jËËðÂK»å³ÌÄ’i%£Q€ê™¦A^iákšA¿Gà $iø.éÿ faÿŒëXÙOiÝû\x”qÄ¢×™e‘Y¬™_e%(oøSÁ–Ò•›.7DQ÷rÜ}ú2„^¸ VÇš­6—²þOÀ†J;*5I:›!š-¨¡ÐaëÆnrOG;5°„œÊ¹!ÖÄ_Z\‚=òXbÈ¢@+ÉÀÒ }M7¼ P(UhiH®äj¤I1Ãã`ö\lè±Êˆ¤–RV€„šìÍ!xk§Yd@–ý[Ù*SëÂSetÎe2-K]m¾ ʲ3#ŠWÎÈ>û¦ïCîÚîU*¡£@(!œÆ$Xå$D{š~\êÝvÃèë©Xyž—,k+U\&šh`FVƒ?òxº÷•&³Òš’j⫬MyBÀÖ?{`âŸ?4ùÜm\`™íƒoa0óÓö€ˆešýY¼EÁŒO.ªc+@++p#ÐJk£1wtCeiuumðeu–¡]I@Q 5ü[­ض AÐQ®Ùz¶'`Q=«BÞ…îJIQ%€±ñ)H,ø-h4šXÜ’\-ËŽ'vŽ$wðkH£3W«ïl÷é?B_ÈÿÄw•ÜÒc{¯Í!9m˜XÖìÏâ­–9¬«AÁœ´ØÄR°×a*ãÐØkž¥ðoÞ>xWµ«zAHÖ¸$$·ó$c¢~­ %Z‚›{ˆžQ*͉Hí–dv”ûæäÔtòy5åøa €JU(UºÉ‘ˆHs$_-`~¹¾û#/œüázôÀOá>ãÏ ö÷° ¯ÁlüZ¬¹¼ã,ü,²Ò2‹W3ø™¡kž~<¹TÙ©±©A@.Õ?²Øç÷¿ðþ?€òB#åb²±€“M鞆W²‚Šd![*ÔÞ{뵉3[™ƒ9+é™Å¥%øÄ£O$žÒ‰Ûv àºÅ„½Ÿ³múØéKµ—BñØ{¼Ùö|ç›øYd¥e‘:¼¯ÕÁÓRÚ4ô¨RÔe>éì@&+7ë¼—VoÑäÜ”êT\ƒÖyLåö„‡¬ìSË¢¬Š©MÐkAÙZ‚¿|’® }"¢+í?bŸ21¡éÊ‘(ž˜NÏÕžAëïï>>óöí Ø›)…Dz1>|Y³ÈJË Ø8§ÌW(3xæÀù|€™‘šÚ ÎeÛ¾©û×±*’ÍÚlD6ul4‰™°ç‘Ì½Æ p")-iE(ÂdP€ÏÓ‰é±êjÇ÷`fvZ@©Š£‚–A".p?þŸOùá~üð/¡Åq]žÂÆm¢¦¡kº†¿–,޾²Œ šý'²--ÄçDwÁüuèX:açö ¥-dçý{Ä5#È⃇&—Óe'— ñäGmXÑ:\uW«e`Ý{Ýð·ŽuÝ„d*Bšdº¤\!ŸlËL@Jc²9ÊÁîºi—ä“%>|ÒI²=^k«0;_£ýKÃDû#ÀýÇÇŸ90ù»ØÄ¿N³˜3s¢\ɵêm?{KN8Ü›œ«éa$—êNٵĵ£ÿ?^œ6ÝÂ-Å@ú uå>@Xâî]½Å3ž=:Ï7ø¡†?c«ÕuO#ä6Õ³$Ш뚅<²[QÒç3¤ë“üÄjøÂÔ‚™ å‚D$†è8¹n#âÝwÛuh¢f˜¦EÞ˜Ž µe¸pi‰4ø•Â9ù~àŸ›]þÈSûÏýÁ¾ö†Q´lš6Ü—· ŽJƒ•zG„h2É,„sË-ß—ÞFäìÊÛ†JþÑ©ÿgï`$É0Ž—íjklÛ¶µ¶m#8Û¶m#Ö™±­»àÌèÝW“ÚË:ÖvMò+gþýªù íèü•¯àm°&Èy*nàÏUàÁbPÝ0+`-¬‚åˆÑÖÒœð“3]e?¬«#«GjÉŠÁj²ÖŒT“ýKšÉ‰•-äšM½ä¦#äjÌoÞ9L¾|æ8ùîÅÓäÛçO’ï1ÿú¹ãä½;·’‡ŽÍâ¸!rû¾qrûÞ1rÿá)òÀÑÙ÷-m¯2?:ŒÑš7r”]@'¹°&ój{iH«sÅ Í[ÖZŠ]ÊB}#ôTFø‰Æ;×–f€¨ŽÑ@]¹Yö Ί™‡Þ'€ ÒY7† èñ£¶ÆÚœÀ±»šgg{Ê~\=TEV U/mÙ@%Y?VK/o%×néGÈzõ釳£ä³'‘÷ïÚF=¹ð¼î94EîCÀî=8õ÷î…ÖWÊa?¬¬,°‰¸-›QK’+3¦Š 9¶Ü]–êòlE9»©ÀâZlEÆä5v®5ÅŒ7Ä邘桮l,ü¦gÎ|,g‚vVÈDü¹ ¨XæGm…$ò;ª c/,ôVü¼v´–¬<´þJ²n¤ŠZÖBnÛ3J¾xú(ùøÞä±SKÉ­»GÉû&°iòà±YDlúÏí3MÏ¥ÜA𦽀Ú!]173!Å1^Û’Ž+çùÖbWé« ó mi¶«<ÂçET¾§"Ìv–…¹ò´ÉÚšÀT¤ ¨@vðÙÌs"ègÝ0 ëoKAµµ X&‹üþÚ¢Ø[Kú*~] Ú`Õb̖ž…VŒÀ–;ö“;÷O»LâôqƉý¹{¡íé”ۊ€yá s,Îu4A˜¢‘ +biÊ” –Ržãò%)KÎ «’¥‰Rm¾Ã׿»lM’ª(iÐ@²Ë™…@àì‘{ÑÇ‚:˜,Ø’ÀÙ<ÇH†*: Cb{'Œ¨2¨©,ù>‚öçºÅ a„†ÓÍ›w’{1{øøFcã¿n›iz(?átõ0«¨cÈŠÀ³N2l*…©Š6pŒ6ÖV,VäGÅš¢¸´¤·œ[3ZÇí]ÚA§£ÛV•¡ d¯sV‹ÜûŸ=kYð)ŠÄ+¹qKâ9ÖQeAÁÜ ‘ã+ƒ:]74—'ßGxþÝ2ÝDî?:Kî9<õëöÙ–ûóv-ŽQiz1ŒnÈRÌ¢´«-¨5E°ñêMr[eF‰Øš‚„öª¾¡4)4”$EÜÀÀLw•z(Ä*Ý.¹#¸Èõh’ÝbØ‚¤#¸¦¢„LEŽ9š! œƒu]àX²üÓш¥Ião¬­»?5½€É4M«8ÞMEL³»6W25É©/NÈÓ¥Ò`cXU5†›‹x/v}õùbÆN´—p@adÆô7Ð@žËî .rÀ¬GyÑsuÑ1dA‘xu²‘w2Çxt`ýeÁ'2 ±tÙ’DÞ± YÅ6;îêH¢ÀIɰ!DmèŽÁÇçpzÉ…W^§:[ @{“ÿÚ»k„ˆ  Y·Üÿj´Ô€—_|w{oe´O^âàðhÿb’i¥g û ë½µûÞÇ1e§);N8$/>ø¬=B«pQÓÎeOAÓ`mê•ÝÛŒÎÖ‰‰ojSaZádø4÷{š%\Ô®ç:åc½j{•0OTý¾ ê[FiùuãT¿yʺá[T^~?¯Që¬óg° ¼.•Â,øÇjöIEND®B`‚cargo-0.8.0/src/doc/images/auth-level-acl.png000066400000000000000000002602741264656333200207510ustar00rootroot00000000000000‰PNG  IHDR\Â’¼ââ`ƒIDATxìÜ·nÃFÀá¼Ó½‚^€ÀY³F½yóäMKúä%½÷Þ{'-9œ!  ‘ ãô„®€“ô}³'ý}<ñ‡;=`Ljbˆb Š€(¢ˆb Š€(¢ˆb Š€(¢ˆb Š€(€(¢ˆb Š€(¢ˆb Š€(¢ˆb Š€(¢ˆbˆb Š€(¢ˆb Š€(¢ˆbÿ‹Ó£IY”Ê¢œ}·öùCÓ¢ø§X«ö£Á¾Ü›&¸3˧W”_w[ºè<Åh?Ÿ…ëíw‰Ûúè¨ CŠÃwkÕ¾[†AÅ1Á½X>åqKç!¢ÏMGáåMº5Ql†”ëŽb´ïŽÃ ò]/k€åcÑ݉ç!ÀÅÅEŒq±XÌçóº®+6\?Ä~”ý@cŒýp°QÌ÷擹ú×um7 ß(†—@ÅìDFäŸÃš¦Ùî&õ#–Æ@Û0?OÃmŒNÓö«ŸÛ¿úæ£(&ŠAþD1;‘ÑÙY­V'''¿u“³³³år¹MéDîìÚõ·âÙºwD± Ñá–fŸ·iÛ½{X†!¥(&ŠAþD1;‘Ñù9??ÿ-—ôÝd»[˜:Öø·ôÙ=¢Xþº¯·6š>ççÀD1Q ò!ŠÙ‰ŒÈÿŒXÝÛ±J¢ö6༠нqP„;¿ÛzÅD1¸Q Q ptè·[“ŠØ®u±ßîQæ}0źÏ'án¦Ç?¤l‰bÄwËõ¿‚(–Í¢³e=€¦i~»5™v ¿Ý£lš&e ÅNŸÛƒÊéÁ^GMÚ8¢˜Î[¼Óý(–ÿ¢³ ß1±úR†Ç…0}@‹³q49þ!~t®ðø×mÊ•(F×ëÒ€(–Á¢³å9€£cb‹ÅSŽQ¬~rÏ5×ݬ,ÞHÅü$Ÿ([‹Å¢o"Ëå2í$úÑWUµX, ŠmÐwî0~¨M¿zy¿Ã&Ÿw)¢X¦Q Q `>Ÿ÷MÄí¹]¾AYUÕ|>OùD±¦aØþsuºÔ~> WþÍiÊ€(–'@C¨ëºo"i‡QUU]×)7€(Ö¼q†?jÿ gûá å,¦ ˆbYD1D1€êRÚa«Õª]¶)3þQ¬{|¦O¦¿y÷¨ ÃFOÖ]ZQ,g€(†( ˆüü {çáæ8u5îÿ#Ù홥¥‰P–¢“Ñ›èàôDtÓ•NUªÒLõ—bŠÓœ‚¿â„5E)&üLqÀ,hùhÙÕÁjAáÓïØgQfwƾcYZß™9ï£GÏ•-ËWÒõÞÑ»çžûÿ°xôáGN=ù”£Å£î¼ýŽð#ìCQ ‚¤Xâ8 IX˜âŽ>¾Yú ›Aó=×u{;Žãz¾?oê{X‹9q±"|à».Tϲ,pý•$.ºwÆ™{g–îTrþŽ'E×Å?þš¹;\f¨^e.+¶ ¥û_F› Oí–O)FýEX»XzU‚ HˆlÙ²å'¥Ÿ|ï;ß}â‰'(-È.ÐþdSS°¬™žù¯ÿüÏwÆWlÛ¾ûλ¿ÿ=ý™$²¢Õš¦Ï»M¾'bÝ.û v·ÎþGÕn74EN W]æ TTÿê1`<£inØcõšÕ™éÕ™,„€A,Øã<a¡çz¸ñÐûöÜkz*‘bP˜žùò­_ž¿'±sÏ–a8æL&;3•ù»KýÐð㵈ARÌk•„>T:^0». }P*cC"Üm…¸£H1Ï2TYˆŒ”/ÅÕÍí” 9aTÄBÙp‡ÿî¢Ìº˜ˆg–U™-›XOõ†Ë~ÐâHŠùVUW„òŶãcÇ5±Å‚Zj,º¥áoœÑÆœV5' £+TÂè,þ*–¬‹ð£cb5«Š4ºÊ×Z6ßí6ùžˆqkxë/ØÝ:óUßni91–^• "ó­Zªo}㛫ÓYþ‚ez*2ëˆC{êÉ'{1­>ìóÖ[omq¶œ";tXoOôb|ßš{,[8Äò£—æ`‡™tW±árú)§â×ÍwvÔ‚ )6:~M…‘ô>Ïn9'ôAj8$Å¢DŠujšùfÄëuÐE!>Ä|Ýô‚!pK2ûbúf÷]Š5]vSáDŠÙͲ×7ÖÍ`|´±Å‹¬[#º'©è -å…x*¨`¸¬XBR,LdvE•ãl)…ŠíóÞn“ê‰Ø·†Ïþ"ºsZe1ù^• –7$Dþßãÿðû?(ÿìç¯n|6Aos@áuÜ‘ÇL§¶{.ôb°ù¡c{öÙgaŸ­[·ÂúÛßúön3³ÙT÷IOâþiX_tþo¼þÆ›o¾ »½¶eËU—_™™LÃwQнw½žzê©y£È}¬ÃøÃôïÿá÷¿‡2µ6AcŽs QÊí VMú+µƒ!!)fÄ)b”HÁ;UU@«ub”b¾YÃZ®()†B$F”R3^]“…„È•ÜÑÜ“†ó­W+íá¿bJ1·¥ŠBä ‡ëvˇã¿¿`H1·UJ¾W%ˆe ‘ Lñ°Ã§SipX_pá_ÿü”PŸ ±®±ûì´]YT]ïy|âIÏ<ÓÍ‘ßüë_Úmv˜³ ð‚=÷Úm÷Ûo»˜zãMðuð"‘Áá2“Î|î&ÊB‡^ßkÍð©=f×€£604A3«ýž·¥ºE¥ bÁ Ø ¢mhB¼ˆš …Å2˜¼‚aK1·¥ÀæÊ’b^­ Ûå^¬™ð…•‹Íèî)WnÅd+@ø¯WRÌ6’l.JÓå®Ýr Åxì/"tëÅ–8u)ù^• –=$Dî»÷^R³Ù™étwPä¡SLâ[à¤0¶ëòÏ(Ù^š°ôDwÉl·]éé©,N<áC7^wýQ‡ž™è¾ˆïâžP@Mo}ðýÂÕW\uáyÌf§áu0\øn(ÅÀÊ-庮ß#®ƒa•½ÝRPIØíÒK.¥60AcL%™+{Áüj^úPl¹”Sl±R¬ +¢;ùª9„“‰+†sð]¼„ËKŠ•ÚÞ #¦&xoÔšì*C’§búÝ“ ŠBRä+á¿bœH1·© #i6Oí–‡œbM—Çþ"B·^j6u)™^u…A$Dnùâ—º:lj*›NCÎ{ÐO»­ž½B¹üÅ_„w1¶ëƒ†×aŒÿÂ2*-Ti˜e,³Ã»¸„/â Kœ•2…/†R ?ÞmÏÝv¯ýááÀÉïëß_»ï~°'dâÇJÂ>Ç} ¼Em`‚ )æw*BÔºýi!_ó— óbm!ê»왹ôrÕhµ-Ûvz3Ï»®c¶Œ²–BF×sJ¹ÅÖ¦P,7šmÓêVÆqËìõª^`;=Öh\¶oÔ©ä…aÐŒˆR,ÂãTBÏ~ M^L#)–kÍvÛˆãØV§Ý¨U yögÙ4Çkå6²¢–*õð\ðt ™µšjYÏËâ"Z}#Š{bW¬P®5ÚhúV jÕl ÑøuÃæ¿b¼H1¿³ÈQ“rOß´ðH»Ó2*Å‚´(-fðÙní‰Ø·†ÿþ‚—^uùC$D`Èdz2®*Ý ï‚Qé‰Tvb긣}à´Ûÿ¸áºë»!Z½“é9 ¼‚k0b`Ó@Z…¯,°Ûd×gáD“(ÂvÚ3´c'Ÿ(Áοýõo»4•»ª÷éUò={ì¹eËjC@I±†& ‘gg÷ÛŠ€°ç±â^Š!¾c[€ýo,Û±û%”‘ 5Û=æÑ=„;ÊŸïy½ÚqÞϪ,µÜXF‰ŠÖhÛ[‚Ûª/êY©Ôv£H1¹ìº†Ì¬§$+jA/–*U Ö²½hžÛ»£buE…5°ÚùASµÚ ñ(®–ÚØÌúáZõRA` vü i Mbä¯4,—]×j²â*ó?N÷¤k¦ë³9'²Í‰ár_1¤Æ ³uV¥a{,¡U+2ÕX¹íqÙnì‰Ø·†ëþ‚ƒ^uA$D 5&[0?ý¶mÛŽ8ì0̆Q`Ï…‹ðÞ÷¼öÐXÙÉT˜b?\ð\ã²à>s^LõÛ_ w[»Ï¾ûï³/¾‰ÆÂ}“O>É&^»$ôCÒí€I1Æ ýlì†u¬‡WfÔH1ì¡m‰Y׆HËÂb ­j¹AoR¬5("7xþ;C—<à ¬àÎÖ_îxãøwcå"à–F.¹QÝS®dCÓ©Ø©£8®RÌ)ʃÿløÃKYtÓìÆÏq»å@ŠñÒ_pЫ.‚„È‹^¼èü Úÿ€_’{ôÑGƒyüý±¿g&Rè›2½e ²žKÁ: ›ø.¬Ó½}Â=ÃMÜaû&*6|ÇEâ[xœpÏ^!ÜÜ)†»õR…›ážXeßøÚ׃y¼ðü 0¯%̤yÌ‘G=òðÃÔ‚ )ƪ,« 2F¦,’bRÿ¿Ý#º-ÙpB3‰è­†. B5cbR¥…õãSŠ1?– rý V¾Ñ_ùõ€£Vý £á”(ç‚X51¹H1…~:upL·ã@Š1riI…È!fAdgã·Ýr Å8ê/8èU—ABÄ÷ýÂ-…^²°,«§gN=éäx'”Dî-ß“é¾ÛR°Ž¾ üšJÃáWî«@žþN§s{ñ¶÷[Û} ª‘Ju÷Ä/ÂÂŽ›ƒÞ‚u˜°?•úøG?ÌáÉ'žT>óÙ÷ì¾'¼ g ëwÂÆ© ARlÐ_ç’nÄâzÄB=X,$ŤºDÃk—‡O€âU”A•©Y#HVRüBÃMŠå’À«óͪ8œ†.'Òe÷ŸB@j8IéAMêóÔNbÚÜ|¥3šS¢g bO€×™ÏŠWŠ¡‡F =Žæt &ßí–)ÆgÁ[¯JK "®ë~<÷QbÝÜöÝ«…¥'&?úØ{~Vvgƒ¹á<ù\ æÊÂ4\s7Yoá`°$.½ø’M›^ æðüúõ𘪠FXЋ¥×î³ß£<êmõëÖ]xÞùãΖw¢eöÚ} 6P ‚¤XÐ,Ê£NHÇÎ3’k-îH$Åäbs$ï }`£!\½%—ÜèRLÆG$~¥˜ÛR¥"jDžðNÒ‰Í6‹Ïä à6å¾ãÅÜãWÉtOìQllL•=§§cHŠ5+`ÃŽýa«þÚ-RŒÓþ‚³^• –$D¶nÝzÕW‚«Z΄ÙÁzéêS°´ö€½ÖìŽ9¿0u×(ËLWEMíý~f«\ _h¬›IgW§³#~ÖGVΦ³GrXfbÏÖ½4dÝšÀnì·ŸõÒKðÕ˜qŶ‚ HŠuò£þ‡<㿾•ý(CRlônQÎt´Ë {¸Êhx²Ð±jùѤ˜Ž¹¸ø“bŒh,$Wb~¡Ù7ZFfŸ:¿–ÙOž1‚Ž/Ói—rñK1¼‰¦Q5›ÛŠMŠ1Â…DµžpM—ßvË㬿à W]I±üUWOO¦Wg¦C©”™À|aaâ°¹IÁ¢/«ÓÓðEW^vù믿ÌÔ+¯¼rÊI§ÀW€Ë„_„…¹›ýßÂu6,¤ ¼c—TÕØAk×¾ Rl…·‚ HŠ9†Îˆ†Î€‘rÑ Ä>#àã`ÔÍšØxÔÐ Æ$–Ë•½ àXŠùµA)½ÕŽ=bBÖ„ÇMãÓ~Ìx |¸3½$Dn"©ÊF‘bZÃNú0«=·“ÖŸðDNCìûïJ‡ƒvË…ã¶¿à¾W%b‰ ‘·Þzë ŸûW_yÕ}÷Þñ_¨B+·mÛ6(lܸñâ .„{Iô»Ÿíú/ˆ›žÁ¼cL~ß˲åÜÅ—¼üòˈg²aÆßÿî÷·ÞrËñÇ 5Á£anþÞ&æc¤äÏÂÒÛóŒÓNûú×¾vÙg”SN:yaoˆAƒkÕ«O |ßÇ.ý(N”¹¥A$Å0F` ˆ5‹¤XŸƒ[»ììâI*ºãM,ͨ‡RÌïF:c¸ããé“"Öxâö¢iJzAÉ1N>Y)&ÇßøÝ&cD$Ÿ‡ÖŸðésÐnù—b<÷\õªAð/žö寮Îd×L¯†1ƒ3½œ\éí1Y˜¨ Ó~±3Ö‡ ¼0ó=&ü:ñÃ)þèG5›`¯æ¦-››¨ /¿ôÒ¹ò¹ðuP<àÈPÀœúPèfâÿä'A¢…õÇX­ù ¿ È«rßý öЩÁg»µ‚3ª¦gü`x)PbÚ2¸\³Ù™‹.¸pË+°ì¤A$Å\Cƃ\l‘ããìÌ~^TÒAìxͤBäOŠa"$FÐMä¡ÍcHòøNÛ¨ yv(‡Þ‹$Å<£\ˆÍ ŠZN]Š%ú„ïã:sP1>¤X3yóÂQ»åYŠqß_cC$ÅPT½öÚkçÉçÎô'fS½dó«ÂÄó½r¸9/›>n¢Dƒ ëÖÏþã§›6m  +éÌÌŽ=ãŦ'Q´u~Óõ7n}c+î6Ô9þ[½ñƃ|ðTé8fѶpþ‰>)ùQMe °ßÞû<Ôh`ÌÚr“bAó;ea¼(UŸ¤X¤ƒP¤X¡as$ÅüŽ*ô'_ñ¢þDóËUŠymdá'É9U+Vê†ét/p»”‹3RL){ü ŸL¾büK±V<·[Š#)F$Å¢§»áºë§{©ñwȦŸÂÂüÍ9 ¹ÞÉÐ?µjòúk¯³,k®Š¼¶ÁÜð‘ã?„GÆÌý0%f%‹vعt]÷î;ïzÿ^ïÍl¯:Âsé›w?,caº;”2sú©§a²˜ Ž *6á üo¬üµ–w ÜD¸•pCá¶â„ V¢k$aÌH ‡¤ØØÏÎÖ¤¥—SLoºÜH1·”ú"Ì 2xúËOŠ™qhW z©\«­vÇv\ßg·yÞ†Oz­ã:ó^1†Oæ«Ñ §í–k)Æ}ARŒ AC¡ë;︤æ˜׸¤w*„©¸0WýD ÊGr(äíŠ7µÌêø«_þꋟûü·´o>ÑjÅžÉþ…^€ù.¡þéÉ&ÑO¹¬îMˆyÕW€1Ù¼y3[„‘[Ê‚ÌqRcÄÊ“b^KÆcVu’b4ûä"¤˜Ã‰k É# ·(ºV~òŒmÆ[Q.+F«ç¢·y¾fŸìTò ÷Ä]Å8H´Ÿ¼ä¿ÝÒì“$ÅbDhø$ÎØhúñ<úi<Ó0ÅnÎ-c–ý=fwûñ w¼É2vabÈD/±¼¥û‘¦ÅŠkhBŠ-7Á¿AÙW€¤šöøÖX0k}-’f8KKй­’Ðµj&h+$ÝYš6¾ B¯›¸RL¬Y~+†& Œ†ÇcÅÆ!Åœ¢$ôAi{ÔnÇ Åøï/HŠI1dãÆ?ôðº×ýÅøËã?þôÓO¯ž[¿Á4¯»&ß‹Jg§z#%'æ$ÕÂó`‚ð­I(¤Áˆvð!p´er­ÿ½èü ÑôuSŒÁSŒ-”S vƒ·Rïž>|"ŒÁ Vè0«Å‹+BŠùVUú¡šñ_[ú"ªµdÿe̼IR,h—öƒGLÔûf²“j–¿”¤˜ÓÀª%š^Ç1tâeÏ]…GË_ÖŠ/Ñ>R¨[Aœ˜*3`–ÿŠ%/ņ&íób½X,íH±X¬µlÚ-oRŒÿþ‚¤A 6ÚO8ö8 zÂD`™‰,ÓSÙÝffÓ]Ô{k¡ßB lžvò)Ï>ó,;í=÷à) õ¸ñúëg§WwcÁÒ<å® –§&R“ï^ $; ˆ•äÅp%Ë_Š5u™1$!ù¤þìG©ÿÅ9­Hб¿¥˜gÙ0b¾í\I±è)·s%76]û|GË[VÿòäJ푇š ñJ1œ7.üNe@nEÛŠAŠ1&+åN\’¶’ë{…9h·I1þû ’bAR ¥$ϧ3›žÍ€øÉN§³3X ™FÕ5‰háDûYXwie@¨]ráÅ8ëb˜Yx1¼JßÖ¾µ×n{@¼œi¶Oêý©‰‰Éw­ºàÜó‚ã(Ç b9K1ÀšbÕôƒðÚ%¡?ùª9Ô#´¬ÁhàŸõ$Ř³ì‡ò%Á¸'$>~ó/Åܲ"ôGmûA|tú¹­a1á»Vk!,wä3ag#BrMwôƒG—b fïb¡Xiñ[±1I1¯S„¤ÓŠ™*cÚ-GRŒÿþ‚¤A î»÷^0bÙÞ(HXOÏ[Â$ú;,ø"ì˜úô'> Ã0á˜0m%¬yfØ«„ޝø£ƒËN¦fÒ™ì¼Ë±u©w¯‚W:N@¬¼übV #–³s0Ã×®žØËÂÈš¡$YµBBõ¬«I±98E™­,GÆ-åfôÿRÌÐsšr=æ|O~-/ö©lÉ‹-€KZØ‘kÆ.Je(—ÜÄg©‹ ŵfñ`i";¯"ÿK^бÝcVµDÓ”Ú.í–#)ÆARŒ HŠ!Žã°ßÚ™TåNzNú|XÏß Ë°Àþ`ÄÀ]ö™ÏnÙâKß÷ÁÜÁm×b¼ÊŽÛ‹·ï9»;œòtïrÍM´?µjÂÄ.ºð€XÁÁb®ë±L¥˜WQüs6B<Rîx =/GŽ`üG7I1¤ƒÍ"‘4sìhA|ÊåXб§mEtÃÙ•ÓÞUâ‰ë´5i—ä®òÚ ãG—HPId)†^ÐF†ñ-ùŽÏuÅ’—bŒ®*‰`a ]RáN´[~¤ÿýI1‚ )†h_ÿFSȇy²0‹|ŸÍ°ŒFL>ûÈËïlÞ éɜ͟ 8 \ƒ ÛI ½éÅôïé³Ó3Ý`±t7‡Z˜trÕÄÄ»Výüg?ˆÉÖ­[á'¹iÓ¦€ –§³ë’ÐÙHTcv¤ar‘¸M½ïd#Ø€f1'Fâ`¢¨°ƒ p, ÿRÌëT„d¯Ò!3JÅa8u9ùŒæ¬D~ÅVR©âG“b“•hã—´Ç£CœXoåµFN@Ú-ORŒÿþ‚¤ACÝc¿b¿o¯÷„)äÓ;¦ï· e\#ÙÄÎ:ýLù¬sø]Î<ûܳÎ9÷ìsr—\š¿úšï~û;µxå•W‹€²ŒéÅ`·[¿t dƒenÒýÉww¥Ø Ï?¬Th%ü$1§A,C)Ö.åNÞ!y ¢´¼<š4jB%¶$)†ÔUQH,Ù\»¬0¬ø—b®‘8‰ª$…¡I‰MDè•Æ`êä¥þã3ÍÍvO8Ï]‚¿Òñ¸­Øx¥¦¿dO Cïß+Ú-wRŒÿþ‚¤A ùüÍjòå£åYô‚Éæ!^l¦—•Ÿï%óïu ’‚¥>øAúðGòW]SýMuÛ¶msÓ‡õãí0Ûà§?ù)8q8ëð:À¤“0|òÍ7ß ˆü«´,+ ˆå(Ŭ‚˜èÈ/f5Ï|`ä[yô“&%Å$­±t¥˜oV„Aä¶D¢S-0‚q/Å,M”ÏÆeYŠp)šã­Kñ',·º€$$ÅPÙ»‘Ç'–V®èr\±ñI1Æ¿cH®ØŒ:4¯, ŒInÛ-»'JZŠqØ_#’ba ÔsÎî³k0|vÖXX`3,O§pÝ•b«ÓY˜¹²»—LŸ2c3ñ=»ól¦§g¦ÒÝ„h“Ýõözïé'Ÿz÷wA¢1 ùìÅ` S œô‘³=†mê]«RïZ,pg`Ö^{íµ-CÀ ‚Û7+Ù_%ÿ$Řéc'sÁNZÌ.SJ­`(¼NA„EpÆ€%(Å¿šg\›ZǺ&eUH¡n¼K1¯šˆ üT’øý*€è +§Ub«8q‡¬Ç UÁÖ•¨CòZœ×® –ðšáð[±qJ1v—ä+­áÿg¤>à˜b¡Îm»M 'bßÞû ’bAR¬ë¹úÊ«²)H!¿Cf}(ÏÝøV¸™…õöͰŒo16aÍÚ3ÜÄBø{3›êY0T{=—7c© †zí¶zöäOú¯ÿü/´„ÌY;Ÿ~º½ÿÞûdßIºab°,Æ?º®»…XRÀ-Û¾r¤ARŒ="iÆxÒü3Æ\0ÒóçËMü·Së(A)†”ZÎ’”bˆ]… 뚇xf9ÏÐ8Fk)Æh„6Úı·&§×†¹"~«ª ì1Ó¨¥Õj{ˆpZºÂl­aâ§¥"•›CÈÇN]gOw8®R hê²Àjù‹W/f£ˆ†‘ªŒóv½'Š~køï/HŠB9Å(Üuç]]Å“Î.>Ñ~¿ÍL¿¬d)Ææ¼·X›©°ÌÞœ[ÎLõ˜^ •îu&Ý-O½wϽnºáFfpÆ‹ýò¿XÎÂpøä*¶##¶t½ØJ•bI1×™éc’Çi |× Æ¬ùóÉi†éúF³YRsc<¯æ Åj­n4›­VûZmÓáÿÏwF2—)_i´]ÀƒŸY/…UY”W)ÆÈö¨UÓì´ã¥ÕêXîÐvåv¹n:#H¨Qa=•+m/HÆÅdµÔaÔß·ÍfYS„E#æ«~î s…zkp«õÍVM•ÙͧÜå´bÜH±Àoç&¹R­å ' Iâù‘b>#>+)Ä…#7mÆÅD’­T®ÍvÇìbYð“0àQ`ÝÚðÚ&f dÿœÕb¹Úè6,¨<þ ›õjY/ä¥H÷#_¬uµ@³QÒ‹MÛgk6R¾ Wë^aUÛhÔŠšÊhý,ÿÂIÅ8’baú36¢¢j•ž»§¨QctŒ~§v›@Oľ5œ÷$Å‚¤$•ɲ}ø‘Ò]wßxý òYg¸ïþ{¬Æ„b½à)X¯°¥wî°Æ%kñð#žë<ÇJé8· û§º‰öß Tm!–,pûVœ#HŠaª`Æd[ ÁŠÂžVÒ1t!v$ÝvÛ¹¨à2ž‘‰«ùÿóÝ-+‰» µŠ­Žk)æ6‹Â.„}›|S“þêb3H -+˜-‹ížÄœ"%w€ãCŠ!V]Kú7×°yo· ôDì[ÃgARŒ HСܙÅÃŽ˜ÍLwSk…iõa ÓoMî˜ákbÞfÿ·ÂÍìœÍìo16Ù{öË)6h?ÅØ3›Âë…4^™c<ÚÜ`2½Xý¿ÿ‚Ë ¡x±` K=³>åÝ_qRŒ )æu“míJ¼vI@®ì12‘Åôüã5‡Œc$hc q&Ÿ5Aû¢ÕÌá\еx’bÉ{1Q­zAÒXš˜DÍk# u‡–þÅrÙq 9‰›«Õý€ãHŠ%ïÅÄJÛå¿Ý&бo ‡ýI1‚ )†9°ž_¿þÄ>4ÝK¡Š!,à(Âô®çdøš˜»Éx+ÜÌÌÙÌìðcÖ¬=ûä›`o2÷ÌâèeC/vΙgy[·2mãÊå Ŧ&&‚l!–8+HŠ$ÅØÁYJÅ v1ÖÀô!RÝ^à#º,Ä„XÆç·-R ñ;•å+Åß()BüHåEg€&)Ö»œ—’p"^°K°êñÖ>§7|¼­ñI1üÁzj¼UÍ—?@*Æ£CœV9õ’«›Þ’h· ôDì[ÃmARŒ HŠ=ÑzâðÞX?b°ÎÌY0Á–N1Vб¹å/}þ  ½|ßúóÈCO­š„,+)F#HŠåØÃ“‡õÇeHnáY·,üÐhÈÕp’x)ÅÅÜ2–bˆÝªä„Øs: I6$ÅXàô‘1‘«4í]»Z“cjSņɶÿ‘¤Øöýܶ–‹éñžu‘9¨×R Ř& q!*EÓ[2í6žˆ}k¸í/HŠå{ë­·nºñ¦l*=›Éöâ¤pJ ,¯ð/K7¤n*³°÷ºuëP,H·Ÿ‚H±UË+RŒ )F4ÍœìjØsJªfŸG’F)]4¨%bÈ(Ã'C:õ’õQ¤©ËIþùãÁF™ýÐÆ@R*ÞU¤˜Û”ùÎ)&-2\Ñi³gVe ià ÆoU #U^Ñ*; ».Æ)â·ëØ#’׫–°á¿bìŸÏèŸb€gG5.冹´Úm=ûÖðÞ_ðÙ«&AC§Óétº#(SS«ÓY!HË‚Ëto4%x1XòcŸ€™ÿõ¯ ñ↠{®ÙŠÁBR !)FË\ŠIZ#NQŽ8óºg·ËÚPiT$U¯´mo~Šb%žDãnÛ¨—ŠššWdYEq‘é­Z!¹Lçí²÷ÁÝŽQ-äå¡çÂÓJFÇø¤X©åŽ c:êÂw)·óì“&ØÛ1P¨™CœcÖ+º" WSQΗj-ׯ‹k5KeèšW «OÕV%l2Œ–3„{B¼v£¢*òU•”b¥1´uâ»bìŸO\Ÿbà›­º>´sª^o™þÒl· ôDì[ÃÁ¯JDߨuþôÇ?¾¯÷d'zÆ'ÌC?/5~˜x>Ü w “ñãzþfvUÿDûý÷Äã°÷ 7a™ ßê¿9ÎÙsf*3Jï·÷>7 /àN|­ð•ìÎ>9’s§·Þì8›âZð°xähP­HŠËLŠžÝiÕ*%MÍçÞysyT¢$çòª^ª6Û–±â»f»Y- s®|ˆØ½ôŠZÐËÕzÛ´w¥r!<Çjµ’®å¸3(Év¾7ù‚^©¦íòÖ¨,ü9ò¹ÞýÕ–d~ÎÅr½Ù¶]/`âÛ²žë]€œªÊ½ƒÈŠZ¬ÔMÇìžB|Ïi7•’>ß>`û/hÅj£i:Ñ/2/ãl<íf­RÖ *6|q‡V½VªÔºÝ¿LÚ-õÿŸ½³pŽÛêúðÿÑzuIZc˜™ás˜ËfÎ[æ6 eràÅÐ eÆ0¸L†P¹Mâ8Ær{¤ã\k¶±Æ»öôóNÏœÙ9w-]]ɃÏ\ý)fµÎƒ÷?íÆ=mŒäpýÄD-#Uܸ™®7ž§Œÿ=&¹J$mb½Í³½ÑCn¢L)hŸ+¹Hþ „p•ÖB®\¾‚ž[B²{®n»!’bM’b,eΞ­®¬ü±²ò‡&×ßє婃UAŠH1©»', hYRŒ2ã)Y¬¢¢bòÕסü gÌoÁ/æEßûúL¨p蘫4)3¿ —ßóaM̹çOZƒ_ú\%Ë~ÉK9 ¾”‹ï”ä M;åÚɵ5µôôžçæM›ULБ± 2œ ›¸S¬Œ,ÏŸ}ò’â%Å;šRÅE[y¶9ödaUbR ¤þR̆‹•î?Ðøò«>Z^ úš¬UvÞ‚¹ó×®Z½âî{ÆŒUwUÐøg±5Ê•\~ïv¶d+|Í–lÙ5d7ÎWËÓ¦Z^¾6|h~Ñ'E qûµµµC áK™bM Ú/+;E»–Èò¼öêè]o^±ëÍËšP—¿ñú„ƒ——Ÿ- (O¬ R @Š€Ò^ŠÙÍb555Ô,^¸ˆ…ŽKŸ7Ê8bÈÀÁ»víªªªæã?¶üî{r¼L{¤'Õ‚9óþ¶eË–M›¸6ìØ¶mñÂ…Ù^ÜXqc}f{n꺃Evfæ—^víUW'U´÷íòK.íÞ¥+-’IÀ†¦¹ ˈ\L¸w})¦=:wÝ»{•b¿<ûôÓþU„b)FŸMÛ)vº¢â{Ú÷D–gßÞYûöÎhBÍܳûÚÂ÷4mOV) Å@Š¥? ÅXèØT¬êªªÑ#FºŽä7(­ëÔ¾ã}ìñü ýöâüÙsõ9oåJ5ÎÜßÏÿÂsϵÉÍ ¬™¶‘ù&ŽÏŠ:5Ö£k·#¥¥'OžüöÛoO6šS§N}óõ7ôñ3O==kÚ áhGÐR]‡æç™C¿*9 ÷®P4ý4ÁË/¾dCÙø±\ھȚiM¨é{v_]xø&Úý”ª~ª ŤH±ô¤˜Õ[DIqÉÚÕkÆŒé)ÃùTæ\3mò¶`öxöh…‡·kÝÆ¾GÙ6¯õ|ðsç”QóäO´ÊÍ fSáX.eKD i ®Ôtnï=kª«mgËËŸzâ©Î:j!øýPp9=´}p/t¿­Z¿ô‹ü4øÉ8p UN®–ÊH_еoÓnÏž=b\b@Ф´”b•••Ûþµ•^KìÒ¡“«4iô6?kÚuV„YHÑéýûö3¾ðÒ|ð×]owNññO<ñDëœ<¶TãöuÖ»{OJR§‹²qKаþ{íÕ×Ú¶jSw›©­'¸_åˆn»ìÞ½›ï—oyöŒYºî*#Tþà¡ö?))@úK1b@úK±Ï>ûté¢Å=»vˉgjGÔåˆIeèýa]/–ß³þ„¶¡™úŸùƒ‡@ŠqAŠþR ¤þRŒ’¶&Ž`|¿#Xñ§ë(j\j8?‹5™#'ŽŸ`£Ø.QPWëì\W(ÿ)©¡èýüío|@½ 2Åè¯)ís¨½>Y^Ö$)f—½}ëV›ýŸlоœ’©'õì3 à;]³juøQ¸Žìܾã[……b\b¤¹)¤¿+)*QŽãñû’"ÀA…6C©éW¶—YðØãáx›ØøÑcüSøÄàHšsíªÕ¼M,,Å8SŒ¦µöõ½ˆòN±æ”b;·ï )Æ“ÛË5fhõAÌ™RôÍã>ÆwJéiÔÌ™5ËžÅÓ¹ ƒ~|RŒ R €ô—b Å€tß)vòÔèá#9«Îûˆò³ê²ÀDN<ëÞu¾ùæ›@?”¿UøÖø±ãè¬p)³ã™Oüï ÖOÍ™)ÖL¯Oò¹K/ G§%W1a¤¢¢¼°£GŽò´|§wÜz»v÷Ç3 »àB#5¤¤é/Å@ŠéŸ)öþ»ïõïÓO9ÒZÛ|}§¾”sžÈ­VÙ¹£‡ï×«Ž *#•l㢆†®dÊjjjš?S,Ú/+;M®¼~N†‰öòK/·kÝ–7s%¹ [Šjúõöù‡êÔ®ƒ ìJÍ;Ũ Ÿ ÅHs)R H)ÆR4âÿò#=¥9ð‹³Âå *n‚Ãøî¥¢O*ÿI¾,çâñ?üàC+‰ìN±p¦X²åJéIy}zöâ S£ººúøñãôÂc«¬lãˆd—ao<® -æ¢ñOŸ.û% übfÁã];v ½‹*dF RŒ R €ô—b Å€ô—b,qŽ=zñÄ‹l˜½Î†#´2‚ŠÅ_fØ0~ÿȸ4žÔ¼‡«S»öW]qåö­Û8Z‹IØ)ÆRŒg£O“¶×4mFÃÃsYø]:tzñÅ÷íÙ·w÷^úlTíÝ¿g÷žW_~eë?ÿuó7÷êÖÃs2({_áË50ägÂO‰zÒ^ûöî³?:ÉÔï;tˆ^ÏЧ]Bø;Å.„ã‚K¤H±ô¤˜u:Ÿ}þùäk®å |cc¿b~Ùžþªƒ^Kéi×Hm¥ѯwŸ% ¿üÒK ¡]Ìïƒöí´:| 1d)F0RºZ»Ú¸Ê/n†¶ì7žÒ4›+µ+ýǵkƒðµ¾÷çÁnο:ÿæÀ¿wìL0bûå'¼zåª!ËvŠqAŠý b)vyA%Öò)f%Î7_=mÊTWj*•¸OÄ™9|ÄŠ{–¿óÖÛ<ÕÓ,Aû6 ŸµZ åi“i<ÒaoçlLÙãÝ@®eyñ-›·4dÄ~ÿ(Þ*,\·f-¤¤ØŸ)b:ƒ…µ<@Š1µµµ´½‹v{µ ’¿<©lо• *'žuÅ¥—;q⤤X‹@Фíc¥E’‹^„ìÙµ›Ž ãÔí+´4~²¾Ð«–¯àÍen(*h?£>hßöÚ‰ÒgcŽä!7öO6ô'þùHבÓ&Oyÿý÷yX*ÿH1H±–€H1+wþ÷ßÿŽ1B ©…0RÙøùÄâ¬z¡Ö¯]K6mWCDíÛÙtxr=lô‘v(l߈aâ²Î;=pßý§OŸ¶ R «‚Kc Å;ÅŽ=:gæìVAÚÿ>cB°}BñP>üàCt.¥eEÌ´Ÿ.åiC ^0wžÍãR «‚KK ÅRŒ¶zm*ØØ­Sgí*Oi›CO:¿iãFk»"ˆÚO—â@±vy­/\Xq¶âWR «‚KS ÅRìÕW^Ér=ã[*aÃï]'1lÞö¶q¥Ü²eS*;ÅlÐ~C©ù±èáÿÛ‘Úô9qìø/¿üR «‚Kc ÅRìÁ0RyÚÔ γ?о¦ÃHŠmNZŠq¦ï³³%Ì9LòHn¸òY i¸ÔjFäûн¤V)–Ž@ФØáC‡³Ü¸r•‘Š£¾Œ°ibQ™b[6mJöõÉ'þð×'Uã†ÑGj!yÙÜ5ººª:ÙÿÈçŸÞ¤‰žÃ‡nLо¡Š>{wïyêÔIÊø'‹÷CãàE–Ÿ)¿ñº²]Ï“šÖ þ7‘C>%xS¹Rš_UUÅ“GsÏwÉ 3 ÅZVAŠH1€ãt°â¢¢Iã'h·¾MRž2®Tûx÷wy‹Ù´¯…"Uï=ËËÎ4þåM>’÷ÁÕTW/˜7_s¾XøÉU°©:·ïðÆëoFg«ñ#ºõæ[T†)Ö‚ R @Ф;÷ß{x~¾+”_޲áú*CÖ÷1é ­Ù*'÷…çžçs›1h?bh„¤UGÑN±ò3ÉI1» ~8ùC†Ò<¥ƒ9“ Úç•pT¿'ÍúµëìÌ Q[[;uòÑb‚öQbR ðÓwÕeuTTW÷Ó¯àO'Åx«×¡C‡)b?ÈÑ×Z„‚ö+S¾NR>ÎR,• ýð;‰¶DÔwŠéTvŠ%®äá‡¢Ü´ßØ{'U²2ïÿï­…º÷öê¾÷î¾ïí®Üq§]™åFðN£bͳPné³ÀHË!CÌ ¶i›.i(4ˆæÛ“­© Ð ACD*Ìr’¼<•Ô‡:uºŠºåùFZAÂÉ“ÏÙÒ{>ýþ ¨áøíÛVqyŠÙ\õ?üü;Ÿ·mgî?š%V_m“Ëßøº7G°…1¤`á7￟¸‰‰Dû¼‡€bBŠ 53»6¿ÙM5×qÿaÒs·­h¥á°N3 ]9œv¥T½5bª_ÌC¡3©Züêš÷Dk\ÏŽZün[îxŸÏüýxF ¡1ö {à[<ã©OuY¯•tm-òÛDŸ/mïg1÷ÛßñÛßþ–pŸÉdò_T‘|^¤Ø¿}ýë>ÉœS íopA1pmó¼ïiYùòIcYíÃç©§Ø3žò”¾õ-R¡ÝF'³IÙ£>úü;ï:ÿGO‰ö×~(&$ ˜PONHD{RJ¿6´Üž #B™õ@±´÷n[ñZÿbÕ\ä€OX,U(Vëšn´Z†¡kõ²šKÅøI¹ª)f>Ç<:j$}ö’÷Æù X#y·-orÈcå{üŒ: “MØÃ¯nV ùQMxB (†Ôn·ï¼ý×9‹ˆÙÞa¶ç$Ò"œŸí„ˆtË3ŸUùÑoÕÿý÷âM7[—on;nn·øíoÞ~ #/£$?ýÿ°cÕnq¬>kö±E>?ÿŽ»¾ó+Ý>Vý¡úk_ùjÒä?:·±f(&Å„ÕgžB¡úèŠ (f©ƒ†I©ªÞ™Pè‘QÏH»W@{9TZÌ|Žy($ ˜G@1!ÅNVÃÊlYx+hù (öóæÏÉ»&/n¹\çí"È΂¿1=]œhŸüµŠm’HÆ}ø#ñ‰‘]_þÒ—"_úùKø Ç—¾ð…ؿļùèu;,« «H´ŸÉO”S’ÆëÂ4|ò¶çÞòŸ=^O±û¿qÿSŸôdÒFx¥ÜÎþL9…¤û.¨ml>醛ÞûÇïΤÿÿŸùLŽ-Ho¿0×>$ã·’îon­<ìbVmärgL¢3=í”f{f]Ú¹ðâ¼ð¥/zñË^ø’—½è%/{ñô àô%Ö‡—¼àEý—…Ù™}úˆþÈ·ÞNОVŒ!Ñ>§íš¾£ÀuýÅK7\ºîº ÉgûK»ÕÄML$Úç=PLH@1sÐiuz“Õeë$öÃ!#ÍãÕ-â,V?U PLH@1³ß6ˆÚÝñ5 î…èc*ìPLH蘡p±¯üëW^óÊW ãšº‰m#V5ŸSÌþu Î ÛdàBŸ]rhÉulîŠr&Ø.¿ãðzM|óÚW½újäQrŠÙpÐæ†öGKáïÆUªHÜÄÎ O18PL@±ã–€bu›ôÇtsdt,õF ªy©¶&ëo’€bBŠa (&$$ ˜÷ö`}ꓟ|ÞíwÞxéºí ?m܃“âÛ,É*0ý‡ótÙOöåGI´¿~Ú À÷®w^×0œVìÃ÷|Ðn 9˜ŒA]±Eo/ù¼±þðIq(&$ ˜©ÍÐÀ^Ó<Ë PlÒR!j²ÜÃ?÷ªrê`Zý”jl5«åjµ\®ˆ³Y16ûY1ÅÌPìš“€bBBBfcÅ|5“VPHH@1Fù|þ}ïz÷E×ÎUß«sSÌñ÷üâSü>uퟒÊÑO”Óég;“×ι«¶áÓÍÍ©åïº|üÂpZ±/J_°^/@<ãìjΖÒOÙK"& Še (fZUµˆH¡Ð®­Ðž”ÈÈu½·^Œz â'£YU¹P(¨(”Ù3Êj!‹ìÍî"÷ˆ%r²ªuFØ¢ŽUÿTzd­ÕëY?Ì¥h¥£×åLBڃƄ¤XJ)7“•Tf ×‹¹¹Ö¾Ø22KÉÅzgÄ>IÔ˜L£°·IåÈ(t&?j%.…}^¯g*¯/ ų­½(ädÜmSµ‡ð3Uã~×.o²ô“"Í ¬ø­×„|aH{Š9Ñgü(bL¬â³4nþbç(Sw:b0ÍhB…9æïÌg†b|‹gu©Ó°É U–sioF¥lôkzÏhÊ9è®ÙãÈèÍ÷’õ?óðË`6‰»ôI<ZåÚÖŸþxÅ,¿º*úæ’0.£$§¥`€,2[>_0š„U¶XÓØ¯éÝ¡V¶Â LªoÔòÉhÐç³Íóz}Áp4¯Öúãã ŸDÆÅö¨»¸Æm­’KŸ×3kXÚÅ,˜]ʃ¯%9Â}ðS‹ÓTl ÌF½V’óÙt:-WÚ¨˜Ýq 2Üp/¯×²¶f 7»9 Ír»I»Âi eLÑJÑ*J2JšiÕ 3-«TÚxVq_‹íá_žøÿÝìEO5› |ûƒ”ÒyUï’¬2íémISȲ°•mݶ%¸/W‹8dvuUNG­ùÉKɬܠ 'ÝTX;Ýc¬}Ý­a>sH@1ÂÀ¯êo{»íµŸVìœãï†ý¢Ÿðé¶ãtÿ„Oñ-¨%·gù¿ˆ§%|ò;ßVH®ýi· ¨vÊ^’ÄNžoŸ„C@1!¡³ ÅÌV!ºBQ(V6F®‘g¢ ›¶ O9szðì¦ê · å#…¹@·Q0 ºRÄ%i %Ú•þ”¢M–tD]&ɬhJÈÍ _¢}?t ë(ìJª6x< Øœ·+YòÅRyŠÖŸ7¯æ™ý” ¦;ú²››€-‡:åÔÞHeR1çHûg¿Ìòˆ³XDm±OÝQÆïœftáÂìóŠgæs@1ŽE 2 ³ëRVç@ÍH”JöRED×Ðö¯²Álîξ8ô{9‡I÷lK%J¹nI‚¥àMÓf¯ž Ì † ôúÔtØ{7u•ÉþB;ã3;£\€V˜ ŠÛÉ =‚åŽ+VÛÍcK´ïŽVŽÅL=êì×°2žÿ½¯)¥ßÝÑ|ãH€ÀÌΜvÒšIú¯–zž¢ >S±%D㮚 »7VæŠé¤š~~ù„tâ³Xu³”/7É›oq )Ȭd%bER¶Íä¿–nÇò™0W£µ!éX9 Ôä‹*CÆ7NR&,vŽqÝËLc¾J¼aUC ënóY@1v™¦IþþéǼqF®÷pB1¬G»ÞúìçZ‰öIÇèá)‡€bBBgŠMZåæF»»~´)©†“äf—e4“lã%¼w…]÷ ¾·êpMoÿÍÕhå`o•SøVDïSMÄ5”6h‘‰â„FPF®ˆ¬qB±´64äƒ{7þg÷܈Øsy’«ÿeÝUg¥ÝJ{ÌÚK{}nl4Ù9…zËË2Wæ%C²^y"(™§î¨ÅV§úÇ…9æûÌ皇‹wiA3‰ºþlIò2e¨ lÈA;ü ½×”س­iiØ»JíåÅT'^𦇔eå›Íò°:Ç› vlt³ ž@BJa:ë7²`å1àMÖúZ–ŠQ¾gku·pâ†xi|pþDÓ.OX6ÙŸ¢Éý§h·õ0?E¹MÅ–Ä&a(Éxâµ9ƒkZv¾ÐcyRõv¹=DKÆÈ¬žlÃ|н ™Ø` à&çµt{ø–'ž =8Ä<d‡ ­¶ŠÕúÜ-bW¿bwë²hHâbSé•„e¿þu·ÎùÌ.Å ´°T*=÷YÏqÍ2y-L´O9Å?m )s­‚bÐÌ׿æµ6òÛÙ`N´WžõDûŠ (&$ Ø¤cQ/QÖZæBù 55·Á-è#L MÍö‚Yaa±X®hØ»í\Èéß“©ë 0G-­œpØ+h`\Q¶¥(…„vÃY±¿.Èu¨É¾S3ç4CÊ(zg4¹z£V¡ï%ÔeÉéWÔZ£YG˜£^³˜s61QíñÀ–Q˜Œ:ºš‰ÌóйƒÑ°ã¿èÆKZÛCp™.ǃ󛟮#½U¾ÏãíÍV3……lg›èα)£@Òz&äœÎþ…Fäj}9§.'ã˜Wì3Ÿ{²/RÔ|ÀWPJ.k½4¶SWR~gcËl^§1ôeÕFw¶+ù¨Ûi¡Ú>|@­’µ•ϧ%7tz:›·¿Ng+m“³Eìê'½N·³xEÛ7bl†*9 øÒè.TS‰Ì¾.GçcZò×À¹îÖ<ŸÙ%Â'‰Ú­ök_ùj‹ñ¸P1ø”=Þ%7ÅH èî'?µ³µMJÚøÓJIá)‡€bBBgŠuœ1]1e1jéÊ®#ô°‡·ÁþýŸ3jó@¶£žc—›(˘‚,ùÁþ}²Ÿn\›PÞÓÚUôÑÂ&…rûü¢ƒ6D”Ñb5@S`$Œ`…I§º‡é$ûvΩ¸j,.ªå=Ph.¹o7êvÄæPä(VÚÌoZ„i] ×5|P…) ±“s³t7g_Æ2u¹ ǼbŸùÜów‘BóAR¡·8äEÙïòPa´`A{åÖâ‹‚LP ÏLß’¸È±ž?è”±dBœ…`o¥íÛv†Ç„ó%ÛÞ’íÜN Š’ó9PB+tæJævø1¸× ÅÚ%çnÖ®t½§äÉk m§…’p#槨Ý1>Õ0Ùž¢ü¦‚%`„»Êµ¾I{ì{Á±Žîþã–tRIã•“ ñew0?\ÜS c¹¼×Òíá[ž¸cAtwq¤oÞ|NÙ4ÞO´#ú¸ZĨ®*Áå’¢/[#Ù€ÕTúÚqG»ü5ð­»uÏgÅD ɲ??qïÇ/nn_ØvílÄöËíŸÃ§ÔŸœùò'Ú§Ÿ¶¤kãêé»Ü‹¡¼’RyàÛ7^ºÞeyЉDûâPLH@±#©¥ÆÎGú!JB&Øû÷³9ùc‹^Mʬ)¢NhTì‡ÁKh²—à 0:‘üëh[>j†f_Qò™º|É#裀ät( åF|PLR ZéZráïÕóAp)€]+øæ¡mÕ¯"¬Dy¡›˜?V¦¾¢b'A{¬Š1L]N(Æ;¯f>÷<ä\¤Ší¦:”$\KC‹æçF5ܯ¶(†t  ô ©ÈÀáŠË¡‘‹Šƒ ¬glr€žM\À˜bmÇæ6L4À9tÍžblÐÉvä܎(Ý XÚ{8²äQŒ1ûS”ßT°ÄÁ+ÝáF•Á¬¹4¬ì±ÞW3euL¡ ¼ØÝÉԲХjwÌ{-ÅžåIéXO:ŒÚ·^¶×GÇËq?p¶ˆAcuÿq'Sç}ɃF„ÁTT2­™ü5ð¯»5ÍgÅðèë_ÿúoºéÂÖ¶kÓJ(6˲Žå§ô’8~¢Ÿ¦$5Ñ>âb£ÑðÎÛn'-ݶÝâ6 B¨ŸÂg†’"Ñ>Š1 (ÖK9âœLZI9…}C$mqËÐw®àP¦>`‡b­˜3K:U“–{ïbg2_ÿžfÒ:-2sÀ–Q@Áe²aÝÇÿ£©$yð±„x1Šû Q%îE4aµFÍÌŒøi¸ÉΘJ< ~;ikˆ>IºüPŒ^?ã_¤¨Ksµ’^BÃÄ-…ŒrTCv(†‰­§D㞬š÷â%à¨(ˆÖP;šŠ6æˆOeu󘡤Cb°°÷®Š™*äÕÇô;b`?V¤±s€!ã!Xˆ÷äLOQ~Sž 4ÌÕçõ1ý.yPÇ4Ù Œ):ôÿ:Ò•pþxžk‘=çòD›lP—EWøÇ¼³~(ÆÝ"¶ØI,Ï!ý FAåÌ¦Žµʼ5p¯»µÍgň؃>xç­·ü‘­‹Û;®™Ëù»y ‰ö5§=Ñ>FQwÝvûÎÔYÌöŒûCH´ÿõ}6ñ9Ó4S@1~ (FßCþ&Š ÍõT´ †xdGƒbÍ\û[QñKʇ8ŸÊÝŸ)ê”Jøaë( è3ð~b‡b8G2õŸ¶ð:Ìð¿,!hˆñF@r³”òUDa(®y¦"áØIK†Á‰áñÔ=.(Æ1¯NŠñ/Rh>TËÜW,axU(\¦žÁÞOyŒÒ„9akíËjË+ÉC%P¥Â¡GWÁK]ŠÁò¤D@ã$A¾5A1SG+"¯$Sî^KzWÀz4ÐLqd`!Üex”§(¿©ÈÊLƒb@V“·¼2&¦+}LÁ[Rž)Úêör_K±‡y⎅úÙW(?ão§(€Ð^vS!uÿTcþø×Ý:ç³€b oœ$nbwÝ~ÇÅ­í‹Û.×y;i&H•e Ÿ²”„S†[0Ô¹оQøA„‹ÙM¶ÿ>nÇiô{Ÿ Œî9ÃÇÔ÷@cPŒ®õì·ÁÌ6èuZºVW DÈ ÅÐej¹ɹ ›¬€ü¡”¬j­ž99žg8¶ÇÂXþ¿:Å5 DKð+ƒzå]ðFO+œÄ _…c'¨A"<æ©ËÅøçÕê™oöZZ³©-ù¥ÙMèóg‘âæGãH}5)'vÁ0¶%ÌÅìB‚|äÍ‘J]( ™eð*€—Êá%ÃÎé« ŠÁ— ~"àt²P,©Iö÷©Ý²ˆá!(ÆðØ1I(Œ‰µøMexžC1HF!/Èí‘î‘“>Öz~~ÝÞhV†—ubñ_K±‡y⎠æõõC1Žñ àS¿Û6´FENÃsÚËn* üën=ó™AÂSŒüm·Úoyà ÚÙÜÞ9¿E8‘k–'ž|˜~ÞÚ>?—ö~ë OI1@i.ûƒ§;äïì>£Ÿð)ù{¨’;›ÓSÅ.¯€b ø»¿ß>¿±¦n8ê¯g>Ìe,ìa‡ç!ÿ"…æCB1ö¾‚% ÓÀ”Šá¤NÉÚ}¹!qÊŽÜVÜø{Ç* зÊxÉ@UÇÅX±N¿?a(ŒÇI ͵ 'JsFÇÖeIçgL´§(¿©ìP Ð-yá‡bàÉkåñãé|©¦u‡x\y¯Åöp.O¼Ò!*ö䡨1´ˆEf_¯•²Éx8ðz=@Á°H{¹Lñ×À¿îÖ9ŸcÐüã|ðÃwÜrÛ¯#@g>iý%Ç‘ío[‘‰Žƒ8d9N]è'Êé!Kºf§ÄS ]äµ›÷þŸ¿°Zj7vcj61nm׉n‡"×’Sè ׯ)L´ÿïïòô ä!!>gè ôª÷ØccÒWßýÎÛøûJ@1!šË1lbWílWnbz!¶w…ªÝÐ.cØ„ÐR§©FB~šRªÞ19¡Ã(ðÆÓ±ÿ—m)iSp¦¼%誎P`ùCĘLTß4Wôá÷NÚŠ;ô©{ŒPŒ^чšI§WôyÈ·Hùû }9:ÀÒRÔ—±ø…ç#dӇװâyû.Xì›Þ“ôs’nvr

(Æ/Âî~Ýë!ë%Õ=áAöË+_x×ó_þÒ—¾ìÅ/Y×ñÒ½øÓá0ö£«þÐCþ¿¸÷µ¯z5Ç­­V¿ê¯|Ò7[xŽPÂMŸPL@±3%!ÅLEºº ÞËiÇÅÕ]ç6{/Qls²ªBvO1 PãÓhÐѪÅLìjd}ÄÅ8FÁlò†O&оgeòV†øEuxé8“ÍÌü»&;)è÷ÝLon`´˜cHü±2üb¨ ø^§xÌPlÃ…ùçÕñ'ÚçY¤/@ÜÏ)–;dN±Âžžkœ7ưFàgô ¸à)¹ø„f>0K¨—l0ìQ¹Â'ûI6(æe}´i PÌÕL“ð`AqÙp­ƒ;œÜS”ßÔkŠ¡zúíFI‰‡¯.PVò]K±‡{yž(v -bx -‘Û')5½oŽW¶—ËT üën-óY@±ÇU_»ï>+]ׯ6$³?8Ý™zŠ]ÚÞùüg>û‡<?úážö”§8J;»Ùú¡ôœ-­¡¯‰öý±âäP›øN³Ù´2wëÉ¡·ÁõÔ¾Wj…wU/³Ë ÅÒ‡ók2(ç"ж—?" ÙGXä—t“y…„QT‰#(†ƒ³Œ#¬nÁ¬-yʳ ÅÙçÂkz£,ÏXü2}aådÔR{ðe¦Šè êäŸWÇÅø)?Ãk!r¨µÐ)Æ®ðõŒöÌù… +^”)Á2sº=Œ:£,âwX3ø¬AÖ¼«"Co80žáe'Å<á<´ÄVW:°X¼K¿¿Žó)J3õìC1¤q_MÚ€ü鸮¥ØÃ½óK@±ÉdòžËïrÙù¶Î9‡;xJÊL‹mÜ|ý÷ã~;ó¿çEâI…¢Éébü:r{qUÄ`goaƒ!T“Ÿ¾à®çO}Ķ\VAN1Å;[PL/Hûler˜Â45X¹ F!Q{¹æê$ëüoŸ$Ú+àß±LCH‘X,I©¤|«ªäˆ2¹êª|aº,9’‘qÀöQ€¼ò@OFGL¨ÐÆlÉÈ!š §Û‡\9Î(§4ûÛÆì<Ôc%r˜6þ¹ D¡ªëÍz¹\Ö:#6(†S¿W{t›'-ea²-þyuüPŒ}‘úI[Y8™_Ïôkq'Ôå‘JP‡U:c‡q¾g-`\eÝ (Dú@"3šÆFþn(F¶ÄŒïžë‚'÷öɆ‰ªJ`¹°‚² ²‘n$¤# ‚áp0•‡'ûå7õZbF)Ÿ$Š'K´I°ÄÀ}‰çZŠ=üËó´A1þRýäì"_²¶Ò`/~"±›ÚUaéDûü5p®»uÏg~ (ö`µJ^Ié²sÏ£ðItj§ÛºõÙÏ}¨^·)’ Â0¨"ùË}ôQÓ¼62ÁáôdølØg³­·¿åmvÂ~è á)Pì¬HH@1´O”{+ËCpÓnª¾zÌþNº‰!_á€b#½4¤ˆZƒU†Ð»=Ù$lÿ¡ß|gjØvÁ< èú‘³ÌF+ýÕ·j¤IIM†ÓíÛH• ¯jêrÍÍ^ꦙ´r{~”òJRuk°êˆ™íJ}4ÑfÀÑŸª ŠAþ,2êàâ:ùçÕqC1þEÊÅàë:·½‚¶*ŸøKrôŒÙÀnÓüŸŠäA™ËxòfÇÿÓ¬tiˆJR»,o‚s‚žaÖ_ÖV‚'&(FvŠj~.J‹N ŠAC0`%çË ÷ºÝx\°€l²¼„ÿ)Êoê5Åúq7­¯ÐBx®¥ØÃ½æ}×åËŸøø'Ôï«àou õàƒF#‘¿ÿ»¿ýó?û~ò“Ÿüæ7¿! 3¾?õz·¬8Ó-G®}H´/ ˜€bgKBŠAÎl;x¯µÊOŒ€ðšÁ;[Î4HÉÄÅ@„¥—ÂBÛïˆJš>Q#~8Ÿ¬NïÍï)Æ4 8<Ñn¹Çnƒã°Ð¶Áhÿjô&+µl`öHíq$¶Rájw ZZQ‘ D²RÕZ¦s¯\Ws™L&—SԪѳ~ÑåȬW̉ö1ê’ê¶Ù°o†êä˜Wk€b<‹”ŠAwÙò˺IˆÐá|=3V‚îY@ñŸÀþSÇÔRÖM‹m1÷Ž;l¬N8å›K•…QW O{:!t,PŒh,Ýø%ô<>k‚bÈ` J\Ø– YaZl#„ÊBô÷‰?EyM=ýP f˜>^ÑÌš×áÃq-ÅÞåyê ‹V‹5 RB{™MvÞ²ü5p®»5Îg~ (F(¡Z7\¼Îµi#žÕžb.»ŒËý¶wüÕ§v¿öÕä÷¾û½æO› ä ß‘÷ ’Ð["§H@±³$!Ű·…?BÙö =ä±:$ƒ]4+Ô> „öš&% ÞâGáK“bl÷Ê|m†×ú• _´Ô\«™ ƒËpl#† £êU3W@‘â„ï}䞨JÛÍ‚?ÕÄÔ²P ýS›K­b‘/L°8š)d|v&t(C댌ښ¬t¢ÄuòÏ+úÌç‡bü‹”Š&†âtøS´EU ì,ø¯v«Ÿ vIÈ$ˆê7TrÄîÑáï°–„2î`Þ¤Ý=~7(8—%ǃP‡lŒi®ÌP ¸ÆêÜXZÌX3éy‡I¬‰ÂŽlIŠAƒ¡aè7_Í\ÃS”ÛÔkŠA–I"·ÓÝËÃÐ̆Éu-Åþå¹V(†ç*‹˜_ÝHÇô€nØ­ŒÁÔ>$ŇÄg|5p®»5ÏgÅøEÞÆèš¦Ò'ç2ë/<µKNOIù ›zÞíw—1ò*FŒìcû —¶v®s] )ù/n¹®Û¹H¾¼ý–Ûþãá‡!Jq½3z°öì§?ƒ˜wÃ…KÄÚK® ·]Ûç7vÎmílZÇÅí—¼à…úQï‡ïùàu.’oHaÒv×yè4ÅhPLH@±3ðÊ+"S蘘 u öûñÀ¯ŠeÜÌìιü`™ÜEÑ—ûÑø—¾öq¢KÎlSж¨LG‰…öM’uT?ÑžªuïÇŠ)'–ZJîˆBt(Æ6 pk¨½Jºœй­?^I^ôüײaGÉhûù†0SàWk®É{J³µ:H™ÌJP¨Ø2™  íÝ©þXs0Á󩞃ÁÃuòÏ+êÌç‡bü‹”ŠÍËz…b¹ºÞ˜ædb:z±ð3“"!?Åg“aà ‚4aXýJ|¾ [n—{W9ÊÓ sÈhŠ“ÇyKýåïzs‡k}|¯q% ·a€bàÁòÅÕá¢VTÒ€ŸÖŰ̬£ÝAçx5gHùÆ¢Ç[;¶oƒ²%2C1ާ(¯©§ŠÏ È'7ìn ®… Â{-ÅþåyrP ÷¡½Ž«ELªÅ=ûS1][dE;†2°<5fSÇí4x§‚—_ œën½ó™_Šý¦ýâÕe§ØwÙÙôíÃþ úi{æ$åÚÚ²>o~´}iç‚u¸¬ã– ÊLÝ"DéŸÿñŸìÜü§¡ÿûß“¿ ϯ-+¥ÚÕvmn^ØvY­Ø¹Hš³=õ ›æ\³Ê_·]a‰öiPLH@±3Zl>#“?–Sšz«7tZzQ†!‡=Û6â.a«VµN¯×ë´´f¹Š8x[¡8ÏB9 e\# r.—ˆ%ŠÆè@bÔù°ªˆ\®·:½ÁhÐÃÍ åÈUäßK(Õ¦ÕäRC«ª…È|_@’(ÅØ<Åè£Ðëõ¢lß$kŽ·•¹¥¤dy¤´ÜÐn¿ßm%´ƒ:wÇ”JÁ¥$¡¼à<hJh~T2JYïổ³÷ÌŽÑ”3óØf7:é(§<…0 XN%Ã2Yó©YVöÇ%”R2®“^Ñg>?ã_¤¼P © \Œ¢Ý\o¤ÍÊIïu€-B]t|æ¡&Îó°ØNækš½ÊtXe l£¿È Ìí,Nʤ‚ápØï55¿_ƒ7š˜ ˜=¯‚sú’rIoûmCSóqŸãæq9X;Ã9w°'ö]õ³jÅ í²ºMS²óÃâMöúç³>¾§(‡©§Ša·˜hR¾T³zŠ´Ro”äô|_ù*}þk±=üËs]P f;˜šÎ&“RXRô!W‹XÕUç'dX.5ÚÝ.™ðššŽ*­$ó“·qSÛhÚ£Œø5p¬»5Îg (aŒ_‰ÿ+qã²]¢ì\û] Ǧʼn¶!HES’ÏBh]²í²?g+ëÆ’.¹vHµ³bÖÿ½~ ÅÖë)öá{>´³a5yêùµAz€à0b6±™¸Œ71ò ù•| Mƒ™ïá)F‘€bBŠ M:9ÛŒ*¿Tè‰,tÊ)JµÀ:S‚ð@ì·."á[4CÓ^¦5¡{ý`a$Zøä` ŒØÍ4ÙGR±»k¢\ÍìªÑ»W˧&ÃǦ¼ŠGfGIì- "¡ýÊIa×oxÍë $1ž–©V'ÆZ\Œ8ÐÙ ¿0ýiËî„Y«áïäžbHŠ (vÆ41ªòò]h¨P6ho–\µ èÅÈÒÊý‰Bu43£)'výÉ¥¨¦œ‹Ù?‚R o=0ä„´ïíÊúdÙ~½ÕÌDB6Ë{øbìÔ#Éæ’¸°D¹s¤QðG2*ЬëÙÛ/³‰¿ÕË^ôbíçÚiH+6™LÈߨêmÏy®kûi7ä)O|RäË‘R©ôP­öEùSïŸvMS§M;ÂSŒ"Å„;“uŒz¹(Ë…BÎRAVë:$*çÕ`V¹Us¡ ¨å¦Þ±Ö>1{¢^¯7 2'TŽ¢ÕUE.L[“Éåd¥¨@hšŒzZµ¬®š*“+Ëu½Õ3O~¨ůñ°Û¨¨ùl:™L¦Óél^©hÆpÌT‡ÿZÏjæ±wΠ£5«EU!ãbŽ¢ëšÞó­ÍÞ¬ZÓÉ^ŠZ× <(|óŠcæ¯y‘®A#-7sÌôþçTkØÖ+ª’ͦíe–—ÕÓ*3ûZ­$Û«4I©\ièèr™m½¡ÊyÛ¾l–Äôh]óÚß–™}½Q±úmÚ°x2Ižo c¿‰ ÏçìÝ[çž{ﺻó|Þ÷=ÿõ‚bÏ?÷œ¨:CÛ ¯¤¹Óö;|øÁ£» ÜwßÞatУzýê¾1 R¬¤ Å@ç1éÔ3€&âU¿LÄâñX,‘¬+TK±þ @ë©‘bÐþRÌ„ÌÊ̇fž·Wƒ_T×tÇpá­;}>•–(Q`j[1xà¾û¥Ð¾„’Ir¢t{øÁ‡Ú¡¬˜²r·xo¶QlC=õ䓺ܘ úˆ(™;{ÎpŠ¥Ý.Ï.mÌBû–Íb¤R  ¶Ð’^¤  AúÓ ,ýÍØ¯¬î|óóËÆë€@Šý_ H%ÛÅ‹ï±Ën!%J¨¶Ð¾luúä4ÛÔùÑè˜!`Ÿö¹Ýȯ”ºõPvÕW¶Cú¤ ÕÍÎ;ëœá"úÆ‚²Ðä?³ÿÓ}L"ÝdE{Mâ¤.´oùSRì× Å)åŒ^ÈÉÕ]¨4@)Þ5Rc-ûK]•Ñ„xŸî €@ŠÕ›]øè#ˆÆ²EÁÆ“b’;)[©>öþ{ï¶|ùrU–Kp5ëô™k×®m‡*ûßô~sÔáGH^¤LLÔÞÎ;ì(Ndt7ý8¡B‘b²*å8 q’>9R )P.ëz±ÝÞ‘Ú°‰Js¬YúôèÅ.3c/š •þÕm~ýEöbH1ÉuõUW ;#»Ý.2¨ÃhºÄ~µ®–\[$ÎëÙà3cJ±7#o•ømi›O9`Ÿý>ëé-õß*+¦'ù¿ÿc‡m¶SÂNÚ´)öt:=æ{xàÞûå=lQ³ëWQÝ7"ÅbR )`¬Üït¹Ý.gç®D¾Ò4¥ÄÆËlv=ÿÖRYÑ ›Ídûú²é¥ñ·6^žõºab€@Šýþ@ª|>?ó´Ó‡M–;¡ŽX‡±ÕûÕCÚYgžzZö‡*#ëxÜ RwÌ>eŠmʰAµyã }#¡²iÑõΞxü ±Z«Õf±[£,Z ™Læ¤ãO”Âj6]SÌxv½/5Å:b¤R b¿ÂËT&€þøÓÚ{™ss¯«òoh–]vØé¶[níîî–x´_ÝzB™a2àèÑŠÅbjyêé§ž:æÈ#mÉ=7yRÙ¹ì÷[o¾¹pAüÙ`ðØ£Ž¶¦úŒ¨@ã¡Ôªšò–¦lÖ±9RL@Š Å6¥þ5é/—%â±·60?žXúu¦oŒI¤XsÅî¸Ý7ÕfßrÚt‰Ÿr¨8©)ÆK¶ÓìS7I©ð±jR¡î&úLRɉ‰±Ré–/&—ŒžÎ?Ï >ý´¬P™_³æWJK*ý"T5™`â¿é&HõCuI»°¯¾úJÒ6¯¸ì²¶ÞVT¤LŠÝ3&oÕs“ùHD˜œqþK弜QÓV}ÔV®n9}†lÕ #&^ )& ÅbìH±àSO«à)Õ¶š6ãàý˜}ú,ÏÕ×<õÄSwúîØiûUÀ”TÜ·WK΋$šnṵM•­üJ4¶†5“}£¿\šnsHm{Çæ2ø”­gly–sn׃Ê–_¥¾úùçŸÇ³u¿Ô ^-’ŹrÅÊÄ’%σW¸/Û{=äFr;™€ÜZŒ˜ìÛÔ|Œ&"LN³Ú¥ƒÌ_uS—ìÕ«Òda \ç¿ðÜ ìvöÜ3>ì™û¬>)MDϧŸ\ûÓORÒ-/Mo²É ›ôkê·2¥æ'³nÝêþþõ½_¿d¼«yH±‰bÚÿ1÷ã­7Ý,åó]ç¹|·ß~%œ\ö…ª¦¸áºëmF•}]ƒ_מׇ¶±.©3Uw_Òg8¦Jd™ø5•­yÈï:ÿ®;î =ÿÂûï½÷ùgŸýðý÷¢ÉÌW«”X°¾U}’¹dÑ¢×^}õᮇ®ºâÊã=Nâ¹ddUMºÃ1ÍæC[µê¿®—¯æ&Mêp0Ù—žú)d9s䡇å×®­TYñíŠçž}öfïMí)¶dñÜ¥Ÿ^?0Pîïÿ—4¹‰&ƒ”d‚…¦åôóσ2  ÛL#64TIóÆ¢…³ˆ¤R¬%¬Y³ZJæWF!‡R#_Uà‘GBõSÅ5ÔTN¥UÕ“ø,‰ÀR¹Šr¨"¹¶Ûrë÷Ùï”O:÷¬s¤<ÿ×^ç»õ¶{î¼ëÞ»ï¹çλýwÝ}÷wÝxÝõW_qåç¹f6óˆCÛy»í¥R˜ü\›*&Ê sy‘:ÿ±±æ°Úåç³gÎ’,IÒ”÷ qjú‹´O¤Ø‰³?þÈ-£õö¾,Mvn½_¿¸âÛw'"Rl±f—~ýÕóßô†›šRïKéôŸæŸ%‹›H)€]ÆKì*ԥψ ºï¿$$ŠoÒAaj«wFöM•S‘Yª†×4»cºcªT4³—T™QæßæfJS;rÒnJ7é¬j‰ª“&#¨ÒfÖêàzžj§æÐì’lå.’>yòñ'êõÔ«øî»ï2™LÛH±s /vŽ Õt›½0ÞÙýñâŒÖE²8åň-xÿøÅ‹æ4=±YU#v.R bH±‰g¼µ Å‘ùn¹M$”,U/LmõŽ>¬¹dzØQ­goÈ2)=6ìȬvQor£aÕe3J}IÙ/»Cÿ’“rIväÐÓ…óÕv"›ÜB&vôáGJ]EÓŽ‘bJ¹šnç'–œµôÓ&"Rl­$rJŒ˜±?¸`"æ6O=&R bH±M‡$QÞtƒW2ÅI‰É²nf”Û¯V¬Ÿj±É%U}_š\RæKíwh3ë©ÎTeYÕšIS—T7ýÃÚCóKÒô8úŒÇ±¨Û©žªîþá²æÇ¶–bºmJg4É)&R )644tÍ•WÛ%’ËîPɉÆÖHu´ÚäPšì‹BRWG íëVshriìC‹Þ¯ëм§½:£¿ÕxãYŒ§ÐO$W÷ÙcϾ¾R̤Àä”bH±õC믺üŠáŠøV›ÎUtˆ¯Ôa9sî‡|¨ ³’3¶‰nVÙ¶ é©î²ýŽÌsm»åÖê¤<—ªz&Ï+Û=vÝmUß*Uv )VR `òJ1¤ØÀÀÀ…ó\¢Š¤¼—x"½Î£û¢K>ýä“ÁÁA± WŠ5S•ï먻¯÷ëïi•VÏa}=e’jªí`&óX¿•+Vúïºg×w»gµX”õ“íö[o“Íf‘b& Å&§@Š >ü°*„/^iûm¶½áÚë{{{¥¿îðö[o‹cÒïuM}}ÒØ±èâú²SIþFÁ,j«v,vÃp髈­û÷è%8Wÿ¸úÉÇŸØoï}ä'*nά?ÿ¬ R¬¤Àd–bH1 óß}Ï©'ü×ûïÿ1—«í°üËå†{Ò%ÀªF¬£Z¢Köu›btëÐ=¥›Ù¡ì¨AÔ%óC{µÀ™jú’ž‰4%é¤[×_¬Ô048øúk¯Í™5ûÂó/øöÛo+R¬¤R  åR¬íŰ͖[UU”ÑRÊ:z«c¸ZQSL÷·o(–¯Œ˜ì¨ûªCÝ,Ÿÿ·f¾R )0ù¥RL×ÕªER)8äP¦$§N±Ù:,rfß=÷>ôÀƒ·š6CgSšd;ŽJx¬©fQ[ð¥Oj¦®Z›O™ns¸ïþ‡t°šƒLFÝ]‡­I‚dú›oLV@Š R )@¤˜à:ožá¤¬J<©ªüÓlö3ç8SËS"˜>¶Õô*hktÄ–v[¹$>K*—©A6nÚ‘I7ݬ#—ªW-†:ïìsûûòùµ·Ütóv[m#w/fdVªIZvÚn‡þþþÆ¿R )€@ŠÝ}ç]b$üjxk„eí²ÃŽÜw_©T’«¿üòK±8xØ«z^1SÕ`êŒU¶Ã­Ãj—Ø.٪ȯÍT7#¶Kš¥ÚMúWåÒÈ8iÖí·ÝîƒÄ:ºíÍHä°ƒ6jœY¦Yí2ŽLòØ#–Y5þEbH1¤RìcJ£v˜E,ÕñÇû^,¦´ÔþóµTå×^o‚m(´/;V‹Šü²Iè֙ι·ßvûÉ'ž$Ë\Ê8ö)rµšYÍ—A¶ï^{ßäõ^sÅUÛl±¥Œ#Õ8vÕÁhÇ}ì¿ÿýo¹¯Ü]väÖétú’ /’ɰ#^Ì}ñ¥M}¤R )€Ëçó§ŸršÃfÛy‡®õüåûï¿—“JHé*]É/¾züºv˜*ì5Íæóç¹òkÖœ=ôàC”Ó™•vU lŠu»­·ýÛüù•)žK×ÑWM|àÞûTxšlõ4DÌ=þXàÀýö·[­²ýèÑbH1¤ ÅbÍ’ëë{õ•p|Á­¢jëôŸtü •Ai “Òc²/1b}«úäê`qP¶/¾ÚzÆ–âË6è°jgÉ|¼Â}¹¨®õë׋á’@°ûï½o†}ª‹9 /&Mzî¸í?üðƒºF:«Ï?û<üò+ŸöY³_)†@Š Å´„Ò)“cvxúɧ¦ÛjqÉiF1þÓO9U–ìÛ%Î묹g†Ëª ç+;&‰“+W¬Pƒ«­,»ý–[•/S^LÙ™Î3dœ1' n¡‘bH1¤ ÅbÍ¢j‡ &}ÖæóÛn¹µ8¬é¶©â¹Ž9âÈï2mÄ´«ZþårYrXŠeõExmá˜öÔOêžZ{ ÿßÕé#!cÒIJ½ûö;2ˆP©Aý\f(["ÅbH1@Š Å6’ó¨JòsÄQ+W®¬êR‡÷ùï›fËewX‡«†Í<õ´õCC’’YÛS4Ç•—]n³Îž9KÍZþEbH±M Å éò o ]ªü¤˜H«Ä’%ï¾óN6›­5b:žkhp踣1*‹Y¥ÄØ_,«ÍyÔgúûû¥–ÙßçÏ_³zµN.)†CŠR Š=îN…»§X¤ØóÂ^+¾ýöú¿\ë¾ø’ Ô?Ȥ“bH1¤ Å ”ôT¥X)€›ÔFL/aYZ¿¾‰1‘bH1¤ Åb€ƒ6—bH1¤ Å)«V­2ùób$ÎUƧéæJ,9SœÑO?­[·./MæÙX[·nuÿúÞ¯_Z¼hö‡œßüÜôcþK1@Š@)U•bžd©“ž\.'~KÕù±.Rì\%ŒD÷4ÝÎ\¼hæ§Ÿ\;0ðKÿ¿¤É$kbƆ*éo^_ïL,9«ù¹}8[?,R¬%R,ŸËäŠåºú— yÕL5Q*d»cá.Ÿ×ív)<_ éIçL†/ Ïaø_=èïï\.æyåËO7“ìŽE#ápø=¹FlL>“ˆ…~Ÿ§úHn·Çëï Eb©\±¦{]ïDè Ê;ÉËÕŒÐVßEÿFŸ—3ÙD$äózªãöúºÂÑD&¯zhJÆ鸻s‘T6Ÿô½`²víZùó{HŒ ‘bç‹ë™ˆöÙöö¾ÜÛûŠle†5ùm:ýú²Ïïþ¤û꥟^ßÜ”®íYz}÷Ç—}8‡H±H1Wç|±l݋᪜ðDÒŒlØ/ׯÇå¥ cŽtv*=µêì\Šx:átÉP/ùDÈ/}5ó=•º1"ËΖ›ÁíOdĬµ`„¶ù.awõ|RæYˆ½ãã Ä‹#Cö¸Lºê{Àdd``@þü–|¶±.RlÞ’Ås ÑÓ/’GšŸÆšdMJŒXï×//xÿ„Å‹æÈÜšh³%Fì“îk …‰˜UYÞÕ¢…³ä]!ÅZ ÅâZ¹B¿-ò‰jog|TŒQ9›pwÖ…/’2mÁ¤ÉÌ;Cú|ª$>¥V½Ì{:Y©ŸB§f§ËåtÖJ¥`¾Rƒ~'uŒàe*-¡Ý¾K8•‰zõ0BÍT¼‘↟&[ŠÉ½`2"‰“« ÆÌ "ÅDôHΣžu?5Š“”IcVs>üà™Xí|Éš”±‰š•¨:ã]ÍCе @Š•ÒáÎ*Q‰z2%©Z w¸¨ÍK.á-:<]‰T¶T®&5æÒ± océ‘j…|ÑçÃÉTPOÈÀíñùýþâ™ú•˜V9‚Ë”4Ã’~ÞR1›Jtêà ë'2':™1 lä‡Âéb Fh¯ïât9õ ‹$Rùj¾n©˜ë‰ô5¡+‘S(¢Ñp—¶±]áHTGzä³À$Eþzß4Áb€Óúi2Î )H13òZH¸=uöôÇsº¦Ö‚?šªŒE9×í©18­’/#óqcÉÊNå]#‚f\•VŒxõüÙŠ¦ÞwRLG]#3 ä[0B}7œSÎf¢z¾Z¹ŽP)´Ÿ*Wà$X¬¯¯Oþ/‹@Š5*z˜R bfd¢:bÈ—ß8”3ÑN-&ª¦)ó 5JWL¸ä["_”W1þÍéOÉõF('tJ©/V6›Kwõ‰\‰¼îXÿ;Ñ=u)´æGhËï¢qr&qˆažGò×5÷“}IO&;¥Ri•^ )†CŠR¬5{´¸tçÇëÕT½D7%jcÇ$%N<†9¹QX‘L©•I^ÑvR¹ô-Цö,£ÍS°§ÐÈ;)§½µ9˜MŒÐæß%”2}Ÿù¸Î‘ŒÓ@Š€è0‘b*’úbH1¤R bNY•Û7u(N®ªb*ÔYsÒÓ§ƒ°Z$_ºùJãäƒî±ÕL … kd2½“îêq‡RÍÐÖßÅ,Ô1ÚAbÄ‹©‰~BŠR¬ý(&ÝãDÏ'ºÆª8V Õç;LìF ä‹®yß…tØ/35ÃåvÕH±†Þ‰¦™Úþ»Èy¤R ý„¤X;’¨æH:}ñrESŠxtûܘò%˜l3ùÒÅt´6(Ìéòøº¡H4Ñ“ÌdóÅRY¿}ÓÆÞ‰f“€¤³BŠRL—Û×9tŠB·«SáM•6êõV …RõË—æÒô ¡VJ1ý¼§§+žÌJå:$N£ïDÓÌíÿ]bH1ôR bí_nß˪S™¨·ºìdb¼‚îN¿D–ÕE1êÜ€7]ªU!Å:J_µVŠõÜUñŒùë ºFnÚà;)æ’Éd*•L¦såæGhëï‚@Š¡Ÿb€kÿrû®`¡"´÷ We‰&öŽ˜”úìKw :œ+T¬I÷ tç+¦”³ÑÎÖJ±B¨:AO(ù›ÑUîQ“iìä~­à ÍÐæß)€C?!Å)Öþåö£+–‹u*œ¼© éJèëuŹƒ=ú–O½ GJ T+¤Xc«%–3‘Ñ“iì$º\ª³+ÐÓüíþ]bH1ôR bí_nßêIF¼Õe'ÓMm ~Áé‡fN2¤{»ù±”Š7Zª˜ñIŸÖK±zru-}ÓFÞI>!BKGc5?B›~—ö—b€CŠ!Å)VJG:55®¤&°,¤;9}fê$ßìÔ«[j²1Ÿ#–-›…Dµ^ŠiÕen‚z´0Òae¿ûÂz ]É«©Úÿ»4/Å<ÉRbH1¤ ÅZA^˜Á)™­uØ©qú¹±º¦ãÎÜÝ…ñ|tú“…ZÿRî éÛ´¶Ð~282o89Öç"~Õg_4ýûÞI9®®©—5hñíÿ]j;×fõ:#ébbH1¤ ÅZ Òt™×Y/g7–hN(šLgó…B>—é‰G|úªA$U+ÚÈ9º?“ŠÅb!ŸM&¢##¸Ñ ·…RL'$jÜþXw*—ÏçsÙT2øF‰¨p|c%ä¥~óä²éxd㋞pæiÁmý]Ì;ëªgúA‘P¨ËßÏ ÈbH1¤ ÅZSn¿® ˹ŽZ2ÃKÍÍš)^IË„=-•bB.¨ãQ9q5ÉàFç‚ÉßûNœÞ°ŒSK FhÿïR ß¹»vº ÅbH1@ŠMåxµÜ¾««»ÎŸdº#^׸êÆŒIŸ)ùXÀ;ÞÏý¡x¾¬«¿«4ºÒØ«FN„()¤ã¾ñŸ¥+Ü]¬>u2ÒårêtËÔïy'îp"S©¡‰Úú»„;í±ó9kÑ!¿º¬ L¨¤R )H±¬ÖZOÔIQRóñH$…Báp4–”½b¹R/¥B:Ù‹FÂ!!‰ÆzRýóML¡ú,Æ£ÔüW Q¬ßÿ+¶Ÿ¾þê‘—^¼àµW¯õ¥Ï5¯¼|é;oßÛOý±áax+Q Qì4¶»cvð±oóÀ¦ØÖ÷ß=ýö[7¿÷î½±»Tã¹÷wnÿø£‡êîdy+Q Qì8»?>vþTçía &Q,ÌŠÌèèžÁ°&o%Š!Šíîæ‰ò×·X|ˆúk€ÚD±Iå©Ú;YÞJC+?¹!Xç†âk:¯ý•@›Å`£Øq|öÇ\¢Øÿ¢¢ØÖ·k+¯o‰] c¬=ûIÊõ Š!Š!Š€(†(†(+ÅF£Ñ°±ˆñ‰bˆbš(V–å°±ˆñ‰bˆbš(¶½½=l,b|¢¢œK)ŠbØ@ÄàÎñ Š€(vpp ‹5±ˆÅàD1D1¨Dâ"^Y–>wÿ.c:å­IQ Q ºÝ® "ŒÆ1È ŠÐ)¥h"{{{™VŠÑÇH)eÅh^¯M¤ªªL+ÅèãÄ1È ŠÐEQDé÷û™VŠÑLjcAk·çºcnP¶ˆé#ŠÀ`0°,Öæ5±8¹…Ű.´¾¾q¤,ËÜĸcè1zkb-%ŠÀÖÖVwLkOëŽÅèsË!Š ’LîQ®öêÅÀÉ­I t Q ûb“{”‘K¢›TUµJuL ‹ÆX'é3mG Q €éd0Œ» «)†#–;OQ i¬(Š^¯—RZ@&„Å(c 1V9¬6Q D1Å@Q D1Å@Q D1Å@Q D1D1Å@Q D1Å@Q D1Å@Q D1Å@@Q D1Å@Q D1Å@Q D1žû×J@€Å@Q D1ÅÅ@Q D1Å@Q D1Å@Q D1Å@Q Q D1Å@Q D1Å@Q D1Å@Q D1ÅÅ@Q D1Å@Q D1Å@Q D1Å@Q Q D1Å@Q D1Åh,Q 8wZØ> t¸:&Š,·°½%»ÿ{ì-Y®c¢ØéÌsØÎÎNJé‹/¾xÿý÷ßmâGwüO)ÅóyÅNWÄb¹ ªªÏ>ûì݆ ~˜WU?Ø›ÞÅD1þfïΙ$U(ÿ |ÌVl¤=ÑBCB ‰½bvg˜ƒ9°g†ÀtsÐ3Ø ÝíÝãCUde†§“IU¯Šýÿ)ýÞ‹ÜI³¨¨;üèElYVp-bù—ùÿøËQ?)€å­{y_ÞÆ¯]lyc_ïbD1±ç(¶vŦӥÓî¿v̆@;vÇøˆb@;¢X „.mgõI›úâ.A'Õb(NBã7R½˜J½$¸õ¬V£ØÞ,ú“¥ {iÍÙ-ÛÍM"4~+¿ê·¸É¤ÔÞ©WÔ§iF±ýôj¹Ù¤ÚÄ÷ñ’fç!Ql:»âN§bØÅA§vMù;D1QŒ(f…ù¬ä|kì‹$¯ØÑ¨> [è,7N/e™Ç#tN\­>Œ×7 ŠŠðU¯’j=Ší€ùLœð2©®¦Ä†°SOê³ûêŽíûŠOït΋<aˆªqÿÔîÊpVùFsƒS| l±ÎñÂ8Ž<ãeËÏäF“šbýrN˜vã,å<öMòj.v<í9d#ŠõyhN(J²"¿Ä+4Zë\b¶'IzÂ4ó®)S€( ŠÅv°/7»X•'[ï#ɨ^ôE´ÖäÅ·´3žJue¼t*GõNÛ9sÁ—×Hu vžÐhÛŠiÍE XŸs“íìfìsGè¼LîžšvE¥UÓõ][Wõ(Í(æ¼üZÇÒ1»à˃SâYF$݈b¯.—vêFè§+§}‡¼3€>´^±^½«³ÐùiûVs£B¾}`TM{§ @¢ØqÅœ¢—æüZZÒÈT(Q9~¬.zбÂ~}Ç.¯žÕ;Qlï] 4çfÖc™¹­˜ùh§×ÌŸ}“½´Uº!Ç÷Nmý×j%õ¤4ÓÙ1'ø¢<½·”i®cóÕí•b'ÎÊç/|œ‡¾ëºöI7í9ä˜ûÄÙù ^©bטÔj zuk>»Æ³ûÇ@¢ØQ±R,Ê;}ÿ/ŸãèE|~]ì¨Òâ—¾IÙTÅúÚ¨‹TïD±ýX'Ûü>Í&ÑÆ¥m+f¶©…SNÛ·býö€(D±ïƒoŸ,¥’ó)å‚7„ÆÃ‰QlÿÌõYn=4þÍÕõµó¾‹ìWÁeÇMöešb3ŠÕ_+йÕv†s>3ŠUŸÅS›û¶ØäÆÕ¾CÞ@—â–w¹ã7åT»£ØþñÅ€(ö}Å*µb:»B¹j}¡–ûDZƒ¤VW²õõ‹gEbiO,æJmD±Ý¸ºú£“ýæÂ1áG'G»P2ï¸ÉVЩ;VŠ•ãþ©­ÇïÅ4cßäix®%Þtn潇¬ I¼o¼Rlïøˆb@;¢˜ê/0;‹ù5…v<«ÏP„¶x[TQì‹0–'ñÌæãœmê鯾ð³n×M¶.Ãæžbv>ÈýS;rÓÈ¡«³óɱV¾Ö`ï!÷í)æÄæ¸d›Š[öiPû£ØÞñÅ€(v$D1Õ_,½ì¤í| ›ÛWÉ.ý ?ü k¦›ÎuãË7¢Ø—@?\s®§Ç¦b]Œ{or\)ÝPœÌæ¸jÇ‹b²ÏÝ3øè5Cd¿êQåŽCÞ‹bm"¶nfq²…1©ÝQlïøˆb@;¢˜h´e\åë”6£z&ÇZßOßÊu£ Ä:+̕ڌb_>™yb[Ïê£Ökœó´÷&/¢¢y½»8Bç¥óþ©/Š™[ÎÛá«$•ê·ÀOÛ‡¼33B +¸­£]~º ïÕD±½ã ŠQìØˆbq=©«!·Åk^%ir ÜÔ¥uM\M›Qì« `ÈCarŸ›—¼ø–0xI»û&_Ù^ahŒpU;ïíqŸ”­'t–Ÿ•u×÷]SžGè’vÞqÈûóËÈL¯~'çØw-ñZØ+µ?Ší›2Q ˆbGC3w‚÷n¶4©/î┣1„æ,Vø­TÛQlï4Sé¬4±úõ~jëÒËÝ7Ùd.Û?µcF±í‰èErÇ!Ûó’—À÷IšI}aÛ9~¢Åކ(¦ÿ¨å_¤zQ§Ør®µ¢3´ÃBi6¢Ø— `N\ã'›I=.–ÐÙѰë&os¢^ivLm_+¿r3O+ó“#¶ùõîC¶ç5]ÂÍÚI­¥Ó*vÌ[±ñ;Æð=£€(F‹+õ†æì ×Ìzei‹“爖eݤÞRE®¹ÉýV3w.ß?€îWkSN¶0.½û&{ÕЧ¡k2ˆs}Œû¦f^Ñ݈bæ”Í(¿Åœxºï´}•®-ÖÙá9Õb×!›Ø>¡åŸÒθW]˜·bûvŽà§ÅD1Èy욺,‹ʪiûY>þL›ë¶ä<¶uueÝö“<þÔö3'Ò·MuIYÖuÓ“ÜÈrÚO7³¬ê¶æcL€(‡GöF1àÑ¢€(öþí+¿þé«(ö§ÿü{1o¢€(ö D12Q @{¤ö»þåO~&nýìÿéw¿‡HcD1Qì@9ì·¿ýí/ùË_üâõWÕþðÃÿòÿàü§«åŸüÿþCû{`ù¨¹|à\>v.>?ÅD±Ãz^¶ü~þóŸ·ß°|ø\>‚>¯Sx¼( ŠÿOÙåÿ¦[>”¨oX>‚.Dõ¿àñ£€(v ebã8¶m«¾! mÛqY,öQ SWçy^|”çå Å~ËÄ–mˆbøöQlù Êb1¢Ø䬑êp¦Ø7ìrR@;ô2±k»î¬¯¾!àºïþs[(ÅÖLU$nÙÑx¼1žqé&D±£/“R¶©oh?’R²XŒ(ö¾©ŽÅ-'žŽ7ÆXb¬@#ŠD1¢ØWbö£XâÙ–ýIJƒfV@;zûÍo~󽢈bËÇQ¢Qì(+Å€(FˆbD±yìë"O“ä¼H’ìR4Ý ×¶ÁŸ^Ìê‰ìê2ûxl^J#Ǿ­>9IÒì’eÕöãë8ŸìËHܲÂn~zI¦¡+óËuÈç$ÍËz˜¤Ò™‡Ôe‘¥Éùé ¬(ën˜Õ½Ì›ðæ%å4Ôež>ï’×m?+ˆ(vü?_¿oQì€ ŠÉ¡Š|G¬sâK-Õ‹*Ò~2i¥ìrÏϬS©žÈæ;–x“åž‹N]M•-ÞçÖ³z6µyðÆ©Ý0éfe›‹¯¢%,»y×Fûv9­ÝRÏëìSZÎ ÀC ŠÿW¢ŽÅ ˆbC‹MÞyTOêØÕÖqÅ‘#4NT]ËQæ[â~ÖªÅ\¹b¹Ÿ}›#®F½ˆÓ\•4Ó®oŸüü[êœZ©Åˆ(<~#ŠÍM"îc‡…ÅnY+Q¬Òl»vÍë+ÅÌYmê‹»|Y²æ‰;yÍüEQlªÏâNv4(D±£!ŠňbÃÉ2U]Šâ’œÌD•´³ÅtÚã“] 4–UÝ8K9O}qö….ú¸¬«+/Ù%½×G&—K–ei–÷R©±°…ÎrãôR–y8BçÄÕúæýÂI«nžå….*zõb<{–9BD±Ã Šňbsê =0eRiºÔ3k‘Å,/™”Nö—äFVÍÚ«]h½ÅÌ€eÇÓËkýÚ^#•N¦zuŠÊÑ<çÂñã²éç§Ãç¾[´WÝ0ïbSâšÏœê¦jca¢ØÅ€ŽbD±éìlÕ™yl_t£ÔöÓvÛ Ç¡o›ºÈ/ç(°ÅbW›JGhüøG/–s„0CÞ¬Sc»Þ)N?2¥ÙÅÌ[znÌ[#/ú÷„y¯Å‚(öø@#ŠÉÖfÙÙbD1Ë¿È7KX›Å'ϱ…iW›ªXÜOû*Ì9óÅ&7Lºù ¢Ø\{úùªY™ê³kd»c@#Š=>ňbSå|(æÄµÒ™Û{}µ(VïŽb‹)}±Í­ÆÝQ¬ñ„&î¸Ç‰bˆbD±Ç¢Ql®Ýí(¶³éÈþb “åx~xŠ/Eæ~­•bî{Ç’ZŸõØTy| Ûoq“ùëD1·šŽ¾R QŒ(ÅØSlT¯ôG뉰Üót_«­½Îe'Õ³ùììÚSl,ý¥Yí'§¡.²(pvv_iO±¸š6¿ßàT ê(ňbD1¢Øœ¸ß>Ù&Z¿±Oå+ÅŠÐ~w/ùádßÅœóÛß>ieT:Ùå¡ád͇£ó“+žêž«g}4E$4vùá Y_Ò4Í®–Ê›a+Š™·ô¢^ ›oŸ<.€(F{| ŠÅÔPœ„.ÈÚ—8Õ^¡ ‹á¾(6ÅŽ0£˜~Ý»£˜ðÊ~VJI©嫜f‡Í¨žÉ±,­JåƒZ¼þÂÇK§ßˆ\?©ßʉôÿjÊ­(¦Æ2º mÔ³¹[†¯±Â^¢Qì±€(FÓ ‘-^±Üàœ$ÚC…Z*ºk¥X9BcGYÙõÃзyŠ×´ÕRS >€kÀÒya”¤Éɳæ/KÞt^”Õm×÷]y9;B禳V¾´™¾ÅãÊ-uüøœÄ'߯—^ ¢Qìñ€(FS²Ë,q—¨ïßhÈCq's_­1·Å*§œÔ¢I}qgò“¹öĽÎõ¤vG1ý–nð’I¢Qìñ€(FÓMMf‹ áÍc•U옩È0%®¸Ÿ•·Ç¦žÐí©N±å\ ZþësGlóÓÆxt;Š•“>ù6Û¼–¤£ú.ňbQŒ(v5wiä[b…Äõ Õ.Ó‚”WjÝx‰<±Â>eu_ŸõË$“~l¶eF±ÛðTœªêvœ¿ü”c×ÔOYVuÓ £TŸAÊyš¦q§iš¥TùrþWhÚ~–j‹‡¾©¯½«Ÿ¶ÛG}[W×+•uÓÓ¬ŽQŒ(ňb¢Q ŠÅD1¢@#ŠˆbD1€(FňbQŒ( ŠÅ¢Q @#ŠíôïìÝ kÛXÞÇñ¼"¿?\ŸK ·Ò½Zö¦ìÍ…Áy¼Ó°­“¥8»4õÎxÆx/PQš,nCÚu&k“’e©˜Æ4qÓêiÄ$ŽAéŠV2#ƒž¿mùX>‘k›zœÌs|tbØý"½~ýú³Ï>»ÿ~*•ºÞ@ÿAÿ¤7éG Š!Š 6@Có‡þPþþ÷¿ðÁ‘.èG4€†Yƒ ÅÅQ Q̇'Ožüö·¿ô€†Ñ` ÅÅ`ð Š!ŠùP*•FGG#GЛÝÞ§X€(†(ˆb^ÅØîî.wËä‡~¸ººº³³óuýý“Þän¥¤Z€(†(ˆbÅv[}ÚJ?ÒF—––hñÖô&ýˆ°ÁôAl½€(†(ˆbÅÔ‚³ˆíííYžh€³‹ÑÇ­gêJIÊÍ ™L&•Êd„Ù±¤ê¦uš™†Þb øÀÐñE¢¢ Š!Š Za,‹´Ð…`VhX¤…>N“XJÍ¥¢!wÃáTQ3OeSæÚ¿T ¡½ë¿Ãdü®ŒRf¨ç/Êdè;·ÅÅQ Qì­U*º¤ëÏþ35¬k×®%“ɇÒŸþ¹s1vפ7æÜ_Œ&¡©þö·¿}üñÇ49‚D‡£ƒZß"M +‘Të´Ñ¥”£í¥ôwz¬¢ãXÁ”æs  {ÿ¢ôTpˆIH(€(†(ˆbˆbo¡V«åóyç ½é¼LŒ¶Ò·zFƒ‹u›ŸM øVºQh¨W™ÓVt)Á]|Ôߦãc ÀUEï(–FD1D1@C;¡"öÉ'ŸDzC˜´zFƒ#½¡ô½‹³#CœàHl6WX)äR1ö3f¤dZ§ˆ©sss¹º¹\¡hö=ŠùXCÿ!Š Š!Š¢¢]¨iùÍo~óé§Ÿnmmmllܽ{—îsŒ8ŒŽŽÒ_‰Õ3L‰8Є4-MN‡ ÑáØhýÝÅIà{$5)ÌU1¡Ävub¬&]-I’(UÃbtU)Ò›ônIÑÍ.dLC-ËÅ"—ŠÅ’,+ªî˜Ëý¸ì}B3ÞÍ¥©r©~‰R’•²jpkªUMGm.µ·v§•åúì_[Q5÷ALû©oJ*Êe­ËºõrY)•ì/TfËöæóŒø_?Œ&­/¶·(Ö8Œ–rF1±Û—m”åÆŸ$ÿ·áë{ÿá5&—ËÎÁô~ë Šf˜>N Š!Š¢¢Xÿ÷cw5Òn_´í—åðêÕ«7nœT£©hBËGe÷Qöq1sndÈ)V([G•sÁ!§°lÒ»º£J„І!¦Úõ,.jõÙU16à¢[¢ fÚõv¾˜Û3pÃтŠó¸#¢,ÅÙ!Ø»©ƒßäÞqõ–!ÑÞ†Rû·—fÃÁ€ëº¢û@ÅTÈcª£kp0¥¹DÐeîP¦ w»Š*”‹¹ø‘Œ´¿%bÈ™hÈuÙ1{Ù|œ? c 1åGÑi+żOûs"úJÆåÁ¡Øl™E*§˜è®°’ ¹Á)Q¥ÉØûÌH¢@ëòqjúÅÅQ QŒ¶ºg׈±"ætpp@?zûÛ'i’—/_ZGÐAÙü´«?ŒbG ÄUË]!td£óþµ@(Ô1 !if¹à|‹°+†¤ÛW¬E‡¼æè „×S(StÙä>hG±PÏQLž³¬èœáÅ‚ éÈM xLŒå >Šy)6?`ÊÑã—íÁÇñ±0›‘‹½G{G±Ð±QÌ”ãA¯_¾ÄÓó)æÿð|NIñqjúÅÅQ QŒi MZ]ܹsçí7Ú§I¬.èБZLߢXÈY!âb÷§S&¸æåÝââ‹D݃E±B4Ðy!ÒH, ùºdùhQÅtߦÊÇ•bj.À]ÌÅ¡£¿‚G‹å”n[e‰¬ÜtJ|óžShpIè¨ÁP8‹rWÔ¥ŠºåÎÇñ»0R.Ä<¾Å•bÁxã¤ëBèø¯•“¯SüQŒ]Miù85ýˆbˆb€(†(Æž,ùøñc« Q#-~ø!­Ùê £Á‘šÄê‚ÍžPiõ»µÐãÞIF[ :G®¨î ŠÅãÑhêÑg©ŽÒݬPÌ\rƒ 8ƒ”ÈúÇ\8À‡Œ#Ç %rªnèj1Þñþ°¨uÝ»]UJÅ–’,âG ά\¶;:!1äYço‘)éüžbì²»naލWÒ Ç›;±éŠØÙ£B’îÅB¹RÙ4RŽVÏ—Í¢ãN¿`l…¨KZîüœ #jg- b=d™t«f”ýÀÏžbìÒÆ¯'6W4ÌFKë¼"L(>O1ÿ‡7›U4]• #G.ð“Êš¡+ì7bGôqjúÅÅQìˆb´/~ÄaiiÉê ‹8Ð$Å¢ÕÏ5e|# ^t‹@L±Ú”¹è‘(¦Í%bÑXC4N}ƒYq†®`J;ÅB‚iÙ¸«~’æ¤ø/!ä›F¡9§Vœ‹F£ö²2¢Å¨+ÃÜ<ž>é¾vqww!ëe|ñ¡Iø;X[ Ñ 7çäÁøìJs'{S-Hý1˜s…b·ÓíçŒø\˜)ÏvîaǦæ{™Ï§O²±†â ÁßÌ‚”¯S̎Ȧf¿§,ŒtÜUZfk‘øI|œš~D1D1@ãáöIïÛÙŽø{{{–'Àöïï}~ZÌ·Å¢æ=ÒëJ±áŒáž‹ø[]梘ƒ©«%IÌÍ ‰8»Y«$ì¸üÕm¬Üùˆb¬}8b‡îö¨JI,ä„L‚Ý[ÇÈõXïKÎ+†â+ãvÝ?I ^¶ÚJ™ãì(É Gb‰{6¨?gÄ×Ât)Å=±ÁIž ÿ»QLãÞgGFö(Ktöתù;Ålfþ¯Úèø¢ŠÅh)n‘>NM¿¢¢ ŠyÃFûº®_»vp]Œ.s½’Þ¤±"ÆÐ$4ÕÀl´/õ¼§XÜ«°£X :/[áîÄ ¸D1­b!çÅR’Î…QÌ(…ùC$ÊÜ/^ʱ Ÿ˜“ˆb”½ÚÓŽ|óÅLˆë/.—¡yÌo*‰á!¡xN·<ø8#>æ9˜èbü­¢{v„7vt?§˜;"»|ÎãÔ¸,ÒÇ©é @CD1D±J¥ÂÖÇÌu±7oÞ¤R©HÃÿ6Dˆc1ÚJŸ1ùuýýÓ¹÷)šŠ&䊔…6ZŒÕfi„Ûœ¾Ë¸\¸£ÌrOŸä·7„Ž+„ŠüFé|Óg]KF0àŸâ;Š•ls.Ü…!À·ÅØä^ÇÕ¥\& v,æŠåÎßñ^˜¿(&%Þ>Š…†zHhþN1wDÝã×ôX¤Sˆbˆb€(†(Öù|>BZ׋}úé§[[[¥Riqq‘]#6::úùçŸonn²‚æ4Ú9‚ÓGèƒì§4a.—{Ü@wM²kÄ-ÃêV»ø=˜<ÛPTñŒSì1î>5îA–,Š9ï>#‘„XR4Ý`ê{QLg³•¬¨]² ‹(s¢¢j†ÉbbÿoŸôÅC/ËÒ\&Î" ^xþΈ¿(&¥ø22lò“»}2š“ ]kÓuÃÁç)v?¢¿(æãÔô Š!Š¢¢X­Vûä“O"žÖ××­Y–ÇÇÇ#= a4Øj G<ÑhV™ÊìP§Ø\©óþÁ9g7`wJzÿ~%íhmÉlF€òJˆú$¿)•³M‰ñà;ˆbf!ÆGˆ9Å´xzªãv9çqQÌð±Ñ~Èk£ý‚ê/Š5o! úg´dZLi.ÊïîïÂßñWëʹÿ”OFesûŽbE½ý~h¨km,¢#ᆑølÉç)>‰(æÿÔ¢¢ Š!Šõ©‹Ñ…ZÝ®£›"-‡¯¾ú*›Í:n¥äÑh ³h’nW“Ñ¡Yë÷Åbœ`(–HeR ·;¼¢J÷ÅïVæ7ßñQŒa3é¥ÙÀ;¸}R™ qb9Y.I*Ê*+&¬ë±£Ì²oÌ#ŠQLš])J¢XRù58³ 3/ªôSSV¢Ó‡è+òÅ:÷t % Fë\K™^.ÅòwFü,Œ?G$‘+¦eêŠe‹öÅH(¾R,Š+¢jÚ—"2áÌJóÒ¯rqn¤ãY«eŸ§ø$¢˜ÿSpºÐÿ–c{PøõòåKD1¢ Š!Šõ1Úêž‹ÅèÆÆD"1??¯išåæË/¿üÇ?þñ§?ýéw¿ûÝÕúú'½I?²ÜÐT4!MK“Ó!è@t8:¨õm1e¯ Õù+ª Å?IðxÃ"ÅH`$ {"QŒÝ·è-(ê|¾ D£anOv¶½·%–s¿°nÑDL¿’A¶ˆÏö$ƃüÒÃ|á Ds¦åÊßñŜъç#бºÄŸ5ö#OXÙ"¾Nñ‰D1ÿ§N1úŸ@ñx\×u˧•••_ÿú×OŸ>EëD1@C³w¸¿sçÎÄÄDdÀÐ’ha´¼¾ÝE™ 'TP îîB(FŠBÔe–XÊqIÐH©>¥Ê*†—`Båë?бû»c—°i,ryŠ‹*Û?=tt¢„Ô=šhÜIœáx¡Û=˜šw{ryŒ€ÇS8þΈυsåøÈOÅãÏ#;kö»¯ìIÄÇ)öøÃÓ¥®QŒÿˆÿS§ ŠX¤ajjŠ»RÞÛ?ÿùOvá|?º¢¢ Š!ŠQrº~ýzd€Ñòt]·úÄ,„‘ [K Ç2Õ´øç' %Ãre¨Å•\&U—™ÍV q÷CNvn.à{—{ &Åì+›Jì>µîØÂäÂÑ:ˆfæhµŽY¥ö ä»ÐˆE1~ ÜמK…G“Ј°¢X†<;ÄŒÌ:í.ók…LÌ5¿Ð~Zt<ø:#þFÌâÜ‘44+¬ÃìXá9Óò¦Rá€k#¦šK¸”±P4STÙ}b3 r]ìÔÜMCiÏÉJœÿS§Š“L&éR[=(‹lg‰>u1D1D1@C£K±"iõ—¡•å¢$®Ô‰RQ)k¦åŸ&ECÃᆑ”ʇ&\2‡VåR}o¯’¬¨úà´C-Ëe•”2 *žLÓ0tbFßž®©Š}YÕŒlZ}õöÌt6{]P?ΈA¿s±±2šÝúwÙßµNßµÙõÜ5~Í8¹SÜÿS0À¦§§#GP&3MÓò´½½}t/WzGQD±wQ ÅÅØ]“ƒŒiBfy®ó6À¹r#©hradÈ!|ê÷Nº¶‹µ-§Û·oÓÿƳº òõÁ¸Ö4úß„ˆbï¢ Š!ŠE|Œ9•Çí?ÛÎÞck§³ ]ì¯ý«åfŸž8ä]ÄÅNEû¿3ÅÅÅN”Qò|¨e #©œõ.öàÁnäË—/c±Xÿ‹¢¢ Š!Š!ŠõQÌe¡á!‡àp(žÉ)ºß‘.V(œO:ºqãÆ@1D1Ü> ˆbˆbˆbïŽI ¶:|ǺØgŸ}F?ýòË/ÿûßû/bˆbˆb€(†(†(p »Øèè¨(ŠøÃü1D1D1@CC8Í׋ù/bˆbˆb€(†(†(pFºXÿ‹¢¢ Š!Š!ŠÑŸuí+ëÍúZÅë^ôMÒ÷Iß*xw±+bˆbˆb€(æ¢Xÿ?{B_wÍúæ_'†ð¢o•¾[ðîbý/bˆbˆb€(†(†(Fßõ»-bèbô €WëCCD1D1D±ÚWï¶ áEß0xt±þ1D1D1@CCëÇ>bØ_¬;t±(bˆbˆb€(†(611qómE±ˆZ¤uBúQ…ðêEˆ"†(†(ˆbˆbwîÜü(F‹DCD1D1@C;1¯_¿¾~ýú G1Zž®ëˆbˆb€(†(ˆbˆb'ÜÅèR,ºEqТ-‰F˳NŠ¢ Š!Š¢¢˜¯PåÁ{NoÇ®áG±ª¼ûìÑŽÛëùÖ£Šºûm¤¥ýƒ´qw§Ï‡6µ§;[â΋§æŽb€(†(ˆbˆbˆbo¤[ÿó_ÿáñúÞ• ñEŸãÔÎ_Î7ýQ¾ÏQ¬˜¾Pÿ­Ïßzu–£ Š!Š¢¢¢ØFÒ£ˆ±×â‹Ý~F±»—ëýYz¹ÏQlkú½úï{9ùæ»ÅQ Q ÅÅÅÆІ&¿Rۯݭ\âr«‹]™6¾;Qìâw#Š¢¢ Š!Š ÚFûýb·¥—úêÕf;?±ÛÇ(vQ ÅÅQ Q¬Åþ"Énä”}å%Is|êÅòí›á?G?úÞù ï_»%>“ù™ŸåoßýÙÅsß;_ýè⥫7’Ò»Ü°Š”M\{ï{õ©Î}ïâ¥ÉôLE—ï]ñÅh=õcýˆŽUïwç~veôîâ²3l=˜œ¼6z{õ±±¿ü:\kåã7ÓÏ5÷(¶³˜œ¼1A¯Õ#·Ž¾Ú˜™¼16ùÑt¥zL«îmÎ éøøXÓøx<9=¿ypÈ Ó);¤Qtêi ÊKJTÓ¤|6Is5ÑTÓ3yEãGU+ÛóB{ª©tvcOççY˜INÕÕ:š1g¢¢ Š!Š!й]´u±Å.²½ç©M¸î;–X|Ì>¸Ûe •¯ÏØ%iûbú½.Ã|D±ç÷ÇÜg¸8±£;‹n¯ð–îÅÞHö7ó£4·ßÿþbÔ…EÍ+ŠUÖ…ˆ»±å½*¶½Œ¸šZÐÙ }›Ä,l·Gíu9¢°^i539q7³qàcÌ€(†(ˆbˆb¸}rc×¥7=lµ­¨`ØãÓ,o%„IÊÝK²Àô yQ•šÿYëñô´(.‹KÂäåsvcj¥®Ê"‹Y4L¤å¥û·~æ3нo±ñïßH®ŠùUç$—Óo›”±çi.®æ‹«ÂÕóö;WgE·Û'Ÿ¤ìc»ÎƒjËï7‘›9¯Û'õÍxÄ–Îæ76·7¤åéøX¤a<»m5h36l&¿±½½±6og1¹9jf,bOç×ë£æ§Û£äÃfËZ`ÑMXX£æ³íºµ\©’³­Oŧ—¥õlº½Ì½šÕ˘³Q Q ÅÅ&&&"žhÀÛD1¿Çê{£¢të®n¾R7Ç7'®^l$¤zº°ø¬™ÌäÛv]º$îï;&™¶“Óµ¬Iÿ”&í«ÏDÙq¬§©‹lÏ~gr:wok§=L'Ï÷Åä¿\´§VŸ´ß¯>JÐûvìÛqD1š3ç–·æ]÷Û™ ³Iا–ìPxïÙ®G«nÛµ+»©[mÚÌx3då-¢gÇ팵 ë–ãÃSöÛ3Z=¯e#MÉ|Õj“çíQÂF}T«¥Å7´Z{&9ßÊYë5«º·£™fµmÛóŒmÒì=Œ9;ÅÅQ QìÎ;O4àm¢˜ßcõ?Šy¼noÙõÇ|1ó=ûQ•O¸yŠéKÁáç:Å&ñö͉ɛé]çýQ¢Y».&éNLãu4÷ë­ ZROQŒÖ#ØÝÈò?bó§óŽ(6ú¼ê¶Kï;nå¢»äæŸgyµ?ràº*¦ªÌ ´°P±÷„±HÝø¼nYµÊZ¤a\ذ:Éy!>5•žY«Zµµ´ýI·:Tåé)’^Sª4UsPzíÀê´m_ù•TkëÓcv^›_?¨ÖZ«:ØÜ KÙ*;æ @CD1D±×¯__¿~=ÒýH×õ·ŠbþÕÿ(öþ•K?»Ì^›sMVšƒ%6øÂx4|Õñú‘ýþ%QuÜÛøBgî¦'ǯ\rÌ–|ã½—YõѸ#ŠíÜí¶ÝØÉãÑ™²½Øe:E1ÇÝ”ìÅÞ§1îOŸÜ_ºv®cß1ÍÎdWï?êñé“zEÙX_[ÈΤ§âf|¾Z¿ l>ÒÝö8ãÕù8ëh]±©h\:Ùa<ÒßЭƒõéˆÃX|JÈ.HÛ{U‹ñs¶ Š!Š¢¢kUt‰wo#ý“Þä+•ÿ(æ÷X±§˜ö(E͈¥«Þ.+K‰4rëþÄ÷ÜðQ,±*YÕ®óé“Åô…£“|ï£eË9‰ÈMÂ/úovh·1Ý£˜ó©Õ§Ž}Ð.Iªz\«ÉËöea ÅäyÇýÝTŽbl*/39•uû–MÎt~»fÙ<Çœˆbˆb€(†(æŸï(æÓ€<}ÒÜšv½2ë{×n=¸/Ü›f/úçâÙ³‚ôÅîîñöÕg×&ï=ÌJ’x >J]t½Rl‡?nU¼ê8è«gùÕ¥œ¸Ú~Ñ?wÔý΢çv¥»5ÒWã‡=±ïúŒÎPìkÖºÿ¹6c~}L«¬MGZè6ÈüÚúæ¶¢é{3ã.Q,»]=>ŠÅé#Ç_)6–Î.//çèŸkkkËùå̓Cìö¶×—ç§ÓSÎl—^«XŒÇ˜³Q Q ÅÅxˆb–noEÿ³éUú§ñLh]v$Bi‹b~ëM²sÛÞä>,íïsx±ädl¤íj&ˆüTûÙïõ¶§[üÕÙ#“¨¹µ¶3ß*бíöß“žåÞïö]±»çq<ÒÜÔjVÛáBÜåöÉäòžÕI^˜j–°Í*Ý>Ùåɇ²=(»}¸·Üº¬jqª•íÍMyO§ÿÔö¶éq’rÅ1¦VÝ\ì 6½^ëiÌ™€(†(ˆbˆbD1îz«¨`Ôÿùx²c—1öÚµw²ßmïj?éLK´I?U*¶Û8ŒbSQë˜j鯅7ÚwNÂÝϸzÓždrñ±õVQŒõ5öÛõXwyWdÆÙ¨h;üñˆã²¯CzÊdSZ©YÂXûé“ì)“éÎv¦I‚}Œ U™Ž4ŒÏXNµå¤ýéJm/i˜–¬úLópS Õ^ÆXg¢¢ Š!ŠqÅølôªñÎs²Q³mMˆÏžÕý7êãÅtØ‘±äÖ•bçRWß軆&o-¥䨶·ê(Pô:^Úx\Ÿjÿѽ›—¸Ýô½_l=Ôň^黓ŒîTÕ·ŒbÔé¯k¯jÚsU6½u¥ØØÌš\=<<¬ê²´ÐxÏNW•šE6¶¾°¹§Õj5½²¥í¥4&“Ø…ü¦vX«êÛk©š’ò¡E”…)6ÕÆÞªê•µlÒ~/»Í®S#ÉìÚž¦××¥UÖ[cÆ{sF Š!Š¢˜ˆbÜùÞhðéb·/s•|j¿ãòº$i´ÄB•ç^û4Õ_ܧòÅh’»WºÍpîÞÆ¿ÜžPÉ’‹b¯º|K4çÒû^ëáîô0Þ¼Xloz,ân¬}Íמc‡2Ž ±QÝhÙE¼yWå¡’t—j=9#ÅÅQÌ?D1zLd¤g4xà£Ø¹Ïv][×s?|ÄZÒêô÷pÉ÷o¤Ÿ«¬í®¦Ã\ŸJÜÏ?_½Õ¸.Œ^·Þ´¦ZJv޼ºŸ½=W?â}vDï׎(Lü¨s=?º2A[þ}%w Xó.ËïÝÈš,б{E/¶ÝþåôïÅ´JÙ$W¹„åMe#ÛøOze«­í—{$“œYÓj–ÓÁf>9Îeµäš¬YtižlSÓ ŠÞžKWÖÓq6„Í4³}ào ¢Øé‡(ˆbþ!н~ýúúõë‘Ð0]×/нýk÷ÕOv_<Þ}ñä•î6 ºûj_~£É•/d“5¸ý§ôÎºËÆœ9ˆbˆb€(†(滋Ñ%`÷QÒh+b§7Šá%ÚÛö‡·ô㢢 D1D1ðY‚ð2¶„ɛɻ…í[2?Êÿ)@C€ÓQ QŒƒ(†×é–cŸ²÷¶4Q Qì´AD1D1øFó•„ð2^d¯^¾ô³Ë—®Þ¸UÜïaŸ2ú†Q Q  ¢¢Ô¾z· /ú†Q Q  ¢¢Ðô7ÿzW=¯ÿgï|µ+õ½>|QÜ7À ÑH$‡CáP ƒÁ`0&""ïDDD~ëåÓº‡æ4m†éôû~7Àì“ìºVž•„¹ý¼?€ƒSÛ2 m]WeYVUÝvÓ¤ÞÿV”|›mÛä×w]ß›¨vð…5ض”hùŒÒ;€ƒû¾üß>Í‹Áˆýg¤¤Øg x“ÇÞcü¬Õþ÷!ËÀ{?Ȫ~Û?­µR\ÂT<5Ñ/¶ýK,ó=*Q¬)ŸÒ9Þ Ÿö¯á‹ ))(ã·]~¿í|1è°ÿ½åósþXR RLó!òÞ#,øþ—!«ÐûÉ,ÿˆž r¾Ÿ˜Ë§Ê„¥Ü¿ §û ÊÍR¬ŸÓ5E÷?Ó×ôj.½'òžCŠ}H1 Å ÅäyDR43Rý`csŸÇþO÷Zý—J±8¯»öuýÜ÷Nÿ‰–„£|$ÅH±/V?yQ­ìÅú7avᯒb{ Šŀû†RLµ±wvLí¬Ï½ƒj‘¥+gµ?b*£—äLòOHŠ RìŸyN–¶ëß…Ù…¿JŠ…bî@Š)öý¤o½ƒzQûcTyoÍ6å&Öu™çyYWÆ…Ò»ÖtQo|§™Kº¤ÕÆ–§ Ûîÿ_?³›h%ØÓïÍËÊ„úMR¬˜¶ý1,½"Ũ×GW&ä©ÕO½ÜÊc“Þ°éÛ%«³Ät@I~OäÂøfæÚ)ç§z©m™§£ '¿ÄÖ{‹Å¦~cwNÃK ¾ÌÏÝ£y~ÚZ¬‹8ÔÅþBQ–ÄKš£D9uAK~ ùKRL½4ƒ[‹¥6±.TVÇ÷ˆºµMÇJ±|¸]‚sRìÛ Å ÅtŸÞ~ÚY÷U>¬tGLMx~Z j?Pst_b¦ôÚ½ìÅÌǧ0ŠÕiä‰Ë®/ãG{»¶®Lý?×ËóÌ<ôHu]–bªŽçÑë¨y»ìÏüûÈ—ok©¢{³a7`mJg]]¬‹ý…Ò¼=E ±10õÎx#do=LúU~PŠEå0wÅë´FùbØ–. éÉÔü-˘? jM R̤ط@ŠAŠÉ—‰f5K§Õ5bȽ·‰ªùì#ü¬:ÍióiÛåœx΋³ôšÖ3ó‹RŒ5ƾGŒnÇ­¥Ž½7 z®éȪ3~>’O¡O~,¦µ¤Þ#7ª÷ 9w%¬¶ßÁ^’bVòA¼>h?,¥¥X¢÷ƒ·è·†™ß2}©.öjçE`{)'¹¿;ÞnA(.aû+ññaàçb'X—Yž-¾xÌÏeøðÖ(!ŠŠŤ¤ØŒz‰½;AR B°e,“à¸Z¼%;ü0ÎòbäÿUÓ ìFþ¡ˆýGRL·1]MÊvaœ³¹-誟ö‡‚«s8SL‹:ñ=:gÝ*_Ì[”^/Êêi½5zêªèégîyß4][6¨Û¶iº‰ST¹Iy4ï#1m¨†ræçM¿r!øòs&óQ8åܬr”7·Á¦âÖ,"H«‰‰M°¾L^® Ûõnu1¤XXu#‚¯CÒ5*÷1H¥­XªŽÎ‹¶È—…•¼ZË ¥V –íÀnØØ•EØÞoº§%bQÝO\ˆuî)-^<«J±0­¦…‰ É¯êØ£­Yós]Ûq¹uxhŸzÖqýÑ1¯XßvEt\‰ó¶k›Æ®ˆ!ŠŀûÖRl Éa9 ×æ˜#7útg+‚cY̓yrØ.›1%qjS›ø¯¤˜Ziþ\=—hzßßeÎ6öý0 }? ýÑ3Å¢´¨ˆ²(ò,½û9ß^ã£vg)6á3yJRô0?:£¥Í–6;Å÷œ¿«´ÂRšþ‚{=‚³#¿sYŠ¢{u|ØRÝío½¨Ëu1úb æ.‹IM6)f4Ãý­°¶­œíÍVK}Ø·qÄT„§£ýícR R R @ŠAй³•ÁãÓÁßÞÒø´¦L½úp¤„Q’æuSúRŒ,À×K±7\Ô˜‡4Ͷôˆö6š·Ô:uE–½”¾Dq’—u•úv)fþ5ð߆NashÛ%)öñ1)æ¤ßUŠAŠÑ JÆ‘ù&¼K_SÇ”õ†Ÿô+gOxÛ¤mŸ¤¯Ñ`™O3vzø3·O’/0ÍË3QmíÑ 5Wö9¹ÚÄ2vYtëÉ8')æÓDž•Ñü¾mšfX„[ÎÿñRŒêòéRŒNµ¶‹Î<8ïR]ŒnâïFRõBRÕôR}LŠé> îÕnå²sØ>é0æ!Åà»J1H1úìÝA>ˆý1,¥/®´°åÁ^ä÷iµUŠÑÉâtÝ4%^ªèÂAû—¤¹† ÜÎ= Š‘£ï R§´’0 ƒÐÜ:7V1ª–SKìûXb>f.ß~fŽ¥ŠÍ-ç_/Åœêr]ŠY‹µ&÷—]›«í®ÕÅH”‘ ?íwOWÎr·va©Œ3ÑQFaQÖ© RÌù }å0æ!ÅàK1H1ÚðuPŽ&ÃUì{wâEí)6Ó®”YæÉ'ç׋¤r>B˜Ð¬õª³½âCá=CQsßÝh»Yèß!ÅHóÑó{bxê)uVNűœ®¥vœšÊñjÝlm²CÌÇÿ„cÇO¦’4¥cο^йÔ庳‹"1Uêr]ŒD‚lØ‰Ó •´l·vAóÖ?=nû9‹ùK}MŠÑ¡þ‡Ð:ÿ§u¨Yï0æ) ~R R €ï(Šňm ="ˆóv˜çœ-CS†‘õü¾éòeBëÇ͸pΗ©/âÀ#‚j\•~QƒðB]¬–™ŠÕÞzÏ–¾9½P~R¯›²tá\ô oúç8mIWËY^Þ>I¦›>ï:ÌŒ³©¯cŸÚÛ ·1¯Ù!õü¨¨ê¦›¤˜ H1 Å Åö]ó>òÞ!­gÓGØÉ§Í6OÖkx6ü”išôÛŽJaL¿mª‹Ž.²=y^SC:@ ™gƒ:»6‰å± ëõiçáçãnàÓ‚#K•Ûußsnœdod/(7‹ÒºÁ­.´]1]¤Øf/ÖãÕ—)Û‰ku±‹§­ = ´ÓÞµf¾%ãnb–f³>·4¸…ó˜×Kt¾3JH1 b@ŠAŠb¬+Ó‡$ˆó‘©ý ëË×ûQ501W§Ó¯ÕÑad&r¨óЧ~˜´ ŸÊˆg²¯²Àœ×ç™/I1¿YÕnN§ãä t›ú‡£ëöƒ¥+ü×Y*>%Æ‘ÿl¬c3­~X´³>5›•ItÄÞžKÍpBˣ͌S ÿÔ{ôÿìÝ‹këþ¨ñ?*À¹…;Ꭾ ÂM \䔋@!ˆÁcA ¸ ”QˆA!±œ¡4nWª¤â¦º¦5Õ3Ò©¶“2'iúm'o'ãtXíZ³Î󡀫f2×½×ÞïûQŒ(ňb—DY0>#Ci–ôÎU/Yyµ¢ŒnˆbD1€(Fó±[™ÓÕÊ´ã1-®£7æSêŠ`‰8FGïöŒŠL=,6{@#ŠD1¢˜BétÊ/_³½ÄåíéR׈bD1€(F àvêó™ÔYéb­íx@òf‹\³ëzQŒ(ňb¿ZÚ_Ó´f³Ñlµ Ëõ’pmSï´G:†íz ¢Q ŠÅD1¢@#Šˆb Dû§ Šy—ƒ( ŠņÃáÁÁÁúúºa£ô.pxxhÆúúúÁÁÁp8$Šý Q @#Šmnn†±³³ã] `ggÇ0ŒÍÍÍ¿'ŠÅD1¢˜ëºƒÁÀ0 Ó4Gÿè]0`8š¦iÆ`0p]÷ï‰bD1QŒ(6ºú“.¶³³sqó(À¬ÉIÛØØØßß'Š]vňbþ”»»»ã.v€qÛÝÝõÏ$Š%2ŠˆbI,vpp°··7 677Çëî_`}}}sss0ìíí0Lìï‰b¢QLéb£Û°wĹÀÞ‘ýý}¥ˆÅ’ÅD±þ[ÖßÅFöÅÞ_Ø#¾"Æ01¢ØßˆbD1qÀ=2<"ElÌQ ’(–À.¦¤1á‘¡8ÍaÂQ ’(–ü4\0rQ @û‡¦1àByÿnD1QŒLx Šˆbÿ&ˆb¢ÅD1 Šˆb@Å€(ÉF¢€(D1Q ˆb¢Å Ùˆb@Å€( ŠQ @¢$Q ˆb¢ÅD1 Šˆb@€d#ŠQ @¢€(D1Q ˆbPD1 Šˆb@Å€( Šý“ йûöÏÑë…rÆŸÿ8ûò‡a¼Ɇ |C×ÞÞ“ xé.oï£É£’‡öO9Ú ½ûQ Ŷ·®þïæÿhKÛÞL?¿|쿞hGhÞ×÷¼óX©ÝðÐÝwÓ‚ÏbûîÍ£+sóƒå] wxèì»®§¸ø½‹µ—Ë£ɾþñ~ö¯^ÀѪw_½5Q @b0Rl÷þ­IíjÞÐfV±­å•ã rçãÿ}¾4ùóÃóF±3ÚÆÇãÄðvÇKˆÕçËSgñ³ÿÛäÊÜúhyçàñ­É^:[ž¸¼½Ë‰Ë3`{ÿxûýkp´ê3¬Þ€(É™>¹öêÍqì¸g8^ ÷Ù½ÓpfjLþüô³ë‡C%Š]­õ“Å®ªWï",ܙ܂¥ÿüôÄ¥í]I%1ŠÝŠr´ñŸaõÖD1HÌBûƒ+ÇS#—–¶ƒçNþ&øÏø‡ñ–jòo¨D±ß’Å|kW =q±QlEÊË%í]ØýíµUãª:*-ùQ,þ3~k¢\r‹\š·—·ÃæN>øäxcã$0<ôT‡Ö¨›|þ¾j Ö6·ûj5S6:*{ß›ñaùüÐÝúúcÕŸ…98øE£mƒílmVÕìï…œ ½½»~ôɵÍfב/”/ß{|çdé·CÿÅÙ»3úÎÏGßùù‡\%ÅщŸÃ`g͘\¨Àʶµü^nVÈM¿,ÃÑZ“;2}Ò ¹­qŸáˆ·&à zQ þ!QL&…©³ðÔ¹“wßîΪBkËe4ÙéÏ•;+‹_s’üYùY^Ùï ;/ž¼ùïA;ZúêúNgRñ–ž¾Ý¸}gQùðõ'Ÿúézr|äwÞ?}ùþŠòå·Þ<ÓwC£ØÎÝ›A#’†;ÏU»ÿº¯\á¾þéú­³—bñöË-ÇwxÊϵçßCön}8ñ+÷V7Îåfg­ÿýþ=åó‹÷µç³®­?ïv#Þôð»ðÙÿ±ÃÅßoÐëžOÿ]çxá¹ñ~'Ÿ¯¨wêÎrvÅ¢ÝÖØÏpØ­öæ—³7âÆó/¶‡?¢$ ŠÉ;(e]¤à¹“Ë«ûUHV% þÑû‡AQl)ðò÷@;%µŸÊjè³n¾7‡ÓõäfØçï.o‡/´¯®´µ?¸=ûÛ®ü¾áªÅ'øçÚËï²jØ™/yòmÖÞÍå÷!ßùðÝŽz.A?j‡’·O^«}q¢ßô ë¯7üíyßÿÞ¾ÙôH‡¯$–=Ûtƒ;3xÁ8ˆq[ã=Ãá·fdë݇™ßùÀ°½äQ ’Å„ûìÁÉ ÊÏgëíû“”\…ö¿_;‰8 ÆÊæöV{eyíÚMi [îo/jïÉgt^,ÿùìõ—­¡S¤Ü=Ö¶Ìþn¿¿ýŸåWO Îê^`ë¹ýrc}üáÁ«ç+§¿—2%CŠäçÎÊ+ýG°»¦oÜ>mp‹¯³HÐÚUr1e¼Õë/“½¿¨_u×®Ëo®>ú¸ôy{´_óó·Ç4ÐôÁî,¾þôJûpú…¯þ|1ú¾¸w÷ë'ßx¨•«ƒþ`gíݧ7gŸ‹\Õ…åoæ`wksëî“«±æëS®½?Þ0úMÖ߸"Çl©¿œô¯7kC_,;î˜-Pv’AŸ¾ío v×/rÀãmW÷Ï[ã=Ãá·FNsœÿÆß¹ÓïÿX|uÚ+¯½xH>¢$6Љ¾Ä¯ÿ~ï“Ë볪 %{o{>Û_$ t¬  GÜMC }žížmz>ÖjG¾6 Š=žüRXúÚéðŸÁ™zroÍòüöN²Ý— ÓÏRÚ‹¯‡Á/ú|ô§;>}ùäƒOîô~ÞRÆ>½'ãõ†!ËÉ»/NzÜå\d€•r.Ò’¦‰þ¸¡4&Eä›.”³S^ì ëÖÉÏÓM×ó_LyƒûõOùþ?¦ìàÙ£éÙ—Ñokìg8ìÖ¾ã¹1¿œÏŸNNa}è!ùˆbð(&AfPªs'åÿÀΔôøíÀõN­¯~yõvcñÝÀ=³aŒ·O®/¼þàÍõGk[jΛÅ®üþÅ;cqAó•8ÿ‰/ŒÝ|¹ê_ï,B[}¹<³ôýì_¿¹xõÖâõç[îø<~ôftReJ£Ø¹K¾3ä‡ÊÞ%îÈ¢ïÁC´dªéÔ¹Üx­|Z&-ÊàŠvÓƒÉLLY¥nìxü×ÍÅÉ^—;±.ñég÷耗äâxŠ¡Œöº³fŸ÷¶Æ{†Cn;:­UL=!ÉG€äF1é 2¶åöÛeîäUY&)¨ ìÞŸZ¹iñúïž-o­÷¥ˆøQLåZýíU}ëÕëµ»&•DbÊ„J!;U£˜ÌUôÎ:x,É#ZseˆÐ¢ ‹Äù¹gn~_z»±P{ÿ›\ÏóF1gó“ºl™r.2Dk:Š-¾èß,ù¤*ü¦‡š øšª–òº€ë¯¶&¾ä…® ÂZ1åÚʯO_®-œü<_­¦ï[]î¼·5þ3¬Þà&ýQ[xµöب/û÷šêªjH>¢$0Š kõƒo¥s™±(ÓÙºÀ #xEsíîË/Ö_ÅúÆÆÝKꛃ¢˜ÌÎ y«€”©'þ‚ÇõÈÛã˜Ô‚ôkÃWÏßÿvkQ=xQì³!Í(ø\VjK² ½œ‹T¤èQ,úMW©MMj×àx–â¦k¯÷fuè¿5[þ’ò#£ÛÎ}[#žNÄ(fë#§<öH>¢$;ŠÉºïòïeqÜ„»®o<\øã·³eáÎÇþ_ÅÖ^ý1õŽ¿›ÚµÜn,ü¾8#Ši‹ƒCOå;¯{k¶§”—+rØ Fô(&×0Üþ÷S×jñêå¿wž¾þxåR£ØRœ(妋«ú¢ïÙïÞËš\§ËÌ-lºòxÈ4ýǷNîþ⬟Éúhç½­O'r;]}桎ÖÚ— ¶H>¢$4Љ%Yoëîêžuœ*š×^~ŸA¦p¶w×V7îÞ[ôá:ˆÅÔE¬š7^~éÿ<ô„ûyí|Ó'õÓ'e8U„(öôž!…»úvô’ÍOKŸw%QM²ã‹›»®'díöËš>7ŠE¸é³8¿nhƒÿÔ–d-°Ó2{ýõ@s¬gw(ëÁç<†ˆq[c?ÃÁ·¦¿q•±` Š@‚¢˜c¦«¿¯-<Ò‚œR»€;غqgé·[K“¥ÐýF±C²Ú ~“OÎZ;ßz×9O¤,´ÿh# ü=ÑNWt’z"g<×R’G´…öŸûÏZý¶+Çìƒ%]&(ŸíÞWÞ>é+/ÿùKÚßÅÎqÓCÈ:bWî­Ü¸#íU&íÊï原Y¡ÿöòާÚ}|oéêíÚ†sÎÛó¹5û²êÿÍ÷[žª¿úñê­¥«·–_lº’(ÉŽbbûö™e­¬ÐµÆmýƒ¨?]õ%ƒoŽÇ½ÛU7 ´LùäÕ…-Ïïçé ÄÚ¶r„ã}M¿ØÑùlH’Ø'õä(B}˜þöƒgOúšëEŠb¾õ¤–^v¡NÙ“8;Š­žN]1Ï, ¿:ôÄŒeþer¨åùí<¼Ó”É•ãÌ?ŠE¿éÁÔÞ¤Lc”7NÊô ÷sûÊí{óŸmϯ/mô¿O)úmÿ ÞõµמSâÝC™úL^Å€ä#Š@Ò£˜Nßñ,¶¡/àóºi^}ðaQÿ¾þu{ýsÿYMÊNàõòB9µüð¥ñlù»ã©ä“ŸN©öçÚ×­¯ƒÅ×Ùõ¤hëÛ®oGòù'ŸV7··¾þX|õ~¼;y™€«t¥ã#yóìÝ÷­þκþÅ?{îÙ¦ëE‰b²ô•l¨=Ô¾™ý]ó󷇓‘w²Ø™tÙéê`´Ó5ýËÚ•÷…ÕÎp:²üþáé«O«WÝ»\R_Ðüct.f{å­qýt¬Eipñ£Xô›‘y¯Ç Ší˽–I “ø5ù½8|ñèôâÜýeò$¼x.û•ƒç»­ñŸáà[£<±Wtõ£û»úçɳ!ÓE‘|D1H|S“„Ì ¨fJX-í èçêÂwƆÞppmêÚon bo!ô̓¾i‰R.B~n®¬íO¶ºöùÛÚÀÓW²”å‰Á·k¡ß¦Ì ÿ¹¯ïÉÌAßù>ù6kïæÛNÈ·=\Ýõ•¯%uÞ_Ä(ù¦‡ú!£ü&oCù½ ûš²ÿãîÍ÷9~ó/Bý¶Æ†ƒoÍØºö>ä™K/ù@K>D1uA+NåxªþqsY|ñÕõ„¹j\¿u¶=-=Ô¾»á¾ûøÛ­EÙD]õ\©!-)»¸öûšùswáÞédOÛ?œçí·OÆVŽmÎ@¼÷qeõÓ5%£ÜzóLß ;}Ù\y·ÿãé‚o×2të…±ëù˜o?^U/ÚòÂ»í¾±¦®X¿¿ýøÑò¸ýÉ‚h!{·7Gã¡4µì<è,}=PÖ:K‰bÁw$úM·ô$p²Ã“I ÷ƒç`ýq%àºýðN ·'íì·'_Öô_ßÖøÏpà­–±qã΢úÐ.¬­ÿô’D±„@»ÎϽ­Ñ¼³Íí­þ®ýÓõ.€»?ÙÅs´‹¡'Ü­Í£ýZû‡¾ #9ixÐï«›ˆ EÁ¶wͯãoëoKBŠmèNv=úÙÌú¶Ck°cŽ>óujÎöÎÑ1ï¹^LîO¹V_wFW&™7=D”k+ÔÛz©§ãüíúèP¿îØû@p!ˆbP~ £D±ˆ€(D1¢Å’QŒ(&+²~îÅ’ ŠÅÅ ¯”÷6†‘ÙåÍ•ñQ,ù\«£ÕK…¹l&“NM¤3™ì\q¾ÑÒm×ûÿƒÝÑšMíH³ÙÒ­â!š¦ï{®¢FÜ}×þy0úq†^$ÃÉç]/ Š%Ÿ­U ©_H«-çßŸÄæ§Î¹Ôòþ®3ÅUޱœIùdZ¶¢àòÅß ™TTÙyÓýwG±òôé¶½Kg·çS~™yËó³+Ù©clÛˆb€ËDK>»OSºh¸D±K8‘-ÛÊH±é(ÆH1¢àRÅ’Ï*gRÁÒ™\.›I§‚åkŽ÷ïdëÕôèä'Ò鹚þ÷G±ŒŪùŒâèÝñ@\¢Xòu…ÔÙbUïÙž°ºò\@9›oY.o¤X€( Š%B·˜N)ò•ŽÀÕæ³)…¯Ô¸Ž}Âqå—–ÙjÖ«•J¥Öô¿ѵ{m­Q­VFªµzK7o̱O¹^ ×êm­Y;Ú¸:Ú¸ÑÔZm£k®ToŸr¼ §×‘]6×Úºåmjû8®úëp²…Âu,£ÓnÖkrÞõfSkw Ë9³kÇí¶æ•ùª¦süWêÙ…^®žÑi6Æ{œœo³ÕéÊþb_®xÂO?”Ó5:²áxËÑaôäŒC8V·£5krôõ†¦›½_nf÷ÌVs|ÊcGWLö¶I§¥ a«cöœ8›$ ŠˆbÉŽaR¤UΦ5ÓõF\=ŸòÉÕ\ÏmÎçƒ^h5ä÷ÓršÙ­fý¿¨:žŸ«7ÊÙtj¦t®¢™žO{ºâÕ «S-¤‚”êzÈeÉ”ÚÊâ÷Ì®¿äuÊ…\j¶\±j:ÇûȤÂå:ŽºÐ¾\^?·SŸŸõU™|©Õu<ÿr…‰uú*·]+Í:‹ì\¹Ósg´Àöü\vÖvåF'p3Ûh²és¡¥7æ‚6‘G²Ø28›$ Šˆb ¥tQ5œˆ¯D”r¥Oþ";æ§_f)¯Gì•s©¨2eË·çú\:R‹ªžèÈ΢(4ÌÐ…öå÷(+ß;F-íÆ×Þiç"}søÛ'»QN½$§ÿr…‰súÓz•|ê—*mKݬá6å+ÊfZá—Û(»²dG᪺g“D1Q,‘$¬ø¥‹]/”kÌŽ,ó§œtJ%ÕF+eR‘¨K˷˹5J*OD3nK§Óç;fÿ*r);r~9R,ó«(fW"ﲤõ<ër…ˆsúÓoˆú ø§æ:z5êfEÍFm.êYË®dtdy݉³I¢€(–@…+3ß:wGË”-‰b¡Ò­OÍtJ57_mjÍêü\h3 Êwåçۦ帮cwµŠºí|Û «<™|¹Z-óAÃsœ_D1½šIg²>™L6—KÏNWžY/œ™{ضÇuœ®ÞœË¨7ÀmÒjÔÍr^=çj£Q¯×kõf× ‹bÝf1 7UëZ³¡Îa”ižñ/W˜˜§?ÑÓJgQkhZ£”Ϥ¹š#Û•Òg6+Ì7«*EÖ7.ÒÒÔ¿JçʵF«Õ,²ê·•gÒl­m™¥kÕ¬úLÚq6I@ÅHó(õ'úŒKIW3£ØÜ|­­›]Óè´;‹ÿGÍi5Ãñ„ÛmfgE1[KO Íö|œr6jËÍk®lfµ+©€ Å‚Ò}ü檺w¤5bÒÅ–çãÌ~Ť«O^¶âKP!Q¬[L«7µë‰ ³ž«ñ/W¸ø§o•Õ^V2Ýkžï8ÁÓ{‹ Ó_ éé¬è9Wµ¼S]m>0#ª…+_÷|zÍÂtîkÇÙ$Q @K$³>g“t!Š¥«ÛqÊÙ€ágÖæ‚£˜ÛmT}êmÇóqÍb:b+t=?G™f˜¯êçb]­4 PóD¯Ý¨TE¥ÚîºS§Ü(œ²ƒúW¢˜£WÎL-ôí€;ûr…ˆúŽQUž¥º$1aä§·<:}§–ŸÞl®îªÏX^ ONG9-]¶n-ŸžzÀZ–Ü uáÿ–Þu\yc¦9bL˜='Î& ˆb¢X9|ðÔ°n³˜Q¶‘(Þ׺ÅtX¬Qf±I" äZ½®¡w´f£"ëùG‰b™R+<ðÉ1GbvgºÝHˆ ‰®c›ÓµJA¦þ•QLÙ*Wq<•kÖÓSG\êÆ¿\!âŸ~à2…Õ5N˜]ËMÂÆ²9–qÊ´ÜÑF­ãÄ\¹Rž?U®ør®¤49È`™\¾T®k—çu¢Å. í§ × 0¯m^Ö Ú£LÕ”ˆ£pä3AȵŒz¹”ÏfRª¨QL8±£˜ _ 8”l¹¸x[«^šËgÒ)Å_ÅôÊô•;14§¾š öå ÿôÚ\œ¹Æ\ð3Æn—SQ)ÇãÔe!rŪéx"Æ& ˆb¢XÒ¨ÓÍdm¦–– |…ŸÅrm'dý2ù€ÂnS×uúGD1»O‘.n@ Ê?úHúX*«ØgR¬ãÅçÚ½ŽVŸ/äRŠtÑŒµIò¢€(–€”"]ìXžÂíµ‚>Ø ™‘§ÐŠeëŠo7V§šžL§nXTDzõJ™K‹bN}.:£2«#öš™éëÕUg£–¢F±l%R yo£ÐJu§Å⟾£W~q®®TÍoWÍ…¿}Rê˜)µd|YØsÍfq®P(Žæ u}|˜ÍÒx»ôXÀ¼ãž:á7Ó²ãl’€( Š%”kÖRAæJÕVG7MSok•¢Ä”7HFˆb2@I‘+–ëõš “ Œbê3%Šõ$¬\|s[Aëc¦7¢DWâNXêÍgÂ×óË·ºŽ|sp þÎtaÜp„Ù,L˜½€(÷ô%t¦Õþª;ž°%å ¥ÞŒ‡¡P7ç[–'ôÚ\*’ìéFN'ŸŠªÒ±ãl’€( Š%Ÿ^/¥"I—›¦'"G1áv«ÅÀÉ’s­®YÉN¦³«¹s¥•–Œ€Ë†Gù@Ü(B¶²;•TD¼„]˦-QLÙÐÇn³¿¼NUYÓ-þå ûô…]ÿåYdKºã)l½žIýB±n¨É¯Vøu¨j÷¦Ÿåf6Ò˜J=Î&‰ˆb¢Xò¹–Q›/Ì él±Ò0mO„D±œÅ®m´µz­ÑÑ[†¦w-oÄídg¾sÐj̯kVªwºJyÉU'šõÂôˆ¢¶çò%Šå*8QL¾ÍÒùLСçK^·’ d5Ë…L:Bó¯Ú.ºíz!x3Ós¥šzã_®0ñO_t[µ¹l:è$råFÇõfpÌÚü\: W(wzÁÛÙ†VÊgƒ¯Ø|}Æ“ßm”gþG&“+6u+Î&Iˆb¢Xò¹½®ÑÖšõ‘Z­^oh­¶Ñµ\OÄb6+ÅbéD¥i†®Äï_aJ8–©wZ-MPG7{rHI`õÌNûèÐ[mÝ0-Çõ"s]Ƕm˲lÛvÜó´k÷Œã‹6¾j†Ùsuúrç{Æxû±ÉÓq»ži´[ÇÛÎÞŠpò® `ú$ÅD1 Šˆb@€d#ŠQ @¢€(D1Q ˆblD1 Šˆb@Å€( ŠQ ’(D1Q ˆb¢ÅD1 Š@²Å€( ŠQ @¢€(D1H6¢ÅD1 Šˆb@Å€(ÉF¢€(D1Q ˆb¢Å Ùˆb@Å€( ŠQ @û§r,½­Õª•òX¥Zk´uÓñ’Æul˲mçòw;á¸^r¹¶mYQN§â¸ê#u)°›üK‘|êŸÄz†ùO’ÿÛðÿhð/Aʼn@KǬÿ{×úä´’Ýÿ(þ€¼”wB•òçIâ¼öá<¨õ> ® Ty ‰ÙÜà,Á T\óÁÜŠ*ÔuA¢Ì–˜]ݹ˜‡VâÙïŒæŽ4C5é¹},·Ü#fVÃö¬ÖésNŸn©î>]:¥Ú2{ûƯªÄZ×Ã]«ƒE£ïÌV]ýU³Âý:ƒñÚL§ä?*özVRjÍÚÃV;?äÞÊá]ñ^O²ÆpvÿóµKHdï¶Ù»F¾‚0´JHHRLBBBBB’bû‹ 1 ÞµæFûƒGèÛ¢6Â]«£ÁÞôý-€Õè×\ß·¤XÏj¼µ =»oÍðóû‘UÜѺP·ö €íF¿6kïH1D ñ®xÿÍ1K g÷?_»„ÄÞw[þQ›—‡ ?´¾¤hÓ6 IнÌaF¬\×l/èE¡ç˜2\„÷¼ü/®¡(¶¢]«£Ù÷JaxM×.Çß¶œhÿNl²“bjÝÊuTHRŒ à|’bH’ËÃýÏ×.!±÷Ý–{Ôæê!È­ûœ5îzŸûŸû_ø¦õÛ_ÿä§¿r•~èô¿ôKzIrd’Û§€w8†‚‘–@Ì7ëP¢ÞÙ7ILÈî:Ñ|ÕàÃMR æ?¹ IŠY9&ÅÀjIŠeálþ×.!±·Ýµ¹%Å`•ØÏ¤ ýï­g¿UýäÇ¿d>” £Å$5&!!!!I±}ý"K¡hã·Gzí -ÁýþI¿Cßµ-Ûï }…žãضí8^Åß“)š„ëîv]ÏúʼnÛzO‹î† “¥3½ožëíz^FœäíÞPÍ WFÆ×Q¾T&“· tðwT ¥}¾0øÜc"½=CàÈuû ¥Í@%¯¯­Œ÷wh5Ø‚Bsû«t'0³\oLÃ%âg»°ã“çD¡c[,®z®ØÃΛ°Ç¤XÄ"s2%‘ ¤XÄ"Öuø^† Ì@¤]pEÆ¡€˜øº¢tG@yQ¿ÙÌoRL8²EÀžB)ÌH‹ð?\Ænø ¸ù9„ûQ–fÐuáZ*ƒ™ˆ Ç>/ðÅrÒmáQ‹d²?3¼`$Hä ’ó‚túúÿõ«Ç?þò…ïÒýcäÒãÆ“îË- IŠí+½¬°ÙްdXWÙþJÍÝ¢ˆìøe·éDÄÕKJÿj­öÓöWâë€RC7%~gA`i%õ¥Ò4£‘ù¡Rs|»VTFJÖÚÎÀ¯Æ×ÕFî@Õ}Âüá·k¥U Ó‹y«&¢Ø°¹I, tôJA•Y¬.xa ¹ª;v»¦Œ®9ÉY³£7 iz^7‡sac,C±Ö¶ÍÔù@ÏhTF (”™þã<\´¢á Þ å†1¬®ÓÜ‘ShE[‘>h¥œc¸®Q«º¹}/ï´RÝÆFÞá(+1› ‰Ä`-inr7s­ß•Ì`0¥,6L[¯gWÀ%ÓµêEUØËPñ†‚ƒ(Þ¥+DÀ ܯ…–‘ÒÇK5- œ_¨¶šÕÑÒjI³)ÆÅ0¨ª¥¨ªÖ4舑Vì®väà†Àqð;)&¨¥šåGxKñC.¸ºð-…7äˆç¾Xº-×5ƒL†‡`Öw¬ ‰œ‘bgéÿîÛ1Ûõ“e£öß ßýôÓÏÖ‡Ë,­nÐ/é%Z .ùK•ïÜøþò–„„„„„$Åòn¾J¡yD\‹=xTªMxîØÔ³Ë„€@¤¢Ø´á%XÊ @BhГT%¼:û‚E«ÇëpPjq¤˜§WB릟\Äê˜L§Up|åøÄ;‚º8üzal±¦ÝÛ1<µ„Úéñ;p9”Z¬s`±Ö¬273RŒS‡B3ä£br‡ãMàáj%V³µð€L*iÑ0OÍ.hnô®•D0²—ácK­Àëè u˜;#¾¡iz»o¼-*PÓ-?•#žZ«•vÇ¡BS++P½î~†V¨4-êÐïhU œh„Ê,V[–ëù¾géÍ¢20È7d£:ÕÛ;žôlXÂ&°M¬¶–aQ¯¸¶Q %;¢*yF[¯3µ”R­­S¯°i”¯D¶öiË-°«Ør¸™3“T©Ö[(éœbMÛŽϪ3›™ƒ›õYàòà;3L ¼Ãñ&¤î…cûàËøJÉ!Cd‹,”D0®—!C4P0Ia íQ{a‡‚Ôˆ­4uw»°£7*ð-ã;Àfƒn¹tÄs,½¢r?žð1¼¦´)5#®½]‡ok;Ò¢üµã7th‰¹-[ª¶†ÓÑJ fÞR|ç_º¥°fâƒù¼@?VrÐm¹®d2<³¾`%HHä†ó‚ñ±Ÿ(gÚ?Øx…š„Ób´0½…ÞøË•ï<ùô¥¸|ϹÛßwyìîþî/º‡òÑ”dóu´AȤ?ï9žž,ïàé;³Ï^½IÀÚ‰Á-'zï‚ÿ"$ÚLwò¡‹Ý­ÜAB’bû‘«Áâ/$ƒ<Èðkh¡íð4PÁ ÷·ËÊ)F ”4’º[³Ào,Ñ{Ä-ƒVéôžÕ¤ª( dbÒ¢¹$¡J§ÆýnLñW´‰`»SÔ. ´fK–¸U6JÕµc¡M+9E/³ïíä6jÂÀÐ`þ­…‚ftZ°oÔ# ŒÚÈ|€xm&³lG jåÑÝÄm±h²·dÐîå–“ÔDwÉèÌY­¶ÁŠ!电!!Ä.Ž9B¯(bBïðILo=.˜Ì$_O¬i¹tHÁ³J"ÑËð‘>PÁÒÞ¶~(HDlcdÓ´Õ„U6ÇÂG„°Y„EŠ%[µì˰ê§Ü&È‘á¨=¸MZãmª2’‘g¦¨°–â#j<Ðuá[ k&:ÈÑÏ L±\t[®k`™ ÁìïX ù€„$Åh45X<ç$_ÀrÃ?ùçôCÿà¯Ò[â{iñÔ®çÞc<½ý½VRb£û¹\4å«ó1·uÜ^DÓh3㎢ϩk+ˆHŽ?3—Ÿ¡HdŒÀn®ñN>¬å“¤Ø~Frf[ôÞX:4 ×~r[DÃê §…b{\N‚©)6HêQ¢h˜~²¨_SÓó‚•5ж›1­Ä¤X ›¼à‰îêE¹9¢J`Àk+æð>nO¬WâÁÖ ðgÙM+ DIÄ6 ‡lÜŽ¡Ó¦Cù_`ê Z æÃûbê`'GHš=áécÜ®+€Í¢¥a÷˜n “›YÁ^®!TnÀ­²Á;oB: ST¾úýeVŠWTb“@—Í[N´{JŠÑËðÁ ™±ÛýöÂà1¥bðrŒªšèk`‚¢Cƒt’1¡b˜Úò9⿤(ªªÐ±• GZ„ÿ¡äƒ>´„?É€sø6-oÛª–Ì€`-ÅG”Ⱥð-…6äØç¦X>º-×5ƒL†‡`öw¬„|@B’bºõ,žÿî©Nê±?û—›qúGêz1zc\`zî9ŠJ8¾ÿI1IŠå¢)_MŒ™©Ù/P7Ì^œ>ëø­K·—º«¯¢ .¯Îšw.?Åãõô¹‚'­]ãˆÚRLB’b’3-);dOi%XÁ¡×ZyÔ =×6 ½Y¯²4»<)¦´¹Ì&pd†‹ì¬9rÇŸ|åÙ–©kÍ*$⌃…x/†ó† B˜ÚP˜÷<›l5eda]ÓM×ǼUGŒuOò)r˜ °¸O©6ZÍ!4š­f5™%}RD<)«Ò{Ía´jÔ0È·„Câsý¡!*nҚЪ£H1„Ãñ&:P'CÓK-¶Ù¬ØŠâÖéK«x»©¤8€½ Ü@‘™Ûƒö±P`b°«N8´Ä*è)Y-ÅGTöºð-…7äÈç¢XŽº-ß5ăL†‡`öw´„|@B’b‡N~²YÿËÆ÷¼ôø›«gc¢íÅsàÎúŸ[÷7ß )vX’bï $)–À«dÕ„… S x $í•þ¾H|½Q-¨|öiŽcÓì ¤XÐ(0ÁU#ÅD[¯– ¼*oIŠ9-æÝÎV¬z²’ï¨ óg1%Xåt-ÕjÓ Å >7± °…ÎxR¬g5P"êlª‘¤Ä$g‚C8|ü„Ø®Àˆÿ×t#¯Ï"—lõª5sk7•0¢—áCÐvøÙõÞ¶~(·ÀÆX~m,pо܀Ëaà R!)­«Š@´XRl¢Á Z<`ÖÏn)>¢²×…o)¼™ø G>/ÅrÔmù®!n¯ Áìïh 9€„$Åœ'«ñlü—¾?®Ì]ﳿú·9ú¡Œ+Coå|ÿiï“b›¯Ÿ­,¸Kóî²·üJÔÕW_Þ²<ï.-îg’Zž7탣™×í…[bÌtm'ë¼}uåì附Ç®Ï,om­vOÁ÷}õÎr‰«Â‡OœdÅ@¹“®ìTwÒ¶ÈôÅëp|Úo œ ¤Ø¯Ü­ëøõKOá¬cm‘BHA3­öÿæ>Ÿûhiì øÙ#VŒ2h[„ówÇ/{yö8[G¶±E!}Ú¸H]n—ŽÛÞfÿoîsmncàdjÔâüµ»ˆ°Ì$$)–˜Œ'Rʺ¨ÛðEQÑ}S£•,›WUà}&N;Õ.L+![mFR,}¢[æUƒŒ*ôrÓzTqš»¿}2Ó - §£Wá(F>ÓÜfç1ª*ìçz;.Ã×UÜt%·¤ÖÜÙyå¶Ó©€\fÄtIsìf’Iï®’I1|`ì=)–¹½°Cbû¤5éöIØvŠ!ÅZEÁ^õÈ6Úš¦™N€iwwûdR,rš¹ŸCä[;ç ›Á[šÃ×…m)¼™ø G>/ÅrÒmñ¤Xæ‡`öw´„@B’bÓ¸ϱ—V7§LÒX½=–C¾#Rl 6²qŸ)h”îm›/,É•¥$¥2sêʽDê(g(žqŸcvCŠMþ9ï®Ã”éÚAɳ·×F«;mOM™P†’bx¿ñ@;H±câ\õÀ¶®°6:óˆl‰±zbÐ@NÂcÑ— y@%Í™e.$¸}_FÿžMsˆI/áÂ2¤Xþçg1Ô;¡pKŒ’CD/»lbßó/ÍÜ&,~ªÔøÓ'3b!lÍPt^èù„Ù¡Yƒõ;Ü‹oç$Ú&˜<À, µÐ⸽N}°0Å'}çöp§ &°ÂÀ„&4ŠT µXÕ#ѤÈlÑ : •ŠP‹m7Ê/)†6C++ÅJ¹ÀÎ\ŸÃ÷»£¤ €ñ½ {OŠel/ìPÀÜ"øQÁ¬©°ð‡éÉfã[ÂsHp‰ö|i,ë[=D´»–h?ÊLŠ%ª+ÚççªÂ¨É'xK3“bx¯¢[ kfˆräóY,'ÝOŠef7ÀJÈ$$)çÈÿµ§^½÷xµøA?Ë>ýƒþW Š ‰³õ¿ RìõÌ…™Áª™ov—_ÞwŸžºçú|Lâm.aÁ¡s÷f®v—_zŸŸ?g²ºØú)Žb8xòú©‹wotÉÈ¥Ã樜յWÇ¡ëËФØõKóËT¹ÑÕ=³S7»ÝÕuÏ}|ô˜Ûåúë˜@Éãs—·%¬-Ü~ÀJÒÏÌt÷u²:¸tä³ÓÏ#¬ßxdsæÉ¹ig…^pŸ8Î+ŒµAбK›k3WL›w·0ýè2ýÆY·r«ãœ6ËXË)6;e‚( ‹Ô¤\ÚX1Ÿ=ÍbòŒ}ùÚ£KWŸ.nnaÃ2§¤Xþ'ëS(ÍŽÏóf¬±‚còR_C“gÏ+-§7üÊL¥ I1j¬xÙI1ßPZ=qеj&/€Úå¶Ç§N³I:)¿H³ùD˜œ5Â"ƒªñvÛL”r›Œ¾ë÷Û²j‚\ÎÐ@vO|=ñÛÌ€R'ä~Ng~!¢„8DgN(6:#3:kÍ#ù%Åð&ˆsKØV–ؘÌqÓ»£¤ €±½ ð™•ö¶½C¸…¡nú ¥œ–=sà'¥k%eÀ¯Ô ª *§û„ÿƒù9Ò"üµã· ¤ NIW:EÈa‡·4;)†¯ ßRH3'räóY,'ÝOŠef7ÀJ€•ƒú6ÚºìõôFB’b?ôÛtjý• ßM½ú…oZ?ý•«´ú$*„–¡3b°ÓÑI·’$ΫKìà¿#WW¶K>yX¹Xïos;IëâyœÙË×l ,†Jp1÷¯Þ‚ER(RìÖÂÆð>ÇëŠêÆ0£Øe¿îX|ùŒÁ YǬ˜;8õ”'ÅM= '÷·wæé…×§N'Æ[‡'Åúxýa¿®kó›[BKgf¸Å_â…v‡¸Œ[\Øäœö¥9orRŒ qûÌ0kè°Ì/$$)–DNù@-ÕÚ¦íùï¹½YVJ­žðe~he(×µŽeé­ˆá_ë)”’Öq|ßw,£^.«6;nD²b^e¨Þ¶ÞÖ’hµ4Ûˆ§•õ¶ãù¾çZBm¥ÜrÃhôõ½Rß–p?_ÃË7ƒZÖ¶ëYF«¤€T#˜pòš*4Vݰl×ó\ÇÒêÐŒM'á&AUšú¶±®­Õ†¾H¤ª5͈=ÓnÀ· »7ztšR¬7[šnGÀ“Ú¹fX; Ýi³Ðb óKŠ!M·€ÁJX­ (±ïwKID#z>0&'ÅRiÛ =€Ç |M³]Ï÷\£UUà[$õ„AÏ´©b®e oRÓÜh EŠAr·øÛz»ãùçtêÌN–ì ?ÒŠýµã· ¤ŸE ƽŽã×iï°%@Àá-ÍNŠáë·ÒL|cŸÈb9é¶xR,ûC0û»Ádzû6eï$$)ö ǶI±/¿#(3¼(IPŒ ¡~ñXvR ö”åù‹MFLœ\Øòbåü¹ëGÎ\?Ïöß1¬}pêŠ!•K·îî'||1åÔÍ—ÉL lEØs.=ÐLð_.ÝÓ¥šzÕÞÂo<ÞÒ™3ÓËœœå§‡ 7Ö„ÖáI±ÉNŸ|=ÍÖÐ}î*X7ÓŒÅ;4¤Æâµ¹äú>` ™Û×'$ŧO"Â2ǤXþÑsªê1ÔŠ"§ Ä­‰¥)0æšÚ°ihR,„t"(Û{(¦°ìpý b&™Ä3j±õNiGГW+§‹ƒ%BD.LÙ9€ ˆÂ…zÄœ’<ÛËmW牙ÜÌZ™s·XÚIŠ…“;c.4[±߆¾‡e»«djã{>0ðóUa íe{¡‡p‹h¬s¢¤žÂ®Wi»|ʪ´†}eE„4üH;Öÿ|íøÁ ZBtEŸÝ Ÿ)“F|]ø–Bš‰rÔóY,/ݺžËðÌþn€–¶€{ IŠýþ?noŸTÿÞÌNŠQ!´˜™#—ÏͰÕC÷>¼²05ø|´@Óºÿx‚!D/Ö½'K³7Oiw³}yÃgb‚K/<&£—–Ò9TVu0ù¨9²>kýüÍÑÕF W´0Ð(ÜNÀŒ~ãu&l¨äpUY·û¤·#Rœ€ìèÕ®.ºÚn‰nÿôºk‹ÝÕÅåµËç˜Î="™I± a™SHHR,ÿèu´zêáJ¡¬u¼qo´eÍM•f¶j%!¤íøl«,‡ñŒ†:Z_±izÝÎïËÞ×áF†áÃþA1ØÙ¥B@Æ·Q}u-Vš^/hùãü½F¹¨$%ý)¢Òö¢äSШVŠ<Óë Ï(ú64W³Z¼N«¤¦4U½mï´kàX ЮÇF5¯ã^/+)Í4ÚôžÙ(¨Û¡EB[ç4+V›.ŸÓŠešG8‡è5uáOÆÂEÅDG˜€„YKÍÎCÚÌ»5`kvYÉ´Fö2|`  €0ö¶½ÐC¬X1:íÚà,ó ¯'•Õ°:xŸu?Íò¡07žp1 Ú¶ª¢–Û6Ä~¤ë¾vôà†-L`´ 6XÜÈÄ–â#Jl]ø–B™‰rÄóY,OÝ–ïˆA&ÃC0ó»VB2ó [4ºw¤Øßþ{?Ñ~°²ž…£·Ç¨Àl¤P*â0P›kÓÝ9||&¥ Ô…c[¸³ Ew‰h0™#b€aRì¡ËȬçâW4S~’ƒËà·Tà +àD¤ÒÔÃW“Y—™C¬æ{óJ±Ù‹&,*d[S1¾]ÍJŠeË|CB’bùGx¶Õ1·Ñ±l''|-#¤·þ]Q´óŸˆÄOɪ2”~@ÂÀ÷\×õ¼ „+Qèoç‡dÍ'´VÏÛ©wø¨Èw)¼0"o/¹Ç${>ÈɆ¨/Óóý€ù|2kƒJ`í8Dq+QÐÚÞVaZáŽÏ}s „ RI>0ö¿+0C:Œø ½Àçoaà“‚…qÿcã^q+àFÚ nD±ßâ6%{jiöºø–›‰rÌóYì=ÁÐÁ¬ïÙ%ì>$$)öŸßzÏÆ¿3ßÍBŠ}{¾h~ëQFR VQQåØÌ¸Íë´³°héh2åü¡“׎^°?¼zï ’Ã_Ê@ŠÁƺì¤_Ýä~KÃä΄„œÂTC¦ð;#Å^tg Ŧ¯³$h‚ßàÀP)©¤øsdz%;)–=,÷’“€”(jÝâO:ç÷ŽIHHHìCð¹xH±|ABBBBB’bîb¯¿|濜,¤½=.pÿÙZfRìõÌ”9Ø|'PÇnÍ€óNŠMŠ~CTÚoi˜Ô™‚iu‘ÒO á$ÖÁ4mtYÙýéëRL´' d‘|r|¾:ó‘"Þ¶Ù à=.ÉÛ1Jiæ`š$ÅÞ/HRLΘ(Jâ¿%íGmB(!!!I18ô£GRLBBBBB’b3vÐ?SïëŸ¼ÜØä üeýÖ0/Fÿ;|•ÞBo„=˜bRLÄ,H÷#J®ßHÒ(ÝÛ6$5ÏãÌOtžórOŠAŽ|ú9½&J®=i°í‡OÅÕ¡ý–†ÉÉZóîbRÎ¥3,ßÿ¹Çdë`$äâòâ Fh Ðyv~±{þɈä3 ‹Ü-d¹{ôØ Ô¯/ô x¡°ÂDþ/<ÝÙ@:;”ªpC£mÇ‘b#Å>Z’¤Ø¾†$Å$ˆ GÈC©jVô>(!!!§àÕ0¤ñªJâ ºÿgï^¼š8÷ÿQýôÜ/Å}ßϽþ<7åÜ”s3í:Ëì›9=Ëü.¦íJo±{Óvÿh7­¢lD¢FŒJªQƒHÈ„AÇšÝ9ßdèãLƒi¸Þ¯5‹&ß<óÍ$+„ÏzffGЉÿxb-õ8þ‡´½Î­™òzsâï9ÿwÿ÷y¹!¿Ú.òµi5oLØZ*!’8àõOnþu£åõß_{/^’ˆA]ÑO2š¿>?›ž7çæ‹_~QëSº^’•¶¼v¹o²8·h¦oϾubT•ÉÃ?YzTÙᡘ÷<ڗ,dË×÷ºŸf4Ã’Ué7ç{¿5ðl;SÕKÃRŸ¹=ûzxX­ï›yboùÙYO/º>v¿<·XMuª‡«×ÂÛóÁ÷SŸž›š,ÊuÒ篺ŸÎë_=˜œ)/Í̽ü§Ÿ\v‡¿Ÿª%Ç]zgêÕSÁ–*[uÿ:ôú¹¹lusoEÕÎô¼|²^^Û“ñ·¾º×/wó¯µûÇ“iVн \Yõý¶¡¡B±fܸ_’‰`Îÿä?ÿíøÄ½e[K ¤Ì©—Id“ {3Og;}]¿¯¨b·sŸ]}¹®àµøï“uiË­Î5åÅ{é§™ËmuYCÏál*’xÕÙôWÅ Ó‡/téCfmþÑåÉǶ¢fyæg©™baÏYº+32Ój´>%óvaεoYV¹\vr1@ûr1ùH·,k»ÿŠ}+ÅxqÉß÷»wïN¶ o0y›}°!S“Åä"e2­`vvVJç¼û€6"Ýò.ãòa.éjš¡¡ßþ…v±t:½¼¼,_¶œ§…s£²íÔ¦Õ–<–þ[߉|u”7U:vr1y³ŠmÛWwgÿ;¹ØÃ‡å¿)³fÐ&Ìù—q'sŸC€PŒP 4«P(8‰˜eY>ãUæ¦V>ßúo¦¦EÈ—x'“7¡Ø6Osr1˲œhL¬ÚÄCQ‹Ã,Ër±6š&F(Ú@&“‘´¢X,>iš>pñS¯[ßzôߺNä &o3y³ms(F.V9íxËáÃÚ(#mÀ9³¾sŠ  Eä æœw›C1r1¡âÎo\ï`€o\žÔ8qX%b„b  LÖ¨˜O¼ÔJMM“õÍŒÓ|Bÿ­ëd²†Pì¹|—'; ¢¸â0a·7B1‰?MÓ4 £Ô2¬ .›°7¡˜Š6šh6[¯%Íô@ÿÍ÷ßêNÅž‹o]þ hCߺØíŽPL¹²²RÚ²¡MÞ:Š=oú§åè¿õmŠ=_ßÚ–ýb S¶-S¹˜½1ÅÜg¯Pœ•kÔ¯MÐé[šúæû§ÿ–tB(ÅÔQ“¥m§?ŽB1ÿaŠÒÚ@§!ýëµ´B1@(f›¦YÚv²Q{#Šiø‰<š  Œ¶ªùþé¿UŠB1¡Î¬¿d£¶„b;žeYÎO¡~UèG#„b¢ôœØ€P¬.¬Q74ë]УꛫчMÍ÷Oÿ­ê„PLB1B±ååeçgCî»üTn)€PÌÒh""ьӂ`¨Ùþé¿¥ŠB1»´'Ë*7X¼9WíW£A™JÄvZ(¬Ô´¢ZŠ5G—´úo¡˜Š1S¬[*Ìz—ùÂú™bÅ|A¨e¹¸CgŠDìw5r£ùbhE(öèÑ#«Æ¹ñ¨FýºeæÒÝ—ú?ùÝ»ï¾ùæ»ïþî“?Œ^ýzÉ´¾/š>õý·Vû÷ß²NÅô #3Jl]ù#ñ9téßœ¯.¯œë¼¼R2Kx9ä†a•SǯýüÜ¥}çÊø¿’Û wó¥©lûP¬¢X•ÊéȪ(/è[›ˆEjôQ—¾ZŠùhÔú†¦ÿøæ«»_j¬ûÍ9Ýcõ=è=Kÿ+ÊòÊJý´¼ÿŒ¹ýŠÐ¡¡X±X,•KK3‹’ˆ]üáÙ±Ÿœ«.?=7òOÏ\û_qcÕ(йQ~¼ró7‰‘|zìgçœÊ‹?>+·óéyìV¶y(fövïWzRÆóo(Ý»_éî5^Ä-¶ÎçŸQ´Q—JÄy¬ `[Š}?ÁÍüh÷K›ÙÝ_ô5¦¦Fi.$ZzsÏKÊ_ÆçZ3Þ9èTJ"&·î¼3Å̾ÀŽ拸ÅÖÏÓçbú2hýL±gõðáÃêO±0þ/ù³ûÿN=Ú ßÝë ÅÆsê鸟—P¿î(š>5ëŸc'„b€PŒP¬¼a(f¬îPLfŠmЦA(F(Ön¹‰ØóŠÕG#âÇ_ª³§ë—ÿñÌŸÎüá/»%|òØûfÜO¤¢©Ñ÷ã§}(æ¿·çÕ‹zk]'„b€PÌV'˯æ\…µ¥¸T ½–¦åì`*“EB±kGâ’—‹R#‹Ü0–oþ6Q Å~ñ4“ÛùÛó%£Z©†U§Þv*f!›I§’Éd*•ÉæÍÊúЧl‡YȤäé‚å.³ ùlº:R*Îd³¹‚iy‡‘F¯;Kœám/ËÈ×Q:›×gf!Ÿ«vŸ’RÙfÁ°lTGFÊs0£·!ÅÊgkÏ8YÝŠn#–™Ïç2g/HóËï·¦bÙ§û(§ZÚú 'ëréÚsKgrf¥þÍäb;'@(¦JôŠÉê&ƒ}0žuG$¹ÄÇ»=÷‡²×”käs¯¼ÖÃJ6ŒŒ$&³žM䲩ë‰ññ ñxâúõêßšåuÌŠâô_^¸#¹pa|<1™É=t)—åGî w(v¡Ú°tâgÞ“43™¬6##'““ª™FûMº¨K#ñxr2³¸¢eqZž¦<}U¬Ûí¹Lmàµ'¨‰¨²™;ÕqãÕÒë©l®¸Q¼¥*¥©œž[ÞÒ;™b€PLGM “l«üÍJùqu1¬rùÉÊr¡(3Åäaj¦ØHÇ™äk—V¾5Ge©‘En˜öÃÉ×'Fþéi5SìâOÏÊmÉÔVþ\­tÆ”Áå¶l¨™™b•B*êÞ¿N(ÚŸ³T•銱‚iËJö†ÕﱤáDF1µÒ£+‰;cY)5Ì:¤i;Ìl<èZ7Jh •·½r‰¾`W£-£ Õ½Vº7¸aC=)õô}‘õÛ FòÛÃÊöEØíKX>¶èÿUë 5Ø—ÁH_Ö´…ÿ®RHFë÷vWO<Ùò¾4Ïš‹ÍÏÏïD ¡XÊŸy§‰OgÖ]íq§bƒ ÅBâ µjï;Érv°[UíþK'ÇÊŒܽÇ©©ŠÀ;ƒ*Ýqµï ‰/BëÊžN×Ê‹ã* [gïi¬1¿Í(åìxè <ŸzƒTi¢xaòt`ïîõOðãñú=Ÿ|gßîFíî ʸþ+›E(¦@(ÆL±#—˜ypú^öÜ”,Îf²ƒS÷ÿx7þ/þ$ç×W3Åä¤û‰ÿ<:=4õ` #5²Èé ®ýÏñ‹?¨›)–ù,=ý§jåÚ˜g2³ñé’ñì3ÅÌt×~`ÒXÀcW0(!š÷øG+Ù¯×5”µ$ nŠåFbšQ"YÕ|n(²ÉsV3¡X·QU²±nÍF"µ‘Jv“†"CÖ¦[ôÁHõí×é®îjáã…«äãݺŽ×^š”ùìÇQ ± []]u~ õ«Ò¸¦på wØïcUèóÁ;œ8yj¬P»7å§Û»Ï“ u¾‘_]ü8ð’ÖîÀgEo(¦u0Q”†ÇömŠ ç7|î>›Y]{j'wëj÷]ȹB®Äþ‹o«¼¯‘ÀG“ªÿôg}¿§nœ1ýW Í»E³žP ŠÙê ’7Ž^þ;ý£{ÎŒ¾,Ë@õgÇ€Ä[*ÿYõ§M鸖3c?>§ T±kéêÌðßûr¢{L]¹ÒÞ*3د¥.Œ¨²/5á(éòN E£Ñ`w}⣛)ÖËU$[ëß¿™¾Œi‹J&ä}|8ÔMž ô™M„bÑ‘œ<ñ~¹_/ØïDP™þ§¡`8­ŸñÖ›67ÛâfŒ„³_µBNT·Ù 7Óãc,M(¦ÉÅHÄÚÀL1•h(ãâ¸;cÚºà?\Ó%Y»Ã³¾Ø]7* ÞWc{'é>u[îÜèîŽPº¼A¬“mÔLwƒfªÅ œMèt¼á$]R¼×GñìjUáÚÉMkßI.T{('½øöt‚ƒ{½ý¾“—RãšÏJý;AŠB1ûé$CW%ß+‡IÖ–ÎAgæ—ÊÅÔíµ»ÔÒٸ̹KÕŒþp ùêÓ+WÚ[”ê x™¡´%k­B¼Çî d­ÆÙJw0‹E"½³ír˜´×XCá.OxÔèœbÑDÁ3­I ÄÒ³:J>í^B™î|­;a¨ãûÝ-&Í­ŸS¬+V¨®öž_í¢êz+5q¯ï¯EPi׳êŽ&ÔNy’AÝ}H¸ö…ˆ9¯ZÅLö‡=9VoÊÚn2éyv]áþÚéÄ*¹d_W¡˜ÊÅHÄv€Plµ ÆíSî´(ðÅ=×…Á“áPx½w®Ìr·{¦˜²/ áS× =s¸f×â•ÂäGî-¾s-ßh¨}_\»g…k_„¼©×pµçêÆgßpWÁÁ«ZÞf:¥±Q3caO&ü,Q”Î Nÿ¥'}úh²Ð øÔ•êZ)¯/οã®í ]y¯ÖÞ v¸ŸúÉêù1WmÇàӆݱZÇp~ •M s€PŒPÌ{Éú„«þöÖÊÔ•+Ÿ1³ÒÁ Ïf•x²3×g+á~w½1ÔDk"1•q‰D,àt¦Â¯ºñEÝ4±P¦âÞB¼Û5P¼Pn¢îU¡Xbí ñ•L"ˆ¡¡D¡6ˆ‘Ï5”7,íµ -÷4±PÆv‰«Í;Xý¤³îØ@"W õ*…t\ÔЧ º- ËÈ5–—^ëŽ? ö¥=yY¬»>hÓ½p*Åsê£9[ñšJ(€P,?~Éå+ÕìÄ4ÍÚÏùHÇFS²Æ%Y‡Çja™cöÊ©@ ¬ „ÞvTîh(<6»~¨““U~:Ðážråd;Þ€I‘ÕŠêßT7¶ÐLáÊ>wr–gªÜ xŠó[*öN;xÍpÕΞv?ɯVu¯:¼v;oÔ&† ž>}ú qêÔࣚk}VnÚ„b¬!#“Ã'‡bÍ/ÞPÌX5ž)K½1“›‘Ë~'_i0‡«ÏÚø*–™Trd¨¿'Vø¤„bfãPÌpçDJ  ¯‰D\3ÎDOÒØèHÌî`¸§$s?³Çóhwp”T›ÖöéD\u{ 5TÆT§«;íq]êQ³Ea¦zö7Ö•4ÏS*Ùï9Úô/\e$ÒµA*ª6Äá“/€PÌ4Mo„QOÝ¥¨úº™bááYw(ö¶;1ò¤?M¹ûrdã@ͽ]ãÁíkç¿8ùNØ}ø¤Ê¡¤ÆŠí>v×|jâm×C:7Å.ΨMoЃZoÜ¿51üÕç'ßmØÌÒÅ}uù”ËìíIqóæÍÉÉ{%ÓlX¬:Ÿ¹uS¨âºà¯ópàðš#Gïö´1<³ZPó¿<:öŸüüò­û¦²tQ_©'hö¡Xû€PŒPÌJ÷Õ(:žl%<”³ëñ^±([ Å2êz‡Zê!ùD®¨+4’5Õæê¨$HQYi_ ©©^•œªŒ˜úPÌ»^QÉ”þàP5ûOÕk_8£×=±,š¨;s™z59Ñþ‹ Ó’bë8:Ž ›Š+Û½.“ÇzC±À-³Þìµ/ŽtºªèC±Îȼ©x£·¹«¾75ˆþ¹ÏL|®iF…kK—ßöh9¯Û· ‹Íz*ÝóP©oCêïž ëŠvüüæüjŸJS§q¦е ÅÅVSŸjâ}(Ö›®«6B S°®…b1uQÌ\ª/VÛY'(T43ź{õ¡˜dLþtõj†ÕH_8ؽñõ%þgŠ5Ši_8«ß]Ý›ö¥›)æ?“_çççå'¹ØóŠ5£4ájŽÜ2º{T;SL%YÊÒÄɆ ÍîFP}òå?SƒhùofþbÄJÎ¥¡+n";6<³6þ­‹o=ܱa”×yî~i«•>е'Å8|r5ØøðÉÌ@,Gª‡ †bC™†1–b¥{½3´z’™œaZꂌÏtød4kš†‹i¹Tì:f!ŸJ Å"ÁFñY%©#kª‰™ÿÃ'##YËÝ‘ééÈ®c™ùlj¨/¶îÒCŠU é¡¡uÍ %Òò 5yødý §Ž÷T)F²Ç›Ç=c"&+õwØ~„bòùãüT7ü¤bŸ{Ý;xrB=\™»tBø¤ÄUsžíªÐʱ+|jøë©™¥’Šá4¡Ø‰95Î3…bëúŸ‹zšÙ-Íܺߠ™ã2Îüp§÷ˆH÷þœøðØÁêáŽG>xìÔD]ñ—Sžý_WìXwôæüüŒËüÒS‹ËëöÿÔ‹g{Ù×0>[qï®ûw/ž;uìÈ+ *·NL(Ö& #óž“+Ø›ÞèDûÑDAŸ­˜éú )*ÉX÷æ¡XZhßsD§7ñ©$z#¡°uáP(–±ìtmZYWW—üèIº¶ie"]Þ³ãë4Å,Õgе:–°Ýòqi¨&ȬkÙ%¤4’©ØJÆsÒúPÚÒlq3VZw¢ýhý‰öõ/\n¤Ú˜êK9][ùDp‹WŸÔÇ^äb;@(¦¿\ÑFcË_ø’×ÑSî1gn~^Æt¿¨’2•dÍ{¶5w¢Ó•Èœ›yÚÃÌÙo(&ë|Ï;1×(ç:~imõà|ÄÝÌÙUÙ ™EÏ)º^9qɵïq拾SuÅû"—M¥®øÜýʼn¨{ººËgO9pèС#‡86±d^ЍÍdÛ]ð†ï?íaiâÈ®—”± §r×®]Ryüü”©HånOÖézuüR!kŠŠ© E‰d, ŸÌ\D­WÓ‘|†b"¤î43]šÃ'•`,‘N'ÉBÅèñl9’ÈÊXÒR~¤'èŽéòõ‡ã9ë»™VÉPÝ¥3ý‡b*OH¤SÉd¦àL˜RÂ} êÈFòé!÷V"ñ|Ý)º‚=qËvTR}ÞérÚ-ÚZê“Jl(mVìŠe$ûÃûëRN}(æ¾Ø‚ÒÕÕÝÝÕðÈÍf1r± ÓGË}‡$Rñêxåèñ±¨D5NjÔT(v *õ޹e[ÚPL%_BmEЉW޽téüÙóSË+ l±™sG=OùXßĢ죹¯cGÜë;$ÕjPü¹¦xæ¸gÝ‘³7d­ßé;þŠ;@»#]<î~z_~½¸öRM?à™Ú6WWùÕ­%çU(Ý®«\i¡X›€PŒPL‰îýzjz”ÿPLt…ªºÏF/Üù‘÷ Y*êÒˆÆó ›ï†Â¡`]'#ùŠíCÃívÅ’ªU®hÞ=-Né ÈL²`ݺÈHE¿ÅMê¸Wp¦¢;RUÉ„÷ëù¸ƒ\Æ^ués1y¬ `{Š5R.—Ÿê†nýòÿ­Ò!ŠV#¥¸7®r©B+eWç#Guª­¸Âš†É—jÝVœõÓ'„uCsîßÍŒV›)OÈØz¿ðÎ5Ó;ëô£,£ýio*¥í(Ö½YVeùØâfLÝ«&#9ËöõÂ9;<²`´×5_0¤ÚÖG]*k¢¸åŠù9E)Ó•tvíÒ‡b3ÞmMê åØù©õ¡Ø´êsã¼ìÒ‰õívn4jæ¢ßfœÁ§ãïïÒ†‚w܃_Šé‹Ó®ý)vH×Á¡WÖÜ9{Lßí¡ž«[­Ô¿˜)ÅtŠ¢P”™b7›ù§§Ç~qNB.ÝâD`?«F]ꆮÞY~~n¤ã̵#qcÕÍ û™T éžp Á±¤e+Ö@èiÓ¿>#±²½¡ú@¨+r_ø1è: ÏŒ÷†»…b¢ ÖÅ>]ÁÞ‘t݆L<ÚðRÝ¡dÎÞ 3W­¨*…‘žÉX0Ò—.Ôwï‹6LÆB±œåo‹>TŒto$Ø`+Ñ~µí çeÒ‰‘¾Þª¾‘tuˆL°Ñ §ºô!—®¸õŠ•½ô‘‡º«‘âÕþ÷t4HxüöS ª¾<²vW牋R½xãCWEÏB¹ÞõþõÓ¹výïè§'¨Õ2Ô¨T.\頻q(Ö©fЉ¹þu‡}vŽÊÝŽ&šq§âÇu6˜"Ö3ä~¦úâcž_\·ÿ§.õ6˜¥¶ë•¯yújÿÑW\]+z†R2šŸÊÏíó A(æ¡3ÅBWG÷œÿgƒãÚåfŠýb³Gíýá@òUÿ3Å´,#ŸÍ¤k²¹‚õŒc²™ê Âô5FŲÌ*˪TêG3ÑD&W0´ƒùÜw[Î:¾|V•µŽ]âÒ*|·2²K›YI©Ó”æ «âs‹[T1 ùœÚå[~¼‘Š`M¨·°á!±ê`L•[O_ÜzÅ4 Ãp~ õ«ÆÂtúz|th@ ů¦¦ÊMX˜J_¿'©ôT¾QMö_\X˜«ZX(}6¿zUšYðÕþ´óˆñññë)]ÿîb!Å Úþ榮_uН¦¦¦5-eØ”S{õúõÔôÜÂÖ+uŠB1_–kJFi)»¸ž_¸›_¸ÓxÉßž_š^Ê|–í8ã9.òå3wb©¥™%)Øè±ÕaÓùÅû ¥Ri¹ÆÞ  ’Úïˆ åky¢‘‡ö»„G*6€¡˜>`RÔJ}tÒÌc›§ïþ[Ú ¡ ³K5Õ\lÅ0V ÃÜp‘àlå[3;t_޲”,l-ëù'§3H¯Ø¦H™f‘MT7Tco `¸Ï8¦;iZû@(fx5)Í7þ5ß?ý·¬B1@(¦fŠ9¸Šº¥¸X4•œÍŒ¾ì)öOÏÜë½mXe)Ð ž}¦`e"º“öwõ¥ ö‹¡Xë×ç Ñ¿áOë:!„bvɷ岜?{nJB1÷L1 Å2§ÒåoV¤ ä›ý,+=Òö»t‚±¾‘œiÀ‹=S¬¥RÉù©nìÄ1é¿õT„b€PÌ6 Ã(fXåj(ÖqÆ9ã~uÙ[=§˜„bÆã²ÿPL6jM©K]óàÅ€P¬´*éP7¶Z¯YßĶšíŸþ[Ø ¡ ¦i–ü).e¦Øƒ3™á¿ûåèF÷Ô– ÿ/ï}|KfŠIAÉÙ¨ ô¡XA‰þ±m0>ã·´B1@(&?~¼…™b˳ñé‰î±ä«—’ÚòÚ¥‰ÃcÙÁ)ãÑfŠÉFm°A(¦®K³Ô©?ÕV?Vƒþ[Û ¡ s¬¬¬láëšQ’hÌX5Ô"¿–ʵ»ü‘ÍÙ@Š5â´¾F­÷R5ôßtM ;!„bjL?¹˜æ"•ê.?‰_˜Ð‡bšG³^C_ß,MŸMôOÿ-í„P ŠyŽ£4MSsŠ ý7-}(&ÃÊà5 €Þÿgç.tœ£8޾ÿâpñ‰u¿Å©…áŸ2¸Ž@ÏÁuÛîMzó¹zõjRÅÅÅÅåd –1˰‰bpÎD1`Eª*µ¢ëºËŸÖZ¾ÿð“ïýþÿÏϬŸÿ¸ÿ¤$–1«ª³ˆb€(¶~Àp Ïõë×ó“qΛ*M•<þß~üÓ=’ŒVl8!ñœ£ Šëz£¿}ûöÐź®Ûl6-~:…¬—Ç¿‚G’qÊP E,c–aÅΈbÀºäý}èb XƬQ쬀(¬ÿí>—¶UÕpß}ø+2NU•Ñê•(v^@Qì?¢ˆb€(¢ Š€(ˆb Š€(ö/X[@ë€yˆbÇã1[HUeÉ/{˜E–Ϭ U•u4KéQ Å=z”äÅ‹=Ì"ËgVÐ,¢ËD1@ËÉê­µl$<È/{˜XÖÎ,ŸYA[kYG—‰b€(¶Ûí>|8t±²s%Ó]5™…s(bYA³ˆÎÅQl|å«W¯†.3Èò™t|ídô€(6óÉbYG¶Ûmk-·u¨ª;0ªÊÂÙZËò™Ôib° Qì³.¶Ûí¶'¯`Û“¬Ÿ±èQlæ“ÅÆ],vïmà/Ù½·Q[æ41@w±8œì`‡“cŒŠXô€(¶D¥±‘ü%Ç‘q‹ÅV’Æ` rˆbkô¦'‡ˆb2 Š€(¢ˆb Š€(¢ˆb Š€(¢¢ˆb Š€(¢ˆb Š€(¢ˆb Š€(¢ˆb РЀ(¢ˆb Š€(¢ˆb Š€(¢ˆb Š€(¢¢ˆb Š€(¢ˆb Š€(¢ˆb Š€(ö–½óþŠ"ùÞÿ¿öMŸ¼fr0+² ƒ$sBQ76¹kDQ%3¤‰( EÅ "Š2K8ý­šk Õ5 Íâl༟׹çžÚšÛÕÏ­®þ¡ŸÃÛ7ÿº¦˜gúý7M7_ûï¨+ÿñ ‚½˜ìõd/©À ý¯º7æÿ'¬âŸì%e¯ªÀ ¿OOl»¹œrüÇn׺ÌîØ÷ã²úv°`Ê ZP]ß¼&ðë/•ÿÅõC?{%Ù‹É^Oö’²W•½°ÚÇÀ;Ýz‚±Ì®Xú§P?øû(³à“”òš¾ÿ të§É¥ß#ôÿ+êg¯'ùbì…Õ>¦Øöâ•ìK;Ã}¼wûGFVïʾÙ%;(K“uK³Gè‡þtG4{UÙ «}<L±µyÿ—}iíÜfðAÞ#²Ðü±žO)+A“j,òÚ>P5k3׿t{„~è?Úµ½ªì…Õ>¦ýÞü«»Û=”• I¥F5|ÿ9O˜×›Ü75K´Gè‡~z[5 PÀË쎥¯n6 ÌÝ| |ä+54Xè¼q(÷¢²Ž$À¸Æ\ÿRïú¡?ð¦SÌ º(K£²À_˜5Õú¥Þ#ôC€M1¦Ø‘ÎmGýâetQö‹NÊÒ`1óâÒM•% êÍô/õ¡úaŠ80Åb”¯}%Xe%hÒ ¾#†2NÊJ½²& T‚jæ½–æú—zÐý0Řb‡ù—¹‡)Ó “²_Ð䯥Á|54ó"‹ MjL4,å¡úaŠ80ŶŠð­”½C™”• båZÃy“k)+÷RjÚ·RæÑAYª1Ó¿Ô{„~è‡) ÀÀ;Ô¾õн-,˜ @Ù7 y*à™”•h§Ìƒ_Øn¸e5hÒ Æ8H‰¢ÇDÿRïú¡¦€@SL|´Y ŒLñŸÆ¡®C¡®Iar_e¾²¡sýK½Gè‡~˜b L±ƒm›Å7ùfʾÍSÏ Œ{”y°ïyÊ|ÐÆóÖKÿ)nj¢&Íô/õ¡úaŠ00ňﭛ(óàžéWÊ‹q¹˜Í/jMsýK½Gè‡~˜b L1þaOÑFY(ÿ4 l‹ºv1÷2׿Ô{„~è‡) ÀÀÛߺiËÆÇ&ʾÍóE(Ó`±kîãžù€²²¾ »×Rïú¡¦€SŒûw7ð bHA•”•®¥¯f£Èt¹XGd¥ž7ª¡¬¬CÅæú—zÐý0Řbôe>Oì¥, ”åÞ4¿Çµn·{Íês "î¬ßãþ{ïHúÿœÕEÜë2ìk)v»Ö¾Ç?Y¿‹Öß¼~·s]†÷‰ïi6Þœ¿G?Épp%,ïvoXBûS @€)Fæ‘4É2»”i°ž²¬˜²oÀç]QÛ ×|Z°še¤8×/ôZq#Õh mw6óÅü²](v×…ˆ{­Ùi]+éÿSzTæ3êB¨Y–ãk£?²GÊ4økô+ó‹Ö¿›ïǧ_»–DfУáÁÍŸ­{ˆùžÑÚ˜bL1æ+ñ ËJФQÍʾraF݆oš¾ýUÄ…¦ÏöÖDÍS¿ûRµÁãúêÖh_MzSÆ7ÖLŠ“M[•uޣǚ¹ÕžWÔšWÔ^xµ%õ#{¤ø+õ«õ‹ÕÀqèWLJ‡žíÜI“™î_nÝónÎ=Úœ?SóÚ„‚5üxð’îžó-¡ý‡) ÀÀÛã^Gó»ù€gï€2ûÕ¤†úùµ–†MO‡Ë’Ækä0]G &8½>öXY*òÔ½Ö(Ÿ¶‚&˜~uŧÿOëQ7G&¤»_û¸Evÿuú•šÅêWžÂ5ÒS?ùçêwEí-MÍ,óFilª“~2xFKhÿaŠ00Åf?¼EöÖRö‹u”¥ñ|†=üÊÀkMÏôH‰¥q­r-ûà§ì˜ÞëúËqMà¹"ô³ùÙ;¾¾¢—ø•ù‚éÙ£,û¯Ñ¯Æbõ«OOª›óçê¿{åæc¼¨uVá+YÆ•%´ÿ0Řb®µ»Ý”}u^„I RªŽ=žÑü>_áWÖ™Ò@•.ôpgÍÚåÈT›·Þf‹º6$Y//§Ú¢ÒìѪ3r…­¼«6,±"dgYHBE¨¥.2ݨGöHKeH|IPœˆÄšð4‡ÚcºÝ«Ä«–ëô­_‘á^[<*ý1ÔØu~!×Oõ”Õ`­±»È&HZc¸¥2” ¾’Pj±FŒ>£t[$—TÂTñ¨Ki¤-•.±Gùô¤±êÂ{IBeXJSTƼõN^ŸÀŠËC«Âw5FÍoŠÑj7•ÍQõG§Ô…'ÑšLveØ®úÈt§AÔ`«¡bjPÚþŒ—ÇdSÌÅÎL†ÛPÆ::i,¨Óä*~ vÅysBuxª]Ýÿ G”ï™zŸæUÎ"4™)7{G1S @À)ýá#œ²4 y^@Yµs\½Ûµ¯Ñ®ñ %=ÕA5bMGäŽk+¶][é Çì}Óö±Ÿè×¢Pæ}¤V±ÿ6%ý½ýtÝ×Ù –4gÔµ’)6ðCÌͽ[Ÿ ¼íoíÍ=Y°<&?$u¶ÇHË­å1W—mÈퟜðLŽ{¼ùÙ ý|åæ˜üåqU¾³Üg¸H¹_­ˆ¹ÎÖïb뿪=éŒ>ìü6¿åB~óÅüæ ]É|\³ú)KšÏ=íØÂ·…íÈ_“¿é\[ýƒÑ&xäýðÐXëýâ첦0¾&rÞg•XÈZ‹ÈvæÚŸv>ãñx°µ¦í·£¼ëåI¢‹;g„†Ü“×—ÇÜ:Êv‰.y6ÜQÓ|ŠÇÜIëg5Ÿ‚Ù®ˆ)=¢«wŸüP/äÑSð™bt$¼›Ãv†ïß¡?­>„uS’ßå~<682îU>ÚÛÜûué ¶r‚5Ò»ldâ öŸájƒÏ½ ².òWð훾¨;uºÙ>­ù˜r»Ùñ8•ÙÈ7˸ë}Fî ɳÇà6_A:o=Þüì¹ý;WéPƒ|‘-¹l¢–Üì’å17÷U<`ÇŒ$Ñ1[±-?8Õ1ß;BY ÌßA˜b L±4þÉ̓,óei ÎÓU~×ÒÀRùÍ6¯ó«CÓÄ%<îä{4žÒ;³ëœèíÔ|Ìt~鈼6"jfº¾tøL1šy;nP÷¶¢&<±!’ºH±Ç\«¡«Œxöð3D,º+ž½ûƒÞêg3>ã¥í+ý¯Ó¯ò…~úûö²JæÍÅW牆˜ŒüUɶ¹ŸQcXLÑ×èþF<îýa{E¸#/zýwiê}köõå;­t;ÓúiV_°<¾–ÖTM1>©nŽÐ‘ÀIgõøÜû¿ój71›Â¶š5¸£"<­ùò\%ûö¥ûÉ m©uÁ1ùfÇ ŸeC8“ÃcOmÂîŒK¬œç¡, ”wÍ ¦€ÀS,ÍÉ‚,ÓÀd^ eDr·Ê.XEOÑÙbhÏL¶Ñ‚¼>ÕuIþ n4óIŠÝmšßÛN8"o¾× ™¾÷•lŠÆSÿ/õÔEDBaœû­6?ãϯo½JÝÑúLÞ;aÌ×›A¬þdß=ÍŒ¾û{ÓšBã«‹=ó–<ü!®*|®g‘RsàÚ¬J[»Å¸ à gz¿* JqDýÁúHSŒt*“¤œ™’Gä£bÄPÏɤúÈ”jó[ïYÒèŸØ7âþƒ½©ê3ºä=fììXà1 Ù¾^¦>ô>¥ð<§1Â衈¢Ì‚©¢ìмTÆ‚) ÐÀã_ãóEšƒ²o æEV"¥vmÍè”dÝÙgßÒ>©ÍòÞ¾§:\Ô™bbq)6ÅM±³Oû<Óª}ônÒónÜzDoyLÍa1þêwµ¾³ko¡!ôËö~u©‰ÁW“ªÇánÚÊ]<á†ÿšb¸¼¼ÄêbŠ=ìß»«*Æý^ñø^ûÉžªiXŸ2Ç3Ú×Þ¨/ö¼|3­ŸÉg’ä.ÌܨÏLºVê{?³ØSŒïesH¶¥â³§šÂ”gFu—Î6D,´AgöÃIgZÙÌqÏ̸»+.ÕHFÚ<Ê"†Ï‹èéØmüŽH¯óÔÃ``Š¥Ø"Ä7y„È4ó¼€g)”¥ÿ|bå¯dr/³Ù她‰’ªà»XG5ÅhM~Ó¬.)–%î’?ôzö‚—ç|ú·âÍè­#^å?=î“ç{{ö°â”ºƒú?>zÝе‹Öÿù‘ÞÌ«ÙݤޗxóæNí€Õ>pzŸŸáâÝØÍ‡›we¹âޏã²ìÛâòv”+VËë[w#v;kä©‘§y‰7V~zce¦«|LVñôlŠÑ3R·wª÷Çâ›s—±(y5%mÔ%u÷Hê›ÆŸïlNµÇ½x¢wúúœ§ÞfX™ª>n<±E”Í¡²»ûõÏ«è¿cÜ­‘ay¾¥;Nm°„7¸%oYÉ蔲,ßǹwò‰rò{y‡*ƒk³:Ï?ù¾ˆjŠyÞTñΟyÜ%Ï?îËšë!=Ês”³¾z˜b L±¦púðÞÅ<óe ›È4 LWQ–£1ô”îïn&ªlkâo¯‰·•O˾Oß׉ ürN½)æŽðÝW5Å„$ÅvúÅ<1óð§Yý–Îß%S¬{7+ÞÓÒ Iô=Ø#÷xm˜–"Æ ìáþöPߣ¢ÞHmmf}ȶ+ŸüÒ3 éðØ;v°_Ï?–d÷¾òÉfgJåÍ™s<£/º+úGz»Ÿ·¶?i­w%oÎýdóÕ°=e?t{4e£Ô.¦û¾“žiͺ€x[ìà“FõsÔ‡>ÿI¯ò°â¡·šß;¿˜ÝÿÏå}¾¿ç‹îJµÁüpƒI˜[w¢J\³gÕO›Á1 Ö(Ôc` £E”c&doo“Ž™GèáY >iòNÑ@ÌÃ``Š‘Ã¢Fce)|À3 (û ”zKåVÄ{[ÜgcË;i~²õXyȇk: £Ð%îÕ¤šbÇé¾F …:ïÓßdió3ÅŽwÓâÄà›¾Ç;¥Éî)pðyò®alNª¢mi‰½ò?‡ïthzºúöˆ Çåù)ÿ¿>äá[IxZ3Ÿ‘¥rõæËÿWt¢¤Çöhì¥grJS!Ij/ž~/›k{Úe‡èu¡]ì¶Zö¡¾£QW/vI½©ñæèœÊñ¡³òþïqX²\–,wr–k;Ûç$_ƒÝó6HÏήœ(.Ø+Ã@[Vz tgû®r Ô^àÃâ«®{4™‰ûO:îõv?éÕÛSõ5Ѻ֡Z> ™í­:Slnó‹ÌS(¦XW+>Þ!¹!wf½Ô#_-‡T]½õ¾Cg•ÍQïN]4„l¿ü?ÉuõŠ‹3ð8+Y‘MÌh ŠecôŒBã 6”jF(’”.:Ú“ä,gä®Û:-R_ToIö.Â2}ý=Voôtü'½Ê¥‡Bî’´ÿ‰AñeAñåkXNª_hƒ¶E1Åœâ|i;¦ƒ;œÜ¤vÉUãc&:RšÏDVÂð¤b˜b L1þÞʃó™b¼€²4Y”Ýý\[cNÇ[½ßÿ6Ùw/°‘¡{Ü:SìXCX2 ?WBhPç%¯ÁÒæQM±¬î6Ý_®•Ë=Æß>-©šjjûtžûÝ šÐ¸ÿ³©äú¸¦ãÕà9ê‘BüU¦xJ]FϨ!äBßsƒjÏà‹ M¬èl·Å'IÏ}g™Üõx‰3lþúxÿúQ¯ÜÔÔ¼hñíCpÌåÿÞ$bGUIƒŠégŠù“¿Œãúcy;˜ÎÏjƒSM÷¶K‹˜™b/rÔw„2 ÌÞ;øêaŠ00Å,õ!–†Põ”)B(ËA•†õÉÞAbYJ»G[(3=_•ñtŒ3šwmùΚ‹5xÇO¾èM±L!ãêàkÙzHªIâ„[!æ%å‰:S¬3ƒëm“ÔLÿWléêĺ¤ºà¸›ŸXlné×ñÞ£r_Úß¶øÿ_òÉÆëÙCšŽw#…qe«âËW³ˆ+[Íš½12>ÛåËkÛ #÷5Ä´Åî«Ýwé¿6^þïÞ¼¥t­ñ3ªKì˜Ôf™zRÞyü`ShBåª/ºŸë7$Tñt†ºOl¼¶bgupbMïºA×u‰3Ô¢Ögmº¾rgUPRmp\ñ2ƒz£}à“ê£a“I]²…ôäô–¢U Ö`¶ÿ;Š6ôµ´?q·~"•ýFõ–9ž‚2IÊKF¥Ý™øÎ·ÿ×®ÈÛÖ?OipKáò­s7˜¤³Yßæåóe·WÈ`ÅÇ@ì¿Á1à²nŠ©ïˆü™Í'ÕQæS @à)æýêV£>„²ˆ*ê”iT½êâ£Qiù)WÇÁŽŒS®ôS® Ÿ9b¿îrLKž¡+Û+ƒ“lgÇ4™ÑêúCËOw¿“62Å„ùÏŽ¦^Õ/ŒMº±2±.ØÈ´"Á‰­’[ÑÓ™îm*Öõ^“yvûóÂu¥Ù-¯'äùwרGÅ Qö$OýuuT5 Ÿ>¨³=°¹D´j;´!ç?7äŸéèÑt»›`UM.Îä@™ãûokO+]O¿-¥® êÙ†õ$,opTÞÃÍ!å§ú:5‰©·-gJcÙþw¿•ûÎwý¡¹Í*ÕOÔÕg$][ÏOZ½‘Œ:Ãc°>£ô{õ¼¼D ª‹ˆ!3úIyG(‹cúNIõ0Řb‰âƒ<ÑLÙ; ¬~´ÖÐ áöÞû3²ÃÑyŠÍÓ¯V–öË5Ú@ö­5Ì•¸7©™3Õš)V+×<ÍG­ÁyCƦUb]¢ü=õt¤“ò´¶ÍœñBLJ ,©Gåî즞´5œÇ.¯›2­tµ®Ÿcÿ»ôèy7ê™ÑT&[öÝZ#º0Çy/ÖûÈZïºK¢ìì7)ôÐLðŒ\]xƒ{K×ð“f¿ôNSéhOô×F{˜~oǀο¡³FïH3e}–}õÙ©ïei `Š40ŬÊG8 ‚(ûE°Èb æ÷·;4‰WÏRê)®¾Ô9,½=éìF™]Íš)Sm§Ä½²?Ñ&ÛNÕé7rI’Í]å¦Ôã¹Çç¿ëÝ®t±È\ëþJ75Wæðê?x`à»yžÑé'š9Å¦˜x|g}­Ñ_~-¼þêËqý>lŽÐ¿¿½az>K¬ù¿|á ®K´zmôÐ9âÙ-ðж«‹¨ÇL=!Á¦ïe5ä˜bL±µküb§ÈÞAe¿XC™gŸJkOÔ¹Öú äú}ú?Î:C‹ÿÒßæÑtL¿o;ÛœóBþ[0¾‰ÙVöRï’Ìtž¬]séù°²²Ðã|?¥ :»ä´—¿šÑü™öô¸ÖÊ=^Ò­ï›§¬»ûKökâfUZƒX°¦ŠŸö8DSƒÖ¶³g´¶ø¹jyÆg[ Çô»Ítæ>Õ=}…bg÷?“û2«÷t>øÜ«Ÿº`û0(wG"ÕÍ‘ô§¸r&Œ¶—%û„Œ…7HõûZ _è×ìïNg÷ÊQdXy1íÿQ“c vÞè˜ùŽ%3Ÿ/,m£ú.,|ž‡) ÐÀ«QÜ5Xe%hr1õJM-ek¿jþìlûOgÛ¾ýÊC5ó]kMøÊuàKד޴£¶˜yô/ ÇµÙí9Ö§ M÷óÊÕ8Jòî¦ý)=ÒOê:Òµu ¿´ç”õ—Tõ—[•äµHe¦Ï(¥éX^OaAǙ➜_\ sé×™\m[YÁ‰æo zòŠ{ò :¿=bUµ)õl’×÷æ÷æt|{´Î¤÷„…éÿª-Ç:ÐÐÔÏö¿¡éQá/öÿkSmÇ®ötŠÍöÿ¨ƒi' {­æûOÇ N ¨àoG`Š00Åâ«W‹oòÕ”YÄSæÁ')«!ÕÓ€æiA–i`¼~5eµÆw;Ÿ€E®¯è_ú=J¿jrÝK5ÕE®oO]¼~ìÿâõÃð§SLý7*V¯gžbÑ5«({ƒ&ë(±ô{ ˜~Õä2Ó¦˜h‹×ýÿý0ŘbqU«(âEöVRæÁ”i Î+_M™+)ÏSo¾¦y˜ê_ú=N®ôÿÀØÛ™jª_®ïéHY¼~ìÿGè‡)àϦ؎ʕ”½ÊÞ¨Ù[@ÙèãßdÞ`ýEÕ‡‰þ¥ßcàô_žþùùSmúúÔÅëÇþ„~˜b L1úW"޲40*[!²Ð|Šʾñš¦ë,>Ô{-ý©ÿøÝ‹ÖþâÊûÅ•ýÅ¿5F˜èWê›"¯ûÿúÿ;ö[×…xkÊ9ÖÊ9mÂnݪö”s^€z÷®¼çL¾_ ƒGp.ô}88]Ξ[ÔüB1ŠÝZ^Èk‘Þ©lè_ìS홵Hoö7{jÑ9§3Owþ¹¿£ùÍ/`<¡Øàzw,}Zl}ÏøòŽæŸóüB1ŠÝ|wìæÛ£UYTo*;{ÆWçûŽß3ƒw4¿ù…b &»±¼ÀgQ}}‘Þ^ò›=ÇÒ³H_nh÷wjù½¦E{~UÍž¾ÂüsGó›|(B±Ô›#é‹Ê¢z})}Z4Ï›ÏNÿl*›ÇžÙ™Þïh~óÅ@(výÍ‘Ôëôi‘çu?OÏ"½S7Ò§ÅøïµÒü3Gó›t(B±×[¸ü_{}8½ª¦7•‡Ù“Ímp0ÙT³çpú´è†ùgþŽæ7ÿèP „bW_ºöêpU-Ò›ª{zS_Ê7Ÿ“ÅŠç÷ÏLmãüy¿£ùÍ?:¡ØËº‡¯×ÕôMUÒ§Å´¹óÙÞ9uó¯Þížô,ÒÛšötçŸû;šßüãC1Š]yy°ªéYôŸ7u5½*;ûçL¦vö4g¦Vœ3‹¹¿£ùÍ?8¡ØúmüE¯®.64áB¯^¤·•‡½ýÍž,ú3L{zóÏüÍoþÑ¡Å.?ßùÅõzž>-6?¯ÍéY¤wêÊ‹ô,ÒW®Uf¨‡+Ì?÷w4¿ù‡b »ôìÃ¥=‹êëõ|ÙWúôEó¼S›÷oýy4Ô`éx•ùçþŽæ7ÿøP „bj.ÿYt÷w>»/}QY¬þ½²¹÷ÙÎ sGó›p(B±‹O?\¼³¨¾X¤/鋪 é!šýóóÙvOÕbOsN§ºóïŸõ;~Êó›?‹a¡Å.?Çõ™x =‘Ç*Á © ÜŒZ8,=æàåXû0]æØ‹®Uàa¬y’(z5;°@ö8‚®$Ð^öÙ'.%ð•Ü1äBëb›>q舵ýçÈúÁ ‰ç³s¾É¡Ýôÿ¨ö‰ ùªñ ;W {x(àì„Ø.XÑiÚ‰¬ê>SŠ xS$É# ‡víw&»H‘öÿ‰dëü gš?.€éE²kÕf“KwÍ„Àº€ê‡AÛ*åQ`]@< Ì x[<–¶žtF`à'õSŠnÂ.qœùyš­P<  ·H†|ÝøÜñ[K‘$ñD@ÀrÏX„€€ëñ¿Üò B@>ZaôåNDÝeŸÝxör÷`¹ì‘ÄßW;?‚uYúy¥k½<‚2ÇAнÝŽæHH%x;»á;"ž(›3u;~50]šuhŸpSôí8"7à ¼…÷ð îFˆÍQ€°4bÙÏ>IEND®B`‚cargo-0.8.0/src/doc/images/forkme.png000066400000000000000000000111651264656333200174220ustar00rootroot00000000000000‰PNG  IHDR••Q¢¸O·¸ECK¦Œ«læ +Z­B ¤JÓ¡¬€„@%hÐÄjjí§£S."¥º*Ô²ßæÎ ß®P¬þ%–ÉRËä"RªðnuF¨±µ6·OhxlÞü‰¥¤2.Žt¸ˆDg$UóŽÍÓªx†ÜþJó2–’ʸXè±âÕõ´¼ºÇE¤lg…ïœÿœÞrsÅRRŸX¾P -¯íñ‘Ž?T+XÚæôÜuïJ*ÓÅÂ!t\› û‡—|bÒ$q‚:BϨ¬iÓ R>F댅w%•ùËé­àðÞƒ"ÒæŠ¾¬H¶ •ê(:ÁXðÚ†n>Òéœ2>±”Tæ‹Uר­Õ Þ±_·P:™:ôr…h¥(ÿgÃèÄ"ùÃ5Æ&–’Ê|±bU-»û&^H(!‘UÃ/ÞþQ.Tbb5·ðÕê¦)¾nlb)©Ì+kÔú">ˆ6~V¨—ÉO%‚Ë›'úøÓ¯ø} _×O+}Ý€ÍÂ[aW…J*ÓÅÒÎ µ"rk÷„FÆ^—PB&`ñÑåõýôó/ëK!Öói¥G¬h^‡¸B4¶ •TæO,."6¸ˆÄûFVžÊ÷+{gÑdKnDá•™Ù~ÌL=<ó˜ù=33333{c¶wf{göÒ¸5îÌþ'×ú¢úôœ®hÔ¥…úFäB— ÓõEfêdêhP¯zí›—€úó_þæ`y´ÊþïÞ;l*¢xo€ªXD,ýn<õ9€5-JY„2 Òç7%mlïì7¿ýÃŒïïzχª¢‹´üÌç¼dvíÆ³š{…U°hã0šL¯pr”õ“ŸýröˆG¯g%°ö;óÝ’ΰ *õ Ÿû‚WÌ.\~JSÄ ¨:µ{ßm¦`îIÅ{e” òJ)ïK_ùÆ"Xfi˜ðªS ú­Nœ¾’"iCK' ê£¼#D>ý™/¤ÆZTìòr)ïʵ§–CUµ@¤Wˆò¾nÓ.‹á>ÄQv…¤%¨ŽŸºd@½™¨Dé»7ÍO½„÷Ù;ßýV6÷¢U5Xù"bõô ¨Ú§è¾è¥¯¡ÎJ-íÅšê5)RQ„ IµÔå¥þýŸÿ-AÇç£'/¬ V¦•ã áöù/|%«¹x¨:D,"Õ„ÎAå’‚À:zâüPD©ç¤¨Å÷o§‚}€j” }B)ï̼sBûÔÙk±Õ<µÿð]³¿ôµ‘ŒÍäÀò‚}&AÅîO5ÖýŽ5ûS*àKõËÔvVQug‘¢‰VgRŠn9LPu !’xT¦ •`¹¬Pì‡á7ù4è5–ઋˆ%Sꬔ¢Z:U/ë)‡tÈhr¬d¢iO@ñ}ã–=³{†ÔÈq1j±6¢c%5;~òRUÄZ·I)úBS: êÓ+ä”E2Bd•zìäE/Ò(=üs,ÿh†AÔ’ Aº *#Ól*Hѱo+ ê5èÇŽ:>Ñyö#3¤hõRR^ŠPç–ïÿäçÉu­êæ³/y ƒ`VÒ¡x¨ZÁ’‰òNÁ\Hƒ¶¼0¥âýQëX¤Ã{Jõ©pŠ@JêD¸edº%bTýæ±$DÖîD,j,Û ®_(¢Ù#w!7Ôö½W(SÜf:D¬€ª,޲Óìeºµ»5Ö«—)îK@©ˆ×;ÿÝòP_f)E“ [LAªN½ÂC GOÁ»¡F•†ÅÔ‚€Ò;¢hª³>H TRÜ'ÅßEЦÎÚ°eoXU/å:Üf"«Àb F)ïŸú<ŸÓ©š»$ŠšnU88Qðnà$4rCk* ¨:€åîÄOX·³4rLE¯ˆˆT¬‘Êž‹µ:°™ú½Â€ª,s'®>qðÈÝD)¢•€ƒEmä€Uƒ¥â]¾]í¦ U°Üx¬¼Á,šÃ¿ùÝ œå`I@E»RZœRÀË·ëÔ¹kIi7 ¨:€%wb°–Ãu(,ZŠÞ @i’ôïù`¬ºâŽ€LAZv…U'°ÜQ2,Þj%ƒê˜Ej¤æÒ`X *EwH…U³Ü`îÄÕGä½gèc3´t±Á³¢iS€ª9b™;1»ÂlÏR*t IÕ€f1¥è5µÆbl†91ì•:™‚T­`܉³u–Ôw¤Næ ¼ûô賟ûbÚ:|Ö _KñÎß‚!S=NBT­`™;qõÂ+ן~oÔz è'¥™÷§.ªï?h<¡£A¿®¦ U+XîNÌÐ_il†ˆåu•ÏañùÀá;†Ù÷ç¿T`Mí*E7MTýLAäN\3=JÑniPQë¬ñ )lÕÑJË}»Zü±ª~½Bw'.€µ1=ð»< R[-ÁôñO}Žqe>S‡MŽV¬]{o“o×<ï‚*ÀZîN¼»ä$“Rá0½@ÅPÑ ØH2ÿ˜ÒÊñµuçáÙ3Ÿ;_¦ ö%Àrwâ:‹¢ ¤BÉø2pÑGÔNpòd½BóíjŠX  °ÌØz…kŃª„ TšË2A”Xmc3òí¤ƒwC@ÕzÄÞ݉k\úä.Xô}dÆÁÊÍdU‚E¯ß.Æfº¹Í4@Ë݉é–FfHOãq_WSZ¥ÆâÝ¢×jMA>Mn¾@  êÔ+4wb„Él+ǵ,-ÁÄÙAÆdˆdZ663ÉmF¾]ͦ USÜf h‡ puRãÊ|œf&ªïŽŽÅñ¯vS€ªë2)ŒÍ,R€¢ˆG…G×ÒÁUvŽ´~&«ïn ²iëþ†« T–»×\àmœû8Wèm‡îPÔR[§~lÆ"–|»:™‚T­`¹;1Ÿ mí ?;^ÿ~„(’QèOJ-EwmB7@½B¹טÙz‘®´' €‰ï‚íàÂÝy¡´àê'ß®N¦ Uû Ÿ»ç{…>ŠÌ¡Ô‡ït 8M›GS¤Ô`Ù#`5®~{HÑOm¿@ T–»—À’òÎA +ÜŠwy:¨­“QàGeR#]|»AZF“;A`™;qÍ™BÀB E»bççGíù}EõÝ!«põ“)¾]ÜüÚ±:A`™;±Šw+ç?ŠCß>÷¿Ê+ñ7%þŸþBu1ï¦ K{Âú »Â.P…ònîÄE©AQ Hî (ŽáËøÖ×w¾÷#†ÿªz‡JÑ­¦  Šˆ5r'®RÞ‰@®¾{³R#Z– H¥Â¢®µm×àÛÕn Puº@À݉wÔ]½ku“"”[Éy†%Ûîÿ÷«Ò´vìN¾]Ïy³)H'¨"b™;qæ|Jô¢æ³(èY A•Ui⦠LâÛÕ±:A`™;1õÏj¼Þ%„&0ŸŽä å`)Mâ˜L*¬ŽXòí¤¥ÆêU€åîÄ㇛›$e§§´ç@±Æ`]½þtÕ\ãˆÅÊ‚EЦ9Îi‹)H'¨,s'^1bz…üw”úäCÊQ0¾ó.ÈhP©™a4>£•mé°‘ E7úu‚*z…îN\Ñ+”5$¹>Û¨ÌzÔá?ã;Bêh.«Î—T¾]í½Â€ªKÄ2wb™‚d£•ÅrKnÞ•ò¸T€éR%ïcoÒbK‡M`Õ±FSNPXîNœl# ‡)–¤-t ˜ËRšTÔ]’T˜èWU6ß®¦ˆÕ ªË܉óÑ °ìˆ= ŒÚ¸ewî÷>Åm«À`ªËF—޳²½Bùv5E¬~PXr'¦WXº°,å÷)£ÁÃwn«à}tÛj½­ã<¤elæñOÚÞ°+ìUïîN\q³*b'‰Rÿ …•‘ß>Ù‘FI…U+èa*E·\„€tŠXæNì—4eÁ’?–v†D&–%~ç…Žá`U ¤òíj0é U€åîĤŸÂ<©PõZÕ8å±TwyšTÕÇÀ¸< {%àšÒ+ 0:ƒåîÄ9ïQ‹T¨K.U” $–ÀR!?®¯Ê§ud rë1 ᘺX}Ä (úƒeîÄŒÁRãÙ5+¥ÂKWŸÊï£ú*sh5£ºkÉ·ëäàÛU]ckdlÆÝ‰kï+$b©€§†K ?ùé/Hƒ´x\u‰¡—Rô§àÛUg 0¬¡ˆåîÄ£“Ð÷UÀ‰ÆÞXì@¦B[X'ß&ß®Ú „5x€›‚Ô€¥IÑŒ@ªÓ;­” ë§v¾]+] °}¬'¬€`Mö ÝxG ,ŽvŽj(ê+RŸô,R Z=#ca&ë5¯{kQnp|»˜ºà¼#+ V°6Árwbús5÷ŒN,éYƒØDé>¢ÖÒéin Û²í`Ü@ß’¿ ß.™‚œLóÞ´¶¥µŸ5üP,w'®›Ñ-ö:œJ„P¾S´! kÍAÜ„:+}ßJÄ,þnþþukü¡XîNL¯°,º!b±¼Ö2 ˜Ç*hXå °|<ýÍ»ÒÚ<'&LAäN\˜•ò.°r@¹Û¨d r·|»R´;ø¯û?àÁGÙÆŸ§ n–MAË#–‰£¡~ïn3úS Ò®S"ÿÛÜÿ¹3öåîÄ5`yñî@©Ör·™©P±äÛÅtkò—øg<è9Ë܉3Ê{¾¥ÃºL#õ}ëÞq¤ªØýå{…o×ùKO‰<`¹;1Õ‚¥›)<õùU'S¡òâ Åœ>”PÞݸ6byñ®9 -©ïÿí…j1±º»»ë{ÖÝ_¶°…M()—ÛúƒþšNÃ$G³ذ% ¢)4±ÒY¤Ú;Ù!¯Ðq¯‡ú¢ ®N¼þQÆH䥾r9©X@ ðÚÉeg( b%uâÕÂE™ ±|øg… ’ bù:±MMŽ©ˆ‚X© < buâM¤Š…, ‚T#–¯#„‰eub‰©ˆ‚X©â`åëÄaçXH…X¾N¬T$RÅ€X®NŒTq VR'^ÝGª˜6ãêÄH{¬¤N¼ºTqð¬ÐêÄH{¬´N¼ŽT1 VV'FªËÕ‰‘*Äòub=+Dª ˆ‚¸:1R…Á+«#U ˆ•Ö‰·*Ärub¤Š±¬Nì ˆ‚¸:qœTÀË׉‰‘ Øcù:±‚g­¥Ä:ÔëÅÕ‰M¬z©±¶Ä¾zíϾN¬µtª¥ÄZbGb=º:qµT€X³bQ¬Š Í ½óubÝn`0¡±XbZÌ›\Ëc“Ë7Ô‰¡XÃbLLš\³b^ÉÇKêÄP+V¿#&ׄ 6©Û gÔ‰¡V¬>'×°yCÑcšŠÕ)ºM®^Ño ý ÔˆÕaruyÁŒjÄÊä2Á ÷f€²X¹0¨Ëä^2Á`A€\ R[àÓ¥¨xRRR   À+¾ŠÜŠg‚IEND®B`‚cargo-0.8.0/src/doc/images/noise.png000066400000000000000000000061661264656333200172610ustar00rootroot00000000000000‰PNG  IHDRddÚë]ß =IDATxÝ\ƒ³É½±sbÛ^k+Ö*f§×¶mÛF´æ«µÙÿÒ§NšµÞ›÷¾­©Ì73}ûZÝ3/‘PÈ:æà<‚¢ûÁÉ«LȉºÞŽ„³ð*! >D!¼þ×>˜€GpŠàA2C¢`OÌAÁÃ-Ä7§§Ç£‰c CæqŠ,í°Å+¤þ ÂjÐ?áÒ|‰;˜8 ¯ù¯¤?&á4V  8ÀŽÒ“°÷Úcåhe–“>/ø.Ê9<ÄÄcì 2Š•Ñ,™³F­it˜­X/v "O'¹ÙE¼ÿÎue/$¥¤À8©g{-…]H¿¯WêsGIÿ,š œ„Äœ^$À‘Øîtx6¢"Ì$1ð1>ÄÇâ i<Ü»ÍAFw\DŒz†Ýó ݬù¿Þ‹„2.r6ê“qµ4tvYW{H‹ KË{œÅg[1z |R±W˜´Ã^xcµ` “àL'ð½æmÑm7gÚçA˜¶Ñ„Ï‘fY1 _¨±ŒÀ. a&8º.“ ãd’ï‚»$õ9öÇ‚^6´ ŸŠÁã‘°çb FÙÐaÈ[‘ñ&¶Ø%ÝR:Õ¯EØ¡û áQCHôä¬I¬&7Kä¾HèÌë[ñ1ŽD°ƒ£EºAO=AÌFxvÁL<‡à ÞôØixò_žïÇL(áÚȲ–YÝ grÖƒà…L†-v’îÆ”ÐYž —Wa<ªišû¾µ½Â‡,ÆÂcJ…AKa2Ô[TD™-B d ¿¾2'Ùo¸`†FgV–×Åö9¶­“…aBþSñŒUAÌ8œ7!ÛP*š0A®ØÖc/éçs%×liÃÁß˨½­¨O‘qîmÀ‰y23\e!,¬¶¦­ø¹,/aà^ëM:@ÕÍ: æ`“G2 Ÿ0p–ÝŒrPß}5’D|$† ¥Ó|ü•¢“ÌôËÈ.r}Qp%§SSY•ZN"-ÞMÜ‚‚#õô;¹Äp …œÑnSl ‘ÑÙOR%Àç1¦.ÅðW%üŽòuûJ³^¨®®šHd/d¯æ³èjÛBÆ$WE”àþÖ·v\¦oW³ë¼&ºŸ¿ËÀ3Çûq#â ÜaµRÌ¢jšÙÁl—ôN‚ÜPxëHh-5( Mø.ã\§Z1Æ^.p7Ÿ–ZžË- ±‚»‹ ué-5Ht§†º«–âýÓ.Z,Ä0N\Ž‚Ë°ÑÑÐÓA™}µÅ*ÑYÄCÄKeÜOÙÍ.èª ½c1P˜æº5óP°·bÖHâ·°½M0U=Ö ×‚9ÛÍÂMîVoÁ`4a7§à)ÜbWlÙ­@Ö‘«p¥5,ãä‘ÒdclsÇßÚ|Ññ‚-u -ËÎb@Ú—xÄÅï~tàÕ9(èŽudû,.­w:ù=Ľ_UêùQü-ö’×Pp´Úýø†Ó_k¡ß×(”Ì éPéàjt·fÙß:¬?6믻O«–²¡±èû˜Åçøä=âÙâD. éÝ.½{!Љô…²V³6Ó$fû!÷ÏÊý~<ä„ÞføIMç@Vn»Šs6Y5ÉEô!‚nÖ$(æªífmƒ«@£oÔÕ³lyŽ*@-/ƒ¢ -$èDt­ˆWð4ºcé/@0v .D¨íË{EÜý‘ œ>ÂzKlØfx»âB‰ù¡õȃ –LjÍnc÷ªV›Õ¢T»‰·%ä>ÙæÕO/db_–tÄÏ7l¥ÿ;Ô`[Y*G-Û 5ÄM¸Þ¢kj;O÷ö ÝîC1ð†ˆfy­Ñq#\ e§þ8fË#.¤¼&7†@“mõ¬gÏ3¤Ð»ˆámξäýÝÀ­Hè^Ë·K¨õ~òÚã2d Eb4) ø¤`BµÞóØ&7#{TÓ¦Šþ3N„U‰TQàSü½ÉBûEÁmHT®¿Ú  FÙàŸ×d=˜îÅí¥êj*]¿‘k:§&i¹0D¥3_Œ#«þÏ´Æ=î¶çÜí•hÙ…¤îÌŽˆ÷yý[ Áè)Ân&ò¶%Ög5ðZ¿;±/ÞãõÏ3Èt¼ŠVØo³ áBÕï3I{ã%Ž6V¿¢mÙW”Å6»ÌÖ<^îs¢Å;Ø›|o8[Pƒ…Í=:™çh˜-Q$ωåøùÊ6ã á#N6Ëm§ŸU?±€]”ý6‚‰yåóì¬åÀf®¹ÞVbR¶v«eìAÎ(—s:Á‰d¾¿-nowl5U7ô„m§«JìvEýE¬e>=ŒBo1ÍçfÑþ= Íþ¦¬Á6~$!¥P¹{ò7ßxÞß:X…‚çìߎ×ò—øä^©baêÿP'ì½hìgcîÃåÜ èÜh¤Eã}²ñoÏîýío|\µtW5$öPÉoøo4ÿ§ÄٮƄ¿Ú ó \C$î9@v=b]I;ïàQêàäÓ ümS/lþÁ"/´iøH‰Z>ʯáqÔð®¶–úðݤåý]ö®”ÚðGQu,CëxÁ ^½Â‰6õ²ÇCa»TDùNüþ‡½Áw´pîa»6YÛ c|ˆ™Ø*¡ŠÄºK3ˆ†Ëâ‰ø÷p°’˜ºh¶Ò»ËªˆMöÀgwVXÇ>80D8V£“ÜëI;xâh@ìZãlùĬ‡–IÔ¡É9Èyní¤ÿl‡¯íØ3ZïÃ$«=ª1FHßÖo‰¥¯…cZŸ¢€^õgª_‘­óÅÈ8Í~W´“x0©xjQ#Z¥;‚°Wbaî@± …0}Á¹ˆ¹™#c”¨ mÒÃe¸U1ù•©ˆôÂ9⨗ñ7blí ÛܸԷGµìTÕ  øç¯îÛÆ;ú¨a¶–]®¨çOïjø£åZ6ljx[¿û¡¶>;ï9’2\È\"oÃ]åÕ¼;—˜Ûë³µ\­ÊƱ©Úî`¬;„é8 O “ú;ÎbËà˜åôbkÕ“ˆá{Ö°„sÒÒ¹…ïˆÜ)>Î@`Õ´ËÖŽ¶ü*á*„4¸»ü£9cGÅÑîAÔò7Q5|YÍõ¹jkÓƒæÞSŽP\PHÄg qÃ3%ZëwlQËž{ )<2þÿ¡HÔðÿlÔ²9õü‰K +ÉZü¾†Ö%êÉõ5$zþÀ¥†÷%j¢v¾âgÍ—Ðq’P5FÉ ÉÚðÿ©È ±pqŽÅ°IEND®B`‚cargo-0.8.0/src/doc/images/org-level-acl.png000066400000000000000000002254341264656333200205760ustar00rootroot00000000000000‰PNG  IHDRêBÎjÒ„*ãIDATxìÝ9”Ùb€aç™óãS¾zÒLs§ºQM#=Óšï;5;÷27úûö­ÈΠ߀|È÷ ßò=È÷€|ò= ßï@¾ä{ïùä{@¾ùï?ùä“gÏž]^^^œ™0¤0°0¼ ¾:ùþÓO?M’äâì…A†¡fðUÈ÷gßŸÀ[Ÿïá4wŠStxûó}8VþâN ÎàíÎ÷ùgÕž¹0à Þî|qe ß¿uùä{ùù^¾ù^¾@¾—ï@¾ßívÏÿ\ÀËŸt¯|€|_Ћ®þѱ+îï'Ü+߀Ý÷O×Ož~xãµÛ¾|Ù)÷Ê÷… ß_^„Èþ¿ó·÷q<ý•¿úÂuÿ—ÇázôÞƒ‹g!ÄŸ|¯|Ÿ®ó—«½‰äùžù~»Ý†ómž|ð8ô÷{?úµoýÄ׿pÝûñ¯…ëá;ëË/·O·§Þ+ßïg•Òç*ÓÄD|ÏÀÍì¾›åCpÿ»ŸþÆ®oýÔ×ÃõèÝxý ÷Ê÷µÒ窳ÄD®å{äû %ø°ƒ>$øÓï•ïå{Ãä{ùþ¼È÷†È÷ò}š|fŸ¦Ù9HY¹6ÿ¾ *M÷és_¥‰¼Áá¥û«UµÏŠä{ù~·œvšµr)R®Ö;ýÉzŸå%Ëé`0|n0Z&i¤ëQ·U-¿xú,ÉĬßiÕª•ÃO¨Tk­NºXg9«éh0 {ÍÒV?ü«ð¡ÃÅæÚÀœ.§ÃV£FñB¹\©Õ›½ádøV³^»Q‰¾…rb£Ý_lŠwçóŸÈIÃ+n¿™‡a”K±r¥ÙéÏãaœ¾ºöëùàùêªVž«Vkõvo¸xùe¥›Õâ…åz—w.ß_Eöðçl·ÛÝåîÉïÿòøH‚øÎúâÙÅöéöô{s^ïõåûݰ]+SnçYlÖ¹ªðù>[ªÑ-Õir˜V§­Z¹t\¥>\ì²+Içèíé.p'ÍJéˆzgtc»M×ýfµtTk0Ï ;ó‰œ0¼‚ö“n£›Å «+HÆzéÕÖ(Ùϯn/wô{¸Ë»ï_´ñ®¹Bmýéƒç;èïýø×ò þÞO~-\Þp¹/Ý|oî:h÷Av¢ý²])QëEñzÞ­]ÅýA?ÊÿñáéézT.Õ_$ÙKI÷h…îÌ¢ »žtJETÚ«4Ÿ¼W­bC¬v¦Yawd"ŇWÜ:¬ŽBÊÍy’å_]aÊê+?åðÖn’Ýq`÷ýÓõ“§~ñ {çCß|ûáñôëü`»yúä;O½7wí¶¯q÷ý¦SÉeáZ³Óíu;ÍjîГá:k\H#•irÓGTín0è÷º­F¾ò6?«ÒûA=¼i9—£ËAøKož$ïv>o7ZA³–›F¸»,2iÇC,W[Þ0è÷šµÊÉ9ûœ'R|xÅíò?¨5ZÝ~?üoÖó­½µÊb_fuuóïW®Ôjõj¥|ýÍ•;ï@¾¿|~Bý?üÎßÞÿÅqØ)ÍõËõ¢¶ß”à§¿”¿ñÔ{Cô×£÷„-ü»íkÈ÷ËA=ÎÁíøDòý´ßŒ·Jò5Rm&³år1›-÷/ÞbÑ‹2ngœäÎG¶le²É"ɬš;5%’L«¥H{8?œF²ž·ãÁÖú‹ƒ„÷¿ÒèÏÒ,²[ŒêÑöí^ñþ{î)<¼âVÃF´fê½UüVi²ìÖË7ÿbGñÕ•_ÀÍÉrs5¡Õôó•U.ßý|òýv» gÔ„ò¡Ýßûѯ…mò×^¹þ]ùן~o8o'\ádüð`ÛíÓíÉù~wÖÖ*»Æ¢XH›ËôÆÀÚ¼îhøÝ´}õŠú Í®µn•oÞÞ¾ŸÕò§¦˜v¢ú?Xî_ý{åÖ:{)™w'±É®³—OOÛç8‘‚Ã+nÕ,¨t6…vè×çñ\‹­®e#÷»9É a÷=¼u»ïÆ÷øˆ›è:¾ƒþÅߟ~oþ¿>z÷õì¾ßÄç´tçI=ã•ñæúÀZ¿Ú™çÒ 7çDïv‹|Ÿ\ý× 1XÞ<áqåð  Uz]õnÝ”›;•øä–âÎ"§çûø§5ñØn?¡¯"«k3n~Öh]/]6Þž|òýGQ¾?ƒ+Ê÷a÷ýÉù~߯E»¤w…^yUùãÀÚ\Ýtóf>za8ZlÒ›O‡¿åîûÕ°yl‘¤W½¦'³N)>sæú¼¿’fEÝ¥‰œœï£ÿA­¿/¼+íiv¥ÈêŠ?«>L³Xünw%߀|/ßG ¸ÚeG¤û$IößûÓµI´Ú9rûñ÷ÝmÖ«q7„Ø[æûùÁ!,•Ö$;jÑ‹Ço¿RiôGÓÕæÕ™w·˜ôûƒô‡ÓÕ™O¤ØðŠÏ4þ¹Âp•µ4«ú. Н®äðøæxÝ úfäû³€|/߯šùãæ‹‹k­;/’‡W‹ÙhÐïvÚÍF½Z‰Rómó}:nE'Á7[­æMZÍjéJõjÌI¿^º^¹Ro¶û£Ér}}òu*¥#ÊݹO¤ÀðŠÏ4=\TåÑ:ÍŽÚÏ{‡ÓHН®Ü3ËýñOªÉ÷çù^¾Ïhz¾ïÆ­7'n¨¿§æûh va¹£]ÒUû•ïS©wG³]šÿòâ$}Ö)0¼â3UíÈ¢ˆëªzáÕ•L£ÏÚøÅùþ| ßË÷'t^üa³‘tÕyUP.—oŸï{·®ÞåÎ>f1îÕ*¥W©ô练=éÕâq–)0¼â3ßá„|_dumºÕè³äû·ò½|˜cûËý›É÷ûa½”S®Õ›íN·?Mf‹õ.‰žbzR¾¯´;v!­îh‘]g·šÝf½Z.ݤ:ýl€ëi¿Õºñ#:ƒyzî)0¼â3 ‹ê„|¿+¾º¢þòý[ ùÞîûPH“7‘ïÓe?îöõálµ¿á1¤§çûZo‘½Fé~³šÝ|¯TïÙO¤Àð"ÅŽ˜*“Ý«–PïàåõÁþöù¾2ÞdÇlÆùþü ßË÷ù^Ü®ŽÔßa£üy€ï/öküš 6ÝLñ|̹9J³£Òd—¼´¿ŠÛIôﮓ¬&õr)ßš‹;ÿ‰ä‡wâ¢jŽ×Å_\íÌŠÿp(ÿèÚöäX¿_ê%ùþ®@¾—¬Üæ|‹|_iOoŽÑ‹Fé–ù~5l–Š>Â4=<ɧõ2øî:å«7?r{2ëÄ'½wþyù>¨ ŽMb7©”®4G«¢ù>ÿS¥ qä›v¹$ߟ3ä{ù>²™´Kz‹ë[ëjpÖË­uvË|_íγ,úõÒmó}ºF»ÉûËì&«ÃW–«ôêÜžÏØÔ^íOÀç?‘"Ã+h=j–òcËÉ= ·¾H¿ôÑL«aãð³Fë¬ÀËäûs€|/ß«féP=¸Í~9ªDT¬ùDÛX¦YÞrÔ.ź³äÈCJ§IKQü/÷ç»,o3^Ué|þ¢I«rus½¿Ë®•ŽÛÕSϾ?ã‰^1é"žDs™^ûMôâIL‹?Y!p¼€7iîÛŠ>H¾?[È÷ò}d5j–"•þt¹ÿìq§“~«©L6_.°î¦ø ZÓÕg¯L“ålÔª•K9Íá2‹ìº•ÃÇßv¦‹`¹Û_í[/ÅšýÉ.Í^Jw³a'ú˜øW ’y/ds²Œ¦“¬ç½fŠ;ÓMVÜùO¤Øð Zôë¹ç/÷W³Ø/ÆÝxÕ鮸ƒ‘s ˆ”ÛƒÉj½^­V‹é¨]¿š|öïåûÈþêñ+år¹”W,¾|`]·¯yûJ¥R.¶³ï×Jyíƒè»4J9•jP¹îgYd?¨çÇXk6[ÍF½ô„çÖžÿD ¯˜MÕÕu «=á¶ðêŠ?«ùþü ßË÷‘dÔzu­w§é­ë>:¨ýzÕöx·~ñ_œë²·J±øCãCxŽªu&i–“Ìë¥bÊ­åmãýùO¤ÈðŠÙôêåRÍøgBÅWW´€Ûµãëk8×ã§4ÜY ß_Æùþ¶µý¦b¾¿xvqr¾,ÇÝÊM™·ÖzyKlÑ«ßxZ},]O›ÕÒõ*ál}Ï´[A™¾Ç¬ß|eVÞ¯¦Ç~Q®ÂgÝd¿ì6*¯hͽÉÉå÷Ì'RhxŤóaþW/âÿõóM–SxuEvËI»^½æ3Úƒeò|,ÕƒÝ÷» ¸›ù~»Ý†Ý÷O>x|ÿ—Ç÷~ük¡˜ßî:âoqÝûɯ…ëá;ë°û~ût{z¾%‹é¸ßítºýá ×éö£Ér“d¯Éz>éuÚÍçZ­v§?/V»,¶YÎF£Ñx<žÎ›$Íbi²žÏf‹`¹\ovñŽÞdÐm7/„ Ÿ5š/×i‘¯`³œŒv«Q¯×­f£V­ÖÍv4Ûì³ÓÿDНýf6„9ÔkÏÇ4Ã,†ãÅúÍ$ô}²^¯^Ø, ý¢wð[ÓìÿÈ÷ò=°5ãC™î.pxNøóv÷èý‡á¤š/u…¶þèýëüàþ/£ƒwâpÿñoþýáÃù:º½Øµ}¼}9Hùþ«ŠhO}Pë%Şܙî2àŽäûçvAØäþe¯p*ýåþr»yÎÍ;÷óùþÅúÐî?úîGÉm>â°ÝÈ÷¥òh]k=n—®TgIÜq€Ý÷ÁöK_Û§ÛÝÅîÉwÂÁ;Çv߇}÷¡ÝoŸloñÝ÷¤óZüDßÑb¿ ™ ZÑKZã ¸“ùþWéÿ£‹§> »ïäûpNØG¿ ©ÿ¶ì¾‡Y·VŠ•«õV'h7êÕÒÕçû ïåû7 v½Z©˜òp)Þ€|/ßÃ÷É~:h—KÇTêíézŸ§“ï‹Ë€4YLGÝv³^«¾T«5šíþp²Ü÷/€|Y(ß_<»ïø~ï·ÛmØ}ÿäƒÇ÷y|ïÇ¿ö<ÖÇ×½ŸüZ¸¾³»ï·O·òý+ ß_^^Þ­|œ½ÝïŸ={vúá9Ïÿ¼Ý=zÿa8!ç¦kûxûòŧ ÎÞnÈ÷Ÿ|òÉÅévAØ\ä:¡ÝG€3x»ó}$ÉkØ}l]Áé? 5€¯B¾ÿôÓOウ ÃP3xËó}|ŠN8V>~’íYC sfÎ ßò=È÷€|È÷ ßò=È÷€|ò= ߀|È÷€|ò= ߀|È÷ ßò= ߟþïwŸŒÿâ?þäGÞ©ÿпþÖÞâr¹Ââ K(,¤°œ2ä{àDéÅúÛøÃ¯«áº\a9…E• ß§xíÞ¥àg·€|<{öìMÔ[—+,­ìvï€Çøá‡¯·Ûº\aQ…¥•Ýò=ðàÁƒ7‘ï]ò}XZÙí ß~OÖ€|ȬXWÈ÷€ÌŠu ß2+EYWÈ÷€ÌŠu ß2+Öò= ³b]È÷€Ì Ö€|ȬXWò= ³‚u ß2+Öò= ³b]È÷€ÌŠu€|ȬXWò= ³‚u ß2+Ö€|Ȭ`]È÷€ÌŠu€|2+XWò= ³b] ß2+Ö€|_Ôn·»ÿþèáöð&œ½?þøwÞ¹wïÞÿ±w¿‰[iÜÀÿ(KÓ©?õV\½¿Êmr‡”Fy%.˜Eª"ænŠvQËD¢ÊEJZÕݬ·K´ ëeÄzkB¶Þ7ÖÂ3v·ç}áàc'GNBŸ¦Q°m~<õÅ7‡çüဟð;l!èfüòË/?ýô¼Ã†aÀ-âÌÂFØ`ù pGôèv»ÄÞÑ$hY‹cV„u…B!„BÁ0¾§Ùýœ >ñgö:z†é;̱ñë°o÷µÐ¿ÿýïo¿ýVdÁØN(.¶\Ù·±°•úùçŸ_¿~MÓÒ`0 /}ˆoÛ¶èñç?ÿù‡~ ¦ijšV©TD˲®÷Áö:×Ö3lrk`.k̪7òéÄjäÂêêÚFv»Öê’_›Õ’kcµæãû»!„B!„0¾]æ9ñc+ñ•+ESÙ’ÒsȤc+#Eżµ×B0ÍYyþü9¤¥íÁOø™ÍI…#X4^ª¶Ém¢NKÈ 7æ2d¦ù)H–ôî[åþþ¾è¾BFÆm‚B½ÎéK±Ñ¢In ,ÂÛ³þÏÿù?…‘h4JæféµDDðÙ¨éáDYƒþ…{’¡ÕŸ°†djP T¸6Àøž…B!„BãûäÊ5Å$Ý&wZŠ_ÎCm55Ú’,©7x-Ø@c<¯ù_ÿúaÁØ{a ÿÇq­ÒŽe+ö-*×$ q¼ÏЊälpø²NÇHqüm2½›D:%ŸŒH’ám¹‰øÞ ·á-ˆY5M&îݻǔ¿¡.†„+…ö4‹ð#–¨ˆ< T3?ÝÎÊîCV™C0¾G!„B!t—ã{ºå&âûì®(±Är)›tÅN±’Aî˜v9aü+·íK_8ˆ•½ ˆð —úôéÿøGY–[­”(<$³ÀvØ c`$Œ‡‡p,œ#¾e¥Š»¬ÅR!ëNS céã{xǃÁ™ŸÓ“ãã#Ç'§gà$K™àŸžžB™Á¤{2Òl6EûÛßÈäøðzÝ«9j¥\Ú*—w3´£™óÇS¥rEuÈ .Bp¢Mªðå G.{Ìúé§Ÿ .Ÿþ9añ¼}š¢_!”Ö ·€,¾™Ÿî‰Ðøgß#„B!„º6Œï“~‰‘&ei~_í‘»Ž¾äxI!èM€®8ã&$„~÷Ýw´mNÀ`#a<å…ÇK*ñrôí•,ÛËßûOyÖö‹ŸÄï»Å=ùN œþL–Î?þu"#ÿùÏDô“‘¿üå/ð®ÍÀ«½Kë³M–_P‚cù«Ì¶ n´^áRƬ’$}ôÑG_ýµã8t‘€H„IÜááp8$#–e …wß}$üSï#‰­¦Þžï°ô–´ÜòÍáe5WjU¶ÕÚrÅ÷ù&™È’8¶'ÖºCŒïgC!„B!„Ísâþ Û èÈíŸ*Úz[©VÎUëŠî]tÓ±M`;8†Ú¨J"ª¶Iü8ºR-¥ªÒ³ýíFU,—ጕjµ¡j¦ã½:¸8Zeüjb¹Ša†i³çÓªÒŕˢ¤h=âa=`Œ^‘ÝkW+’T©÷æZ¼ïetÚUiô‚$ÙÐ:&Y.ƒÁ>,X¥ð¿ÿýï8œ:<<¼2¾‡10ÂQpl`Ïq& øš…­–د•p}^µj›à¢0 ]…‚ç/IÕvÇ ˆïùK(¨Õ¸ßtço>¹?[|³å? ùúàkš6nÓD·ÀpÑãïÿ;é,Ë2l9>>&ü‚ë“ë§#+ÒøÎ*ÕjÏt ….Òx8ð: þ@Î S ݳ{M˜ZÛn«ÛÚɧëk#ëé-IÖ†„v5íHÞ Th[>ÒŽ´~ÿ|ÏÎzhºc}û|‡>Ÿø¨)O´`ÓÅF¹v¡©ÁãnKÊo$ÖGàÒµ£îì¨É;[ÙÄùÈÄFV”uxb­ÚÁ÷–.íä××VÇÖÖÙ­Yëß¹ºB!„B!„ñ½¹Ÿßëuºg*–*ª†÷ØdYòŽK0”eëÕLtÅ#ZTÂÒªlCizζéíG_RMb«ñ U"íz)3c²À XDt™Zèаþ¯ý+®J{öì4ÉùÓŸþÉ"DüL©*?ßs|âô6è•Ì•;ŽweŽd½ÝÈ^: šm›ŽZÎz®WÐþ_‚›îº{ð€©ÂO´à"\–˜^ L¢æÓöÉUͼ@…64r‰µ—ˆ„#çÂáð†H÷k[ëÂL¡u¹?>T ³ý¿øÿð96Û÷kžãÚV×"‚G8±3 yÖ“\M¬³W¼ äCÂl¡µ-ýöÕB!„B!lž‡ÚG'7É(w]Át[̬øJNÚ‰›£ÔŽ÷Ur%•¦ZÍH6Í|ë…€‘å¶9#µ•˜ï*‘F9µâ/VÑÀÅ~!¾g®Åù.^1æ?0%šËßC÷pø/] `ÌÂã{£1)¡XÑàû¼8jÕTKWüÜ¥g«„|¼~ýúÌÇañãûOž¾€-/•ý馸ãã3_pZ²DèW@¸Ò78äûï¿_X|Ïÿ‰÷Å•P„ìå¸e*× Š¿O[ßмþÑWÏÆ›ž<¤Û>~ú2°—(f…68 å_|ÂkÛ’¬÷-d(m„… ë- N-G„Ùâþ·0S8ß÷k—O7N¾^03j_Ûi‘‰£½uaöðH~|úAk[JˆÃ»QW!„B!„°÷½Ó“rthV§›uqe"ž-·{¦CœŽ"Ƨ[wM:ûžJzÏvlhÀ@g€NOëh™éáuÔ,Â6;ÕÝéæâEveÒ§9Q1œÑ¦ŽZLN¦Sçê3óP£ÓÑU16YÐTïèzÏ„íº+/ËIª ›FO2e—íßÇ‹»¢(Õ öZ<ï yGâ9Eî˜j¥8¹~¬n,góhâ ¨8ù) €¹ºð~§Ûa ŒämžÐ[Üé)ôÃf+ÇçÅS«ô“¥óņaÛ£‘Yº5[é0G LéõŸõüò ê?ùÆ“éÓätù'àCXS·‘=õí|ž ‰ïù>qöîË–uÓ÷Èiˆ9:´a²—£q»¢™¶Q§å:¹tt2zz¥˜bÎÀqƒâ+Bðò°HgÚžLŠðñŽ"\–˜Õ²,Ú ‡4Û¡íòƒX>!{(²¾‘Ý;µnÿò­-]fž°› Ò0eÿÒŽp$Šä?¤;¨H$IìYÁñ}ðuAâx:ÞWèòxk+¼öôÖöÎv6Â>ÁÚà6ÕB!„B!lžâ™\!ÇÈf’>Óäm1EÓëqëÕ㓤²Òq˜ø>šë°­ÇéÙsÕó=J–ŽÔ C¡ÙSFrÜO;V4‰[{2®`øä¡ôº®Mõ,}&¤º™Ó÷g·mÓwŒ¾øŽãÓù„ï]š-3O@+_lÏ5Œå[ºV–åýý}øý‡~èv»°p¨È‚-°öŽÇ×ëuî¥kA4U*ïN•ŠÙT|ÅEԮϋ«ViîIczª½›¢©~g¾âk8Nß?ú8þà¼YIfGŽï—»>t´5—‚P¯F£6›Í…Ä÷ÜŸ¸£MÆæz„Ñ(ÄhK4O|ŸQíéå¦÷çX‘ždò­)zŽ_‚SíÙá¡,ÊÏèj /¿ÌÐÙ÷åþ"¼³1+´³<~÷»ßÁÂݧ#ð << Q>¹9¿* ­æ÷袳Cq=äÚµ>Y[¶/¦ÝçYmZãá͈;‹·Õt]7’mêZñ}DÔ,†ZvÕý|Òú¸÷Ìèô^ׂç9¨m'fÄýÃ#÷ÖDmò,Ù/¤åþí©+„B!„Bß‹®€¬âéÿ{²:¡h(™¤a¥;¾ÏBFÏ¢h¬¨ÐÀê†ÿóL©6ó´³¢¨žÞ>§õŸøžKS3°Ò&*A)ÛôÒ±j/`ÝQ®wit,ÜÚ± e÷Ú#Ó!Ë&ÌBëhjé+Ž+·MrÇAsڢijeîcƒãÑ`œUÍU«ÓÓf*ä2§’GÖùJÈÇõfÝ·ž@«q·›Êɵ]ŽøÞ»åš¿S‹‹ï9>qúg$*‹%S™RY,ÄßÏqƒ:»¾ÓíøÅ³§ß¸ëqó»ÁÝõ˜>Ò{÷î Ðæž‚ÅæøÄ‡Õ=’e¹ d¹¥<»õíD˜iMsíø>œ®½¹ø~à^ŠvCÒ‰Û ñÄ÷ $%"!Á_{ßß!„B!„ñ=mÇL›°S|×&Áh”™Tíà‰í½bì"zo;ÄCÏ1]ÂYŽÝÓ!Égæ¡JºsÍøÞh']¡Û5èçïé»ÄÃ6zj£ZÊÑtn²fïÀøž¯ª¹j•ž6YRƒþE–GrzªØË§×Ã~¸=u…B!„B›ç(æ5ÇÐYó ã/õJ!‹Ã?Q³i@ vÛ6aÑiïÉ]F–b+~³àMœÌ/Í鄨šˆEãåË#í29OŸdèÆO¾R–{éZèu#Ps7Ï><Їø Ù7µ–Þ‘4M;’kR~cUp e»l2Ò{rßZÝV~IÌ¥®7vﵺ–eyãûPb§;XÃ…Ä÷>¯k#¿§i<ßä,Ûæžþ•a°½zgß#„B!„Âæ9Ä,Å/7®éÕ Ó>ó¥jÏ´ÇÔbòr÷yßGÏÿKZǶM­¾K³{š]Ngž‚xA혎m=MÌÁf6<2º)Y¬À@Øæ˜½F9ÇäVžpŠ=6)Ö•¶n¸¿ ’j:Žmðr¦.Ô áŒï9Þ%ƒTEí88=­‘‹Ó,X]âÞ÷|{ßsT5o­Òø>=ÿQpZ³§•ivO¿˜2O ùxõêUÀÔû/Þôài`89YŒïù‹P-Mï‚•vǶÏ{ˆ• Ó Ôйã{Žg‚SåKæ 6?ywáÆQ„w4f…õfs¹äøó,]‰DŠÅâUÝ„¬5áš²rŸ€®®²¶7$#W³v-Y9ëÙÙ,&¾ý|DBã{º˜-#‡VBÔnA]!„B!„ÂøÞdûÚÆ÷ µK3»’‹­øJ)¦»»wÔd¬ÚqØy£¾ UL¨»LJE¯@»ñø‡Sz–9°x¾ÃÑ ±€çXÐÏ;+ñ=Ç»l5àõÀHÕ&7ã{ŽÏ‹§Vi|ô?@®:g øé§ŸÎ|Üôqp|'' ƒñ=öªã¨XQ™3¾ç¸Aq!8Ù§Ë/xd¾”é¸à"\‚˜Õqœ÷ß_pùþûï‰Ø%¸¼÷Þ{CÚŒ>˜u”ŽWZßn’ MJ¯Z{d‘ }ÃçϺ˜ðÄ÷ù>Mü§ízñnôÆ÷Þ]Vk#|­øèA¯ˆ>7ß#„B!„Æ÷ó"~œvæÊü…við¬@«Šºƒ‚)ömsÆâœÅ]Ïàx®Ñ± ËPaž©WRR ÂpåìÊ Ñœ¨8ÓÕSã­»®ç¤UhðêžDßs3®œÌIÛì9u±ƒ¶ºÖ•ïåtÙø¬×“,(=‡Ü}ЩYœ ¡<_|OŸhÝ.§&sjŸ­ÒÙ÷±l±º|âl¹áÌzλ*G øå—_|;Ÿ*ŸÜöðð$¨ç8œœpÀøÞSŸôoüEH+0îýc£Ôvl5E*(®»S´Ü¶gÞµÊôÔôï£t;Ç Š«©—û3Þ/<ùN¡#‚‹pibÖ|>/¸¼ýöÛ3|Ø»—ÍÍMÂaØ’¶VC>÷êÆÁÑ€°,½–žuÀz^º4ÔÒÖ#‚[Zî÷ä—šóôG±ûš@­Nfß7]ך¡¬£maö.08Ørgø¡µô^³¶òÄ÷ Û×éPF(‘—à‰ÝŽºB!„B!„ñýüìžÚ¨VªF½Zm(zÏ$ß«Îx0ŒuUïœTSêÒ9ÔÍö¿ºR¯”wÅj£*–á§jСÁl£Óéôz=ÓvØóiõÊ䕺ֳoü]bõ4¥¯DªÖ+"üTuƒ, Úž?¾¿¿¿øÎgƒsþzUÍ_«4¾§ÝEzºzñ?@CíÙ‹J^¯_¿>»pZÂÂøþ ¡ÙV.jJiëÓA¶¡µ5M× Û!ü¸oPóá‰Öz&>=w(+/O¹‹p bÖ/¾øBð€&9Ðæþt~‡‚G±X$üúúQí@ÜÛkòÁÞΞt k}‹øt5ùà`|³“jM}0ôÙïŽÀ_X–{Ôp0Ý1°ÈâX`t¥áÈø¶òîÙ÷ýËï@ë@:ü¨5à5qÃø!„B!„0¾³gàæß+&AÈÄ3¼ñ=ôßVl|_TÈ›ðóÏ?ӹϜ–zê}@Ï ÞΜ½Ñ"\‚æ9~ø¡À‰ÎÓ·m›üVµ¶V…©ÈAwÚj'¦V·-rK`|B!„Bßc|0¾Çøž6_(Úp|Éâ{oÕÁÀíß/AÞž˜õ믿æ°µµE~«ºiZ[O$—»ã¬ïiã{„B!„ºiß‹w:¾Gßó7ÏÁøžÛ«W¯ÎNE–…8²øó ›/Âå‹Y?úè#a°z-ý:Âož¾zÝÚ´N<0¾G!„B!Œï1¾/FWFb “ ô›‹ï•âdÑÆ›]x`0œÍ N²!Æ÷X„·0f•$é­·ÞFB¡ÐgŸ}öèÑ£wÞyGðøàƒ`­Zh”O›íÀp8ù-³Žòë¾~h-¯Ydñ0¾G!„B!Œï«Õª878 yszíF4Ú&!Áê‹" ¶Ç÷GÝv§Q?§vì7žÎ9ý_‚ìã{,Â[³Bÿî»ï ˲ÈÈp8ŒD"‚ <„µh»|h¹ÓöÇÙ=²ºÚ¸“ϦDzù­=©¦u-rC0¾G!„B!Œïau¾9|8NBn „ùÝóçÏ÷÷÷¡8á'ü[‚ãû€£?lAλˆ(€C– ß=Æ÷X„·?fõÞÙ>ÿüsÁåÓO?%·Âø!„B!„0¾÷=È݇PP|¿P¦î¾~ýúšù) ƒÁwô%ßc.AÌ ïÀ½{÷„ Mûÿì݇©#1€a÷tŸ 7x¸—#™…y9ݰ"šì“Ѳû}¤—ã0†!ßä{€‰“ï/Fšò={òͳÌyÉÃÃÃNEÍWóù®ü€©‡{ùÞ. ³n6›?£õz]ï˜x¾Ïp_Fµà˜ßø‡¿ã¶ÛíßFòKdV0W Ï÷™ìw_…9݉_þŽ{ÈóÝM ~~‘üR™ÌÀüò= ³b®ï™s ß2+˜+ùY1Wò= ³‚¹ï™s€|2+˜+ùY1WÈ÷€Ìйï™s€|Ȭ˜+ùYÁ\È÷€Ìйï™Ì€|Ȭ˜+ä{@fÅ\È÷€Ìй@¾dVÌ€|Ȭ`®ä{@fÅ\È÷€Ì æ @¾dVÌ«X@c=?hdqù(@#«²@àî{@¾ùï@¾ä{ïùï@¾ä{ïùä{@¾ä{ïùä{@¾ùï@¾ä{@¾ùï@¾ä{ïùï@¾ä{ïùä{@¾ùï#âúúúäääx/ä ÍE¹TK_v; ÉÌ ß~~^]]ÐH.Õ\­¥+»ÐdZu9›YûÎÅÅÅãããÏÏOÙ ¹Bs‘æ:­]Îàïìöóóóažžž^f ÓGÌ m›Lÿ|µÝþ[®ÓZð#¢tµÝ ÷Ë"~-ø1“|ŸWåï“ÇEK#äRÍÕš ¶tRwû0 /‹‘1¤6™™äûú|†îÌi p‹N}Ö”ÒIÝíK;z8€_›ÌLòýñ¨0£ízûÏrr½ÙŒÿú|Oòœ&Ax¿×éŸF— jíMóɳd¯ýVû,«ïŸ?†¾GÔ÷Õûî{v}O¦;}O†ý·uä ,ˆú¯ fþ…Š ¾Ç%„ ‚ ‚ ê{:§¯ŸƒýégŽßïuŸ>ƒ6©GÀJóJwüwÅVZW«â­›¿*_ß#]ëZºèº.óKDYÓ3M¬ðaÕ!¢À¼èoìÁC«¾ðFø÷vÞ̲¢_ûæ_¨ê{\B‚ ‚ òh ¾'9r øÉËí“» Ť׫‰©w{'Ýþyúã°mû“ÃKáî®±Ý1¶{ÿøê®áè"¹rʰôûÖöÞúÇËCP[{ÿdx3ÚþI2âÑYÚO|bÛþfko³å›îÃuGãøü(^ÿ˜ÜH¹ŽFã)ùfÓ[KâI&e‡Ý8ó >?ÜßãÒ„p+{‡ñéát¬ÁY2îù`4&öêMwe£\«=kÿdÌ8A:0ë³4ÔÑir{šOO·{G·Œu˜6ôzzËKÒÞÝ?à‹ÁèÜX™œ1ðOýáÙp0"àé{xî“iZpH2‚ ¾_ a¶œo†om\CKBzT#Ç åš¤y!aïmUQÔK’ÿ1üëžTEV.‘eÕIn ]¥Vø¤£rfèä÷õjIHþ%/‹uÃB»ÎªïÇeùj<ͺŒÂÑe±,ð)BYT ÷¶ré†*U+IS.…JåZC±\bTÚŒZ»®iYz£´4CM3-ÓòÂÈÒÒ'·’³‰pÊQ@y”… Xj£œ„S,$,- Å¢P%Í rÂIºRS’žœä7®¡$YšÇ—j’Æ$w}ÇêÕ²0é!ÍtMÑ­üµpý(¹”´}¥ÚÐm/^HûÐUÕRšëË}ú©Ô,7&€e–æ-IX Ÿd­&™·¬ãÀ5êb™+N›s|©,J*$yñsø Y ± 8‰€.Cuƒ{ê{r 1/é~ ‚ ‚ ê{Ô÷›oôk)O7§»Mk*ú[ÖÙl}ù–yþãùõá¥ÁDËîûÿëY¶zþ³­MÇÿ_7KÒÇ?ß¾1 ´o­:Ç9Åj”ŽùѺѾµy0¼Yûþ­{?Y´}Â÷O߸¼6ŠÛM^fð9Ñ~=š ò‰ØžQÕCCÙÊ9K úÅîÙ&Hã|Eœ´\m¶oDh¬»§ycíÖçi›¦ß7à#¨zÿ!Lî†aX1UߟõWÞ9‡$7Ã1A}Ï^ ŸæÄÁ[E®¸”¯¹Ñ5I}öNQ–Ky›ú}SÊ€/²ê{£ ·p¢Ô(çt\êià@`É¥œP ÚÓgô?"1 êmâa%kMÍj&„†Æ$óÌIJ=/ÄB]wçL#Kñ˜†xcÐV¶Ø§ ‘ç=?¡Òã݃ “]Ëu‘Ï MýN!+´ùÕ4ÒG[²xkîx‘“µ½«×á¹ðUbm†Ž*PÓ¨ÈA¶¹Z¥5‡t-ðcI¨ÐpM\<‡aIú*@AAÔ÷¨ïÏBázë½}:¦3úùtÚø­s…Sà}èRÎÂ%ôýÉëç´–kûÃÌ&nÚõ¹“)êYšôx¸4ì„®ý•Öò7H#áòôý!9{tíéê+ÊÐÍ̓ – R8OÏŒÕ)=¼uNóÇ}߀œ¯yOŸzt-èûô€Ûãä•`Œ ¿ Ô÷…Âä—r);tÕüÙ&´¥% ŒúÞ¬Á€4Š¢Oˆ\µxgû²ÅwÏh¼IÕðKÕ”d'üŒv.Vý8‡ÅL jîº60ø»CòX§lÖh /N˜€°t'¼îç~L–¨”¬(¦©•;çWèŨów5Á 3¶w”Ê]Ͻê±^_1È™"Èãƒú>5øRR’J”YßÃî{¯Jó–…Eë{@ìéN^޵X,äG{ÇŒ¤w`åjfûZf®|#Œ'ÿ$4¬8ELP²£‡§‘QßG2E¥e‡qʶD{”"?J©°D&’€¶ñŸB ic“Ù‹i‰HaööAäÞâe‚ÑãM,fZr‚X­–y¸ŠtÁ:‡æBY¬ŠåéÐÍ…Ì#WÎÑûE‡ÜíΠïÙ—ôC¾ AAõ=êû^gªÔ·¾Æw1\£ß¦ï_oç”Ùyþ¥;ž¡ëó7ôý÷¿ML1Qy¿?‰ÏÓ÷‡`<ƒþ®9•È?sõ=½èÿÿÚ¯ÿÂ`úâá3Džb¾o‚’Îþí¯„·ÓÚq­Â[ÇãYb˜ÎÊ!ëïÐ÷Ï`Ÿ>„:)RLê{ñë¤(SÀ)öF‹°í÷Ñ÷ƒýëŠ@F<‹ñ~bð—ßùcy|Pß…šæD©« R)²êû"/Šb©\Kz ÍúR–Šløaà˜`SçÔ÷冿úçèå"ù/©­2Ûµ‹kr¼jd4J˜Q‚Ÿ­­¡¹ ¥Œàã¯gä©•ÙlC1 ’EÌU4Ë Oo”ó.{™õ½«²¬ëv˜´v4¡@ºTæ)ÓA~œÄs½àí8ÅÑÈ22¡w3³ Lò ŒZö#`Mk ¥œN"%«»‹å†í‡qh5|ÓÀÞÞÈ>zNTü4Ù®Ñ(æ¼ìñgëÀ**lu—J9ïN²ÏEõà¬áLëb-XðÇœLBA¨ÙÁÕÂuàaAØô=û’fÿ*@AAÔ÷¨ï{×…Ú”v °\}Ï­tÈ.éïWSÉ V}C°ºá®lz»£llûÞò­ú~«=ÊuñИ®ï‡içéµüÆ›‰rh6ÝÕÍöº}œ÷ê”4ăæ¨j(ÿ<-­“— èe‚t}ÿ"qôY:-kò°È±šzš±LæùÞúŽ"xÑ:“ôò•‚<¨ï’dÿ…:7‡¾ç«ZxûFò²âÌ d 9ú>òÝ\¼¼Þ-U)" X]¿Î– Åäj3épÕ ëŒh©7GnÊ&Ý"Gq—ó\¶c€Ø«^µøidÒ÷ä¸eÅ…ÇnÕ3.[󘧜 ퟈ùVT/“qM¤¼²‚YÌf)48}_O§—ÂÏ>}·R¸ÑIdgrW¨¸·L-z¬í‰²ì\=œS¯f4´uãÍJ¯Ëšã]ÍÀ“jÕ+D±føÑÍŒñ•ºf:Á䥗­e3Š)°¯ÏÐäoV <±Hþ“¾g_Òù*@AAÔ÷¸ûÞŸî¾ÿ­}6¦^ô=¹óš´·Ð'Øá[¬úEg¿gnûë›í×ï??}5Ñ»¤¾'O……nôýYµ}žíŒsÆÇm'Ð[ÞŠòõï¶(>æïd`Ô°&H@fÞŒï?V'ÀðõýpmZ9gùklû›­½éåŸÚËdA'yb®’yÂ5#`Ñ÷ÌKz¾¯AAA}µïAµS8þÇ3Ð÷·*òÑÏiõëû(wÛ;yK»eñá¢èûçßæÐ÷'¯¡@üoV<&‰{o_QιeÓ÷íÍßÈ0(a³L®ïóæðP¹þ–±(/Xßë´zQßÿjÔ÷Ðz`Ö÷PÒš¢ï9Š3„fè“è”Èàõ@³° –Ô÷¶Zç ´Êû”1f/Rn˜âÈ–ˆÍÂæ ¸f)ið¤˜fO#«¾‡Š+Ðÿ§Ì¬ïƒO¯Ü³€ì]/C@ì 8¥¡ÁÖþh€ÄcK½_‘܈ø“ ¼d¸1öõ5òU_–Ð$øŒúž}I³ ‚ ‚ ê{Ô÷Pý¥}2¦2Øw¯Ï˜5ú¤º%ú„-ä¤ÿmÃ-³¾.[OW¾­m~þôýÅæ»æõ@kûCÚÆüôZ~Þ^m½ÝÜy)>Dß[ ÊA¬ƒßKßÿg³w«¾çÞ”±žC߯Ï<…ågùWR<Ç>#êû?µ¾‡Æ@¤‰©Âéa€^dÐ÷€«É[ H7ÊŒ³ûÙ $’9 <Ä’ {Àq©•"!¦ÙÓÈ ï)6|aSfÖ÷‘IH(¹ªï¡ò;Ð÷y¢™S{bµÐ)N&å+"ÍÉCú”ÀVø;[ËvL}}Id—ž%F}Ͼ¤¾ AAõ=ê{àb=)c¶†~­\Åo‡Eöö7û,ïÀX¸%óþà}ðvû糜_´¾oon=ÉœàJÒÞhM´ÖœŸƒéµYõý¡ýu2;êôzô Ñ÷-È<ÛN†€çÐ÷ço§ÅˆVá Êßõ=ê{Ø}ºŠ”î!©›ÝëåK:_+’e?„j½¡¶¥T¨ïÈ–€RC›UveÕéÌpÍ ¨áVý8fHãcè{ö)3ë{¿š-ø~çî{¡ú^Éù •jCV-Ç‹«ï#ûõP®ÕjÕ|ĆêÀ¢ö,©Z!«e>ÜVæ3 I²Ñ_°¢˜ëú„Ý÷ð)øµúžý«AAAPߣ¾ŸNm’Áw¦Ù u‡û)ÈYòèÚ‹Í7zæ(¾ÿ¹Kßè=·¾?´a¦‚’/Á÷ÍÉ·OÆç|}¯Ü~t­óõö·#i x8Áu»ïW óä¶za#¤ŒÅðõ=ó¿>þ“ô_ŠM>¹Þyƒñ_õ=êû:Gsvz•£‡J÷z°ç7ÿ¤Ü‚ìDDi‘ŠbϸµúBõ= Á^oþÎ9Î0œB z&#e%zPÙô=ô'…¾ÆeM+ó”Ù®ÕÅŒ¿×ü˜’¥¥’-PßuWŠ¢ÂføœN¨+PgnàëkûÈ.gœ»Ó‰|˼IJ,Ûv“À£À³t¥V™-ܧDø4wüËæžc)šâÎÅ]àÇœxTõ8‹ž½E²C}ϼ¤ïùU€ ‚ ‚ êû›œ¾~µb^êç(þý=þºžÌ³/ñ˜^_ÞœÊÙ'ÕoÝ1ÐÝNÕ0Ü¥ð“n ‡;ŠWÂùõý°»ÇM‡æVöÆù ×ßè¹ú¾óéË4ò–uFšúåw>4%‚Œ»Âõ”íLŸÃýè3/NßÃ_HéôÁìSÆb ø¡ú¾ûéó´«-âÌd{cZLé•7ÿ…@Pߣ¾ —‰†¯ê`.mÌ«¾‡µÛ;äÍ›7#ÒèÍ¡ïa”˜VÓŸ¬þOçÁƒ›ö ¹ )úEìidÖ÷õlÿ¢~Û¹£ ;dž2õäÆI Z(Ë3w„J6K‚dÇ‹Ó÷d˺Ej_Rß›äõɃˆÁ˜3·jÙ¹ÎNÊUÅËä¤\9nâÍJ5³Ö§"ûÕÂl–k!ý<äÅ}Ìã()P”mŸa·“ÙJ¿šëP©Y߃DHG 3gÀ & {Ùõ}©åÂÒ]ðu“}Ê´G@YÛ°EšŠ5Ü—Ý÷4¸údz®R¸G¤°ž­½[Yº›‚¨‘•¨TT—8¬•Þ»»Èy Ô¢C}ϼ¤ï÷U€ ‚ ‚ ê{ £“Í[·éÔÿõÞÝÇíÍɆú÷~ ~6xù*£æ…•kûÛ Û{º¾b~¼úÅŽ/&ýƒ¾'ï^í ÈÒüm(ìžÒÞ˜Æöãn¤\U÷(½ñÐÝáoXï—ÍÞp¦“å÷¹ü½5yK[ËÓfÐá„ØWžIˆw8˜&HÝ}o¼Þø²L '~6»Ã™±z9q2 kò:ßaš‹ô½=J¥=™…}p2¦óøÃ#õÖžé“lƲhP߇’P ·ÜÒ}"Ù&¨ó9›IÃÜ["«Dì\ÎÇk”A¬Aט¹gÞ}Ï×tµV Ý/›>QÁ§^*’­„ª¸õLP¼ÞwF®Þ j®ˆ†OLY,Ðü> †€I}_Sµš@¦„eoî4†¹n]-€¯T…|ßÛУO™òò*`ÉÕâmï7$#šïcBÁÕê7Æ-TUÛ3jäáS|K y¹ã*º ¡<°}ä)ÕRnªŠu³ë:|~ ¸ªlFÙ®-µÎni]ªš^´ð9à›U2½C4`hÊ’¦,!–%Mÿ*@AAõý£±Ûú¼ü¬¹üô×6QTçbm²/[_?¸# †,: êû?8kkªª†ªj–ãÇì^¯˜žù–¡ë¦ihša9á£ëª¦Û^/€È÷Ü/Á÷Ã(¦Í(ÙЀcÀ ïápQßµt͸J‰ã‡ó¥qþ49º"7ê I’eYÑM;ˆ˜§Ìþ(„žmjª’Ds’¢™6èíß‘(° -™š®©”$d‰S— iîTzZØÛ‡¾eh—‰HS¡´¨¢À5uUj4MW/›+†EÉ[äÚfÒ¬!)zr—$)šáúá?æÂÀ3ÓyIWX7aèÇXÒ þ*øÿìÝÉoÛf£èáíLë|û¬¯n½;ÂÜlœ3 wP|¾(´hu€ø Ñ)`ôúŒJàÜ@hUXA-´¢&jãD0WŽœ06^¯lÑtTÆm¢ Ï~ƒiš|)*~¢_¾@¾Ÿ¦‡;£ùdî¬o¿8n÷ë7ÖÂúËw&ù!߃|OÞ®×Ûy[õ×þöôƒ=_‡L¾¿ýà ½ŒàùùþRÈ÷oÔNÿ£ñ ÓËkWþêâØšOÖŸ& ߃|ÿþÐõ·ÿ¹ø·{zþŽý~ðþç{ïóÿ¥ïߘ'÷îg–ÿqëqÂk±ûéå“'ýò½®÷vèýŸìÌÛ—þ¹¿È÷ï?äûÉ)äûiØm7ïzuíãò7ûËß]ýöÚ­‡Ýí ¯Íæ½ÞíæÃÛëŸ$ðáïu½‹ã]ïÁÛ×ì£Vߨý¶½Û3ùþ/#øcþ†ÿRÈ÷È÷ï•õÿ˜ÿëßýîâß]œûþÛÝì~wiþOk¯¹=Öç/Ž^‘¿¾ø¯ëýó¿ŒàùüK ß ßO€|€|?}ò=òýôÈ÷È÷ï6ùù~úä{äûéïï§@¾@¾x·É÷È÷Ó ß ßO€|€|?}ò=ò=À»M¾@¾Ÿ>ùù~úä{äûéïïÞmò=òýôÈ÷È÷Ó ß ßO€|€|ð–‘ïð›’ïÞ8ùù~úLž€És¦@¾@¾Ÿ>ù€÷)ß ä{äûÉä{ù@¾÷×vùù~úä{äûéïï§@¾@¾x·É÷È÷Ó ß ßO€|€|?}ò=ò=À»M¾@¾Ÿ>ùù~úä{äûéïïÞmò=òýôÈ÷È÷Ó ß ßO€|€|ð“ïï§@¾Ï!ÞÛÜ>|ɲ¹½›dì=¿ûôY2’Y³õøèÇ÷’àÅÖæñù^¾ï'zò}ë/þsmÒrùö§·6Ç£ûÚ·Ž¾õé÷!¬‰kþæÅð³­'I’Y ßË÷òýDOî~—MöÙåâ¿?ØK‚õ/oŸÄú°¦~*ßtôƒÅïB¬ßÙ¼t´æÊw[ @Nò=€&#ß—Z7›o6„¥~ÿêkÇ þÕ{»É¡öõoþâ?ßÜ_®ÝÛû…|e”ïŸ?þøríÂþRþáY| ÉÈ÷—¾ì'YñÏÿãJ-lðÅf’‘+ßœƒ| ÉÈ÷ê|÷Ö7§7ˆ÷žìì>ÙÙ{¥|ÿìùþì>‹“ ÞÝ|ütôhÜ›?õoÖ¼^ÿñf³×}öœ±×½·y»ùðv³·þÓpïhUübIä{MæƒÊ÷ëwÒœÌ}ÿÙÝÝÜ“çŒæ¾/¶Ÿ¤vR¿y÷áß]9=Õþ¥ð,IyöÓƒÂåôf—¿¹þýƒÂñqä{M惙½fkìG&,W&#[wÛ¶|oó= ßh2òý…òÝöO×ïöÃò}ï×[—޾up«ûnüÊ|ÿäŒ|ûj£¿¹½»õ¨÷ñåÑÊbûYr(~\8Þ²xçæÝí­í§ëï.¾÷ùï4ù~òrs;I^W¾¯ß~œœØüñBXÿM'>š|-lyåÛÍdÌæƒKò= ßh2h¾Kýú£½×‘ï ×NR¢ÏŠá–ü¯wö¿Ü}Y»vo/IÉŽä=È÷šŒÉs>ÿ¡»¹ÝùéçÃe»ûèçö÷¯}±î…·ÃÿÆù><ÿ6e÷ê(ß¯í¤¿|~öàå{@¾øpšŒG×{csÔ|²ý¶ù>üÈ„|ÿüwáèk83¶G÷/¼Ïùï4ùþ‹Í$+¤öÛÇÛ¼é|ÿ¼~°Ø~rÆàÛÙÈ÷šÌ’ïëSË÷'_~µž<ç{“çò=€&óåûLO¯nüüÆó}ôÙ•—=ºöÅõòMùï4™oîûçÃkå[£G×Þ¼¾ùâçûäë/n‡”¾ÛLNl6[aýéÈ÷ñ ß›`Åɯp´óþ :9\/½fúNFÖítG:ý(yÇuZÍ#f{˜¼uâac¤¿'¿9MF¾ÿ‹Ë·?þü›Ë£åóµ¿+× – W$ûÞx¾O¶^<Æ•µ›wîÞÛ¼öï_…5ÙÈ÷Q«0ó æªÁ¯Ûùìâ0³f'y»¬–NöÒ0™âh¼ÊçGÃæØ›¹Ð&ç Á0J~k­Å“ÎVZÉkð»Ÿï/ €&óáæû‰Ë…ò[I’7ßïl^ÅúðSÏÓk&æûÏFùþëQê6¾4¼ÌNäûù™< µèü;Ÿ«†”:lÍ…ýUÉÛ$jÏgN¹ÞO¦‚vuîä2,¶Îõfžk ÏóÇa.ÿÎs^'œ[l½ŽvÿŸþßßì/ï{Áï3žÝkOªö—o>o]ÿ~;Óþc¸óýê½Ý°æúhÍÝÃ5ÏÂt7wGwß÷ÓkŽwr33£ýîµÒϪ}ö¨÷Éïoí£«?¬5áѵòýÂÒJmÌÊRua~ünæÅÁ¹w^åû¨Sš™Ý_J+Qòé×Ë3Åån2d2÷»û>jÍ¿sù>Óîßó‚ ß¿öžïnmGû˳8Ù·Õ¼31ßÈ÷óí8ÉÚ¨WfF–:Ã_›ïßRÑJq&#|bñæÑYËÜÕv’×°U_=R[möã_ùKó­è]É÷¡Ý¿·ù@¾tëw.\¾uá?ßüt=JR^,ý>­×®WË¥ù X*/®Ô[goõê+ÕRq´iqaqi¥µ1ÈÄÛÍÆ‘f»·ÿu¿]_,/ì­Öævþ‘„‡»vWJ³'× ´ÜÝènô†9ÆuÛ­‘ÎðÕ®B|pàÖòìØÕ_ny§Þ¥+ÕÊB©x¨T®,Õ[/{‹DíúÁkrt°Ååz?ý±D6ßk÷òýûìaãÎè¹µwÖ·3ý‹õkaýå;“W ßÇ« ³™|5ª¥3j÷|¥=˜˜ïçŸ[ÚIŽ W+aÃq…ÒR/>(´å°" H«£c/š¥°f~y˜ä²±Î%k®ÚJÎ2h׉d,,·âsm÷š¡Zg”–ÓÈ7ªÅ°iÆlµÙK]£úÒM‹K½üÓÑÌ-®.—f2×"ϰ󤽘®ïäñd&ÏÉ}&>ùÎ4n.•^2¶RØäذ½pÆîææÃÊl¾×îäû÷ÞNÿ£ñ)ïËkWþêâØšOÖŸ&¯@¾,NÛÁrqæå «ñ„|?úѵƒ¥°ê,Å•(IZ‹!½Î.Ô“´F% n¾Úá4ýpˆÕ¹ñ^ßhVÇ4[îe U™¨TÛxÕ-ã^½03Iq¹“Ãåù™‰fW{qØ´³ü ›.¬ÆyÞ™î¾Øävþ‘´ÂÅ8-\͉ãÉÎ\Ÿÿ*„‡*g…3ëåÂä}t¢$ˆ»¥™É²ù^»ï?OîÝ¿ô’‡ëþ[“W`îû¸µ\JGáÔ½êåZ;:Ø*êuê¥Ù±Òš#ßï$ºcTj­AÇÑ ¹¼p¼r¹Å+'‘45ÈãN:[;֭űûµsè­Ž§àj'Ž;K3c–B—=¶±)Ï…Ó5¹Ô_iËèt‘/Ìç gFù(=¼¹Re¹¶²XšKýtHÃÑÒøêÙ¹ÊÒòÊòbXÌ5‡9Þ•æ ÿ°ódcµR(N—î¹ÂÜB-úÅñdó}þëõ¼[™Ë¹0W˜[éFc¯y0_,2?Œ&¸Ÿ›9¾×îäû÷Ün»yÿÓ«k—¿Ù_þîê·×n=솹t²äû X©.Y¬,ÌͦnQŽÓ1´¼Ú;õÜÐùtòΑïOö¶PÛ8ó!ŸóKí$éý@ØS˜''{£}Ôï´t6¹Ÿ’šŽéé»§ •Fªö×Ë©Œ»Ø8:Õfµ8¾~¹åß2îկחšqr`ÐY=\x)’ÚÂØàª‡[fªñìâðð6ðñr½ÐŒŽ¯Ô\vr˜œù~v~©¶ººRë âüÃ~•‘dñÚÎ9žL¾µë•ì‹Ûsã{ÃŒWKãSâ—šýp¢éIŸæÛQöÖûÂJ»p¢Í…Bþ|êüþ¢ÝÈ÷È÷“•ÚÃSSÓdîmÕ5¤Éù~âÞz«……Ûã7éÏ–ëÉH³Rƒ[é&çÔ+‡0›Úy}aöTÓ?Ö\¼ãÃî-¤g\É¿e8»#³•ÁË>]˜[†ùDa¾´Tkt{ÃhØk·;N»Õj¶7g\ÜÙB©²Ôhw‡QÔënÚn5[íAœûí1¿ØKN¼Â°_e$™|ßÊ9žL¾µë>‚Êþ^BÜ/ý寨>âöøxªíáè-TÆ7î× 9ò}ºÎ‡(¯ÝÈ÷YÈ÷³¥ÅÕ~æQ±Ùøâ~þ|?¶·Âb3™,¤ÏÓÓ“¢ƒóž}w9} vØu˜,e¼íÃôDùídL G¢8ÿ–™éVfçæ GæÂúã½ O^ä¬ÙùR¹Öì&ÁéѦ1_®ÖºƒW{{,‡—>x•açIÞ|Æ39ßç¿ /ÛCøô Á\a|}6ß/ldŸ=¯Éu>¤yí@¾àÃÍ÷…z/Š£qÃ}Q’ÒY.fŠö¹ó}ØÛä”™~^kˆ¤©Â>¿%çÔ¬¤§Œ_¨,Vö-VJ³§§5†a÷ Ÿ:äÜ2S«ƒÙ™¬ôÔÿ½Jaf’B¹=LŽô•™‰Ê+Üù~¾ØùG’Í÷9Ç“‰ïù®Âä=„•¹Wºƒæbús‹”ìyåŸG»ïø`ó}ˆ•“µçNzFôŠùþxo³åFŽGÌ.Œêtc|æœr½—œS·4“Ó|;:3Ê·rçûVÞ|?Iaô˜ÙA}iaR0Ÿ[$Á [O=Ä £ÚäÏÙçvþ‘d2wÎñäÈ÷­×šï •foµœžì(¥½”9¯W)øÚ=€|€|ÿRÇ7—;“ {¾|?hV±«™”ÚÍ~ö1§ ½¸WN‡õs¶ª3¹•ëý_š<'j®T«Oÿ=øïö`;÷–ñècŒðIFœÄÑHœŒK ºíVm¹Zš/dû}ý#ƒ^·Õ¨U+¥ÂYÑù|ù>ÿ°óä7Ï÷ù¯B¾És õþ؉ƩS‹ãð ÖH±“ú~´RœÍžWþ‚¯ÝÈ÷È÷’÷bêÎâ”x¥8ê뫽\®m½ôѵÇeö$.ǵÒlXW­Ì¦¦µ9‡0y^Å•(õ¼Üpø±<Þ£+­Aþ-7V23佯r¥²x R©Ö:£I„‚j; '3ì·VÊé;Ù‡©üçªÑèÄû­r!=7ý¹ò}þa¿ÂH²ù¾Ú>¾•ëuÆÂ@7Ò3ä$Çâpž‡'Ú$Q;œiö¡ÊQwe&óèÚó|í@¾@¾ÏžôÍÅf?ŸÔÉùý]åÉ÷IÔ.÷ÓÆøÞÕ¹Ñúf?sïpö>Q¿Ýj,“̵çSÏç]DÃÁ˜(ꯤfÀŸkÎÃl±Úé‡ýÎbq|ãb;z…-ãÞêѪ`¾ÒÚÄqÔ©WÇ×Ï–ë¯L!5Í}«7 W¦³”íðø³– ¼ÒŠ2ø5wßçö+Œ$“ïgVúáÙ²çÉ÷ù¯Bf…Z§EQö“žJ­5ˆâ¨ß©¦v2{ø½rX,,7ûÃA§±®[&ßç/øÚ=€|€|ŸcúûÐ1ÛÃ8ކƒnseî$Â6’$G¾Ï̾¸Ú ‡ýVe~öÔ=ÝA|j¶ú… ¿0»8¡ß÷å™1‹Í3¶¤Ÿ¶Zª-ÌÛ“•yDjþ-“zˆ¾“¬lÄéW,˜- §ºÔÃGÙmgÒV7Ηïó;ÿH2ï±Ô<òçÈ÷9¯Bj‚¦ìGDýú/Ÿg±'6j¥™B¾Ï_ðµ{ù€+߇^î×Î%Þ¨&=¾s#Îì¼P u<ʬIú€:vƒÐÇÎΘ ÛՙӇȊ–‹3™9ÊÓ²1w.ì0ê¬Lйóá±±ù· ¯êÜÌåZ7 zaæ Ï€íÕ+3YÙ“ÊJ_¯ì;$ÿ°Ï1’ð…ìw'g˜ýnþ«ðÒÖ¿xøbnL¡ÒNƱRšÍŸïó|íàƒÊ÷¸û¾]LÏ’O¥<F-שwŠáËÓÅ5;ɱáêb1ÓCKµvªÝg&Ÿ­õâ$#L>>~Ь¸SJÍ@SOR2ÏͼDq¿U>+[«õÑ&y·L?Lõ¬2_(­v‡§vººXš9ËìÜÂjgºTíÕRáìmªõAž·Gæôóû¼#‰Õ…ô&‹ƒIãI¿ß Nç¹ ÑF£”Þ²Ò„oõšá÷BÒJÕz8Tjü¥Ó½¾¼R_^ûÔ y ¾v ßðNåûé‰úÝÆjmßʾÕF·%¿B<赚GZÝÞज़õj³™gçNK¿ÛZ­­ÔêÍÆêJmµ±1ˆÎµeZ<Üè´õ}«««õv·'/ :­ƒ+°ºoÿ¿ëÍno˜¼Ä ×iÔGÛ®®6ÛÝaœœÃ9†}þ‘ÄÃþ¡Á`0Foòz 'GŽâô·ú­F#œg³Ý{ùë–fk£?L˜D¾@¾Ç5Ê…Ì£nMF¾@¾Ÿ†x8Dñ }<‰y¹—šŒ|€|?MÃjafܪzšŒ|€|?eÃÅÙ™óKƒÐdä{äû)‹»ÚÒÒRµZ­5:qò~ä{äûéïïßiò=òýôÈ÷È÷Ó ß ßO€|ÿø7€|ÿÆÈ÷È÷Ó`òLž3}ò=òýôÈ÷È÷ï6ùù~úä{äûéïï§@¾@¾·È÷È÷Ó ß ßO€|€|?}ò=òý» @¾@¾Ÿ>ùù~úä{äûéïïßmò=òýôÈ÷È÷Ó ß ßO€|€|ÿŽïï§@¾@¾xËÉ÷9K޼Ø:Æö^òfí¥F2u€| ÉÈ÷Û/üçÚ_-Åö›ì×ë_Ü::î§ßG‡5óâÑ0.·ž$oÔÚÉHv“©ä{MF¾ïÜø*´ûƒåÖÍÇo0ßYOåûÍKGøòÝVòÚ<ïòÑ—ý±‘ÜNdªù@“‘ïŸ~z%´û£¥pýñÔòýóÇ_®]Ø_Ê?<{ù>|Hpé‹Íd¤}ý›¿øÏ7÷—k÷ö’éä{MF¾ßûéîád5cÿJks*ùþy¾ùÑQ¾ÿr3ï§@¾Ÿäës¾üÇöÇ£ˆ¿ôÓ^rJ¼÷dg÷Éóɾ8Z_ð‡?,ݸsýñ“ä´g;»ûíbëQÿfýþÒþpëA{s÷óý³ç?û,N2öÚë?^ûcûêþrý‡ÔqSöº÷6¯ß:8âÒ­û·×ûáa¼ccÛúéÇ£ö/^ýqk'ÚÚÙ;9ÁýÿŸ±·ýóÍ[w¯úÚûk?=M2žl?Ý|üôÉóÃíwžî¿>×ë?^¯?øúîö³ä,qÔ^°týîþ8¯7ö7ûùI8e@¾Ðdäûøq!Üt{íù‹›Ÿßü‹S³Ê»W‹G›Õ¯Ýj‡ééO–Û¸ôñ»ß­ÿèßï^ýüvzËÚ¥«÷·&Mž³v^l?ã?ýøÑÑúÔrëêú0Ói|w1½MÉÕ»ã0ë}fðá1¹Çsßvww<Ýßþò›ì/”¾]ß>k6ž/~}c-³}ýú£½ã¼ý‡{» ßh2òýVóÛÐŽÿÃÞþ—ë­QJ^ë¦óýgù>Äý3—ϾBj?Ì÷^¾å_”Ú[/Ï÷e]»õ}{Âq¯~Q`3 þ%Kùþ³Ãý_<+ßoI]û}íåû¼uýÑ‹ôl<–;“`³ùí¤ÓQðA¾¿È÷9ì-òô'ͧ'7ㇿ›½û>,¥o¿~ôôY¼÷ðÞƒ¿»RKÿïGËÍOnô¶žï=ÛÞüãÚñú¿«1ß?Iý~@8îÚ£h/~±õ¨wrÜÒÏÒ#¼øùÝî΋£Il¾¾qçøÞö¯w’}[›ÃîÝ.†-ïw7·»w³#Ù×½qrßýÿ¸Õ?8Êá)‡)†Ž?cÈäûKŸß]´?‘NôõÃÆnêßýìÊhŸ76·âÃWl³ÿiéfüÕ‡ÉoN}e©R^(ZX(W—Vš(ùµýÞ¾þ J¦h°ÑéîëÇoý@{úQÄ™5oJ<轎7-D½n·Ûéö†q2}oôOY<ìuþJ´1Œó‰)üEé]¼Ã~ïHûÛVÂÛ÷Wõaãð¯ý8y—Á´þ7D™>äûóÛ~8ºý«vœ9ž?çÂç?žï/ßIæçýŽwcòýéL´ÿøÕh'ß>Ì—ï»·Ö²¿.æ_­?O’øçQ忳yöäþ·×vNùÒ—ý—Ï¿ýwÇç•>‹dç$Ö_½·w*ßôÅÿgïËzä6–5ÿ¡€ŸúÍ|ä¬5kݵf#`˜€ÁÙøàšÅœ…0xB-Pˆ+ˆn☀i›¶) ʦEIé# 5ÅnV}ÍŠ.*èTÜçä7š{¯Ê©ÌȈÌÈÌ‘‘JöúÜ=-ðéã¦}'»>û~#Eß‹26ÚÙQè¶Ÿv?¿öT¿ªF[·›·…t}e=ùe ä<[§ $~‘ý—3hÚD¿YvºyÛ˜e’ 5³ER8½SºÅæhbì`®Ø¨eë—åÕ[G?ÃNìOhvÝÛv–m3 ·³ï·g¯þ'„ôVôKAÑ÷|äwþŽF|_#ߣž'ôý_ƇkÔ£èáPÏÿ¾@ —ýùbð—ÿ±ð ú¾OÚ3Ü8?ðׯ¶Ì8èûÍË÷¾øøNöëóñ^þÚº;¢ïÑ RüSIЋ¿HÁ§ £Ð÷¿ÏÆ^þéç %M~zñb\<~’}õ$ûöùK ›*(((ú¾IÝ34Ãoî‘r9ªu¤|+Èœ+ ,ÒöVȉ­sæ,N¾™Ùê¬ÇÒÉðc› ëŽ|à Ô.Öéf>rw­oj.ËÏ2¾œt‚óER NéôþðÖ˜£ðWg€ÕjÙú¥AyõÖ]œÝ‚˜Ô:2@_§o×Ú¾¥}¿{uÕ/…·Eß3ðüý]˜vïÉ÷OÚGÛ?Ï/¾ýæQ4=è{ÐîÀøƒ¡&Æõ<äÑ÷h÷G^> gmöyu7úú£OÒû_ÿrûÌ£ï_Lô‚ôb¯²§%Ñ#$Ò­Ï>ŽeÅFAAAÑ÷ÒèªP;´•çe+D׉¶.#×áÇRô½>¾WPô}îÛQ¹ýãåâ„'=zð¹¡iÛ?†ª†Ñ#1—ßPü&–ÉËI&8_$ê”Nïo9êË I`ååjÙRPôý[Ø¿AoÝÐöí컢¹U¿}/ß~ùØã‹Gh÷žU¿B.øiú~óð“û`É_KßÿôD'äûQ<{ò_>¸w¤/³éûìÎßá¯úbO”ìñòI¥ß(á?¿ÿá§þ¹‚‚‚¢ïg£¹~è]G-ÑÕ©¡¡ Ž£§¤ï}ÿÞέ9t)(ú>moŸÏ9ÅWPþ°+¼^bí Ð×µZ¶~©ôýŸdòœÜ½Fa;Ù[5GQΖ©¸…}W4·ê—‚¢ï%qÈ?ýçÞý“çÜyr”¾÷V£´3Ö7¤è«;CÆ›û}ÍÓô=j†$GñS}À‰¿ó÷Þûà÷ÿåΗï¯~}ÿðßÇ“³/$èû+}žÇ_þå÷oÐùW/ñ*(((ú~6ÚÔ9ÛÁŽëékÚV~·{â§Þ¢i’¿È¢°G'e-&è{Ñ4u¦ÛPtÃce«ež†Aàoq’ÕmGêí¢»ª;K¢«ÒQ’ÓÓÓeÑ¡Š¦Üv'¸¬8ʫòms)äeµ¢Î·%ƒ0®»QÃeyî¼ JkAjx­Aû»Sr %ˆºˆ£(Þ"Šå3)?Ñ”i¼ímײ¢î$ÔÅ-I¶Î°`+hûyzƒ‚}”;H [S„ÃËVØ´È”*†±CÍSDÁ¾å -èÄìÛŠa¢dÉ`·$+Ç=A¥yùž·3Œ¶Å ˆÉ€¨óÐDò¡X†Yå%—™ž|S2‹¡›ƒØi±ù:gã£-Òa:ÂtÂf™Â\h"k­aÑ4u#æëœ‚Î2¦{Xrb‚;ÙöQdñ¥°A%e#¦D’7cò2Ü;ô÷ض_uW5_bþ,˜¥jâ”:QO­ÉùCbÆJÇï <{C0\wmàb#±lñ7¬eKÞWßþe‹;x0lz4C´©¯Ð÷‘þÞˆù}áo±X^Aß/³­ðmµêrø…ÛZšnœHôþGjej 4t-ÃX­Œ¦í…ÉÁˆk«,T¢Ÿ ‘·îÿ…´¨¦ŒCm™ÆLËñ¬l®Ûª, ÿZ¤fxEY”hlü¾c›Æê†e»QZÞØ‰*‹ËXØ]ûQ M¡ëEI=í ²xi/KÛq$!Uä;Æj'ÊÊ\»~Êì; Mâ;«åB×õEåÖL~D·êm–ìì”õìYE[‹¬–« GÙ¶L}wû³yi¥u˜nKvy@å6oÄé œh,OdqÇòÄl”}r”óNü- Kò÷}ÏG·j¿ûá½GÏ«øS=üü»¿ÜåœÑïüú ñYÉsÞû¤–NžóüÃA’ˆFß?}\÷þËþQÜhŸ²ßÝþ‚)5ñséûïÏÓAô{I÷Ï¦ï º—뻟þAG’Ÿà×_m}?]hh{R^L-sÉ‚TË(‹Mýì ;7Ó÷µµ+C£Dæìö¿ëf3…"rH›=––Wu7–¼À]ƒ¬¡M/mÏ5‡E­ ¡—Ö$÷½Á[d¢Œ°“4;ÈöípsqC á»Þë?¦òŸé«5z0  í%±…®Ùô}[8š´¥—VóÕÅ(ÉK"ìd½-BtÐݤâÀ£Ⱥp[T)bǸI76 1!qâ›´ý° 98•,N nÕñÍ:_rÉéÉ7%£ JŒ1@ÇÔãêœQŒ6§b/ý(@œ&ß@˜ f _çt–ñÝ;Å”œ˜à&hV`¹Ž»£"É›ƒ3yV’kF˜F+tsÞ,૚:%¨šY˜åý!5g¥ãwP]¾ÚU–Š.25šÂ‚¿ñ7ÌeëÍúêÛ¾lñOìd6£ PY{¼â¦ßWy7»/ü-Ç«3è{Ͳ͉嬎­ýnŽVrf'Í+œǼƒé¥y«s ;v³Ï'¡M*ÀìÍ2½ØÉw‰k)cŒŽ]é –Bs’Šûz36\I0*œ£¢¬ÜjÃì;ÝØP¬¢J·‘‡Azݤè°4 žHüe‹:½r׆Õ(SGæ¾/EGÇf4Ê?9Jy'þ…+ùIŽ{ оgà"ù ï²N¼j‹ˆ¾ÿè«—ã²Tï75käÞ!1òø Àxºö/‚¡ÀáÓµb—»ÿîƒg¸Lðo“ç(‚ŽÐ÷Ÿºöó¾G:{à•ûÁîùÙ¨Eß¿øöë_ýŽwÿùýƒú_~l@AAAÑ÷×KÉ1€ +Œ]a'm±Ë93¬nŒ¾O×» Neb[?£¯±ÔÉzªa#CÁöò­M”uÒf(š9¯)ºtt‡bàwÚÌ›¨CdÇ}ŒúýàR|îb%0]ë­Ž}ÏÏ ž‘¯.~É)¿p^Zö Á$8é‘C¬Œ7›IõêaÙü¬j¯ñØžêN>m°Æh ßä¸fåK.9=™¦dCÞ牡ï$¾ÎåM3í @ôð D¹Z¸n¦ÎùÆÙî`BNFµVTQ‘äÌ!?yáÞ›lb(BZ¾E~öJÚ€z›‚n'oÀspW:~%ÐÄöõ« zj–€¿ñ7üeëMúê[¿l±OWÚŽÒ¯á»#(­GjóúÂÝb1$çÑ÷»nÝH1ÇÕW»Ø)#¬‡û`ýÉײԙ–ÓÊ×LÏ…ÓRCS,½~WO}þhv‘¥O‹“‹Ý&ö5–Óªã¼ï}Ä9ó%Ù´¹÷Q̰›îûÌY@°ˆk¥Ý¼èÃ(­Á¦ò›7êâp¢± ñªÆ>­GdêðfìFù'GïÄߢ°%?ÁqOAÑ÷ €)Ö?ýáæ"Oð€­ûøÕèÁÕUŸà¸øæÝf„õ˜sGæ0ûú~“áCB ®½§×ÿpíÃË_[wo¤ïş턉þtHß¿ó_¿;Fßož]¼‡^Œê|ùí—¨óÙf}¯ï~ôh3Æ]KÑ÷ о—OÖÉLĉÄQ 7Êê¦mÊÄÐ𛸉¾ïJåÑ>¼ØGpUÇ‘D7½« ±]['¾u†î $°´“²¨òËÝ#Nø8Ø#ÌO!D[E®9fМ×\ßâføÈ¸¤¬îpY ~^' ÒÝÀ‡–æH P× ÓËëm‡»*õw=Cœ]飕Õ:(ë¶© ÏZðé{%è^RvC®ƒd¨±œ|uñK2x!ÐM˦뺦L`Ð¥/æ ’¦ªÊÌׇ’AY•eÝÒA{ ^+ÈúV.Ç’q­† «eÛJE“¸¨áãìÙ@é»:ý´é†Ëìëån»oÅSqR”ai#ºN4‰gŽŽLl³2$G<©äôd›’oñ.@¹…çí¶\[Ö‚HÎÔ¹ŒiÆ®@[iÙ§»H¼%zøýh †žë~ÖÖ¶ãëœOßóÝ;Á„œ¨fjZÑ”COQ-IÂR“îý`(.¬(+›¦Î|iù‘XI{ˆ*‹Âè:’$2®Ë“ÔiHÌÁ]éäÇÂ_!  B þbÄÝ<0–­Søê·µlÍp¡àžÊÚÕÕ{öÞº4ÝÕ0ö!½UhîÌWÎì ‹Åðê úòëf”W¢ëê":Õ; ­pÉ`t)A3 ‡gW틆¦øJmÝ`r З†±Bù½ë(C[×õCîy¡/ÌžÁ¹{HC®V:¡Â7¤ä°½À_ r£hjtÙ "‹¾Ðµ…_®$=ÄÈh Ûõ|o½˜Ùw r_Ml•«‘¯#‚LOúiœüWBáÃ-ÈnÞ¨‹ÃÄÍýÊÒÐúÊ/Èà4K~£ü“£œwâoQ¸’Ÿà¸§ è{~üîÝ×'”ÿe€w}·g½?H|Û¾øéy’ U‘aýÎ?ïË¿¯ºxöòé“>¸{ý ú~ûKýšHÏ/ÄÓgÏÞûìˆ/—õÜßSêw¾ÚÊÖ'¥ùø•YìÏZ}vñ«]a7¹È " n\ýyÿ^ý´{õâ™ÈÏ¿ìÿíë à¹ôý¨éà½ÿýÍ£g¯úïÏž?¸ó÷ä3Ã((((ú¾ŽõÑ>~bwœ ôýr”6¿ 5TKI‡Q|Ù:mÑ@沂Vºb5”³ê›à.RJßkVEo ˜{ ˜»«ýy¿ºƒ³>4à Ù7WnFH÷ê2‚n´oCX N/Ø¥áø´X'æ[ŒÂ‘p0ƒ‚6Í¡ï …ð…£)_]ü’ IœHd%Œ²Êw`0ÇÈ …½,ôâíPwc~Yެ†°£ÿà=ne=¶L¾«ÁnGt3(o<,ÝŒoV¦äŽaÓ“kJn1QøpGÍAd‚¶¸:—0 qæÈ»6É’=<‘'aÅlóé{®{'8*'~„ÕdèÕEE’3{òNºw EÝ®GÆŽcúžo‰•ôõïÙ¬ÜÁ¡IúCjþJÇï tŽ kÊà¬40qÀ¬Åˆ»y`,[§ðÕoaÙ‚ó—÷¼Á“Ø |˜Çóbï„øVýü¾ð·X<¯Î ïqσš_ šÄFüõ–$…AY?® œ8«q²¸‘¾_ZnA”wø\q5ÚË]qâЕAÍÒÙC3’º»ªeœKj‰å~7†I.5䉚ž"À4S’MW˜g€™D÷Ó„<´ï¬;CÀ‡N‰Òé‰fÀ/1=GЖn†~7Ý 7ÔÔ¢'²ñCßÏ^¶0á‡5»ÆEzÁ·j4+šÑ(ÿä(çø[¶ä'8î)(úþµÈ~ów4¹ Eþ鮨?|˜wWôý݉Gnï^¼ºNX¿s¬dx~<æý§»ýˆñ¿8ÿñª’ÿdø&1ñ©ê7?þåõßÿyú=‘¤G÷ãûoú3,öDì‰Ü÷çHÓÕúð Ñ|((((ú^”á‚û†£è{Ð8 „#%"ц êçñƒ‘žç¹ARÑ Çè{3ªè±<8Øc÷9b4ôÝAP®Û_Á3ˆ„mßš\§ä("ý÷ÔcI—]w¹/Œ M–Úå+fô}™xîöÿe7þ½´±¨|u±Krè{h˜¤{ê"S‡zÙƒ¦AïÈ E©z!$j€­¼£ä-ÙÃôS±ê2ï ǘ© uðw|³ò%—ŸžLS2‹áxVÈ—` Ù:ŸoVzbx?h’g B‹·|ϧïyî}DNüˆÔ @³Ö'\ôLáO^†{÷r1­@¾E¤VR‚® 4pS"åiù+ÄXb³ 1•°8˜¸ø‹óÀX¶Në«oײ5gð ]ݾ2nekgBzKsÜM~_ø[,ŽWçÓ÷`ñÐÖ ƒ Û¶±#jX17Éú:w}Ýô•yä=üW  Û^¯A³{•9G6ü ú>%ûÏÖu£wÙõ¶¬ÝÕ€`7ˆ‹ªm•eyžgišdesõp«ëQDu7ú‚=S’Cº\Ó Û³¢¢Ê/EÉÒ$Íšn¢ïBŠqödz¢i:éðMvBÆ!ÔFnX8äÏVÔéÁ™ëqƒ‹2ô¶G˜˜DüFù'G)ïÄߢð%óǽ‚¢ï_Ÿ9g—·ýßÆÏ§ ‚¿ûë ±röÃß|vHͯþþÁÅKJX¿û_Ó÷?8düÿòN…¢È|t™ Éñ/žn€§ß~£47þèÿû›‹ ðý_þŠ|TxÿÞ“—H¸¼ó?‡‘GŸ¡X2¯}õÇ ðÜýl;Hö¾þ~@ì¾<ø"ò⫌ðò¯ÜyHêìõüo?…gBAAAEßG[“%q袛~’LG”EJI4-`»Í][åi…ë¬-ÓXêô8I’–r:8ÅÑ¿NôÝY8íD±£•à*( î+ŒñûÀïi^G!űgjûó^ÛÇåþ ‚1ÐUEžÄQà¹kÛ2V z,á«‹[’GßC‡òƒd}/¦zAúˆâ¶ÇKB€¶2l/Œóòu݃Hȧ)oV¶äòšgš’Y QKšéÅ%r5ãù:盆¢¶qÒ£!n>4É0ƒ¾gêœOßóÝ;Ÿ¾çWKE’Ÿ)üÉËpïâH%–m‰•”öä”r‡-Ÿ¾ç¯tRc‰zÏçAÞÔÕ%êº5šªxβÅß<0–­ÓúêÛµlÍ<¸è ¯ëkûFËL ™î»2ØMÙÙ ‹Å÷ê“}?J¾‹Ü;¨!¶ôñW PŸº0öêärÆ%ù¾ƒ_Ž> º2‹½µµZè8g.}O’Ñk }ÀbèHóu(´¥aÉ ‡ãÏ~èøÒI$œ% ÔH -––ìÇú>!dƒœkô»&n™ £šÒ÷Þhj[º9y•IßÏY¶¨Ós—¸+³½ºx>Y!+Ô2mg5Ê?9Êx'þjÅ—üÍ÷6 о? <çþy·e«ŸŸŸ?ºw7~tþøùæX¼ù'ý„¿x\ßïK~wÿ¼¾J óêÑWΫ퟇_üðýO7Ë™}>ö𫟂^ùW?äüþ§Wü(]´Ož?Å7ãÅ“îG_ÿúÞ×î§_ºÑ£ü‰<ÇÞëðaòÍÇ¿ùòîùwîíÿ¬Ñ#9¨Ü÷„ @RÐ÷ø+Ÿt@´6ʸEˆT“hsg¥O>ÐÊ8(¶É7._ÃN¦Î%Ý{‚ §O­ˆæÀ¾2 Y>ðË™÷ÁvRo2wÉdIòÐÑϲñÕÅ/ùzDkrƒ„σäÞ b°yzX¢Úèêdu䌸ö'bR!Ä–6+WryͳLÉ(ú^ãL”¤eé\Â4  À:šãa }ÏíþQž¾ŸS-))?Sä'/r) o {Gl‹H­¤@½^€‚Îåý!rÎJÇïà\€ï˜vsVyþæ1}Nê«oײ5oð໬žˆM[WmDM»ô÷›MµËïïæíÜ…Œ¿ÅšíÕ'z·€T¤h†äÖÀ¶™^¢­0¾ÞÙɈg‹ðŸÙ®Œ T†¯•­OÒ­ ^å¨0„¾Ÿ/ ãÙ^ËÏ'éû !ñ‚.˜bj|H†ð |.¤µÑq5MßK,[ðÆø¶ÔØÚÕY&U°Ój¹Ù”qˬFù'G ïÄ_­æKþ&{›SCq2оk%˜HsJ(((((ú»:p¾Œs2b¤I\?·bÜD¬ãø‡[­úre8žo£ÝÃcLXßxGvNFHŠv¡9¤2ŒÚÂÙ-ðûôæ¯ð Ä× â¦ß˜Òýœv ýÑ"ÛSIäÒ&x.¨v±4,Ç÷,BßsÕÅ.É “aJ>’®0 àA†Ÿ#ßDˆ%MJ‘4d5+WryÍ3Mɵ¸L1QŒLðt.g¸JQÑñÆ6™ËLß~ú^Ʋ“åHâ:ø‹óÀ™>'õÕ·kÙš9xðR«“Õ»Xf³ºö$oP5 ò»ÍÜ…Œ¿Åbxu9ú¾Ùª˜ÍèQÿE)ÖÉc ú>eÓ÷Dx‘gÒȘƛ§ïq¥ ‰\S›â/Ýf~ôý|Iz4Ed.¦dqÈ”çEßÃFÔ1¢6¤A£=:fqœ¡à¬˜ô½ì²U»Vê&Ñ‘é¥4v^·•_Ìo”rdx'yúž/ù›>îmþŒ è{Eß+((((úè‚÷9ûÈÔw%FŽt 7jͪ«,š© ‡Â~ZˆŽÐ4„¾GµŒSÜÒɸÉsÐ<¬„7ôQZH­K2™jë¦+ œÍÆB2Þ'(|c"yŽ·„üœÏ9«uP6(:ädDå|uñK2hDhxI;Ò–Y²EZö áó M²4ãå›H*d¼u™Eþ( YÇh‡tM–ôÈkÁ7+_rùéÉ6%³XëŒ#›¸àëœQ À‹ã‡˜Jh’o B‹³u~ûé{sÈO^Hˆ”î4¡6¤eyú>÷ ÊݲþÉ_éNJß#“µ¶â4!M±gYyÅÜe‹¿yàLŸ“úêÛµlÍ<´èG£ïÝ\ÐLœ…†ÍÞƒ ìiw ÀÑ˰]Ô‡%~ÆÝðµkA8ƒØ€2´u}±ýãò“Ð0ÐÌô[V=¦7G]ì’&— [ô›´ÌABè*çø€éúø‡®n0"^ âr¢úì«ÚÂËZúÅŸœ¼äYB°EšðÍÊ—\~z²MÉ,†éŒ¨. ´úbûÇ [ç¦Áqb“LVÐ$Û@„gëüöÓ÷æž¼$¿0É\Ñ…†fZDr%m,âKx°$ý!’¿Ò¾¯cpêU[à/FüÍgúœÔWß‚eKf›„ç -aÉ–Ýþ‹áÕùô=úNŸ]â[ ¾Rh+Û^i D'€dò ¦Hô3ÀN›ãô=¹ ² ðêP{ }ϰ] »Ò³íõ%lÛɚѬN&†Rmöt0àLû Jv‹¹’à{ÌUgÄ®\]¦HTƒ‹´ïÓ€¹@\øö6Mßwá躂æåí~¸Kƒ¾—\¶à“«Â› 8k~£ì“£œwâ¯VlÉOpÜÛ((úþ4øã‡Ã˱÷¼–¾ÿ<ݽ[mNEß“||=ÖQIK´Eˆm¦fWiÒE’¤äMºgª"{œ’àÒJ‡úç8Ôál6¾S3?í:Ë}sõÁQÙÐnŒ¿kâ^ìyu6 º?»šA1V3nnú¥Ø4¸!kG£*’õ‚‘é›·VnØO}|,á«k–b™I„IþYÂÌ$0¨fFGÏÿmºD/jÂ8à 6/ô/]ã"ð‘1yÒß„·ÂÀ3.‹oV¾äòÓ“mJ^±Qˆ™‘‹beµ' :¶Î%L3žtº]'übÌ•Ì3f„Õ³tþè{È)MßK˜C~ò›.G{Ù ß"Ò+iWa!Ó­c¿¥ý!’¿ÒÍK]S¦—(êÉ! .4@HF¿ìf-[üÍkúœÒWß²e‹?xè¼y Ê’¤Hš×þ‹áÕ¹ô=]Î0‘ÑlѱC_Lp|k­­œ¼nÛ:_ã…Ü ô=ý&1t¹fhæù8}¯™þ¶MÑQKÙAÚˆNÔ¹3’G‹j|"½^i5TÝæ.yÆOßëA^ !æHr5ÈËOÒ¯OGߣïdÂHaV Ñæ± `zNÒ÷ÕÀÊr<ô½Ü²€kªÏ¥Ím”}r”óNüÕŠ/ù Ž{ о?.¾ªî'îŸ?yúúGYÛÉ£máó‹—›“CAAAAÑ÷@W†úèiÃñÃ4Ë‹"OãÀ6g€Ž¨iú—‘³dœášè2Ì+!D]fž½mæ’²ƒ$»Òjýþ¸ÀþÄ1Þº,kIÝŠ¶.<eÝ\íݼ.ì¬j;!šºð­9dŽ2KDÉÑx–•m¥êº¶Hü%~õ ¹²ü¤?–´U0h†“<'…f×aÝ ÑÖYä-µëâ9eÛñÕ5«$“YB˜uV5m[§­AŬA2¾»ôã4/jeD\§¾¬í:Ñ6[CÀ´vAÆ7Ðx.l?ÍòF°uþÿ—¾§rÊÐ÷’朼¸ü›šQV”yêÙðß;iù‘[I»ÒÄú¶í‚ã:ë1lÇO;yH…ä¯tü–Ò>&¢t'rsmÆbÄß’Å#À%rò$Áþ„ÈrÀS}Ÿ ËWgÓÀw¬iú¾Ž‚OßË/[ášv©œ¨"¯8àŸÌn”qr”÷NüÕŠ+ù Ž{ †ô½‚‚‚‚‚¢ï®ŠÛH=(s—ã,#3õ#®Ò°tNŽ—éÃIºÛoi¥¢ªÃAeº¨qÞ¥DäظßÄ:„ý. B Xaß•QK$™‚ž´|uqKòiÄ:q&¬^Ï$ô¾¶î[ÄÊ]‰ÑJ¡Ûا“{”; çUhЕ嶯Ñ@¸å\³ò%—Ÿž\S²‹‰2œhzéBlžÎÅ&Q†“ 4É7H+¸”¤áë\’¾§î}TN~µ¤¤„9ø“—áÞËI÷iùáê„8ˆJ@¼™¼?¤BòW:vQ˜ñÆÆŠ¦Á¡(üÕ~i-º‹sóÀž>'ðÕ·xÙâÜ3£ñ¼ …i‹ü¾ÌÜbñ½:<ºG5á«!b±¹›ÐWÃyº¶+LÆp\¹iG}>>«0<§nb€Ìx-–ÿ“‘) c”¢wS}Ÿ„(¦§»¶ò2¤á0‰É-]о—[¶hÂ%~ã¥V’§žß(ãä(ïx«•„äÒÇ=…?_ú^AAAAAÑ÷@W‡kãè–ÁöÊöØùYC"`Ü-?Ï%òÕî¤}PÀÒá-#šÌ_ÐMww×yvz=\bí’CÁÂJ*Auº¹¶W‡eM/p´;D¼åÅ2Țɧ 1ŠÌ'ò_†æt•‹çµ{êŸBX‡ªÕ—vÖvØ”ƒ¾g©‹S’nýݬ= i\tµO脚ðÖ_XÿœAÒ£íÃN_ŒZ¹î£¹ –VÐPº‘2€È]r„èïÆ3²fù)cJ´ázE:jY=߬s%—šž|S2Š¡m´ú:ÌQ†§s ÓUâêDê0M šœc Ái®—‡”_çt–ñÝû¨œüj©H2æàO^†{eb¯ôq ~;èæÌYÀÕ uJ¨ PRÞRsðW:f±×e)ÄP ñLRØ`ð#þæ·lÀWßêe‹¿MB¶kšz¨Þ‘xòAÏï ‹5Ó«S¥]VëÆMÚrÊnâ_áy^&º:%Dõ :=©N†J“d`q“"é±c’χD•ô«3áD-ï0¢-Ì0o6<ˆ26Ð}äúgHÔYhèG¾´8D™êû4ø„‰y„!=½ø¶±ké×Ç”d‰«1é{ö2AœЕ¾¶ûwõMqå™’ÜF'GyïÄ[­$$—<î)(ú^AAAAAÑ÷€¨³8ô\×õ¶ðý Jóá¢ö‰ÐUFî™òÐæiõˆ{!Ñ…¦È‹¢,Ñ]?gÝeï’áŸdešÀ2#·l]fCÑ$«Åf>D‘ÆA‹8-ıN…QÒw"IËú¨Âš2ÊÅiÙÌn¹­ò¡ï}+Õ–ÅeÕt|u±KÎGWåir‰4+h/Ùƒ?VUU×5IYJíPÄáδa\Hu•¦ýÔó£$ò=?J²Yvëš*M®UósÍ* ÍóLÉ+†ñ&i2´ÞÍ×9£ß6Y/Èå¬ÍK!5º­ :Ç×ùé1-çégÊé'o'ڦǠÔ&¶@ßÿr-"ïåWº· þbÄÚ<oÏWßšeë ºÅ’ðê¼æÒ$ ôaR¤©á£.Ò0ðƒ(‰C?cÞ†¬¼[;%éVZø tm}‰¦iÚ–Ì«ºLã¾ÓaFIV[D“§ý€ ·‚mÑ¢BË|´ dKЦŸ;YÂ0É ”eô¡°jÛWß“4ö}2xÿ½{åU\Ý8üΗì¹WÖáp(…AÕ 0QÑsAT M=I“ŠŠ&ïÖÀê 3ìéÞ™ÙÏ3÷æÏu.m0/]¡{óö‹‹î˜Fï’´þ»‰'9ÿ×éY÷ü·ÿ[„|€|ÿtÙ<éßTõ³ _¦ùñÚ º†O/†r7ã$Žâë§žöºÛ-ñ¶ìÂ˳ۺ?xxè?æ!^T¾ã¸G½Q:Õ³$êõK9s| È÷È÷Ï×5uÝvuñ¾:çü‚|/ß|Nçýü}‰ô¢îÂUWlfÑWñOÙ‘!ßC[Wu×U›ÛÂA³Ý9@izñd:›Íîþ™nËÐsæørïI“¨×Ÿ–È÷ò=À§Ôã¨7žÎfÓIõyïùYüYíÍÊœIôÉâzÎ_ò=òýó5Ë8êWõï~CIþÃ|XFoâEè xºþO,@sÜŽ¢ï›o×Þm=>x€'üYí¥‡:À/hOé4‰ˆ'iÙ†ž3G^ƒ|€|ÿ|Ý)Û®V«4M·Ù± ¿£ê˜g_äÇ&üH{γ‹âÜ~âÓu? ðºšc¶]ÎgÓ7³ùb½Íʺ ¼þnkøÁü®ºú¸]¿&¯¶Çª 0@[•Ùn“._-ÓÕvŸ—Uî9säõÉ÷È÷Ï ß ß?€|€|ðÒä{äûçïïŸ@¾@¾>ùùàµÉ÷È÷Ï ß ß?€|€|ÿ|ò=ò=ÀK‘ïïŸ@¾@¾>ùþ?òýÏ ß ß?€|ÿ¯¿þéýkÀâ9Ÿ€|€|ßÖå!Ûm7Û]VœªîÑduÜmÖ«7›íþxn~ËX¯-‡ýîbŸʪ Ôçã~»Y¯/÷oŸå§s3| x5ò=ò}sJ§£è[ñx}8ßîãèÉtuî†õÊl5úvl²,ê»ÂJ'IôÑ|Û ^‡|€|_-“èlNm¸ªWãè¡É¦:vuÜL£‡Æy=ìN6Íð1à%È÷È÷m‘FWÉ:/»ðEWòùè¶uyoNëIt³Øê¶ëÚ:_Ï¢›õ©>Ö•›èf4[«¦ Ýù°õ[WM¸hÞïáh~(ëðE×»e|»ƒY=| xò=òýis­íó¬ ÕY]Œ®ù¾œEW³m>(Òqôf¼*µ·›FË<|Te·‚ïÎÝÇ Æësøà´¾nŸçõð1àÈ÷È÷m™¯WëÕz[vá£ö´û|ÿá½íÉò¾‚ŸwIt1YÇB{EoâYîo‹ïÌvç[—¿½­þ܆wmu|snº¡cÀ ïï{Ýùt̳ýv½Z.æÓÉ(ºèó}[¤·,Ÿ‡G†ÖÑU¼Þgû]oŸeëYüqÝžö¸Š>ˆG“EºÎǺ ½ÁcÀkïï»4‰¸åûãzÒÿòácM‘FÜ®¤ËÓÉ÷¦Ë¼lÂÕ1àÈ÷È÷·àÞ‹“Ñx:O7ëùdž~XÞV»™gáac÷ù>~$Šf›S¸iÎÅf9%Ñ·ÖÇfèðä{äû¶GW“嶬›ð®=Œ>äû:_DoFé!Üéê"¿8VíÀ±>ßVmøuÚº*ò}:ŸDï¦ÛnøðÙÉ÷È÷Õ>‰¾šWßYžÏ÷ÍáÑgÒöe?^äÇB“nMýØ…;ån‘$£$mNmív>NFÉhºnÂýb÷ým ^|À ßwå6¾5õ:|T.n]”_sþ$ºZdUèÕéè¶=¯†Ž…ó"޾šmOᣦ¿†MÙ^.˜Do’ìþý­Ë§Íб‹®.oNU€ÏG¾Àâ9‡Qt5^m›ªØ¯ÇqÔ§eÓ…ŠÕ8ºYiªò°èG§Ç. «²Et3I÷UÓv]sÊ7ã~ë¦ ‡´_àgWœ»ðEWòù(î_`:vqX&ýÿ>ùù>tÙ<‰~,É›KrOÇÑ#˼ WÇÚÝ/ÝôäÐÜ/ÐßMíð±‹"÷럎|€|Qoç£ûZ?^Mw\O>æû7Ín9‰î$ÓmQ…Þð±Plqt/ž,Mø¨;ç³Qô­x¼8Tݯ{\ã´À§#ß ßß4çc¶¿ÈòCYµá¦.O_”çº ½®>ò¯§sî ëµU‘ïwû<ÏöûË­7áêtØmÖ›í>Ûm6Û}QÖÃÇ€—#ß ß¼ùùþùä{ä{€ ß ß?€|€|ðäû{ÿgïN½eU¢;Žÿ/õ_à3á2âÑX2‡Ì8T ‡B‘ …Aa0e02`L D‰JºO÷zÅ¡ç’7Ÿ|?ö-vÁS¿®µQó4ŒßhçÕ½.óӪͧ¶AvoÚ®WæûÒj™ž¥ ß Ä÷6•:â']7é ñävÊ|BuçõWÉÔON"Í/–ÞÖ§Mâ{â{âûs*÷Ä^¦6é[—Ê|â¼þº>óÅ“—JóKÕgžxr} ¾'¾ ¾?¡2÷òéûcýôý—Žï¯÷ €øžø€øÞ¦û*ϲ,(’À¶ )ò‡[YÕë}|ÿ q¼’Mý¦ª»Eÿ‰Å‡Üê3ë ñ=ñ=ñý5zÈ…%¶Óa8~¿™¥o²8 nÂ8-ºa1;ÛØË§A™7ªïÚ7]?s[$£ÀªgÉÝÖ7eEáM”ÍrůÄ÷ªoºþÖØÜ7YòÝ‚“2GzÚºLã(|ÅYQ÷Ój,'[øWù_Ó4–¡#žœ°§qš•^ǶiŸäªmeÓ>nãµ&Õ²Îã0 ‚·â¤¨»Y™—ÖIæišÌkך4S$Þq\ç°w}/­ÂOKƒr¼u0äbÏ÷µ+½0)ª2 =aqqËí/É‹²H½ã¾â{2™3Ä÷þc~¼žRoÒüR|ïøyU×e5¬úpôÞ-ûåÖÛÔEî÷Œï½ò>œ}[däàï¯7“y³6ÎÅ-^±Ûp‡jÒæ®‰œã©ü£KMÎM,,~Ún÷Ö»,–bØŒYRWX‚öþXõÜíJýb3º­K9ás¨ÎZ'¾]Úofª"û mv_Ï8©ºïË*Q·™7JZ¥"‘«ˆïÈd>E|7‹yÚìëfNßûéüqIkõ°4î»ø^/Mq”×ýr¼PÒ.öRÎû3àk%Išf7Õjnô¦†:·pˆï¥¹;žˆŠÁúÐÎÓ_»Ôd—zöͲîã½{Êkë~ðXçzÁ+=v¥ýttïïçÿÜn‚ÅõüjÇYmjîûaz)»~Z_ÜOÇ “¼íGµmóp/íe'ûU€ø€Læ ñýq˜‰ú¿Æ÷Ũ?î!šŒMבcÇâö+dmNÒ.•µTÖ›=õm‘Æç:ް\ØÂi|odê½›2¤§ò0Qçµ+MªÌ» Zô¦ž6mŒ’éÇ3j´zÚ6}?os<÷Ás…-ë•ý(÷?Œ«n4V·ŽçÇY5® ¾ “ùVÄ÷òˆï}¹ÛÚ¥vê¬Ncq{)›—ÊÇ…N–Êí.Š{ŒÞøâÔùNú´è±Ø-¨ÍT»œýÄ…&•=MÞM;ó±m¬œã/¿Nà8(N\qÆ{õ­¯äËÁÄ÷? â{âûcX<×±}8ý$s¿vúÞË>YjBqà¸aŠK[8‰ï–ınr7–Á~þ‰ó&Ïâ{yß—?l|oMð_›°¾«_èÆõp¡h4¶­ð…½Ô»½GE;«í0˜þûÆ÷ÇoÏw­”}5g®4y2oõk£~ç¨_ëÿ“íG€ñ= Ä÷«%¿8@ÜOýÔ_ «% Ä÷â{ýÔ4~êÄC5õkÕ}ª5¯Üçæk ~êw߈ïÄ÷Š8R•S«=¯ó1©Ÿú©ˆïÄ÷âûJŠT³øþÝ»wÆOxxsª;/õS߀Ä÷â{u$zõvAýÚ*Ì[¹1©ŸúŽI|Ä÷7¿Àg|÷½ps±fµæn"2–Q?õ;¹û@| ¾¿VÜéœ4fæ­Ü˜ÔOýA|jÄ÷â{•·oß?uâaEU^ê§þ* ¾â{ñ}bSÑXy«éR?õ߀Ä÷âû·£ñú‘e6¶¶¶·ººgüÔ/ñи‘t² zWj^gýÕã\¡6êϾy>š<½V'_d+ZnwquÌòûCcc[¯Ÿëþ_߈ïÄ÷Î#Q¹=áïý⋮ˮîÚÛ ‹ÑJΫlWss^õçÑT´t%2W­39i:î/CIGõç3ú¤Æ•ËŸ¿ñ_žÿÂÜó®ûO|*Ä÷âûŠÉÌô[“zÅõt&úÕ&Îs~4µá„ƒãV½Ja«£VüJô¶Í˜Ú¿íÑþÙ¹0¾ßˆïONNÞJŒÆ“ñ°Bñ½¸êwòŽçuÀé\êþŸAýr¿Ú˜ ¿åîû„“ú7;Üecúp›åîûÏsÿñ=€ø@|_©ÈRŽï‡»ú^<|8Û×7÷ðáÜÃoÇšow[ü?=”f¯^tî|œÏ þ̼éà¾g®8f"üçG N¯üñœ“úw¾1Ýeß1/^˜ ÍÌ{OGz½«‹ÑÏuÿUñ=€øÀÝ÷j—ÇBÚß»7?å¹¢qõ¡Hð»­OLŽß,ÌvÜ{ÜPï­×¯†á{ß¼š ßZd·×ýþðŒÝ?ÍœÄCÁ®¶Ç ¾†Æá{]ë+‡'%¹ÝÅù®¶áÆFßíÆ‘ŽžðëDÙu%WüO›ë=_~Ù«Õö–~>n뉼I›ûDþÕ±1ýZ ÍoŸX†GGWÎ.4mYË\i-ª×¯ÛúZ–f^å}“d¶úh¥éüGzŒžß}ÕÓæ»]ïmöfLíilôêƒ]ï»Ý<Ù7|ÍŸXˆú‡Ÿ6ßîÿúë~·»_ïÜÜ6;<“̉.¹x(´;Óc:×ôÌÊþJ(›×‹ŒoùÇŒåëk×G‹Ïÿy¬¹QŸt~!®—±˜ Ï×BFúI½žyÖÖì­×+¼íkhÐb}aKô)&v÷C+s·]çwÙ7ôì†Vö·â¥ \ ûÏßÙNØöM?¸ ýà4íôà4M>8£³>HÈï×ÇÑ$N7séa‡þÛRª§yÒ{ú‘Cé-½›¯Þ­oš~yëǾy*Þâáuû¨â{ñ=€ø^ŽJKßÁbŸ¬µÄ÷ŸæÍ¯w5ˆvËåj›‚Óp›&žòÞ»ç‘ú?~´²¯ùr»wÁvçxðžÛÞMtî™ù´'ñ§_šŸjŽšbÙÜâ7æWM.¤@\½ëG¼ÞÇ®O¯Õ:’¥y [~Ÿh´^¾žËó¯'ê]¬Ñ5áß2þ`¢÷‚M™9<‰ûÏŸ­m öœ¯kÌöìÚºå¸ç›kËO­5†vóúÌ›b¥ö]jÓWjûòó¯úà¤ÍóÖ÷—Ù7m6`©{´£û‚Á»ë»Ž*Ù;úòñ=€»ï+ß‹E½¹¨;ûÇÁ˜fŠï;Œ°5÷ªÙ’äJWC$}bØì°F·R «¸fCiO×_ÒÙÓ6jt>j4§Þó[¹ót¸£ÖTɽø•×¢`_ ¸j;2''…­á^eÙÞž…ó5ºÕktõµÄg<òS" êR=+}Zs8yÉÔ»9±{â²|PaûN|gÞÌî³U¸Ô'’YèQÞÝø¨pRIR| ¾ßßx|_³vp–Ýýq|ËvßôØLékRºl!i}}¯›–O·E&+_šfv;g‰üp³ý©úo­}œiãnzkÀíë[99ófL3ÕܳpµHÄFmÞw_|OzÚžÈkn¯[“Cy¹OýioßXƒ%1ïÕcñÜÊH­Ö«Ù†­íuÕή¤ÅÊ׈ßß¿Ž÷Þ@ƒ=õÖì{ëí[Ùt¯W>£ZÍÓè=–â{''‘wLú(¢Tyj²Ö¼á5#]=³=mKçÚõø_$¾ßˆïÔXAÜAo&-}RÓÖ´Wy¹;“¥X?Øà2½Ìú›œqwð®9{uo¦Š:kºíšxº©wM/Ø"òÇ}§azakÔkÉ͇ó§u¾q™;»ƒ+q£þÃÑû– ¼¥þ¹ys…u}Yc±;CÝçµWoʯ¥xê@^‹moÅ?äĹ¿¹ëÕ°w~x!·åµ$à]þRy'É…Gýæþ]}œ}o£)³¾¯÷4lÞ¯1$9)É<æûèÓŸj“ã{—{Òû(Ø79ÌYŸ­1þØÂú©†~Õ¯„⥳ð{-§ßs,­×Ó( ·OOíÈwëvØ®Pn3{›~2Îno¨ÕÜ>4´y:onå±ùWÈ_Úé󛑙à ÿ•í‚hTÅ÷â{ñ½"jT/¤´Wq5ÅŒ×æ¼æ uô (Ì£Mb¶ø¾×ô•è›æob©éLÏlwš\­5Q,æC}–;¯ý‡b!ÒÇ5¯cE]âi«¾þtç¼±¶+}Zpðêk¹âÝ÷c£›bŒ7=u¦ýnÒ´ÿb™bù¶qzÝ “£o6cÙØæf Y¼ž~ºú´ö¸õÏ&bEƒýówë¡éÜËß}0ÚkIÆwDý–ÓîgÊÇôQ¿h—îé¼o¶ƒ³4ºšŒÜÝr£½ø+ùÃ'—æ»Û¹XN§R‡ÁÀv0¸˜^›$Ž‹ê˜^F|߈ïÕ®S^çîûšùåTQ›öXÚ]Më-]Íò’‰@ÜÈ*ÒÞûåÛ»jîë¹vzÚ|‹}ÃöqÑbg¨×–K¹üÄtLoš¯3Ý©ýÓæ5×+Êä‚ëúò–§j̃t×Ôƒ÷Ö˜ÚÅ2¦eZ«êohšºl?8iÕr¢=½||é«ÒÓ­]öO„ãt,f\ÙÔ±¼^u|̓³gú½m~SïØdÍÒ¼± S£u«ÝC÷Þ+îòø@| ¾/ ÆOx¨¦Žï½MwÇîÞ9½ZGšîˆtUܾNâïí[V§½Êv1£ÈµÅêÖµ'§úmI}Ér“&{OÇ_öZÃbñÚ«¯E&}¸0Ùw)ÆoØÊôî#š² ­i3v¶ö²A¼t îשâ¥ñ}vy [jT°Ü”8ù@Ëœ8»²'¢ˆïu±Ñ»êƒê eê÷ñ=8F| ¾wÙË‘¥-íÝÈÚÆÜ™t[nÀ]#òî½ûTN{Åøêv9¾-qÛÚSS=®ùÐYb vš_µ3÷àüaÃ@îìµÓW_‹mÏÅ?6ÚåÄYNºÕ—öéž÷ƒWwº=k:ã"ûVÄ÷Övù£qšz{fÉsÅø^½ÞKâ{­=aÿÝ“NDÌó[ã{QO|ièqK±½ÓŽ#{ù½sa| ¾ßÛâを覌ïŧØÇLMµZ¿ Üö…3Úb$[È&3ÉÒ•ÍšçÍe WŽï§Å¼F:,2ß´žHOµ›“îå}˺²Në ¢þ!—éûgÜšèödbÿ¬ÏÑÔEkÉ›÷-lÝ7åÝèB¨µF<ÕÝ4š+d³©”1~Þ2α%&NìÌM´ÿ(j–ÎHœXµNôêø^ýå9¡dAÈlLL´Þ÷wvNÜ¿?ùS §Œïå}Îò{˜™³œÎ:ø”2¾·ˆo,½o¹õ­ßÛ2qÅø^4*ÞGªø@| ¾WGóW'ߎm“šk—2ÖõÇæo•ñ,Ï›]ŸjimmÕ¯'-íûeY1²º]Ä÷)½iÿgs&Ûß>eY@9«­Û>>ß1”õëkn™ú„lk)òZ¬ ö|wLÜÚÊ„ƒŸÜÑúÔf¼Æ¨\íªï)u| ¾ßËE“NâûìË;–ï*y20Of3‘àx¹Ýµ)“êŠ2®ßë’Ïë¬ßDßê;8J¦"Aÿ-ÍÜî)ÄBnc [¾»éç‚pµH‘®"¾ø,¸[Bëû¹lrk¨ÝÜÞÝ4¤‡èãšåkîCg‡‘ ZîR_=’ƒx-Œd’É‹zQö÷ìïjæ-rSÉ£­¡Vs‘ý‚òz»ïxŽŽŽ2e“w''"ê?š´Æ÷âOCÄ5ä™2|ñYˆ¸û¾P9—Ç÷â{ñ}>Ÿ7~êÄC5ûkϾ—ó\SsB*:ä#ƒR,.]u¹r¬(5ÜþÕã{½snÉ£œN®ßL‹ËLZö-2tåµ”ßsÛm}›\—Vþ|={:N§ÛÖÞ­iߺloídK㋯ý1]ãsG—Þ}o;n£Ôƒ¡¦ËŠÔ#Ò7™¾‘?)ïCÉuîªwßÿû³ÓÊ“sßÛ‡r}«i¶ñùÖ ù‰ï€ø¾Ê‹É§ ÑÄé•(þöñýÝÏnïd£Ÿfùåý»Ò¼'o &¿ˆï•q¤py4™4§À5«Qké›U¼ŸnÁÞjQ²ZËNòlõ]ö"-Dÿ[|oØñ©#f×ÔܾËF¾»eéæj:öí*kQì¹ü9„EvýI*¼X[òÄuIÒ=>µ/:IÏŽ>ÛÓ?ñÈ·ê Ge[}~Kõ1ƒ*"Æ8ðÝ‘âûö¸t â ®rpùOÄfƒ˜K—î¾×û(ÏNÔ¦¦Žõñ=€ø¾rrÑ?üýÓ¿2®Öð ¦áÅØ_—fùƒ7a4,ú¦y¿yýáÚYÕùkß逸>¯¤ˆïåˆßz÷ýWëQ©6ø£%ËnåϤÃãO¾*“ùþØ>˜4Í»ÑY'žúþfÅÌ[ݦööYÑëA9{Õ‡’/}Þrw·»e=’)¿Æ½X‹$е ¥ä1­¶þç¼àþιrûœXý®¥[N–µ[/^E-g”¼ø÷[ecèîš?¾x¹g©ymäÇKŸ'§ñý¬÷¼¥n#iÝ“ÄK¯õ¸}îõî*3µ«na-‘·H,µÿÑÚ§5¦ïÃËæ¯rp_œe3»-¿-¦3M>7}‘·û¥xItÖ÷£VþƒGíƒ1çï‘òˆï@ ¾¯¤mÿ|)7®éÉôÇ›ò>q»4ËßøgyÓàs[~b3·X¼,«œ³¾ˆïÕ±£óX_ݧl{2²úùç…ÁÁ>ßÂøì~4SÙy¥>™ý—Ï^|÷]`d|É÷ÃóÃk{y‰³ôµ¬Œ¨×â¼þLt÷å³¥ÁÁ…Aß ß`øUä8ÄÞÖ³‘?ü —qúsp<Ž^¸ÿ{‘£H$Ù‹íEÓ™ÿûÇ9‘õßsou|ä¹g`yn=më#d¢ÑÓy#±½½d4‘WnÙ7 ÜÆú~ocá—Kƒ¾À>}Ó>ßêÿ²w';mäûÇ_-O`Í6Û¨WaÅ™§HÑ™çMZµ”l,u,Y´ˆ ŠQ°ÚL6‘Ûm&Ç6]·Ú¦L…T(AêúžÏG%„Ëåªß¼ùRù×W+­±cRH—ïïS8ùãì ÜGÛý\ã“åûJîÅÏòá6¿Õò‹Kò=p›Ü}È÷Ù’ïôv«?Dð™ v­þ©ò}Ü(ßßàÝ÷ò=ív{øsô‹ùo‚ùÓç{äûKÏWð´ò Šø »½XyïÖ'õF§\vÚî„o5ãït7ÖkùåÚâJ­´Õ> wô¦/åû~ïð¸{xÜ‹~ïäç†ór»f£sš*ß÷Ú­|¡:÷´nó϶K»'Á•z;[õpÂÅ•7åÝ£ÞpWÿ,Ü`É÷ñ¼þ’¸?.Ýy²¸n 7?¿ùÍå{äû›ÒoÜ?¿é~±ôö,ÿüØÝñ—Ÿ1[íŽgýó"gfí0xçpëõ½pçØV\XÞ¾jíû¿W»Ñïc[<Í'åûÞâ/âç¹ûðe¹\rº»wfüÈ™¹õ½û“yS? ß·ÜJ euÝHú¤{#¹ÙüæOÈ÷È÷)4W^žgëßnö—åµ(d—v’[ùØz8w~þêpÔî«Õè<ñ-Ê÷±Ó¾7ß?*_+ßwæûërûgA¤Y­\}ääç{@¾O/}~Íðº0¿ùò=ò}z½…(v?Z9¿ÿË¿¯w¯—ïg£|ßo=xw3ûó'ëíÃãÎF¹:”Z½ýäç‹ç,TÛµýöaÿ‹çì<{wßý/ ƒåzzµ­½Ñ¢@áͱv0Ø~¾š¯¶›í“òò«©ÿùï¿ûî»áÏPôòö%_7{æ7¿|=ù>Y»Õê畨’ÖϹû×éò}­XŠÚ}i£ñroî}8ß_ztíÛë>º¶ý£Q»/×G—žÛê…;v ѳ/ëÁõ½é‰Ï÷€|ŸœD“÷Iøl×½¹sš?ÝüæOÌ÷È÷émäž;õÔ\-ˆV_–ª/¿M‘ï»s?­{»âGåûãkäû±ÉgךW]ºû÷hÈù­Þ‡N;ñwßÇÝ|ÖÌ꺡ÛKÆqæ7Š»ïïS8y-)ó Ðh6Žjõp;©ï¾¾;º‡}ùèúùþäÑìðã‹K±þÞÛݼ½|Z}õîä1‡cïFcˆîñ9‰ùﯕ;Ó‹3ƒëÞÜ9ÍoþË÷È÷iîÆŸ.ÛVO¯›ïëÓÑ-ðõ æíÁ½[Ë÷•ÜóÑË„|ßoEËìŒ/ï3ÐÛß¾;©ùï´Z­áÏPôòFe]ó›?ñ|€|ŸÞâãBr¾ÿ¬°ØNï£‡ÄÆ¾|_ú|qøòïÕn3vo~ôW„;?¯¾'ôW&öî{@¾Ï ›†;3¼nöI×üæ—ïãäûôúûç>ÿÇB­T~³ta+­ïýhö¼àßϵ®—ïû­áïWô÷Ó­Êí-žÓ,¯E37‚˜Ñu§ÆÏy^Ž/ž³nñ˜`ò}+f¸3E²¼ÆñÙ]7ù<×™ÍüæO=¿| ß§R_y{Êkì³cD­ö±G×–Ÿoñѵëkw®¼§þlá·çSý¨x¿Ï^õèڳܟóò=L(ù>EM±?.ƒë&KŸ‰Íoþ³É÷1ò}½ù‡ÑÍõÏZÁ{5Þ=ÀvaÿìâÒ4ÆîmoÿræËñ|,}¾íyY .hצÛï ùÆõò}pý;€pOù$þÈÜÁV,ùðU}ìkáNù&—|dB¾ïShïMÅ–¶éüýçc©}tÃ~¸ý(WÛÙo•ËÛf×ãù>Ô~3õnçêÒ~§×ïÕªÛQ[OÈ÷£¿L?Þ,UÍ·A\âgÃíQáà°vzÜÙ(oNG;§×FK¸0d)_mílÕç?_Ê÷0¹äûf³ÙŠîlD/oX×Mq­„ãÍŸžùåûù>•ÊÓ(R?¬žWÚx6jÙ¥~×£åòc[ìYµ;ÅÕ+KÎ÷Ñ"õQ…/w>>ßýö£ÙLør§Œì,¿üðò=L>wßg‘P³¿®ùÍŸÁüò=@D¾O¯7­ÿËå“ࣛôóOêg?|²¾wæRã.>©¶žü£0øcÀØŠóµreêrž[¯ýhp†{O.ý-an« ½mýñáG5ôJ.úìØrü' sÅx‹¿7·Ý .;Ýóè·…ñÃ6K+] ËÝ÷ sgògŽÏèºñcÌoþO<¿|‘ï³Ó¬·ÊëõÒz½²{t|P¿S.ïåWÞ,.ï-mµO¯w•£Zý¤Þîö‚4N­Åâö“Âö³ͅbm£ñ¡ÓôÞv›íN¸ö—^Y•ïaBÉ÷©óhzé³lz™gå8ó›_¾ïSa§¸zw¦p÷³üË`ÌÙÂo£s°“E¾¿MFcø3½ŒdÝŒ™ßüò} €|Ÿ µåÕÑÃuËíQ¦?+?+ïŸY­ÀÄï×ÖÖÂtxpp Ùzæ7øÕ ¿€á×0 "ß§ÅñÁ½‹KÞÿ¹ô£??Ÿ{dîILù~cc#¬‡ß|óÍGe͘è­4é³1ÁuãÇ'ìOžÁüæO7øÕ ¿€á×0 "ß§ÇáÖöô ÔÇ·_0QäûáÍ¿áIÂ_b©1ItpB6M:C&×?OŸ5¿ù¿ýöÛð«7üç/A ò}:t++Ûœ+=øó‹pûÑÜËùBm§}L@¾ÿþûï¿þúëaÁo{â dÊT2º®ùÍŸð‹~݆í>ü†_à @¾@¾×ívÿÓí>üCÈ÷È÷ñ{ð666O²½]@øE ¿ná—.vß=ò=òýÄïï²'ß ßdO¾@¾Èœ|€|?áä{ä{€ìÉ÷È÷Ù“ïï2'ß ßO8ùù {ò=ò=@öä{ä{€ÌÉ÷È÷@¾@¾Èž|€|=ùù sò=òý„ïï²&ß ßd@¾op£äûO@¾@¾ÈžÅs°x@öä{ä{€ÌÉ÷È÷@¾@¾Èž|€|=ù€‰Ï÷U€ÿ2ò=ò=@ä{,ž3aä{ä{€ìÉ÷È÷Ù“ïï2'ß ßO8ùù {ò=ò=@öä{ä{€ÌÉ÷È÷@¾@¾Èž|€|=ùù sò=òý„ïï²$ß';=>©Õ­ÞîqýnøÖ`;‰¿:l ß=:ì×Ðïîì¶6¶Zõã³ ÁY-߈WøÆ½ów¿|Tîíp}mø©©ÏëÁ‡×§†—˜Y; ²Qú¼0˜¶°tL>@¾Èž|Ÿl§ðâ<ÐÏ®ÅRzoþaøÖùö£âÑå·÷7£w cq?ÉaõÕðƒÓ‰ùþmýü/?•U¾/QL»Xš€|È÷“ @¾%øí(Á/.µ¯HçÃíϯ{—Ó)Jÿ/ëÁ-çûYùï'€|Ÿ^ûGQ ÿãz'¾ÄÍ…íÅF?¸à,÷üð­©Ço‚ÛÏ÷Mùï'€|Ÿ,Vá§¿‹éK‡ûGÛün/xçè—3Q÷/w‚1½­z¾ø:W|_©ï4zWæû/—µ­7¹g› ϶s˵Ê~çŠÅs*§ApÚh-·žm>)¼^ª¶Oƒ÷ëµ[ùBuîi%ÜæŸm—vO‚+í¬ï͇Gþ°UŸ,×ëo?6ß÷Ž;õÆI¸5{ÁH¿S)ï-äªá¹å½¥j+þP_@¾—ïäûdµâê(‘#'Îëü‹\qmxÀ½§ÁHcoj´êÎq0²³R™wŽoS¿]+·ß“ïïþvõGcÿ¹RëÇóýêÜçÏÃ_Æ·âbý,Ó[üâÅØw¾ 0ætïÁÌ—±ƒó ‰ùþpkónlY¡åWS±K‡|²Õ þ¯äûÎi§øÕWÁÀÂÿ,Ìýç?ÁÀ‹åív;þëý/{wâÔÖ•¦ ¼ÿ‘^ÚnËK¥”våËR.2ªT–"ŽÒ»cÒ›Ûôê1½8Ã,¡éž,ÄYàkÛdAÙ>²aw¨xL;c†AÖ #L„±0FÆ’A`$F×pô™‰ûr´\„¢ ÛÍó«Sº÷ÜsnUeyîá=…‚ø>]dÄH¢»úføà¥…ƒ‡bfbWZ¾?~ÆKGR6•8ÉÑyzë<KŠïÆOй˜þ(Ç÷f}.§žìê‘Cé}¸¹ZÇæ”W~Óž4·ú€nßÓµ-öÒ”bÖŽÜ` > ¾w×;þñ«úÔÕž“½|ðcÎAååÁó® ÍŠÑ‹¼Ä^) ?;ùÄ⬼}ñò>úÔçG<Wâøž¾{jîÿЛM&\{ãIãÖ'Ç£³ñžã¯ÔÛ¼3*àF‚øþî;î¼ÏzoÇópÉö[¾ô•µkŠèýB펭·¯_»Ž~¹g[qŸ·:lÞ°qnnNæñ}Þ:Þq)¡9™;~(‘;;Ç«Ó |Ô™èpàÌ´œwõ#­~ÃMÎG!{Óe©ŠŒ ¾³•Fãø>qyrÝüžDVN­²7˜ßïkH.Í¿0ç'\“){ð>ßyEª¦h%ëç—|—Œ¨ˆäø¾o–f>°XäçÍKz†-vÏiR1níÊë ßÓ:úúW^¾råJÑ—×}ù+‹íKó?ïºýÊîù£z–® _Ûëí¨¨+ñ½*­Ρxf=Ùg¤Û£rž>ìOZE>5n$ò õvfŒ#•=A™ŠÓÿ§(XWãûCCB&›Ø§”ÔWâûÔa£}猷ã꘠G¥Š#xî|íýãmÄU™„ïÈ¥„øÚ®ã-þÀ%=óUާNŽ«§ÆÇ"#ÃÓB^g€øž"ø[¾ü•¯­-yàÁ[âý-ñF¿Ðǯßj™žž¦tžJè)g©ÑÇûï½–äÓïg{ÏÊ| ¾ÏAlt‡’’‹‹jXŸ\m¦—霕×úõ¤|ŸšóH÷¨³óR«ÑœgF¼iÔIŽÚŸêŒÉTâH­ƒkÍs,Îö ¢Æ÷ºú‘eî|íýÊ-R]S+©Ñ¿Ú:f2^Å͹¯Þ{¤st`쪼â{Z8/„˜˜˜ ˜>½QÙª“C©}( Ö­/¾{µŒ©h>Ò¡Bù2ˆï³áb8NgL^àR9‹•mæZ뉭y¡µÓ(hQóñ¬M])Ï…wÌVʿ⻪¥Éß´vñUYâû™‰}J)ü4j¸oß?ôæ%!“ˆÈå}™7×íxåäeM^7€øžÒöë7P:ÿà}÷s=ZGOGžú·'÷ŠÅbÛ,¡…ùTÿ›¶Gþgòè íaûüþç,[nU éPÅüø‘5ùWÑ@|Ÿ]OS‡QOfâeý;íî5–Ì]ˆòG†Ez|ÿP¥3s£kONªôM›ÏÄåŒÈåÆ÷|•É+]íL•xEŽñýŽ'_Üì=Áµ¾î¡§udñ éòº¬¾dûÃê ú¯~yMå¿>F¥‚>>úÝïýlwUÆ— úøìÓÏP²Ï—S-Z›/W7qˆïWüËh¾s um玅t{@“ŠØHâøCû;wU&Ré/Ì*aºq•.M˜$ï*.Lϵï—ßs)|þƒfìUËÕ{¦?5蘕ibÏWf.žóD|ï\§±Í/ý fÅøØ„óä'û7Š·ãcs ñ='ï®v×é®Ó;w<Ê»ÑÒÚy:(sÔãñЮ¶¼“--Õ§ai~°ªÀ™3g^£_߯ä—áÝhÉu¦ÜH½ KìyŸÛ„©Ë»ŒP;=vzw<ÙAíè°P“÷‡êyuÞ96‡øž_?(oØÜÑCeïÜko<é0[ª/†‡Œ¢7½Á¤b>¼IïïR»ãð¨ŒÓ‡»žtí¨ì8¢È¥üyµ>âû–'ÔÍiéç]·ß144$?ªŒo-¾‡FS7¶m|ï}¹Ê@WW×Ë úˆø~¿ áFQ{Ʋ6ÜžpÅÔ%ž*ˆÜ5)USûxÌ15¾O¯Ÿ3ÇkÛw¾,eñ½œ¢žFPž4&'òÔ:õpz¸ÎþOÆ¥jŽëí:6εøÕZ:êZ~jï\êËÞì—9ë¯[|ˆï…”׫es¶Zn£âõËl÷ß{ßø£wÞ~{zzZÆ ß~ÛVíÖM[¸ÒN®¦¦¦.^¼è¿NèÖ4 ù™ìéÈ¿3ÜìÙ}þ >âûìú>R÷hí š½1šóøøœTŒŸñòÙ}ÇFƧ„˜½v¡oèWÉõßynÏ·Ž#WµÈä‘7]Cëâø›“w“¦¬ßŸßeÄ÷Ѭôgcž4öIopV݉׻ÄtÇ$áøžKáÄx€Êñ:ýÔ—\†ˆ3ý&$âûOÇÇׯ]ÇËä3¶"óé§¶lÜdÜž_üR=»vMÑ¿_.®Ehº,¡G º…WÐg’OÍb°Ö¸eAøoòÈî©q(Ÿog¦u;©çmÛN_[¿, ݘ̶Úùy ,Žâ1íog>ÕÁÏk'[Ä÷létþÀ™i™8zÈiT‰‘™ôôr®Íí¡Cç¦Ô÷þÄû7ºG_©MîLÏÄÒ§Ä+÷Õ— ™’ñé£ï$Çè‰>ï¢2UÔÏ ¶Îãþ«É”ÐŵøWàQNÍnå¢:js>r\H(4¬¾ÿݯ£® §>Ïåÿ°7Q‡ã«g©ÀÎÜÜœ\ÝSja¥M®4¯}ñ†¶:,¼Â>“<TÙâ$–ñ ß f‡éÊgÍî¹ÑåK'æÜÙ¼µíy}Z®<­»=qÇ’—tùyú´~gü‹lk7SdOÔˆï ÿeò63Ý×7ÒÚ}ùô™gßx0rM.i|l¢ą̃³{¤£o":+ó§G&;:Ç]£'‡ŽvŽ^ˆiJ\ðwQç!êÜá›Ôóþî=g.ùhÈÙ7r´•~NDgäuˆï£Ñèº5E´:þOµÿ·áÿ5|­híc–>÷ì³Ö{îálo‰7Ë–[÷›ßPÜO?©ýf﯋ïÞÆgy¯Ú;¶ÞNÃþåX3 e¯·Wþë·ÄÇáUùÙéž²BÇ÷…O¥YÁŸI°ú X}Ï ~nݲÇ÷;}Ç>2ZàÄñóÿàTGÛQ»â ¾Þßµí mÔ?,Vw|Ïa½y‚â9Ù¾ ¾§ídŸyêiJÛ)[ß¼acbi<íCû½o‡~ùÅOFÇÕöpÉv™lffæ‘í§tÛv×Ýtê›¶G¨%–áSŽOÇ¿~«¥ªò÷ÑhTf¥{•¨ºÌ£Ë•ækPâ{»W^!ŸI´ §¹¹e^s³ËE|7$øÙ;dïw¼.döWó€oõÊÂ[}ñ=Çôæ >4‡ø߇B!ZPÏÅmh5ý_EûÍÒN³‰ø>e¯Z›ßó¶·ßÓ)µ=ûô3tŠæÓ¢þƒ:ðûçSô’`À7 —"ÂÁ ßÓhµ0k£Çô5‘´j“½¦ª¢<®¢º¦Áá ™‰r4Ù+ÊËÊË«êš<Á¨4mþ†M‹7´V4Îß/¤Él¢AOC]õü âjìn_H&ѼnW‚Û;*ìuÔUW•—•7ûx|ôº›ìuUU Õuö—7* ÿL²ÓÃ~Û“àK{DQ¿Û^]^j#ôƒ”VÕ5úÂ:â{sHðMOåß—¼4#3ÃOs‚¿wRd"öI÷ð‰ã´`¸½;ÖÓˆM‡ÃÓ‘Xâô¿×è|*½³Ðc3ZLˆÌÛφÛ[ó×¶†?¹(¤) µµ†Ú!÷Â|"7O|Ïý >‘ËøPÿ‚ÚáÖM[²ÿqœî±Y2«ñ$òeán¨°dd­ .ª Ënµ˜ô-oHíÞ:›Iº¨\B´¹ºÔ’‘­Ê2AsÛ”¢<-‹3¯KÌU÷פ ê›}…~&Ÿ½xŽÖRSf1QÕèYéø~` ÁÀ@>Çß Á7Ëîóï™vªÝ¹§mD*bí¯ó)nÎ=µŸFÔ•ïÛÅy†Ï:?.IíÜþB«P&Ónü)ÀLòΜ¾ÊÄ j+î=Ö+SÅ/ìL»ÅáÀAŽïo¾ìþ³/·G|ˆïûÏKÔ¯§FëèÕ­e)¾ÏuëÚŸí.‹Åb)Ëóýî÷gÿÏm[?îþX.Mã Z™šw GµÍ²” ŸnŒäk´,ÉZÕ"¤ôØÍRx{Tš‰r­U~‘\°Þšñë„kø¸‰¯Vøg’û&Zc¹Åwv­`|ßÿŸ{¿Åöþ‡7Çãýˆï¯@‚o’Ýçß31ôøæ…hþ`·4D>Øk>c‘äÂ5æíã#é“ÑÕ‚øæ×¶×ŸR³ûó{L{r|sd÷ù'øˆïñ}ã{ï¯]SÄ›Óîúá$ãâ9ÊÖµ‹ùHòGª½Ã/muË¿ú•5¯¼ô²\‚ð×Ì—]IIÒKmÖÒ&¿®û,ɨò‹--€–óô†RõhiMCcScrŒ”º5l©áû±ÒR*ûÒ¬Ë̂ͩ©·-5†/oòs|ŸQ;*üM)ß³¢ª¢Ì–|¨Æ]øg’k|rT§>ÀòŠòÒÔgÒèÓW,¾ÿŸ‹ý¼Ù›ûqÄ÷7@‚¯f÷ùÇ÷,òþ܈ïç‡3ôÊ·#áØœW<‡y1>¯ OŽï·ûÚz©Ž8Õµm±§0ŒÚ³xíùöþ!æ´‘¡§·ó˜ýgc2.¦¼Nè<øa,ÓÃç¹'Ç÷7[vÏNŸ>-—ñ= ¾?Û{ö>ë½\Üæ‡¥IÆ[׿Øê’†h4ÚåvS±µ8Oç©S2+áU2åRc›VÑRaUËÂ5j¢Ée[ʼÔ_ø«”CUn=}u?ŸIK¥½r)áär;å® FGEÈ]ž¢7êã{kYmýÚÔì›Oï+Ôœž Õ»ª¬jD^øg’S|Ÿö7e-~MÆ…\v‹ª¢Y¬P|ï5‰ãM#¾¿Á|ÎîW ¾Ÿ9û’µ'ºé~#Uo?xJªÂN#Á߉äø~g(,…Ž·)%õ3NFø/^‘ªÐ ¹|[e«œ7ÒÃËù JE˜'Àñ=²{Ä÷°JŠçÌÌÌ|û‘o&ŠÛßæ>q<`´ávçùJµP>Ç÷\å†Å|;8UÏ8^ãO÷};ÔÞJã­5ÔözÛÆÅôº½Z-ÇÉ߇:ÿ\[WWŸ»ººÚ?·o ìñ= ¾¸0Ã^wX¦ùô­ËŒïϽ»o÷gµç­37Vvø°úž‚øúÇ JÕ“7§Íþ±þ•WérÚ¨–2úÔlM>îýÕžŽÿßA—¬`|ÏÛ½ÆEýŽ*Ú@ÕœÝÍ=¾oÊ9¾7éìSjß3[YEEYr|_øgò™ã{›=õb]­ácmêˆï²{:’ñàçßGúJŒ²òm#)ñ}Û¶Í™uþ`0Ïø~¨²˜—ù›ÝˆŠç|ª/–Ùésó]]n|lwmm]îjk÷¿Û¼Q²{Ä÷€øž ø¸~Îò["¾'=ZºÌKÔu÷yϱ9ÂB }R¡~"Ñßãj¶×TØÌBí<Šç¨ êõÛ^gohh Zö -^‘%¾×ì6u*U._H’„š+ÔÉþ™äQ<ÇæŠ¦¤÷ Ï“ì>ë©üãû¹ï[<NÙÉöu!³È#¾õ¯ Îûu¹¤Éö.Q<çü.¾Ñ*«wø°uíý÷ÞÇõmx¯Ú¬9¾§êù|vé¶í®»c±Xîñ=GäÁª”j0Liظš»7*u_ƒ2€]7ú…ƒžj›E=¥¥§Òv¯4èa¿Çà FÓ·®­jJƒ§Î–¾u­i|¯'í:ÛÒਲ©ùuz&šßÃÌ÷øM{ ev:ÈôÆr‹¢Ü'ß0d÷9tÈßë2£‹=%Ƙ»Þ2N;Õnl2EìDµ³d;5¿_Ï/¾_,‰ÓùÁ`úÄzw·S«üPð…Œ_Dï¥aùFÈîßÀ*Šï)OÿÉvQ°NEðÓ×ËñÏ´ß !hµ7þHƒß}Ç?úþÆÆÆrïm;°®ëRŠ–*«EQÓì‰êBûìåêq«#Ìk乤;¯’×˲¬¾·V5…5Mi‰¼µ.‘ß+LjÕîðiºæsÔ©“(kôÉ,ñ½·L¡#$ç _KM†µð…&šÛ¦»T•!½©Ü¢*¯k Ñ û’÷—r²øÙ}îÝLã{“uô"ÜÍÙ=µD7Ò»Í(§slPª´ÞNîÈ7¾ŸnûC+¿'ÐSÊÙï^8µçÃ9úá7 _è¥ ?Š1ãßÙ=â{X-ñ=ûùOÆÌÞ³­¸Ëí¦²øf·®%‰ ø}Þ¾”´ó-o{û½oGæB­ Ãì^M†VK6åÍ‚Ãñ$V›ÍfIVÕ4Îó “vaå´Z]ªrT[Lq)³Ï)M©z_f³¦d× þLxæ¬Î<¾'ÂßdÉŽ¿>â{€Ud²§#{(ožàÓåËŠï7nî||o×bÛÝ^’´mlG}·TL·Wó©ÎúÖiMŸ±I÷á¾dÏa‘oñæ÷­Ûöžb&2èz'ßýü'úBLÿB1ìùà”® ¡ ½À=ùFÈîßÀ*‹ï'&&6¬[¯nN[þ{ŸßÿÕµ¿÷ï­ÍËe8Û{öá’í¿üÙ/ìõöªÊß«[×Rmý@ sRr5A:j–Þ¡•—S„Ì{råûµTe<›ßÛ¸¿ÖT±TfÞà ó’ùReD·&UÁæ*Kvåž°(ü3Qgž5¾'þž]FÖF/€ø`µ¯¾§ßóï¬Æ÷íÔ3[ë¬wÎ¥]xºØü’Ýa-{íû~#¾Óx2©pDà¸Ó|bouK&¸ÎIã]'/ÇɸÂe÷ˆïñýµþׯõ,[nU÷°Ýj¹­l×OèÈ3O=-—"{ê¼sÇ£›7l¼E)›óõ[-›Öohúó2zÐUÁùqrý=ä®)ËžWØšLö¶$—oaÖ*»#*UºË^ŧ¹TŽ·¡\ ÒuÉ„§±:ÓÀ厠ÒK÷ª×{u™L¸S|[E£?è*SŽ”5x üLÒkû4ú†ñ)ÅìËì^©Ð|-åÖLã–Õo ߬êŸ~~ž¹.¼Ik+ÙÙsðp$¬KŸ~PÝ–~ÕžÚˆ&Yd¡ÊÍf^&oˆ;ÊîþT_˜L§QdF*"½½»6§M¯¸ïD¿L!F|§Fü‡ÞßË7ºAâûÂe÷ˆïñ=Ûó‹_òæ´®vù×þ—e–¾ydûÃü}ýò—cͼ]-¥ù2Z4F5]$Ÿ =.—Ãáhiiq¸½!MHÑÏåhinn!ôÃíõ›öÚâý4]f¥‡=®–¦¦·ÛÕÔÔìòrrŸ¡…¼n—Çëu»=Á°výŸI^DÈçi™nWKsS³#±ß/A|€*:9w.=ü©»5Ðvj¬½uØÝ ÇVê>ÞáÇÇ<§†ÛœáO.Έ¥{¶Ž¹Ãíݱˆ.oß³‚f÷ˆïñ}ñÝÛxáüþêgG†‡)”§…ù—‚—ä’hOÚõk×ý ô±X,FUwxëZZÑO•ñ%@.^¼è¿®h`uKï —Ý#¾Ä÷”³ç›ß¦ª÷ÜóòùDýzªŠóèw¿GEíOwŸ‰£Šù=Ï›¯¿ñ£ïÿàkEkÎkÒJüßýú7”þ_¹rEäajjê:&øtkš€XÝ®g½{Ä÷€øž…/‡©„Žº“mÊÇ¢¯¬¡6D9›òq``@þM€"»G|ˆïC¡Åô´pž–áÓ/95ZtOµòé—>oŸÈâ{@|ÏýBPE{ZP÷w®[STd¬ÁOi_ýò*šŸX›Fùò,ßû|>ú2Ø<àó­Oÿhõù|Û/¨äý¿ýáTéþ—?ûÕÆ¹cëíT?±Ð¾l×OîÙVL¿P!Dí‹5%<(ß/íºF_&‹IøœÄb1úG+ýv•¬ýoÿïÿÞj¹mðü Eù÷Yï½ëö;hOÚ‘áaZ˜ôãñýg‰DèËøýþÙÙY y£œúý~úGk$Y=¥{„2î‚ßßî\òÁÜ ¾ÿë_ÿ:44”Hðc±ªèäS3'‹%²{úG+ývÕÄ÷ˆïWÀµk× >ä/‘ÝÓ?Z%A|€ø>Ï5øTäÊ4'v²€ÏÀçóÑ?Hÿ—»FªŒâ8ºkÜéÞ|e:l Ù@6•¤LõtáN\/Î9M¼þÏüétšÿ½{ù@¾ÀPåÈ÷ ßÈ÷*ùä{ùC&߀| ß`¨ä{ïä{ @‚|ò=€|€¡ï@¾ï0Tiò=È÷ò=† @¾ù@¾ÀPåÈ÷ ßÈ÷*ùä{ùC• ߀| ß`¨òä{ ÕÕ? ß¿ù@¾ÿ<@¾@¾ï@¾ï0T?€|€¡ï@¾ï0Tyò=È÷ò=† @¾ù@¾ÀPåÈ÷ ßÈ÷*ùä{ùC• ߀| ß`¨ä{ïä{ U€|ò=€|€¡ï@¾ï0Tyò=È÷ò=ò=€|ò=€|€¡H“ï@¾ïï»®kšæðð°”²9ˆ“¸Œ›ñ¨ïà+‡êì쬔²ô¬x!^ëä{øì¡šL&UU-ݵ¿¿2ˆ“¥»âåø¤ïà“†ªmÛÝÝÝ¥Áòòr]×———‹Å¢¿%.ãf<Š–ñI|ØÈ÷ðp¨òí~uuui0]³w&®²*Yºÿ¯}£=ÙSÖä›ò I É`y¨› X ƒ€PByxr#…”pï­ |·¹0÷zi¯ÜÁöÜä~¿î®ê퉌Ȉ+bÅ—aÄʲVŒÉ¬‰ñƒ† ~ßµÍD«ú£»f¦ÕÃÀ¹~ºéôçÔM5RÖm7üð «Ê±êNoT âæÛ^Õe5BÝã©èçÆ£¶ÓìÉ6¬— @×NŽV·ú)Æ«;ý¹)Ò0 £(JËv0GžíóÃü'(®ú„hŠêÚnÝÉÛfþçG2¤O„¾®¬¨/bÀþå’y;ð"ä{˜¨ÌÎÌ¡}÷_}õÕwß}7“‰i¾É):ÑÞšñÒQ®5c'-ŒJúg?SC—;sÚS7ü@ÉOsœr5l@ÿeÛœêëdOhò‚ka¾ê#(€îìXähÛWÆž5ÒÛºbûÙ`†A¶Ï ðŸ \¸êóQœò§¸Ñêpo]¹”ýðüùþ½ºÌ™§€þó/—äí°a„ ßÀD5aOÚýpËïÿ{Ïóþîïþî¯þê¯þ×ÿú_¿ùÍo¾ýöÛa)øc&Ã[©£ƒõ‚sjYĸqŒ›á]‡ÖŒ“´cò|N¾;wÀ¾xY¿ºçb±ôu7 »|7[ଆí¡úîžQvæÆ*Î;z²M·”») ¼Ìöù>uéî²³ŒMàïšm_œ­Î) d»=Ï?à3— ˜«>%º:XWÜ ¿£–îHã?Æzx~€|Hÿþ ´úØróï9üçïTòvØ>Âï`¢úøñ#™³Üwÿ§?ýé׿þµÅø›¿ù›¢(–{ðé1«áMè:²fv™âñ‚C¬_KÿÇ«jâ·ËôÎD¾ÏwlÁ°u°Ø—Û²Çÿ=„ý` äû28X–=þoPöÛta¹`û.A¾'Œ³­‚Ytý¸Qª×ƒ9òlŸŸ­|îåB¾RTv²®øiËÃÆøh“ÀŸ÷øÁÉ÷|@xfùžxO8à?§’·Ãö€|ÕO~ò“—»j_wOÚ=ç/þâ/þð‡?,ÏÁ¹ÉvÌjx#õ¬Á³ø¦ËOÖ ‡J³%Ü„ãg@ÿ€6èÑ‚áœ?°ùþKv€.-Ô³ç”ïéɹè–-À¨þ®úèè@ý±Ò¯”´‹uÅKšá–Ìä{ŒxèTFEò=LTŸ>}²&>|øðêÌk•ý׌·&Æ 9\…gzbæï¬[‚j)‘4ž}Õý3u#ßï.cº¾­Ò8šnŒ³¢¦O2úºÌ“ø3Iš×m?0:ÕŽtý|¯Z‘ÍÉ)[ù=«ùôF¢8-ªV³Öx)Kõü_úù_^²ê:UÅó¹ï^¬:¥ºžÉI}U¤Q4•–dõüQŽ®ó$¸¼DIÎÚ`ªöˆêôTx™%tOcßw#½¦”}»Ê”o–¹Iór¥I'›&I:’Í–*Í–=Ôâh2ËX©¬h;Þø‹JjUdÉKê$+y±SÒ9 UÉÜ “²éV#{*h¤ç¹–Y<Û(Y¹‚º¬v^.¡U•DTr”W­‰ƒP¦e–„A0~Í8“Uô5%ôm‡óW ©aåî¶­kËLIÖW‰7Û'®”’]”fn#±&èzü«?ý2´¦y©XUÅ®-¯äœ{–îu—zÎUï«uß÷Ú´Uß#[b9 ôm™ÄŸ[ºÕËLêì,Žíy³,ËáçÿðÓÉû‚å5_©DÁâñr9Øì»žRýza‘P²¶¬ô7« Ï¡D5ÙnJÉ`ÇCSñ—#OïPé ºØŸ«Fìü0>É÷UrkÃq½ ÑD‚ˆw˜]T(~¶ë¼j;/R²³ïÏE·è;±kqœKÖ¤³HÚ\mêQªî~sËÏÔZ K^„&í¾—»üú¤oî›æogùX*©‚qdX®ÕMÚåê*»¯ôÀŸ»A%?X;ˆÃQYä{˜¨~þóŸ[ß|óͰ`¼«Öº2, ‡ÿ÷?,?nMŒo£Mìk¤Uôô0¶çg‘î 9µ©O’I·|}Õ¶îbûËO¬»¸ÀÏÙ®á5Ã:íɱV«žÊâçÿðÕ˼ e:¬Z$ãð—Í»"°îc¢ž•~ÛœÿÂùJUé¤#:MÒâUÇdDì­UÞ"ß·ÙÉZª?tçÝzDzιZŠzkIÝ‹Z‘ïÙ•_¬ûU/¯Ëz·ab±Zms'®õÃBn{‡ \·¢º¬”´{¹»mïÚS²•üš”ð6Z¡» ÜéÞ~JÈ][>^É–É÷ùÉbØ~nتâlÍEòýj&44×èÞx6Cu™c­³Ëº<å® ,Ë5›¿Ö+Èa‘cGµ–Çæemꪬ[v(øê¼»½Ã¶ËéÁ>(÷,Q|µ^ßmæy2¹…–23¨Üvó=;­u×DÞŽ·j½XíOÑXSÕV·3“ï» µÿ1¨;ýòZpzLxšR®ŸÕm¯û¦¤µÀȱ&ù~¹€ 3Õ÷S«ÙòF ßër¿(7¯•Öz´æÑ¡‡a/¯Ëz·¡rY›{Ñô³ÚTeÚ@D˘G¤£íU^˜+==jŠ“;gêxépŸj!ÒøQ®z­ÇR‚ãr&u·í][bÊþ³}¢ù‘£}&crÞÏFëÎbÛ/]')›^5Möün¹kËÇ+y'ç~4–ÝÔáµQa>þÙvÚ°UÙ‹¡KF©] ÜÓÓã%©[ÕÖÅù@}ˆÆá75µxR´ª­@߬Àü¾H“Ò, öËïÞ>ä)w\€•+Ÿ¿ä”Ê÷®Uíøñ”>Ni”ª‹xo/+%Ž7ŒËÚÀUy Бew^^¿L[]Ÿè{¥jx;MLõ>Mê;4µ¶žµ_­×w£¹@žLlD¡¥Ì * – \˜Ë÷¼Uw*ìƒjXPGs¡ö!8ü Ûû(¯»NUYp‰>êòw'}mî›æogùX*¯‚IdØ&K3¯(íYH¯ØEkÇZ~ðv‡£ò"ä{˜¨Ö±§Õݯ~õ+aqÖ•ßýîwÃ-c&ÓjÑÞÊõaŠ´t4’v4½…X^÷9øó ur˜ê´U÷Aq{ŒþœúU/IÃ=EÛÙ°¤Mi{NÜèWÙº·‰iEr ‘Cš maû}vÊ÷´^t—?`P2þ*m“¤‹àè’Û«‡rÊäiæžhïÞê&ºüLŸÝ_ŠùsUHúj5˜ú³ìc2Çâ´Ìˆ*þ=ß(ßëêÚ<² õ–9O.ßÛUžL@›h–K_¾£³¼ìIzhDòýòô€cÍå$²²¬.‚nCߤ¦‚½¸yeN÷Õar¡”ÎéÖ\å5_ î¸>F5w :\Kèn»¶Ô”Ë[ìúa•÷°‘@”¡×GXYǸ~̵åã•ÜaïyÖÅ]Îæ­*ÈÖÈX4?5šeBó kX‡n.2YËÄS|E¾:»6Ù´èöî¼\ƒùk½‚Z”L±%‹Pòóò0%y¼aXÖ®Êczn·‡¹ós/Sƒäblã¹gÉä{^ß çi2¹%–26¨¨Øé%d…{ñ‹èé†þqOá.ÀË}|þ’UÃC‹Š=g£=;(Yo˜—õ~®Êc2mÖnú›+'è2L÷3 ÇOM=K&ßóún8“É(²”±AåÁ’Ü…%ò½´¹(h¤Ñ•$`j(B~8~ìß¾Äà#ž‘oæƒ ogùX*¯‚Qd¸(×IíÇ'¨•è2Ÿ8ü`í G¥EÈ÷æÀá9KíþÇ?þñoûÛ?ÿùÏÃððŠžIy×å…‚QöÖª×,7¸AÏCÒݹcê ÅÖôcÀ„$iÇD’¦ÁѦ5Æm¶—µl ÝTe–&Qp9ùÞa?‡œï$ߟ‹Ž­=Héf,qÖñlÅò}_ìWÖR¿Üšdƒô•’‹mQéôÓ΢š,^ 9ºkÊyǃë°yXkÜëüÏDÆE¹Ê .Ë÷=«ÅJ•ÅB-@ØûƒÄiY³ê1è+9´—ÍÜݶsm¹)Eòý;ØH ßßQ7ú2Xä wmùx%íäæò½¼Uß)[6‘I2!õŠÆays‰­ ï‹`ÏX—{Š\¾7š¿Ö+Èá‘?wÎê‘xü¬Í]uþ}°w{ÿ¤y©úÁVÕKîY†ò=¯ï&s<™ÜˆRK™T,É]X$ßóæ’¾ì»vÕ» üûz«®Â·/1øˆg蛄é`ÂÛY>–Ê«`ŽË—³É¾Ž‡Å™tn’k>ñÉÃQqrä{¸ºÖëkÙN;QCEÑX¦Ø EvÜÒk¼ôB( °ë”Xží*e|v,Æ;É÷”l%X$aÂt¹Ð,•ïÛÓŽª¸Ï»WkNÛZ…"ûüš‹Ÿµw÷·FÀ]yÞ¯™%ïqv—í-¹Þ%òóŽšñß{önY]äݦ öô§‰&Â[C·Ù”5ÃÞÂl¥–ô•(+swÛ̵™)å{È÷lµO§" <âÚòñJÞÉåò½y«Ê³57iL|m{§˜5×ãò=»)×½ärOy\¾7š¿Œä{ªµü¹y¼!/ë˸ªÎÎ|PŸÏàÎên0ñå%í{ ƒòc‹¹@žì#Ê-e`Py°dàÂÌ”¬¹d¿Ñ +»¸|ëôʯsjï{ÖcKŒ5ùÞÜ7ÍþD>–Ê«`ÒËÙôë‹ò§bícÚ7ÑõwšzêÃrË—ÙÄ'G¥E¬ ßÀDsÄ“eÙ`ÀøqkbÌÐ ›…J›N’, kšÄ»¾‰™Ó…AÕÊ÷ö=Æ‚ÂJž­@»!lgç¼sx_J¾§¥[ä›Ë÷$'9Tßp6ÑGGG`ÃòÝðS¦Z§˜¾`VqÜýá„þîž|oÅíÝ©ãg7]kqk“]ª’Áœ3½lZa·!g¼·ñ°7•†»" ´-‘Rº>>ØôF޹»måÚÌ”¦ò½ÀFÊ÷Š\ }ȵå㕼“ËÅ8óV•gkn,vÎ>7}äáæ2—ïu/Ië ¼R ß³ùëiä{ƒxCú|SW%º¦OÇcq‚²Œ¡ÒÏygâYæòýs<Ù#F”[ÊÀ ò`ÉÀ…™)¥ÍÅŽcr’1q—:ì°þUùþ¬î o\bðÏÜ7¿¼|/¯‚Ad8S®_²U™C'ödˆ;’òç67˜øX8úîò=€|Õ§OŸ¬‰> w° w?nMŒtùiŽŽ\×Yn!TòòÜÞ¹;{ñµ|/¸ôß@ùbvíOQ­:~Ðäï‡ö4·ò¾Ô£öl£Ý=ep°×”qkŸv>ïªð°rxNàÊ#`vüèŒæU¯™"Àä{ºhAÒµÜsa|xÙÈåµëê"É«^\y·QÙ‰½Ýrï[Ñ}[Ix³E˜Nfº“O—ª±|T‘}¦l{¹»méÚrSŠåû lÄ».D¼rz¾Üµåã•ÔaÍå{y«òlßÛX=-—rýðœÇšË\¾ïËÃâ`òÆÈSdò½ùüµ½|ooHŸoꪜ^µE–œ=Ò²éöQ#Xéže.ßo0H“É(·”AåÁ’ 3SŠ[•™û7mr\Þ5Êaxîx}yyûƒxæ¾ùåå{q "C"9ÚóÖúУk:0Ç>†—ÃËó¨Ñ¦ò½<• ßÀD%<þþßý»W–åÛäûñƒãÇ ¾g{[l‹X®ÇhÙC)lºçö :{—íØEŽDû޳ÿwÞ3n¨|µ‰sÕ±[¾5Lr7ÕË÷,‚|ClÍ·­Ñ9ž–íÕ/Tv&#º,º% ‰í…!jçìÆÿ=F=52ÛôDkæ·EÀ©ïÌVI[öÓÂÝÝ÷—²çY®ýºå­ w÷ȻŷÉ€¾ ó@ùW?È:i]ä݆~K£~¸@‡{‹Ýµ(r¾ŠF¿rì]Ptüg˜u­¤ËÙ]aLÙ·ýLîn[º¶Ü”BùÞÀFòýá"ó¯.”¸¶|¼’:¬¹|/oUy¶¦Æb™ð±EסE‘oæò=Ý8ãfÊÐSdò½Áüµ½|ooÈŸoîª}ä¹Î˜ìÜÆîœ7€•nàY²øj¥Ä­æa2±¥–23¨õò=LT?~´&~ô£}÷ÝwÊ÷ãGÆZcVƒÍau7úgG?*Æ5×€Þ:FÕ«¤{úµ‘|OëûÍ©Wñ«s]xž ZŒRп\û4±Gµæò½}LÞæg—Šo‡%šÎ&"…W¾<ÐMìX3Žß_íR¨¦´š,@?ϨŒ¬îßnäÏN;~ŽsßVùDÕök¯œOuäòV“ø$Eå=ë™»óMý©²ób•ºÄrÇNrƒoY«Þ=ƒž·Ê´lq]äݦÓP-Z¾®^ns“;I-t%5‘X÷¾@OÕ¼Uý¬•»Û–®-6åÒNBé62‘ïù÷'§S ä®-¯äk*ßË[•gûîÆZŒ„çL±[Fè#òæ2—ïuâ948Ƶ¹§pàåšÍ_ÛË÷æñ†üùÖ®ªÎÎ2Ï*±^ŪfS³©|/÷,y|µ^âFs0™ÜˆBKI“q–Œ\˜¹ƒ¬¹õz3Ðî^¥XÀÉß1R)Uú-K >â™ûæ——ïåU0Š ÙaS·ÁmsuL:µÉ\¾‡£ò"DÈ÷0Qýÿñ?¬‰¯¾úêQù~üˆ51f2˜BXˆc}ÿ"2þ¤\¢â›5öç¤íz­»* ]vØ¢ñ 9…•î) ê»¶HwTºçº{©KëÓs瘖uUÑi`) dZ¿¦yY+q°ÈâÝ_4î{ÕV¡Giý&^èú8gzæŸ/çÓ-þ9Ìõ«šÚ‡¤lµÖKŸó[^#–ù=ôÂLõºïšÈŸS²]Km~Y•¨šn\6}ß·uLyç¬Öצ£ZÙûSVé»*½Ð×¢ý°Ô%ì)­em×wmv?­]%"#½^@½§hT×µyD-7ok’ÖEÖmøOe~TtZ÷„ž:~ú°ßu7¾ÐL}^wmx\R\Nqñ¹-êÜŸ‰¶¹ÉÝmK×–›rÙò;?Ì‹RõÇÀFò=±ó’¢V]WeCN|Œ‡¹kËÇ+y'—Ë÷Æ­*ÏÖØX|$<^’ª®ò$ØÓ3úˆ¼¹ÌTŒ:>.»Å%¸œØxŸ·Zî)÷\€·‰Áüµ½|ooÈŸoíªù™ïãâ%&Óm•y;R` >5›È÷+¯ÖJÜ`.&“Qn)¹A9ò`ÉÔ…™;ˆš‹ÊHùo•9ôÇËj¥uß•û¦%ÆÏÜ7¿´|/®‚AdÈJ·§IÃî9 ïl,ßËÃQq"ä{˜¨þøÇ?þõ_ÿ5)ø´1i÷ãÇÇLsèå\Ú=±~E'×÷{vµÔšDÕÇžcÝeŸwoË–£S*h 'ë–ÛR8<´rÂ>©Õ`ñ¼[œ%J;1ïã'õZ˜Ë‡¢pûž#}¯´‹{ÉB×kMH•¢¯ÊÂi‘UNù5ζWR%^t‰Õ¤^òÐy»mv^ùrí£u‘w]ûÎJ^~­uþݲ¥øá³œ–„m½”»Û–®-5%×Lø™ïg#‚kØÇ²¹kËÇ+y'¿ãY;^I«¾C¶‚;u}Z5‰ òæ[u$R¬?æ)Üx6Ÿ¿x9ëî#noˆŸoîª}±2¦Óülj6ï V_­•¸Å\ J&7¢ÜRrƒräÁ’© 3w6¡RÇ"\¡„ZÇžµÊ£KŒ•IßÜ7Í.þD>–Ê«`²»£¸ˆd¶»Á`⇣Ò"¤È÷0Q}ýõ×ÿá?ü:E‡ŸƒÏÏ»§3sÆŽÌœAôáÞæ2èòã´«…ÿ0À£À"ôm¾¼ÝŸÊÎ$[ŽŠ¼u‹ãúE§—ç5†ëÓÓüØyQUF/Aèeq:dû¯„›r^Ú×9D¸|õ5¨"t-ŽŠEÆ‹Òyã¸AGÞ‡RRñ¾Ë-àœbÖ us98¯s:Åip¸©µÀªF@ußñµ~TêE-lŸäû)Ë‹o3ÛdMÏÅPçxò÷¯ÓƒL¯¶*}ùË¢ vUL9ûSÜ>^—ÕnCåírŸáz‘z»ßé,8ZËöæ×2ÖébòÂ9DEû¸»méÚS}ur™œÄx?­ ¼öþ’F>0Ï5³œÐµå㕼“sÏ¢'¬‡›µê;eÛ—‹A•ÓeÁ­\i»A–_ö6¹óCÍ%¶ðéƒk ñî·å><É+H¬º{¼zîÒsy¼a^Ö»º*ó)B7Ùqgql×Ï[ÍspÏÅ›_Í<ÃJã«õún2ˆ’É(·”<G,™† |@7[M؇XRšìâ°â<=Ú.1$“¾¡o\¼åc©¼ †‘!]c_g¹–‹?bß6‚ÁÄ' G/@¾€‰J®àÓüñ*Ú>dYöÍ7ß Æ?LJã? hß=i÷OLßY'Y–&I–×m7¼]S¦ÉgÒÏ¥ôô\ÕÕHÝ(=,èUUùHQ6ª_ýþªiš¶m»^¿¹ ª<>ޤyÕ›¢ê"‰ã,ÏÆ¶É˺Ók)gS¥y­úõ‹"e[üº21ËTôMûV ÑËi¡_:Ìü‘¢nùùNËÌÛº¸½hšU7ežMäE¥ú·ÖåñnÓ·U_;GœVfÕ Ló4.a’%a&YA5’ U“g/äU£LÝm×›’PÍdEÞ°‰äcfž¥³ (s׿˜9¬9†­ºA¶ºS]OÇÐùÎ4מÂ]@>!ÞØÞUÛ*à Œ’4Çÿ¤â¶BîY,¾ú^Ï<™%–20¨ XÚÀ…¥­mö› ­ŠÏm<9qY÷æ.ÏG<¹YŸ^³Èp{ ÂQ€|•ù):t>á8ί&DZn ÎÌÙºð`ÓæH©EèÊÛÙ¶cÛîüþ>¿¥Ó>DzØxÖ½UBW1´àÙ€|ÕÇò“ŸX«Œ ÆdßGè4êCÙ O/ß»…ïôT·ùñúÜ›¯ìãäû^µJë6<Ø×Kk›ð\@¾€‰ŠøôéS?ÿùÏG¥ÞžÿŸñÏñáøOßcêØw\ŸÎM}~ùØý~Ž{<÷;gåzF€|Ï"4âX€çò=LT@w²_ß;·~ìþˆíg èæë19ŽWö»ëwtÑ^žÇZkæÀßã5¾kqüPoæËÜûEß6MÓ0m/˜íH@¢èú˜?7έe?ûØîà ü;0°ÌëCãÚòC$>·yæünþ»2ðn£ÈšîͯwúÇ+’–BÓí»#§™¿Çi‚i6¸÷­áÚ¼ë‡ó…O@”ïAåûÅÓÝÌeFlên8B‡7^ø/d^ý"ˆ§=åV¶¢‡¿ ÌvÄ×I³›æÏ½ýò¿7ÿÃ/žVæóÏ=g¡º¹w±4Ì‹u;D€‡Ò›§I»ÿ¶{ÒÞé)ÓŠºÿËt]zÊÉå7Â=Š; A0ͨUüΞ=AP¾GA”ïO·Zùž¼ÉÛ9áwW3C ÕnH˜c¿ʬOYžßÔÅsUœH¶#žN¢˜«BÿØÝòÞ~ÅÖîá‘¶Ï çË/v koŸ¢ÓÞ[œDØ©}Aþõî/3£8¾úzŽÑ¾Ê¼ò½7þ¶éÍÏ«05TüN[¼3‘Å!žfÂdÛ°ŠàÜ=öAùAA*ߣ|oÔ‹™L6zê†??95OÉ©ÿŠ~”ïÅsUœF¶#¾QÌf²ÑSlBÿ¼Ý’–ïWjGÛÒñô³Õø¹ò^?AAùå{)w=Þ_{@SÉ÷÷Îëåæ‹èùtÔÿ‡êª´3”ïÿþtߣßiEφ1ÅáÞ=8÷Aù~ñ ‚ (ߣ|¿9µª‡‹AùswKïe-H69”ö&ð[Ê÷Sù~×ûË”t¿&Ë÷¨«ÒcCP¾GèÝAR€ò=‚ ‚ |ï»–®ÊRsˆ$«]Ó ¨Ë ½ô:p»š22–ÍðèÖ¼!O-¸VW5,IŠa{lù:ò|ºUC“ëµõ&Õ/˜Y†.KO£“T­ëxÁÌØ\SÎe†äʲ빮ç3û¦k*Ò¤gI7ÚÆsÒ’çDþ‘‡(Z׊·šÒ¬×£aÊJdfÂ09ðCn’!5Á±a¹h ñ‡’ß º&ÃÖ 6¿ÏùÍ8ðÌh4ŠAtáü²x®ò´À†Êvû÷ÉÇcv‰CUÍrü_¶æ:OŸÿÐb¯-]!Á­Õ%EOèÄ÷ž>$D|Øæ´7<úðC‰œ"«t ò/m¾–! ê(Yìà©åiw§´ðaW&¾v˜xÁ¼wKvXy'( ˆ°5&¿ø¦ù¼À:81ì–|ÚO[ª}â 9î©§Û;G[;GÛ²¥±nt|8Ô­–bµUkÏpûÃ_¹K&7îíÜ]Ý?G溻ÊiC:n(Ö!\løh™“_>wîÀ½nI½µZ'z6vŽ÷Ì;v_}ÇiÉÇѼì€íNàãÑø·"§IÇmí҆т|ß¾x·Ñ|‡î•NÛºsõìB~<ÑÏ6¢© ŸÞ¶bÛ÷tb<ØN4;G8ko0ÊÄÁç·ÖHï†é_9~ÿ¹ð®ÌÓ§BÕKk§Wž?jäûRoMù°gmKQ’DQ»8!Qú÷7ÞC?àBôòº­œE¹ÔÖH:œº*™àÃͨñ0ð5íl”ÀÇ-͹I5 ¹—Ü =èDÍF–½›gƒ+g4Ah$v_èS˜¦"ºÉNI†4ލɦ1ƒ®ÏÃ…pFVt"¹b«Ñ‹FØPÎv{×7Az3Q?@RÝÜ&éÝiün¼@šñ¯JFÚw3¹7ƒz¡™·ƒ§WÁcôÌÕ°R¼ÁäÏ背ç&à]5ì݃Þø÷pî äÞâAP¾GA”ï=³ZÌgh²…ºnOŒjĤP—j”u^êº t«Ä´R¯•ò³¦eÉå«}_ízSc”#;Š\M‹) ¦RÍe(”ëv@\üëðÕj1É7•®›T_¢ªjÍÝ¿lùãÌK†Ç0¹’àŒ\±füaM?rÑâ%Èc@®"uÁ†Ïç¡<ƒv¡©Hpú^µ'ð,Ëúí¬žbœRåøå-å˜çô=TWøjííìÍt÷zÇx—+of‡ñN¾ž‘}Ú_¿Ó£}ñþ@sûºèÈûc3i×ãu›CåçP«¢žWk=+ CÓz·¾GKkÚ¬{ûçg¯—éf[¤Ëä‹({3å§_ÂE‘Pðžx’Є‘Çå›:ôØ^}±ôMËo;7\Qxh¬Ë3 ¾\?j|áÓUÁ½ò†DG³½ÝóÿJÔÀoòîùå»qBþ§ÿóé¶Ça}óãêù…ùA›‰À4aÒ¹ÄiÜ»kŸÛ´äÛu‘“‡”f~ ¶‘ÝzYÉ ¸D(ÍøW%#m^¬_F̳ÌL#ËßúYVåüü[A©÷>ÿ,}±9W {÷ ÷þ=œs‚¹·x”ïAå{'’È4M?â$—,ò:ª¼nõ¦…šËï©7®^c4V'# m3à(ù#Á+—$H¹Ée|Üú*K “­ V_‚EÙ­³ÂšÎ/db·Æèiµéó‡5ýÈ…Rˆ?”fPû;Ù„è­!¿ÏCC2¿žaò½`®ò¶@Ã^wLJ²ýü̵üx|²3“°êxÝûµ—²0ò„ABÒŽcìÁ€-ß /mhÙí²FÎ/¾@ÀØàêZøÂƒA®¢Íi·ä+çä{Fõ•ÛwcUhmJÌíìPú<ò.dß]ñ KÇÙ2  öz K¶|ÓIý´¶Ï'ëdã#ÃRjKº/Ž9ÃÀÖ~°üé¸p[DœJ2þ Þrº÷4K´¡=üö'-ß³¤ä{{)I€»‚¾ž}Þ)·Ð{üf˜¡ä’ïKàÞ䧤û©£FM™!ßønŽ…ËVà c¸ÖØ¥2c"©à•ÞaXn˜N3¨KÎXË_/Àuê«AãßLÄÜÛȾÍ ¦YúU È÷W=VD`!ÏÃÔŽßÿ1äûõ ÞUÃÜ=è=Ap§'(ž{‚ò=‚ ‚òý€“ò„\]³RÍ@+çá´æä-P¨h–ã¾m(%xJÖD¾²•¦æú¾ïÙJ­D 4ò}`¬Nõ«[n®¥A¿…¦a®T·¼à©HƒÖ,O´S¢¸Ù¶Õm戥dÙ–åx‰#±šEа¥î°—á”É SPÍ(/_ªw£F}W«A ãcª^57n³©»Áè•ÝÝ,dI“e•/³ΨHºëAÔK½—¶ùÃÊ?rñâ%oÄ ìòuÕð";Ï–HÇé}.š00ÁeÙUI·<Ï5µzž’ïs•¿ù>_SºŽëEn/fáÿü䥱ÝjÝ §°¤âäHú¬—ò•®3lr´9ÀëMÍå•ïs³ò=xºÖlJ*-- .mhy&Qóe¥k¹®Ó…#í0òTñâíßî*²2¦)0Õ¡op»%WXÅ2M¼ö}p·½6‘h÷NBÂàüd¸Ï½Žó0-ýh,5ÂYÈ~ïçD7ÙÐžŠ¡<ÚæÅ»·Ô1ÌàľåoÛº{ãù‡Zïå_•ï_­:WçÖÌÉÖ•Ú™åú'½ÓÌòìN¦d÷wÒå¨nÏÀ2ÈÙX¦pÜ.ÖŽ¶$ëŠ×9 àTõÒçÞ‰÷øTØawgÒ×®·\nƶ×2nûAäÞ³ ìûa@¹÷íæ †Ññn[5˜rQó9ä{â‡ÑÑWß²ÝíϤÞËVϵÎÝçªI\Ù·'½£%2¯ãÛ=q(Q²½¡:WÞàê2$rWŸî…x(3_Ž£¾®œë HfÞÓ÷`ÿþ`÷ü®?Êø , Þ¨Ñ:ìËO?¢Zí#7ə۟ZdëpŠéûXá= I½ÌÔd÷ÎýAð&ûþ¨Ïo>nŒ…5šËÀ»…ý„œÔŽx(?ûnÇ~*útc_ß·È×,~3Q?@RÁÖΣú*þní;5rá4ã\•Œ´1ýXDÞî·zî•{§ÁQqXÈóòµcôíË–|ÖRàÙÕΆ3…IÝq¯ÖîAï ü{8çÅsAP¾GAP¾_f“PeÕ¡ÏÌÒ‚ãlÙŽË·…‰¦Ø ߃LµÕ‰ªoóÉ÷–Tšùz€ÒW‡ò[`Ž›.;a ­B¦¢{³cÎW»Ï šVi2/˜Å½šáÏh|…MmÚvòýF±iÆ·M/Ö¨1n¡â†Ïa/$+Q-Ôºüaå¹x q‡’×Ì7›“ÏÈ´je\[¥¤ðû\,4¡-—2ã^Í \­“ïEs•·…ôò=øÓ‰Ml,«a^®VÉÀ‰ø ^­BvÜKe+Œ¡OP”ù>¿É®Ä¿´Ù-C¢æ*±(ØJ>&ß§‹ h?0!ØWk$…~ûnÉVñLcéÝ íXÛ7õ¬|’Ÿ9h<Øú8ÑÓ/b :¶‡ZÌáÎ7¢z¨wIg‡A‘·ä½±v¿wÛ .^¥—ï3 '¦ÜÁiô;x¯ÞÃ]ˆî TԹ紼f f¥Ÿ·?¬ L놢7ãögf»». 8£/¿ï\=3àÙ»wÿ$Œ±7‘?øå{úŠÎû_æÛåøòÉË0áLñ7mºïb¬?’| zgDáöÝdúÒ5=Y~ù~â4‹žü8€7j´[T!Óy¥Œ'8œ5Œí=£ZÀ¿O¤=ú‹:âd¯Yß<›Ém7œ¦½NÔíŸÏâa•CœqƒWüf"~ ’êU| ·>KãJ\×âi–jU2Ò"òæ –{öÙKJ¾Ÿƒ¸. ¾ÑaÎÔ.ÓïuôîAí {8k‚¹‡ (ß#‚ ‹Oß[Z½ýK²‚ø{SÊ>#ß—;I¸„sÖ ßg+-œeát'‡|ïUg gA¤/ƒ|úšT¯×k’fÏLQ*f)AJ×"§û…YPå¼ÁZ¯h´|F[)5u?¦‡Cž»ñ•=$[ÎBÇVþ‘‹§g(9Í úvR© £: ·ÏBCWx§~3®ÏUv "ò=ŸÜjžãD?EÏ)]9' D<TðRòréå{è ÿÒf¶ s©>Û½üñâ|XRvª| dïoÞ-ùÃ*ši‚ÕfHñÏ0k(7¼<Ñï€NM«Æ·£²ÎµÞZãø$¾ôÍã1µˆUÈû°ñ-¥|¿w˜ø~ä-º–ú Qù¡Î8­«Â /©IžÆ9Ï2Ø•zζ6c3 ±“ä{¨X}}›:NûPš¸W÷& ¯ò½Ç—o±8B_¯%—º3NàÒ½3¢Ð7:t(áŒyzù~E¹¥…Wp/Ô`Êô¡]Ú™0`˜#ˆ­ðë ü °ôì|ª™$=8Í °RRª;¯`ð±]heÇ}ît §3¼U•ÛLÐ±Ž¾wZƒcá4ã_•Œ´F6 p­ªÏËòýàL-ßû¬YPs„bÜ5e €2+ÅJ]V Ëå¯t”ÛÔ~SXùG.Úw(9Íüæêä°{]UPT¥–… ü>g›±q*¹gH`6Á¹ÊÑ‚|_ízŒ5Ë@-“îW›&œO‡’ëì¦èM&¥|gÿY,mhyv.~r#0rþxñ‚ö5ÏÕ©+kí9î–üa埠¸|ÏU‚¼oö&Êþ†bµäÓÆøi©ÖÆçÈšÀ£e:»êÙv£S\ßý±MÕùûZ-¥þ˜Güò=œSæyåà~b/7ñJ·)àœdÞmG¿hÉÇk_¼ûüýñ-ß'k¸}£KJL„vÀù ïI €3Õ;ª–:Jøy4«ÕàP3ÄÎPnaägÔSf9³½&‘cË;×pàjˆ3à_€0YÖÝ fàdrľ×VϰÎÚJç”F™´ ÎÏ|:حιüfb~ˆ%Uç†ôTiF“rURiC7rŸ¥°çâùþþjû¼ùa¥ßëøå{þ=œ‚"¹‡ ‹—ïAå{C®æØ—m2„¸¸ F³e_½šÝç—ò½o¬f¸Ϩ®²¦’J¾7ê«ð'¿ÆGéh´7G6M“Ío6§NaH0lѰò\´/¾Pr˜|ŸÍ0ùžÛç¡ =}¼6$!òM8Wù[`À^wéDU]-û*È«à|:ÊPY ;J/ßÃ6¿gif‰Lj%QÇÞ¡ þxñ‚9kg3Ustož»¥XXç ß'ˆqƒ``õN‰h mp^3Û‘.ýÒ̳_Žõ›Qt…C¾y7åûNã(PÜÒ´™Ö9 <§ôQJü %߃xM; ¾œ Ô}fȈ‹’ï¡/ùžÙ½/íñ ñ»0èõ0ÙvòUŸluÕ»x .åcÊÉ΄SÞ°6ÞS®á_€÷<§Å9Ì@¼nq,"Lœ‹L¢Ár»¸sA"Ëk&à*©Øª.š%ó[Vep½5¦’޽ÃGæåfJßAøþF»‚ÕðÈ÷â{8m){²PùAAP¾% Èæò…b¹Ú¬—Ÿ;3.;‰ò=œ±…ÚE™2 ä©­¬º2"i9"Ù³ð»ÔLr…ÕbµÞ¬äÿŠ|¯oæ·úò=y­4K«ù ÍjÓ!e˪@Xä{¾8CÉq_‚«l³Ï¥`×çö¹@h@¾§L:ßDs•ÝÂ"ä{‚]Î/(QžšƒÛ2ÀKp’}Aò½ØÒ»„Ÿ­@Õ`äüñ—ïa›…ÕÑ4ý¹î–TX,ß34¦¾/GW¨_,·’Ÿa‘ëp¢§ÀÓZz+¿^û¹Õاå{ZÉ‚qÎ[¾1.v¸üÀ-ßó;‡-êA;o¤W¿—GÞ¦‘ï]¢//­[à^°ü§Ê÷ ÈõŸ ‰œ¤h,@ÛcÍÁô‘Û†“,ßÃùwþ¨¥‘ï¡”ÿò¸~Žum-AE~Ü ¦u¥êiØfÀ€ÜÁ;|¨ü?×î!Üšr´2u žGýßLÌüªnÊ4£[•°]$!‡wà#óò#¥Si­)½ŽÞ=Ä÷p¾ ä‚,H¾GA”ï©[gW7%Ëõ¦²|²|_èú¼Ås Õ®pñg3G£ÏêjúP2jpb3zÐj]:ùÞÕ6‰gêF$”ûÁß±"¹8vV²öí ù*ÕTàvµ!†ãó‡•äâ)ÄJN3orà¿ê$~ŸóšYL<öÇÒÁâ¹Ênaqò=4R”mG)M_§ÏU#Á‘V.Ƙ£|/¼´Á-PÒº0Fž2^âò½Q&#\'Nñ›wKþ°.V¾‡bîpTê·>ßU¥Ñ“ùr|â>ÐÍ}$¸~M„:}mí{Ü›$ðqDMP¾‡¼–om…¤â«É â 2D¦Ýrs~¹«]ìFóâ5{(½¡neà$Øç—­Ÿ™7 nŸ?ò›‰ù_ÕO3ÁUɵ ÀGæãFJwßéÚkü{¿|/¾‡³'({²@ùAAP¾w”ÜXäqGPã§ïk†Ÿtšt:8}_¨ûÏžú,t}TÚÍÙRèn…¨BYÍ Õ ±+«ñ©@ I‚Tõù«kõÍX%Á¤ðzI¶S‰à¾)ås¹\6_ïÎúuÒ¸‚Ò©+7)e?[ÑøÃÊ?rñâ%§d©´êäsùè)I>·ÏEB·w°n-vªx®²[X |X‰Ðj¥Bªee; †'ßé2Ù²õLxÈvùþ7-mw<úêÚ@.fÁ™iâ%.ß»Z53¦9ãwï–üa]°|OÛÀ \¸ñ8‘–Þ´£gxÖÒ9["–û6û $\ÀHe…êäs•ïoô±%1£JÖp'£M~ç27?$ŽíJƒß!½új ïuôî!¾‡óNP ÷d!ò=‚ ‚ |š‰èëΨŸ¹±ÀUí‚°2~3¶å‘-(û ߃” %@Ùç“¡6 T§µl_*f)[©L´,ÝŸ¤²%åYAÓÓ 0 ‡¤à„x ¤ÃUG)Æ5bf JlšîD;®hXùG.žBÜ¡ä5s”r†P4ü¹|ܪpû\$4Ú¤Öx®b‡S¸j>v €x®ò·0oùžÆÄ'UPE'½ ß;3º9™ÈÄúإź1sš[D¾]ÚtÄ ±¹8ê0‘À™Üñ—ïØ£4–ÃDæ°[ò‡5m¦®¥0.ò}x;Öéš+Òu|8P aë|@*/GÏòÁÕŒ~7–œ^~½$ró—öX ;°b¶Äç,߇\ä[Ôî() 3ÚäwγUG>µ%-K9€1ÜÇõeê΀}º/r²7Ô×H¥{FÕzêt1Ð^_7ú8ð3ò½ÔrxåûŸÏæ)ßCá èùÇ~!Íøåû‰Ób î*ûüQcN™íLtR›ôò*®°à_€PV…Ê¥›xù,N3[ÙOü{ç>>ÇÚ¸…ǃ)ÉuiÍ ã´>6Êi&î~UW<ÍøW%#m ¯÷±Fl5òxf^~ Æû‰×©ø^Gïâ{8ÿÓäÞà¤gï Ÿë~ˆ ‹’ïAå{(oB(lÊŽçûžÓUê…l(T-/ + ÈeW75ÓŽ¬Mµ–˺—}³#ÛŠ¤E-{ŽY'Ú=ý\2"”g޲k»žçèÒÓ9l8|Ý­ÂTd#Û°òI½}¨jV5¸‰qSÕ Ë¥ûõpDEêzAà{®©5'os5µîÅ|n{Þ£ÕË<Âk·3Ú”»C_XzV4‚Taå¹h ñ‡’×,tàU¶¨NQ|šeøtÝðù}.šÐÕ ¿seÍrƒÀ·» xR8Wù[X€|šÍbfŠ’lÑwò•näeßw(j°ªTw¢ƒW ºjYfW“ŠÄËò½ØÒ\u*â%¥kZ†^¯¬’7àLîx Ê÷U‚M:šBµVÝŒS©6õ`»%Xù'H"•ƒß Ê÷p.j8DØêD^if¾žÙÞ`<jG¯fjCáæ«/§‘Y߻ӔÞ+¢ÉéÄ{|ÒÂàBÅ7û»çþèîÜchs¾ò=ÈÖD“.o‚ǾçGóšÌbiÝb¶Éïœdâ7©Ê ã¶?,Ô°±.O—Z.iîôå‰Ä¼ßê¹W^ÔWoâÉŸOág`y ÙƒûÁ•s½µuB?=…•†ur~­iǯ‰À73kpÚËõ£½žsZ-…7ÉyKµ;çþä{Â;v.®î£PÞnƒSÉ÷­Ñ±÷»fއCµ³ÙÛë§Zò”Ù΄Š@Ù Ù¤Y€p`œ¼ü¡Ùþw·'D‚/œ£ç7 ï>LzYþÞ2îÁãå™;œ÷bƒ´Fƒxw»ý˜†Ëi&î~ÙZ<ÍøW%#m\kjçÜkõ®O {cýuóœü@-pWHÜŸöÌŸk_¿L?¥{rÕл‡øÎ;AþÜóì¥ÿ¿AA”ïA”ïK –s™_“Ó¼'a%˰Qìd_¶iYI%#:Pb8 í(\3!JôPç²›n¢¨X•£­ è£~ÒÍ¥”<ž‚D,2L8JÌVþ‘‹§w(¹Í|Kfô]¨Á°9}.šÐ’Ë €'…s5U âò}5µË™P¢6}ñN¦¢X`êé« SHZêê`üK›£e‹5p&¼¸Am;0T þüØ-ùÃÊ?A0†]ˆCNiž!ß73Pk{ÐXÝŠz¾«Ç6˜1é©÷yŸaÆ!ßS÷ߦ}À)Ô„çÍÁ $yB›ÜÎá(xÂx |3„&ùYÞëÜÓgZ“ŸŠ;F³0k¨( íh¬/lWbÃûqÅ%©ï¨x1£¸ï¡$ÍòÈ÷-FÞ¶ìÇÔQcL™åLPiá1›ô І‚'ðÐgí9Íúç§ Ÿ¼ªÙ³Õ± 1f,/ùÍÄý@§C¶O3þUÉH›F#ð‘ùø¬ÊäÖ>ÿªIÞ=÷pþ òçý©‚ò=‚ ‚òýBq%r@È*]/€& ßT”9ô äËšíÓ²o®´9® ”êZ“^j]¼©ß´iÊÐÜ•*;!àv›yZ‡“Œ`ªæL¶B4 S®ÄÏðÆûœ©ÓÜ@¡,¹‰emV›>um)%þZ½”(­•‡gc‰'o®R-J]'}XùG.žBü¡ä1ƒ¨W ÙáQ6À†×ç¡- –£F-ëj) žÏÕ”-ÐPÙo²T%w¯^€;Z9ð›ãÈe‹2å42xXT@AêºaœÀV ÀÌ”J¹'‡zãd%ó)Ô½ ÿÒækÙ·´Êj.ÞBSWáêÚTO¿ cƒ?Ë9ì–|aMŸiÑ¡³ œ×ÿ®=d—8 ïðÔvàÈ-yF‡”;^,ÚÛkíY9æýæ=’* ±´Î¥w7tke™¡ÐA½é oO|ß›yÿ*áýžhÖŽãÒ¿“Ÿ¿¾ Nç°¹êQú—ôArd"£¦Öí(4[[£f;mé€êëçIÐ2™rì‘·{þlÔí³Ì2mv½ýYͺs®¸.¾§…Âd僙#üRJ¥EUî~ØxÉIâ>çˆBp»ö ÄÊÉáÙvã;;çéãÿÅÍ·û»öà/E êϬA)mgÃMÚùt:9I·oÌSˆ;\sz Z;‡à^~xߢÕá¢ìÌÈë»=0€§õn¼Ìe&î‡ûÿÛ}‡6ȵâi–nU&¥ Ð?¿øðQŠoYG{*\]›Þ—¼~€UÜ\3ÈY,XþU“¸{Ð{ÿÎ?Aþ܃6á 6AùAAù~Ñx¶¡*CTM·ꀔaÙn‚|ß–gqºùD×r !ï¡è¹cuÇw£¶lC×Fè]ÓMnÊ3tÒ›nX^8ÆwM#š‰åúØú®mÛŽãxð2ß1UYŠhFȪ)4 hTWåz­©hJ³ÞT´.̈ƒÀµu8ôݿÖa ñ‡’ß zé*²¬é‰{ àsn3FlºÃ WˆfXÔGEs5] KYê"SßÔUi¼ªTÝdøØµÍ®>Ä0mñµ7¥øž;„¸ÜUË ß/>^óË@¡°þí¸¿Ó´³†r±«žµ4ûÄyH´º±¶rÖRÎÚC›èVçׇæõ‰í♵ÙR/¢ìn?\}çº-oKÇ[;G[²ucw'Ýj fíî ´éׇçnT¦#¡;ýbägK;g$ÑàP·¶¥hvGÑìÚ:«Fó•}­éöžnwÌÛd3°¼µì;Û}`zŠLÁ²omçî¦0G®Î/[òÙ®fµTûÄMGïµà)šV‹¸÷N"] ``` This is called a **manifest**, and it contains all of the metadata that Cargo needs to compile your project. Here’s what’s in `src/main.rs`: ``` fn main() { println!("Hello, world!"); } ``` Cargo generated a “hello world†for us. Let’s compile it:

$ cargo build
   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
And then run it: ```shell $ ./target/debug/hello_world Hello, world! ``` We can also use `cargo run` to compile and then run it, all in one step:
$ cargo run
     Fresh hello_world v0.1.0 (file:///path/to/project/hello_world)
   Running `target/hello_world`
Hello, world!
# Going Further For more details on using Cargo, check out the [Cargo Guide](guide.html) cargo-0.8.0/src/doc/javascripts/000077500000000000000000000000001264656333200165115ustar00rootroot00000000000000cargo-0.8.0/src/doc/javascripts/all.js000066400000000000000000000021371264656333200176220ustar00rootroot00000000000000//= require_tree . Prism.languages.toml = { // https://github.com/LeaVerou/prism/issues/307 'comment': [{ pattern: /(^[^"]*?("[^"]*?"[^"]*?)*?[^"\\]*?)(\/\*[\w\W]*?\*\/|(^|[^:])#.*?(\r?\n|$))/g, lookbehind: true }], 'string': /("|')(\\?.)*?\1/g, 'number': /\d+/, 'boolean': /true|false/, 'toml-section': /\[.*\]/, 'toml-key': /[\w-]+/ }; $(function() { var pres = document.querySelectorAll('pre.rust'); for (var i = 0; i < pres.length; i++) { pres[i].className += ' language-rust'; } $('button.dropdown, a.dropdown').click(function(el, e) { $(this).toggleClass('active'); $(this).siblings('ul').toggleClass('open'); if ($(this).hasClass('active')) { $(document).on('mousedown.useroptions', function() { setTimeout(function() { $('button.dropdown, a.dropdown').removeClass('active'); $('button.dropdown + ul').removeClass('open'); }, 150); $(document).off('mousedown.useroptions'); }); } }); }); cargo-0.8.0/src/doc/javascripts/prism.js000066400000000000000000000161201264656333200202010ustar00rootroot00000000000000/* http://prismjs.com/download.html?themes=prism&languages=markup+css+clike+javascript */ self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{};var Prism=function(){var e=/\blang(?:uage)?-(?!\*)(\w+)\b/i,t=self.Prism={util:{encode:function(e){return e instanceof n?new n(e.type,t.util.encode(e.content),e.alias):"Array"===t.util.type(e)?e.map(t.util.encode):e.replace(/&/g,"&").replace(/e.length)break e;if(!(d instanceof a)){c.lastIndex=0;var m=c.exec(d);if(m){u&&(f=m[1].length);var y=m.index-1+f,m=m[0].slice(f),v=m.length,k=y+v,b=d.slice(0,y+1),w=d.slice(k+1),N=[p,1];b&&N.push(b);var O=new a(l,g?t.tokenize(m,g):m,h);N.push(O),w&&N.push(w),Array.prototype.splice.apply(r,N)}}}}}return r},hooks:{all:{},add:function(e,n){var a=t.hooks.all;a[e]=a[e]||[],a[e].push(n)},run:function(e,n){var a=t.hooks.all[e];if(a&&a.length)for(var r,i=0;r=a[i++];)r(n)}}},n=t.Token=function(e,t,n){this.type=e,this.content=t,this.alias=n};if(n.stringify=function(e,a,r){if("string"==typeof e)return e;if("[object Array]"==Object.prototype.toString.call(e))return e.map(function(t){return n.stringify(t,a,e)}).join("");var i={type:e.type,content:n.stringify(e.content,a,r),tag:"span",classes:["token",e.type],attributes:{},language:a,parent:r};if("comment"==i.type&&(i.attributes.spellcheck="true"),e.alias){var l="Array"===t.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,l)}t.hooks.run("wrap",i);var o="";for(var s in i.attributes)o+=s+'="'+(i.attributes[s]||"")+'"';return"<"+i.tag+' class="'+i.classes.join(" ")+'" '+o+">"+i.content+""},!self.document)return self.addEventListener?(self.addEventListener("message",function(e){var n=JSON.parse(e.data),a=n.language,r=n.code;self.postMessage(JSON.stringify(t.util.encode(t.tokenize(r,t.languages[a])))),self.close()},!1),self.Prism):self.Prism;var a=document.getElementsByTagName("script");return a=a[a.length-1],a&&(t.filename=a.src,document.addEventListener&&!a.hasAttribute("data-manual")&&document.addEventListener("DOMContentLoaded",t.highlightAll)),self.Prism}();"undefined"!=typeof module&&module.exports&&(module.exports=Prism);; Prism.languages.markup={comment://g,prolog:/<\?.+?\?>/,doctype://,cdata://i,tag:{pattern:/<\/?[\w:-]+\s*(?:\s+[\w:-]+(?:=(?:("|')(\\?[\w\W])*?\1|[^\s'">=]+))?\s*)*\/?>/gi,inside:{tag:{pattern:/^<\/?[\w:-]+/i,inside:{punctuation:/^<\/?/,namespace:/^[\w-]+?:/}},"attr-value":{pattern:/=(?:('|")[\w\W]*?(\1)|[^\s>]+)/gi,inside:{punctuation:/=|>|"/g}},punctuation:/\/?>/g,"attr-name":{pattern:/[\w:-]+/g,inside:{namespace:/^[\w-]+?:/}}}},entity:/\&#?[\da-z]{1,8};/gi},Prism.hooks.add("wrap",function(t){"entity"===t.type&&(t.attributes.title=t.content.replace(/&/,"&"))});; Prism.languages.css={comment:/\/\*[\w\W]*?\*\//g,atrule:{pattern:/@[\w-]+?.*?(;|(?=\s*{))/gi,inside:{punctuation:/[;:]/g}},url:/url\((["']?).*?\1\)/gi,selector:/[^\{\}\s][^\{\};]*(?=\s*\{)/g,property:/(\b|\B)[\w-]+(?=\s*:)/gi,string:/("|')(\\?.)*?\1/g,important:/\B!important\b/gi,punctuation:/[\{\};:]/g,"function":/[-a-z0-9]+(?=\()/gi},Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{style:{pattern:/[\w\W]*?<\/style>/gi,inside:{tag:{pattern:/|<\/style>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.css}}});; Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\w\W]*?\*\//g,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*?(\r?\n|$)/g,lookbehind:!0}],string:/("|')(\\?.)*?\1/g,"class-name":{pattern:/((?:(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/gi,lookbehind:!0,inside:{punctuation:/(\.|\\)/}},keyword:/\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/g,"boolean":/\b(true|false)\b/g,"function":{pattern:/[a-z0-9_]+\(/gi,inside:{punctuation:/\(/}},number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?)\b/g,operator:/[-+]{1,2}|!|<=?|>=?|={1,3}|&{1,2}|\|?\||\?|\*|\/|\~|\^|\%/g,ignore:/&(lt|gt|amp);/gi,punctuation:/[{}[\];(),.:]/g};; Prism.languages.javascript=Prism.languages.extend("clike",{keyword:/\b(break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|finally|for|function|get|if|implements|import|in|instanceof|interface|let|new|null|package|private|protected|public|return|set|static|super|switch|this|throw|true|try|typeof|var|void|while|with|yield)\b/g,number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?|NaN|-?Infinity)\b/g}),Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/(^|[^/])\/(?!\/)(\[.+?]|\\.|[^/\r\n])+\/[gim]{0,3}(?=\s*($|[\r\n,.;})]))/g,lookbehind:!0}}),Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{script:{pattern:/[\w\W]*?<\/script>/gi,inside:{tag:{pattern:/|<\/script>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.javascript}}});; cargo-0.8.0/src/doc/manifest.md000066400000000000000000000416431264656333200163200ustar00rootroot00000000000000% The Manifest Format - Cargo Documentation # The `[package]` Section The first section in a `Cargo.toml` is `[package]`. ```toml [package] name = "hello_world" # the name of the package version = "0.1.0" # the current version, obeying semver authors = ["you@example.com"] ``` All three of these fields are mandatory. Cargo bakes in the concept of [Semantic Versioning](http://semver.org/), so make sure you follow some basic rules: * Before you reach 1.0.0, anything goes. * After 1.0.0, only make breaking changes when you increment the major version. In Rust, breaking changes include adding fields to structs or variants to enums. Don’t break the build. * After 1.0.0, don’t add any new public API (no new `pub` anything) in tiny versions. Always increment the minor version if you add any new `pub` structs, traits, fields, types, functions, methods or anything else. * Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. For more on versions, see [this documentation](crates-io.html#using-cratesio-based-crates). ## The `build` Field (optional) This field specifies a file in the repository which is a [build script][1] for building native code. More information can be found in the build script [guide][1]. [1]: build-script.html ```toml [package] # ... build = "build.rs" ``` ## The `exclude` and `include` Fields (optional) You can explicitly specify to Cargo that a set of [globs][globs] should be ignored or included for the purposes of packaging and rebuilding a package. The globs specified in the `exclude` field identify a set of files that are not included when a package is published as well as ignored for the purposes of detecting when to rebuild a package, and the globs in `include` specify files that are explicitly included. If a VCS is being used for a package, the `exclude` field will be seeded with the VCS’ ignore settings (`.gitignore` for git for example). ```toml [package] # ... exclude = ["build/**/*.o", "doc/**/*.html"] ``` ```toml [package] # ... include = ["src/**/*", "Cargo.toml"] ``` The options are mutually exclusive: setting `include` will override an `exclude`. Note that `include` must be an exhaustive list of files as otherwise necessary source files may not be included. [globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html ## Package metadata There are a number of optional metadata fields also accepted under the `[package]` section: ```toml [package] # ... # A short blurb about the package. This is not rendered in any format when # uploaded to crates.io (aka this is not markdown) description = "..." # These URLs point to more information about the repository documentation = "..." homepage = "..." repository = "..." # This points to a file in the repository (relative to this Cargo.toml). The # contents of this file are stored and indexed in the registry. readme = "..." # This is a small list of keywords used to categorize and search for this # package. keywords = ["...", "..."] # This is a string description of the license for this package. Currently # crates.io will validate the license provided against a whitelist of known # license identifiers from http://spdx.org/licenses/. Multiple licenses can # be separated with a `/` license = "..." # If a project is using a nonstandard license, then this key may be specified in # lieu of the above key and must point to a file relative to this manifest # (similar to the readme key) license-file = "..." ``` The [crates.io](https://crates.io) registry will render the description, display the license, link to the three URLs and categorize by the keywords. These keys provide useful information to users of the registry and also influence the search ranking of a crate. It is highly discouraged to omit everything in a published crate. # The `[dependencies]` Section You list dependencies using keys inside of the `[dependencies]` section. For example, if you wanted to depend on `hammer`, `color`, and `geometry`: ```toml [package] # ... [dependencies] hammer = { version = "0.5.0", git = "https://github.com/wycats/hammer.rs" } color = { git = "https://github.com/bjz/color-rs" } geometry = { path = "crates/geometry" } ``` You can specify the source of a dependency in a few ways: * `git = ""`: A git repository with a `Cargo.toml` inside it (not necessarily at the root). The `rev`, `tag`, and `branch` options are also recognized to use something other than the `master` branch. * `path = ""`: A path relative to the current `Cargo.toml` pointing to another directory with a `Cargo.toml` and an associated package. * If `path` and `git` are omitted, then a dependencies will come from crates.io and use the `version` key to indicate the version requirement. Dependencies from crates.io can also use a shorthand where just the version requirement is specified: ```toml [dependencies] hammer = "0.5.0" color = "> 0.6.0, < 0.8.0" ``` The syntax of the requirement strings is described in the [crates.io guide](crates-io.html#using-cratesio-based-crates). Platform-specific dependencies take the same format, but are listed under the `target.$triple` section: ```toml [target.x86_64-pc-windows-gnu.dependencies] winhttp = "0.4.0" [target.i686-unknown-linux-gnu.dependencies] openssl = "1.0.1" native = { path = "native/i686" } [target.x86_64-unknown-linux-gnu.dependencies] openssl = "1.0.1" native = { path = "native/x86_64" } ``` If you’re using a custom target specification, quote the full path and file name: ```toml [target."x86_64/windows.json".dependencies] winhttp = "0.4.0" [target."i686/linux.json".dependencies] openssl = "1.0.1" native = { path = "native/i686" } [target."x86_64/linux.json".dependencies] openssl = "1.0.1" native = { path = "native/x86_64" } ``` # The `[profile.*]` Sections Cargo supports custom configuration of how rustc is invoked through **profiles** at the top level. Any manifest may declare a profile, but only the **top level** project’s profiles are actually read. All dependencies’ profiles will be overridden. This is done so the top-level project has control over how its dependencies are compiled. There are five currently supported profile names, all of which have the same configuration available to them. Listed below is the configuration available, along with the defaults for each profile. ```toml # The development profile, used for `cargo build` [profile.dev] opt-level = 0 # Controls the --opt-level the compiler builds with debug = true # Controls whether the compiler passes `-g` rpath = false # Controls whether the compiler passes `-C rpath` lto = false # Controls `-C lto` for binaries and staticlibs debug-assertions = true # Controls whether debug assertions are enabled codegen-units = 1 # Controls whether the compiler passes `-C codegen-units` # `codegen-units` is ignored when `lto = true` # The release profile, used for `cargo build --release` [profile.release] opt-level = 3 debug = false rpath = false lto = false debug-assertions = false codegen-units = 1 # The testing profile, used for `cargo test` [profile.test] opt-level = 0 debug = true rpath = false lto = false debug-assertions = true codegen-units = 1 # The benchmarking profile, used for `cargo bench` [profile.bench] opt-level = 3 debug = false rpath = false lto = false debug-assertions = false codegen-units = 1 # The documentation profile, used for `cargo doc` [profile.doc] opt-level = 0 debug = true rpath = false lto = false debug-assertions = true codegen-units = 1 ``` # The `[features]` Section Cargo supports **features** to allow expression of: * Conditional compilation options (usable through `cfg` attributes); * Optional dependencies, which enhance a package, but are not required; * Clusters of optional dependencies, such as “postgresâ€, that would include the `postgres` package, the `postgres-macros` package, and possibly other packages (such as development-time mocking libraries, debugging tools, etc.) A feature of a package is either an optional dependency, or a set of other features. The format for specifying features is: ```toml [package] name = "awesome" [features] # The “default†set of optional packages. Most people will # want to use these packages, but they are strictly optional. # Note that `session` is not a package but rather another # feature listed in this manifest. default = ["jquery", "uglifier", "session"] # A feature with no dependencies is used mainly for conditional # compilation, like `#[cfg(feature = "go-faster")]`. go-faster = [] # The “secure-password†feature depends on the bcrypt package. # This aliasing will allow people to talk about the feature in # a higher-level way and allow this package to add more # requirements to the feature in the future. secure-password = ["bcrypt"] # Features can be used to reexport features of other packages. # The `session` feature of package `awesome` will ensure that the # `session` feature of the package `cookie` is also enabled. session = ["cookie/session"] [dependencies] # These packages are mandatory and form the core of this # package’s distribution cookie = "1.2.0" oauth = "1.1.0" route-recognizer = "=2.1.0" # A list of all of the optional dependencies, some of which # are included in the above “featuresâ€. They can be opted # into by apps. jquery = { version = "1.0.2", optional = true } uglifier = { version = "1.5.3", optional = true } bcrypt = { version = "*", optional = true } civet = { version = "*", optional = true } ``` To use the package `awesome`: ```toml [dependencies.awesome] version = "1.3.5" default-features = false # do not include the default features, and optionally # cherry-pick individual features features = ["secure-password", "civet"] ``` ## Rules The usage of features is subject to a few rules: 1. Feature names must not conflict with other package names in the manifest. This is because they are opted into via `features = [...]`, which only has a single namespace 2. With the exception of the `default` feature, all features are opt-in. To opt out of the default feature, use `default-features = false` and cherry-pick individual features. 3. Feature groups are not allowed to cyclically depend on one another. 4. Dev-dependencies cannot be optional 5. Features groups can only reference optional dependencies 6. When a feature is selected, Cargo will call `rustc` with `--cfg feature="${feature_name}"`. If a feature group is included, it and all of its individual features will be included. This can be tested in code via `#[cfg(feature = "foo")]` Note that it is explicitly allowed for features to not actually activate any optional dependencies. This allows packages to internally enable/disable features without requiring a new dependency. ## Usage In End Products One major use-case for this feature is specifying optional features in end-products. For example, the Servo project may want to include optional features that people can enable or disable when they build it. In that case, Servo will describe features in its `Cargo.toml` and they can be enabled using command-line flags: ``` $ cargo build --release --features "shumway pdf" ``` Default features could be excluded using `--no-default-features`. ## Usage In Packages In most cases, the concept of “optional dependency†in a library is best expressed as a separate package that the top-level application depends on. However, high-level packages, like Iron or Piston, may want the ability to curate a number of packages for easy installation. The current Cargo system allows them to curate a number of mandatory dependencies into a single package for easy installation. In some cases, packages may want to provide additional curation for **optional** dependencies: * Grouping a number of low-level optional dependencies together into a single high-level “featureâ€. * Specifying packages that are recommended (or suggested) to be included by users of the package. * Including a feature (like `secure-password` in the motivating example) that will only work if an optional dependency is available, and would be difficult to implement as a separate package. For example, it may be overly difficult to design an IO package to be completely decoupled from OpenSSL, with opt-in via the inclusion of a separate package. In almost all cases, it is an antipattern to use these features outside of high-level packages that are designed for curation. If a feature is optional, it can almost certainly be expressed as a separate package. # The `[dev-dependencies]` Section The format of this section is equivalent to `[dependencies]`. Dev-dependencies are not used when compiling a package for building, but are used for compiling tests and benchmarks. These dependencies are *not* propagated to other packages which depend on this package. # The Project Layout If your project is an executable, name the main source file `src/main.rs`. If it is a library, name the main source file `src/lib.rs`. Cargo will also treat any files located in `src/bin/*.rs` as executables. Your project can optionally contain folders named `examples`, `tests`, and `benches`, which Cargo will treat as containing example executable files, integration tests, and benchmarks respectively. ```notrust â–¾ src/ # directory containing source files lib.rs # the main entry point for libraries and packages main.rs # the main entry point for projects producing executables â–¾ bin/ # (optional) directory containing additional executables *.rs â–¾ examples/ # (optional) examples *.rs â–¾ tests/ # (optional) integration tests *.rs â–¾ benches/ # (optional) benchmarks *.rs ``` # Examples Files located under `examples` are example uses of the functionality provided by the library. When compiled, they are placed in the `target/examples` directory. They must compile as executables (with a `main()` function) and load in the library by using `extern crate `. They are compiled when you run your tests to protect them from bitrotting. You can run individual examples with the command `cargo run --example `. # Tests When you run `cargo test`, Cargo will: * Compile and run your library’s unit tests, which are in files reachable from `lib.rs`. Any sections marked with `#[cfg(test)]` will be included. * Compile and run your library’s documentation tests, which are embedded inside of documentation blocks. * Compile and run your library’s [integration tests](#integration-tests). * Compile your library’s examples. ## Integration tests Each file in `tests/*.rs` is an integration test. When you run `cargo test`, Cargo will compile each of these files as a separate crate. The crate can link to your library by using `extern crate `, like any other code that depends on it. Cargo will not automatically compile files inside subdirectories of `tests`, but an integration test can import modules from these directories as usual. For example, if you want several integration tests to share some code, you can put the shared code in `tests/common/mod.rs` and then put `mod common;` in each of the test files. # Configuring a target All of the `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]` sections support similar configuration for specifying how a target should be built. The example below uses `[lib]`, but it also applies to all other sections as well. All values listed are the defaults for that option unless otherwise specified. ```toml [package] # ... [lib] # The name of a target is the name of the library that will be generated. This # is defaulted to the name of the package or project. name = "foo" # This field points at where the crate is located, relative to the Cargo.toml. path = "src/lib.rs" # A flag for enabling unit tests for this target. This is used by `cargo test`. test = true # A flag for enabling documentation tests for this target. This is only # relevant for libraries, it has no effect on other sections. This is used by # `cargo test`. doctest = true # A flag for enabling benchmarks for this target. This is used by `cargo bench`. bench = true # A flag for enabling documentation of this target. This is used by `cargo doc`. doc = true # If the target is meant to be a compiler plugin, this field must be set to true # for Cargo to correctly compile it and make it available for all dependencies. plugin = false # If set to false, `cargo test` will omit the --test flag to rustc, which stops # it from generating a test harness. This is useful when the binary being built # manages the test runner itself. harness = true ``` # Building Dynamic or Static Libraries If your project produces a library, you can specify which kind of library to build by explicitly listing the library in your `Cargo.toml`: ```toml # ... [lib] name = "..." # this could be “staticlib†as well crate-type = ["dylib"] ``` The available options are `dylib`, `rlib`, and `staticlib`. You should only use this option in a project. Cargo will always compile **packages** (dependencies) based on the requirements of the project that includes them. cargo-0.8.0/src/doc/pkgid-spec.md000066400000000000000000000033661264656333200165400ustar00rootroot00000000000000% Package ID Specifications - Cargo Documentation # Package ID Specifications Subcommands of Cargo frequently need to refer to a particular package within a dependency graph for various operations like updating, cleaning, building, etc. To solve this problem, Cargo supports Package ID Specifications. A specification is a string which is used to uniquely refer to one package within a graph of packages. ## Specification grammar The formal grammar for a Package Id Specification is: ```notrust pkgid := pkgname | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] pkgname := name [ ":" semver ] proto := "http" | "git" | ... ``` Here, brackets indicate that the contents are optional. ## Example Specifications These could all be references to a package `foo` version `1.2.3` from the registry at `crates.io` | pkgid | name | version | url | |-------------------------------:|:------:|:---------:|:--------------------:| | `foo` | foo | * | * | | `foo:1.2.3` | foo | 1.2.3 | * | | `crates.io/foo` | foo | * | *://crates.io/foo | | `crates.io/foo#1.2.3` | foo | 1.2.3 | *://crates.io/foo | | `crates.io/bar#foo:1.2.3` | foo | 1.2.3 | *://crates.io/bar | | `http://crates.io/foo#1.2.3` | foo | 1.2.3 | http://crates.io/foo | ## Brevity of Specifications The goal of this is to enable both succinct and exhaustive syntaxes for referring to packages in a dependency graph. Ambiguous references may refer to one or more packages. Most commands generate an error if more than one package could be referred to with the same specification. cargo-0.8.0/src/doc/stylesheets/000077500000000000000000000000001264656333200165345ustar00rootroot00000000000000cargo-0.8.0/src/doc/stylesheets/all.css000066400000000000000000000136051264656333200200230ustar00rootroot00000000000000html { background: url("../images/noise.png"); background-color: #3b6837; } main, #header { width: 900px; } * { box-sizing: border-box; } body { display: -webkit-flex; display: flex; -webkit-flex-direction: column; flex-direction: column; -webkit-align-items: center; align-items: center; font-family: sans-serif; } a { color: #00ac5b; text-decoration: none; } a:hover { color: #00793f; } h1 { font-size: 24px; margin: 20px 0 10px 0; font-weight: bold; color: #b64790; } h1 code:not(.highlight) { color: #d9a700; vertical-align: bottom; } h1 a, h2 a { color: #b64790; text-decoration: none; } h1:hover a, h2:hover a { color: #A03D7E; } h1:hover a:after, h2:hover a:after { content: '\2002\00a7\2002'; } :target { background: rgba(239, 242, 178, 1); padding: 5px; } h1.title { /* style rustdoc-generated title */ width: 100%; padding: 40px 20px 40px 60px; background-color: #edebdd; margin-bottom: 20px; -webkit-border-radius: 5px; -moz-border-radius: 5px; -ms-border-radius: 5px; border-radius: 5px; margin: 0; color: #383838; font-size: 2em; background-image: url(../images/circle-with-i.png); background-repeat: no-repeat; background-position: 20px center; } h2 { font-size: 18px; margin: 15px 0 5px 0; color: #b64790; font-weight: bold; } h2 code:not(.highlight) { color: #d9a700; } code:not(.highlight) { font-family: monospace; color: #b64790; } main { display: -webkit-flex; display: flex; -webkit-flex-direction: column; flex-direction: column; width: 100%; max-width: 900px; margin-bottom: 10px; background-color: #f9f7ec; padding: 15px; -webkit-border-radius: 10px; -moz-border-radius: 10px; -ms-border-radius: 10px; border-radius: 10px; box-shadow: 0px 0px 5px 2px #3b6837; border: 5px solid #62865f; color: #383838; } main > p:first-child { font-weight: 500; margin-top: 3px; padding-bottom: 15px; border-bottom: 1px solid #62865f; text-align: center; } main p:first-child a { color: #3b6837; } main p:first-child a:hover { color: #62865f; } main p, main ul { /* color: #3b6837; */ margin: 10px 0; line-height: 150%; } main ul { margin-left: 20px; } main li { list-style-type: disc; } main strong { font-weight: bold; } img.logo { align-self: center; margin-bottom: 10px; } pre { padding: 10px; margin: 10px 0; /* border: 1px solid #cad0d0; */ border-radius: 4px; max-width: calc(100vw - 45px); overflow-x: auto; background: #383838 !important; color: white; padding: 20px; /* override prism.js styles */ font-size: 1em !important; border: none !important; box-shadow: none !important; text-shadow: none !important; } pre code { text-shadow: none !important; } footer { padding: 40px; width: 900px; } footer a { color: white; } footer a:hover { color: #e6e6e6; } footer .sep, #header .sep { color: #284725; } footer .sep { margin: 0 10px; } #header .sep { margin-left: 10px; } .headerlink { display: none; text-decoration: none; } .fork-me { position:absolute; top:0; right:0; } .token.toml-section { color: #CB4B16; } .token.toml-key { color: #268BD2; } /* Rust code highlighting */ pre.rust .kw { color: #8959A8; } pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; } pre.rust .number, pre.rust .string { color: #718C00; } pre.rust .self, pre.rust .boolval, pre.rust .prelude-val, pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; } pre.rust .comment { color: #8E908C; } pre.rust .doccomment { color: #4D4D4C; } pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; } pre.rust .lifetime { color: #B76514; } code span.s1 { color: #2AA198; } table th { border-bottom: 1px solid black; } table td, table th { padding: 5px 10px; } #header { color: white; position: relative; height: 100px; display: -webkit-flex; display: flex; -webkit-align-items: center; align-items: center; } #header h1 { font-size: 2em; } #header a, #header h1 { color: white; text-decoration: none; } #header a:hover { color: #d9d9d9; } #header input.search { border: none; color: black; outline: 0; margin-left: 30px; padding: 5px 5px 5px 25px; background-image: url(../images/search.png); background-repeat: no-repeat; background-position: 6px 6px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px; } #header .nav { -webkit-flex-grow: 2; flex-grow: 2; text-align: right; } button.dropdown, a.dropdown { cursor: pointer; } button.dropdown .arrow, a.dropdown .arrow { font-size: 50%; display: inline-block; vertical-align: middle; } button.dropdown .arrow::after, a.dropdown .arrow::after { content: "â–¼"; } button.active.dropdown .arrow::after, a.active.dropdown .arrow::after { content: "â–²"; } button { background: none; outline: 0; border: 0; padding: 10px; color: white; } button.active { background:#2a4f27; box-shadow:inset -2px 2px 4px 0 #243d26 } ul.dropdown { display: none; visibility: none; position: absolute; top: 100%; left: 0; width: 100%; opacity: 0; margin: 0; text-align: left; padding: 0; background: white; border: 1px solid #d5d3cb; list-style: none; z-index: 10; -webkit-border-radius: 5px; -moz-border-radius: 5px; -ms-border-radius: 5px; border-radius: 5px; } ul.dropdown li a { font-size: 90%; width: 100%; display: inline-block; padding: 8px 10px; text-decoration: none; color: #383838 !important; } ul.dropdown li a:hover { background: #5e5e5e; color: white !important; } ul.dropdown li.last { border-top: 1px solid #d5d3cb; } ul.dropdown.open { display: block; visibility: visible; opacity: 1; } .dropdown-container { display: inline-block; position: relative; } p > img { max-width: 100%; } cargo-0.8.0/src/doc/stylesheets/normalize.css000066400000000000000000000153321264656333200212520ustar00rootroot00000000000000/*! normalize.css v2.0.1 | MIT License | git.io/normalize */ /* ========================================================================== HTML5 display definitions ========================================================================== */ /* * Corrects `block` display not defined in IE 8/9. */ article, aside, details, figcaption, figure, footer, header, hgroup, nav, section, summary { display: block; } /* * Corrects `inline-block` display not defined in IE 8/9. */ audio, canvas, video { display: inline-block; } /* * Prevents modern browsers from displaying `audio` without controls. * Remove excess height in iOS 5 devices. */ audio:not([controls]) { display: none; height: 0; } /* * Addresses styling for `hidden` attribute not present in IE 8/9. */ [hidden] { display: none; } /* ========================================================================== Base ========================================================================== */ /* * 1. Sets default font family to sans-serif. * 2. Prevents iOS text size adjust after orientation change, without disabling * user zoom. */ html { font-family: sans-serif; /* 1 */ -webkit-text-size-adjust: 100%; /* 2 */ -ms-text-size-adjust: 100%; /* 2 */ } /* * Removes default margin. */ body { margin: 0; } /* ========================================================================== Links ========================================================================== */ /* * Addresses `outline` inconsistency between Chrome and other browsers. */ a:focus { outline: thin dotted; } /* * Improves readability when focused and also mouse hovered in all browsers. */ a:active, a:hover { outline: 0; } /* ========================================================================== Typography ========================================================================== */ /* * Addresses `h1` font sizes within `section` and `article` in Firefox 4+, * Safari 5, and Chrome. */ h1 { font-size: 2em; } /* * Addresses styling not present in IE 8/9, Safari 5, and Chrome. */ abbr[title] { border-bottom: 1px dotted; } /* * Addresses style set to `bolder` in Firefox 4+, Safari 5, and Chrome. */ b, strong { font-weight: bold; } /* * Addresses styling not present in Safari 5 and Chrome. */ dfn { font-style: italic; } /* * Addresses styling not present in IE 8/9. */ mark { background: #ff0; color: #000; } /* * Corrects font family set oddly in Safari 5 and Chrome. */ code, kbd, pre, samp { font-family: monospace, serif; font-size: 1em; } /* * Improves readability of pre-formatted text in all browsers. */ pre { white-space: pre; white-space: pre-wrap; word-wrap: break-word; } /* * Sets consistent quote types. */ q { quotes: "\201C" "\201D" "\2018" "\2019"; } /* * Addresses inconsistent and variable font size in all browsers. */ small { font-size: 80%; } /* * Prevents `sub` and `sup` affecting `line-height` in all browsers. */ sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } sup { top: -0.5em; } sub { bottom: -0.25em; } /* ========================================================================== Embedded content ========================================================================== */ /* * Removes border when inside `a` element in IE 8/9. */ img { border: 0; } /* * Corrects overflow displayed oddly in IE 9. */ svg:not(:root) { overflow: hidden; } /* ========================================================================== Figures ========================================================================== */ /* * Addresses margin not present in IE 8/9 and Safari 5. */ figure { margin: 0; } /* ========================================================================== Forms ========================================================================== */ /* * Define consistent border, margin, and padding. */ fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; } /* * 1. Corrects color not being inherited in IE 8/9. * 2. Remove padding so people aren't caught out if they zero out fieldsets. */ legend { border: 0; /* 1 */ padding: 0; /* 2 */ } /* * 1. Corrects font family not being inherited in all browsers. * 2. Corrects font size not being inherited in all browsers. * 3. Addresses margins set differently in Firefox 4+, Safari 5, and Chrome */ button, input, select, textarea { font-family: inherit; /* 1 */ font-size: 100%; /* 2 */ margin: 0; /* 3 */ } /* * Addresses Firefox 4+ setting `line-height` on `input` using `!important` in * the UA stylesheet. */ button, input { line-height: normal; } /* * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` * and `video` controls. * 2. Corrects inability to style clickable `input` types in iOS. * 3. Improves usability and consistency of cursor style between image-type * `input` and others. */ button, html input[type="button"], /* 1 */ input[type="reset"], input[type="submit"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ } /* * Re-set default cursor for disabled elements. */ button[disabled], input[disabled] { cursor: default; } /* * 1. Addresses box sizing set to `content-box` in IE 8/9. * 2. Removes excess padding in IE 8/9. */ input[type="checkbox"], input[type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ } /* * 1. Addresses `appearance` set to `searchfield` in Safari 5 and Chrome. * 2. Addresses `box-sizing` set to `border-box` in Safari 5 and Chrome * (include `-moz` to future-proof). */ input[type="search"] { -webkit-appearance: textfield; /* 1 */ -moz-box-sizing: content-box; -webkit-box-sizing: content-box; /* 2 */ box-sizing: content-box; } /* * Removes inner padding and search cancel button in Safari 5 and Chrome * on OS X. */ input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; } /* * Removes inner padding and border in Firefox 4+. */ button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; } /* * 1. Removes default vertical scrollbar in IE 8/9. * 2. Improves readability and alignment in all browsers. */ textarea { overflow: auto; /* 1 */ vertical-align: top; /* 2 */ } /* ========================================================================== Tables ========================================================================== */ /* * Remove most spacing between table cells. */ table { border-collapse: collapse; border-spacing: 0; }cargo-0.8.0/src/doc/stylesheets/prism.css000066400000000000000000000104311264656333200203770ustar00rootroot00000000000000/* http://prismjs.com/download.html?themes=prism-twilight&languages=markup+css+clike+javascript */ /** * prism.js Twilight theme * Based (more or less) on the Twilight theme originally of Textmate fame. * @author Remy Bach */ code[class*="language-"], pre[class*="language-"] { color: white; direction: ltr; font-family: Consolas, Monaco, 'Andale Mono', monospace; text-align: left; text-shadow: 0 -.1em .2em black; white-space: pre; word-spacing: normal; word-break: normal; line-height: 1.5; -moz-tab-size: 4; -o-tab-size: 4; tab-size: 4; -webkit-hyphens: none; -moz-hyphens: none; -ms-hyphens: none; hyphens: none; } pre[class*="language-"], :not(pre) > code[class*="language-"] { background: hsl(0, 0%, 8%); /* #141414 */ } /* Code blocks */ pre[class*="language-"] { border-radius: .5em; border: .3em solid hsl(0, 0%, 33%); /* #282A2B */ box-shadow: 1px 1px .5em black inset; margin: .5em 0; overflow: auto; padding: 1em; } pre[class*="language-"]::selection { /* Safari */ background: hsl(200, 4%, 16%); /* #282A2B */ } pre[class*="language-"]::selection { /* Firefox */ background: hsl(200, 4%, 16%); /* #282A2B */ } /* Text Selection colour */ pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection, code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection { text-shadow: none; background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ } pre[class*="language-"]::selection, pre[class*="language-"] ::selection, code[class*="language-"]::selection, code[class*="language-"] ::selection { text-shadow: none; background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ } /* Inline code */ :not(pre) > code[class*="language-"] { border-radius: .3em; border: .13em solid hsl(0, 0%, 33%); /* #545454 */ box-shadow: 1px 1px .3em -.1em black inset; padding: .15em .2em .05em; } .token.comment, .token.prolog, .token.doctype, .token.cdata { color: hsl(0, 0%, 47%); /* #777777 */ } .token.punctuation { opacity: .7; } .namespace { opacity: .7; } .token.tag, .token.boolean, .token.number, .token.deleted { color: hsl(14, 58%, 55%); /* #CF6A4C */ } .token.keyword, .token.property, .token.selector, .token.constant, .token.symbol, .token.builtin { color: hsl(53, 89%, 79%); /* #F9EE98 */ } .token.attr-name, .token.attr-value, .token.string, .token.char, .token.operator, .token.entity, .token.url, .language-css .token.string, .style .token.string, .token.variable, .token.inserted { color: hsl(76, 21%, 52%); /* #8F9D6A */ } .token.atrule { color: hsl(218, 22%, 55%); /* #7587A6 */ } .token.regex, .token.important { color: hsl(42, 75%, 65%); /* #E9C062 */ } .token.important { font-weight: bold; } .token.entity { cursor: help; } pre[data-line] { padding: 1em 0 1em 3em; position: relative; } /* Markup */ .language-markup .token.tag, .language-markup .token.attr-name, .language-markup .token.punctuation { color: hsl(33, 33%, 52%); /* #AC885B */ } /* Make the tokens sit above the line highlight so the colours don't look faded. */ .token { position: relative; z-index: 1; } .line-highlight { background: -moz-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ background: -o-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ background: -webkit-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ background: hsla(0, 0%, 33%, 0.25); /* #545454 */ background: linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ border-bottom: 1px dashed hsl(0, 0%, 33%); /* #545454 */ border-top: 1px dashed hsl(0, 0%, 33%); /* #545454 */ left: 0; line-height: inherit; margin-top: 0.75em; /* Same as .prism’s padding-top */ padding: inherit 0; pointer-events: none; position: absolute; right: 0; white-space: pre; z-index: 0; } .line-highlight:before, .line-highlight[data-end]:after { background-color: hsl(215, 15%, 59%); /* #8794A6 */ border-radius: 999px; box-shadow: 0 1px white; color: hsl(24, 20%, 95%); /* #F5F2F0 */ content: attr(data-start); font: bold 65%/1.5 sans-serif; left: .6em; min-width: 1em; padding: 0 .5em; position: absolute; text-align: center; text-shadow: none; top: .4em; vertical-align: .3em; } .line-highlight[data-end]:after { bottom: .4em; content: attr(data-end); top: auto; } cargo-0.8.0/src/etc/000077500000000000000000000000001264656333200141665ustar00rootroot00000000000000cargo-0.8.0/src/etc/_cargo000066400000000000000000000514511264656333200153510ustar00rootroot00000000000000#compdef cargo typeset -A opt_args autoload -U regexp-replace _cargo() { _arguments \ '(- 1 *)'{-h,--help}'[show help message]' \ '(- 1 *)'--list'[list installed commands]' \ '(- 1 *)'{-v,--verbose}'[use verbose output]' \ '(- 1 *)'--color'[colorization option]' \ '(- 1 *)'{-V,--version}'[show version information]' \ '1: :_cargo_cmds' \ '*:: :->args' case $state in args) #TODO: add path completion to manifest-path options case $words[1] in bench) _arguments \ '--features=[space separated feature list]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ "${command_scope_spec[@]}" \ '--manifest-path=[path to manifest]: :_files -/' \ '--no-default-features[do not build the default features]' \ '--no-run[compile but do not run]' \ '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \ '--target=[target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--color=:colorization option:(auto always never)' \ ;; build) _arguments \ '--features=[space separated feature list]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ "${command_scope_spec[@]}" \ '--manifest-path=[path to manifest]: :files -/' \ '--no-default-features[do not build the default features]' \ '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \ '--release=[build in release mode]' \ '--target=[target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--color=:colorization option:(auto always never)' \ ;; clean) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release[whether or not to clean release artifacts]' \ '--target=[target triple(default:all)]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; doc) _arguments \ '--features=[space separated feature list]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ '--manifest-path=[path to manifest]' \ '--no-deps[do not build docs for dependencies]' \ '--no-default-features[do not build the default features]' \ '--open[open docs in browser after the build]' \ '(-p, --package)'{-p,--package}'=[package to document]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release[build artifacts in release mode, with optimizations]' \ '--target=[build for the target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; fetch) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; generate-lockfile) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; git-checkout) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ 'q(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--reference=[REF]' \ '--url=[URL]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; help) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '*: :_cargo_cmds' \ ;; install) _arguments \ '--bin=[only install the specified binary]' \ '--branch=[branch to use when installing from git]' \ '--color=:colorization option:(auto always never)' \ '--debug[build in debug mode instead of release mode]' \ '--example[install the specified example instead of binaries]' \ '--features=[space separated feature list]' \ '--git=[URL from which to install the crate]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ '--no-default-features[do not build the default features]' \ '--path=[local filesystem path to crate to install]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--rev=[specific commit to use when installing from git]' \ '--root=[directory to install packages into]' \ '--tag=[tag to use when installing from git]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--vers=[version to install from crates.io]' \ ;; locate-project) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ ;; login) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--host=[Host to set the token for]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; new) _arguments \ '--bin[use binary template]' \ '--vcs:initialize a new repo with a given VCS:(git hg none)' \ '(-h, --help)'{-h,--help}'[show help message]' \ '--name=[set the resulting package name]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; owner) _arguments \ '(-a, --add)'{-a,--add}'[add owner LOGIN]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '--index[registry index]' \ '(-l, --list)'{-l,--list}'[list owners of a crate]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \ '--token[API token to use when authenticating]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; package) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-l, --list)'{-l,--list}'[print files included in a package without making one]' \ '--manifest-path=[path to manifest]' \ '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ '--no-verify[do not build to verify contents]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; pkgid) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; publish) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--host=[Host to set the token for]' \ '--manifest-path=[path to manifest]' \ '--no-verify[Do not verify tarball until before publish]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--token[token to use when uploading]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; read-manifest) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; run) _arguments \ '--example=[name of the bin target]' \ '--features=[space separated feature list]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ '--manifest-path=[path to manifest]' \ '--bin=[name of the bin target]' \ '--no-default-features[do not build the default features]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release=[build in release mode]' \ '--target=[target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ '*: :_normal' \ ;; rustc) _arguments \ '--color=:colorization option:(auto always never)' \ '--features=[features to compile for the package]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'=[number of jobs to run in parallel]' \ '--manifest-path=[path to the manifest to fetch dependencies for]' \ '--no-default-features[do not compile default features for the package]' \ '(-p, --package)'{-p,--package}'=[profile to compile for]' \ '--profile=[profile to build the selected target for]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release[build artifacts in release mode, with optimizations]' \ '--target=[target triple which compiles will be for]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ "${command_scope_spec[@]}" \ ;; rustdoc) _arguments \ '--color=:colorization option:(auto always never)' \ '--features=[space-separated list of features to also build]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'=[number of jobs to run in parallel]' \ '--manifest-path=[path to the manifest to document]' \ '--no-default-features[do not build the `default` feature]' \ '--open[open the docs in a browser after the operation]' \ '(-p, --package)'{-p,--package}'=[package to document]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release[build artifacts in release mode, with optimizations]' \ '--target=[build for the target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ "${command_scope_spec[@]}" \ ;; search) _arguments \ '--color=:colorization option:(auto always never)' \ '(-h, --help)'{-h,--help}'[show help message]' \ '--host=[host of a registry to search in]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ ;; test) _arguments \ '--features=[space separated feature list]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ '--manifest-path=[path to manifest]' \ '--test=[test name]: :_test_names' \ '--no-default-features[do not build the default features]' \ '--no-fail-fast[run all tests regardless of failure]' \ '--no-run[compile but do not run]' \ '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--release[build artifacts in release mode, with optimizations]' \ '--target=[target triple]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ '1: :_test_names' \ ;; uninstall) _arguments \ '--bin=[only uninstall the binary NAME]' \ '--color=:colorization option:(auto always never)' \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-q, --quiet)'{-q,--quiet}'[less output printed to stdout]' \ '--root=[directory to uninstall packages from]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ ;; update) _arguments \ '--aggressive=[force dependency update]' \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-p,--package)'{-p=,--package=}'[package to update]:packages:__get_package_names' \ '--precise=[update single dependency to PRECISE]: :' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; verify-project) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--manifest-path=[path to manifest]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; version) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ ;; yank) _arguments \ '(-h, --help)'{-h,--help}'[show help message]' \ '--index[registry index]' \ '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ '--token[API token to use when authenticating]' \ '--undo[undo a yank, putting a version back into the index]' \ '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ '--color=:colorization option:(auto always never)' \ '--vers[yank version]' \ ;; esac ;; esac } _cargo_cmds(){ local -a commands;commands=( 'bench:execute all benchmarks of a local package' 'build:compile the current project' 'clean:remove generated artifacts' 'doc:build package documentation' 'fetch:fetch package dependencies' 'generate-lockfile:create lockfile' 'git-checkout:git checkout' 'help:get help for commands' 'install:install a Rust binary' 'locate-project:print "Cargo.toml" location' 'login:login to remote server' 'new:create a new project' 'owner:manage the owners of a crate on the registry' 'package:assemble local package into a distributable tarball' 'pkgid:print a fully qualified package specification' 'publish:upload package to the registry' 'read-manifest:print manifest in JSON format' 'run:run the main binary of the local package' 'rustc:compile a package and all of its dependencies' 'rustdoc:build documentation for a package' 'search:search packages on crates.io' 'test:execute all unit and tests of a local package' 'uninstall:remove a Rust binary' 'update:update dependencies' 'verify-project:check Cargo.toml' 'version:show version information' 'yank:remove pushed file from index' ) _describe 'command' commands } #FIXME: Disabled until fixed #gets package names from the manifest file _get_package_names() { } #TODO:see if it makes sense to have 'locate-project' to have non-json output. #strips package name from json stuff _locate_manifest(){ local manifest=`cargo locate-project 2>/dev/null` regexp-replace manifest '\{"root":"|"\}' '' echo $manifest } # Extracts the values of "name" from the array given in $1 and shows them as # command line options for completion _get_names_from_array() { local -a filelist; local manifest=$(_locate_manifest) if [[ -z $manifest ]]; then return 0 fi local last_line local -a names; local in_block=false local block_name=$1 names=() while read line do if [[ $last_line == "[[$block_name]]" ]]; then in_block=true else if [[ $last_line =~ '.*\[\[.*' ]]; then in_block=false fi fi if [[ $in_block == true ]]; then if [[ $line =~ '.*name.*=' ]]; then regexp-replace line '^.*name *= *|"' "" names+=$line fi fi last_line=$line done < $manifest _describe $block_name names } #Gets the test names from the manifest file _test_names() { _get_names_from_array "test" } #Gets the bench names from the manifest file _benchmark_names() { _get_names_from_array "bench" } # These flags are mutally exclusive specifiers for the scope of a command; as # they are used in multiple places without change, they are expanded into the # appropriate command's `_arguments` where appropriate. set command_scope_spec command_scope_spec=( '(--bin --example --test --lib)--bench=[benchmark name]: :_benchmark_names' '(--bench --bin --test --lib)--example=[example name]' '(--bench --example --test --lib)--bin=[binary name]' '(--bench --bin --example --test)--lib=[library name]' '(--bench --bin --example --lib)--test=[test name]' ) _cargo cargo-0.8.0/src/etc/cargo.1000066400000000000000000000041501264656333200153430ustar00rootroot00000000000000.TH CARGO "1" "November 2015" "cargo 0.8.0" "User Commands" .SH NAME cargo \- The Rust package manager .SH SYNOPSIS .B cargo [...] .B cargo [\fIOPTIONS\fR] .SH DESCRIPTION This program is a package manager for the Rust language, available at <\fBhttp://rust-lang.org\fR>. .SH OPTIONS .TP \fB\-h, \-\-help\fR Display a help message .TP \fB\-V, \-\-version\fR Print version information and exit .TP \fB\-\-list\fR List all available cargo commands .TP \fB\-v, \-\-verbose\fR Use verbose output .TP \fB\-\-color\fR Configure coloring of output .SH COMMANDS To get extended information about commands, run 'cargo help ' .TP \fBcargo build\fR Compile the current project .TP \fBcargo clean\fR Remove the target directory with build output .TP \fBcargo doc\fR Build this project's and its dependencies' documentation .TP \fBcargo install\fR Install a Rust binary .TP \fBcargo new\fR Create a new cargo project .TP \fBcargo run\fR Build and execute src/main.rs .TP \fBcargo test\fR Run the tests for the package .TP \fBcargo bench\fR Run the benchmarks for the package .TP \fBcargo update\fR Update dependencies in Cargo.lock .TP \fBcargo package\fR Generate a source tarball for the current package .TP \fBcargo publish\fR Package and upload this project to the registry .TP \fBcargo uninstall\fR Remove a Rust binary .TP \fBcargo version\fR Print cargo's version and exit .SH FILES .TP ~/.cargo Directory in which Cargo stores repository data. Cargo can be instructed to use a .cargo subdirectory in a different location by setting the CARGO_HOME environment variable. .SH "EXAMPLES" Build a local package and all of its dependencies $ cargo build Build a package with optimizations $ cargo build --release Run tests for a cross-compiled target $ cargo test --target i686-unknown-linux-gnu Learn about a command's options and usage $ cargo help clean .SH "SEE ALSO" rustc(1), rustdoc(1) .SH "BUGS" See <\fBhttps://github.com/rust-lang/cargo/issues\fR> for issues. .SH "COPYRIGHT" This work is dual-licensed under Apache 2.0 and MIT terms. See \fBCOPYRIGHT\fR file in the cargo source distribution. cargo-0.8.0/src/etc/cargo.bashcomp.sh000066400000000000000000000136641264656333200174220ustar00rootroot00000000000000command -v cargo >/dev/null 2>&1 && _cargo() { local cur prev words cword cmd _get_comp_words_by_ref cur prev words cword COMPREPLY=() cmd=${words[1]} local vcs='git hg none' local color='auto always never' local opt_help='-h --help' local opt_verbose='-v --verbose' local opt_quiet='-q --quiet' local opt_color='--color' local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" local opt_pkg='-p --package' local opt_feat='--features --no-default-features' local opt_mani='--manifest-path' local opt_jobs='-j --jobs' local opt___nocmd="$opt_common -V --version --list" local opt__bench="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --no-run" local opt__build="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --release" local opt__clean="$opt_common $opt_pkg $opt_mani --target --release" local opt__doc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --open --no-deps --release" local opt__fetch="$opt_common $opt_mani" local opt__generate_lockfile="${opt__fetch}" local opt__git_checkout="$opt_common --reference --url" local opt__help="$opt_help" local opt__install="$opt_common $opt_feat $opt_jobs --bin --branch --debug --example --git --list --path --rev --root --tag --vers" local opt__locate_project="$opt_mani -h --help" local opt__login="$opt_common --host" local opt__new="$opt_common --vcs --bin --name" local opt__owner="$opt_common -a --add -r --remove -l --list --index --token" local opt__package="$opt_common $opt_mani -l --list --no-verify --no-metadata" local opt__pkgid="${opt__fetch}" local opt__publish="$opt_common $opt_mani --host --token --no-verify" local opt__read_manifest="$opt_help $opt_verbose $opt_mani --color" local opt__run="$opt_common $opt_feat $opt_mani $opt_jobs --target --bin --example --release" local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --release" local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --release --open" local opt__search="$opt_common --host" local opt__test="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --no-run --release --no-fail-fast" local opt__uninstall="$opt_common --bin --root" local opt__update="$opt_common $opt_pkg $opt_mani --aggressive --precise" local opt__verify_project="${opt__fetch}" local opt__version="$opt_help $opt_verbose --color" local opt__yank="$opt_common --vers --undo --index --token" if [[ $cword -eq 1 ]]; then if [[ "$cur" == -* ]]; then COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) else COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) fi elif [[ $cword -ge 2 ]]; then case "${prev}" in --vcs) COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) ;; --color) COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) ;; --manifest-path) _filedir toml ;; --bin) COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) ;; --test) COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) ;; --bench) COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) ;; --example) COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) ;; --target) COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) ;; help) COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) ;; *) local opt_var=opt__${cmd//-/_} COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) ;; esac fi # compopt does not work in bash version 3 return 0 } && complete -F _cargo cargo __cargo_commands=$(cargo --list 2>/dev/null | tail -n +2) _locate_manifest(){ local manifest=`cargo locate-project 2>/dev/null` # regexp-replace manifest '\{"root":"|"\}' '' echo ${manifest:9:-2} } # Extracts the values of "name" from the array given in $1 and shows them as # command line options for completion _get_names_from_array() { local manifest=$(_locate_manifest) if [[ -z $manifest ]]; then return 0 fi local last_line local -a names local in_block=false local block_name=$1 while read line do if [[ $last_line == "[[$block_name]]" ]]; then in_block=true else if [[ $last_line =~ .*\[\[.* ]]; then in_block=false fi fi if [[ $in_block == true ]]; then if [[ $line =~ .*name.*\= ]]; then line=${line##*=} line=${line%%\"} line=${line##*\"} names+=($line) fi fi last_line=$line done < $manifest echo "${names[@]}" } #Gets the bin names from the manifest file _bin_names() { _get_names_from_array "bin" } #Gets the test names from the manifest file _test_names() { _get_names_from_array "test" } #Gets the bench names from the manifest file _benchmark_names() { _get_names_from_array "bench" } _get_examples(){ local files=($(dirname $(_locate_manifest))/examples/*.rs) local names=("${files[@]##*/}") local names=("${names[@]%.*}") # "*" means no examples found if [[ "${names[@]}" != "*" ]]; then echo "${names[@]}" fi } _get_targets(){ local CURRENT_PATH if [ `uname -o` == "Cygwin" -a -f "$PWD"/Cargo.toml ]; then CURRENT_PATH=$PWD else CURRENT_PATH=$(_locate_manifest) fi if [[ -z "$CURRENT_PATH" ]]; then return 1 fi local TARGETS=() local FIND_PATHS=( "/" ) local FIND_PATH LINES LINE while [[ "$CURRENT_PATH" != "/" ]]; do FIND_PATHS+=( "$CURRENT_PATH" ) CURRENT_PATH=$(dirname $CURRENT_PATH) done for FIND_PATH in ${FIND_PATHS[@]}; do if [[ -f "$FIND_PATH"/.cargo/config ]]; then LINES=( `grep "$FIND_PATH"/.cargo/config -e "^\[target\."` ) for LINE in ${LINES[@]}; do TARGETS+=(`sed 's/^\[target\.\(.*\)\]$/\1/' <<< $LINE`) done fi done echo "${TARGETS[@]}" } # vim:ft=sh cargo-0.8.0/src/etc/dl-snapshot.py000066400000000000000000000047001264656333200167750ustar00rootroot00000000000000import download import hashlib import os import re import shutil import sys datere = re.compile('^\d{4}-\d{2}-\d{2}') cksumre = re.compile('^ ([^ ]+) ([^$]+)$') current = None snaps = {} with open('src/snapshots.txt') as f: for line in iter(f): line = line.rstrip() m = datere.match(line) if m: current = m.group() snaps[current] = {} continue m = cksumre.match(line) if m: snaps[current][m.group(1)] = m.group(2) continue # This script currently doesn't look at older snapshots, so there is # no need to look past the first section. break date = current triple = sys.argv[1] ts = triple.split('-') arch = ts[0] if (arch == 'i586') or (arch == 'i386'): arch = 'i686' if len(ts) == 2: vendor = 'unknown' target_os = ts[1] else: vendor = ts[1] target_os = ts[2] # NB: The platform format differs from the triple format, to support # bootstrapping multiple triples from the same snapshot. plat_arch = arch if (arch != 'i686') else 'i386' plat_os = target_os if (target_os == 'windows'): plat_os = 'winnt' elif (target_os == 'darwin'): plat_os = 'macos' platform = "%s-%s" % (plat_os, plat_arch) if platform not in snaps[date]: raise Exception("no snapshot for the triple '%s'" % triple) # Reconstitute triple with any applicable changes. For historical reasons # this differs from the snapshots.txt platform name. if target_os == 'linux': target_os = 'linux-gnu' elif target_os == 'darwin': vendor = 'apple' elif target_os == 'windows': vendor = 'pc' target_os = 'windows-gnu' triple = "%s-%s-%s" % (arch, vendor, target_os) hash = snaps[date][platform] tarball = 'cargo-nightly-' + triple + '.tar.gz' url = 'https://static.rust-lang.org/cargo-dist/%s/%s' % \ (date.strip(), tarball) dl_path = "target/dl/" + tarball dst = "target/snapshot" if not os.path.isdir('target/dl'): os.makedirs('target/dl') if os.path.isdir(dst): shutil.rmtree(dst) exists = False if os.path.exists(dl_path): h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() if h == hash: print("file already present %s (%s)" % (dl_path, hash,)) exists = True if not exists: download.get(url, dl_path) h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() if h != hash: raise Exception("failed to verify the checksum of the snapshot") download.unpack(dl_path, dst, strip=2) cargo-0.8.0/src/etc/download.py000066400000000000000000000035741264656333200163600ustar00rootroot00000000000000import contextlib import os import shutil import subprocess import sys import tarfile def get(url, path, quiet=False): # see http://serverfault.com/questions/301128/how-to-download if sys.platform == 'win32': run(["PowerShell.exe", "/nologo", "-Command", "(New-Object System.Net.WebClient).DownloadFile('" + url + "', '" + path + "')"], quiet=quiet) else: run(["curl", "-o", path, url], quiet=quiet) def unpack(tarball, dst, quiet=False, strip=0): if quiet: print("extracting " + tarball) with contextlib.closing(tarfile.open(tarball)) as tar: for p in tar.getmembers(): if p.isdir(): continue path = [] p2 = p.name while p2 != "": a, b = os.path.split(p2) path.insert(0, b) p2 = a if len(path) <= strip: continue fp = os.path.join(dst, *path[strip:]) if not quiet: print("extracting " + p.name) contents = tar.extractfile(p) if not os.path.exists(os.path.dirname(fp)): os.makedirs(os.path.dirname(fp)) open(fp, 'wb').write(contents.read()) os.chmod(fp, p.mode) def run(args, quiet=False): if not quiet: print("running: " + ' '.join(args)) sys.stdout.flush() # Use Popen here instead of call() as it apparently allows powershell on # Windows to not lock up waiting for input presumably. ret = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = ret.communicate() code = ret.wait() if code != 0: print("stdout: \n\n" + out) print("stderr: \n\n" + err) raise Exception("failed to fetch url") cargo-0.8.0/src/etc/install-deps.py000066400000000000000000000034571264656333200171500ustar00rootroot00000000000000#!/usr/bin/env python import contextlib import download import os import shutil import sys import tarfile if os.environ.get('BITS') == '32': host_bits = 'i686' extra_bits = 'x86_64' else: host_bits = 'x86_64' extra_bits = 'i686' extra = None # Figure out our target triple if sys.platform == 'linux' or sys.platform == 'linux2': host = host_bits + '-unknown-linux-gnu' extra = extra_bits + '-unknown-linux-gnu' elif sys.platform == 'darwin': host = host_bits + '-apple-darwin' extra = extra_bits + '-apple-darwin' elif sys.platform == 'win32': if os.environ.get('MSVC') == '1': host = host_bits + '-pc-windows-msvc' extra = extra_bits + '-pc-windows-msvc' else: host = host_bits + '-pc-windows-gnu' else: exit_msg = "There is no official Cargo snapshot for {} platform, sorry." sys.exit(exit_msg.format(sys.platform)) rust_date = open('src/rustversion.txt').read().strip() url = 'https://static.rust-lang.org/dist/' + rust_date def install_via_tarballs(): if os.path.isdir("rustc-install"): shutil.rmtree("rustc-install") # Download the compiler host_fname = 'rustc-nightly-' + host + '.tar.gz' download.get(url + '/' + host_fname, host_fname) download.unpack(host_fname, "rustc-install", quiet=True, strip=2) os.remove(host_fname) # Download all target libraries needed fetch_std(host) if extra is not None: fetch_std(extra) if os.path.isdir("rustc"): shutil.rmtree("rustc") os.rename("rustc-install", "rustc") def fetch_std(target): fname = 'rust-std-nightly-' + target + '.tar.gz' print("adding target libs for " + target) download.get(url + '/' + fname, fname) download.unpack(fname, "rustc-install", quiet=True, strip=2) os.remove(fname) install_via_tarballs() cargo-0.8.0/src/etc/print-new-snapshot.py000066400000000000000000000016221264656333200203210ustar00rootroot00000000000000# When updating snapshots, run this file and pipe it into `src/snapshots.txt` import os import sys import hashlib import download date = sys.argv[1] print(date) if not os.path.isdir('target/dl'): os.makedirs('target/dl') snaps = { 'macos-i386': 'i686-apple-darwin', 'macos-x86_64': 'x86_64-apple-darwin', 'linux-i386': 'i686-unknown-linux-gnu', 'linux-x86_64': 'x86_64-unknown-linux-gnu', 'winnt-i386': 'i686-pc-windows-gnu', 'winnt-x86_64': 'x86_64-pc-windows-gnu', 'bitrig-x86_64': 'x86_64-unknown-bitrig', } for platform in sorted(snaps): triple = snaps[platform] tarball = 'cargo-nightly-' + triple + '.tar.gz' url = 'https://static.rust-lang.org/cargo-dist/' + date + '/' + tarball dl_path = "target/dl/" + tarball download.get(url, dl_path, quiet=True) h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() print(' ' + platform + ' ' + h) cargo-0.8.0/src/rust-installer/000077500000000000000000000000001264656333200164035ustar00rootroot00000000000000cargo-0.8.0/src/rustversion.txt000066400000000000000000000000131264656333200165510ustar00rootroot000000000000002015-11-30 cargo-0.8.0/src/snapshots.txt000066400000000000000000000206141264656333200162010ustar00rootroot000000000000002015-04-02 dragonfly-x86_64 7d330a67ad82701ee65a08a47a51c3b0b26697bc freebsd-x86_64 2e0ade0901864ea67200f990cb289343b08959e7 bitrig-x86_64 1b39aba2b9e1a7c9b5ac890b864eb1cb8a18e4d0 openbsd-x86_64 1e2a02520f9e8e300a3d7ef91c1ab03e6baeebe6 linux-i386 ba6c162680d5509d89ba2363d7cae2047f40c034 linux-x86_64 94f715c9a52809a639f2ce6f8b1d5215a0c272b5 macos-i386 cf333f16f89bfd50e8ce461c6f81ca30d33f7f73 macos-x86_64 1f7008a6ec860e2bc7580e71bdf320ac518ddeb8 winnt-i386 8c0088ae9e47133b976f7ad155c50ca9abb2906c winnt-x86_64 01ae9ea568211a20f048e7b00d902d6fe72d1627 2015-03-26 linux-i386 d8b59fb0a0e8222b1753370f1d7c91dcb9697b37 linux-x86_64 e2f8388d6bccad3b3f09bbbe4ea1bc9671224f4c macos-i386 3baad9c920c4a68bfd8c10ba3afb80013559adf5 macos-x86_64 394afa61b945717bca18412c3c93a428db7d6d5d winnt-i386 4bc98dabc039c34c040942f0eadd99ddb37f06dc winnt-x86_64 54d948ed95b86b9c63861246cf7cfd7161a48a20 2015-03-17 linux-i386 96a64fa9b4b6cc0cddaa90ecde4e08254c9025d5 linux-x86_64 354bb5b11b1f19e270ebc0553db1ddc560999bdb macos-i386 d1b69ef765bc450a3758b8abdb0909df7893058b macos-x86_64 a2328a82e073c230cd88dcfac96bdc784a999200 winnt-i386 fb6e346d59bda47ed87e36800e8bfe210cf01297 winnt-x86_64 4ef3d1ce315df8b27bd842fb66b8e2b03ce99a08 2015-02-26 linux-i386 2a28b604d09b4a76a54a05d91f7f158692427b3a linux-x86_64 7367f4aca86d38e209ef7236b00175df036c03e2 macos-i386 e5cabb0a4a2b4e47f7b1ae9b802e2b5d0b14eac5 macos-x86_64 3026c60ddd46d2bcf1cb178fc801095dbfba5286 winnt-i386 2008eed3965ed9a989a38c22b9c55c02ae9db1f1 winnt-x86_64 98a48d7a6dbffcd099ea2574a68f04883624d9a1 2015-01-24 linux-i386 96213038f850569f1c4fa6a0d146c6155c0d566b linux-x86_64 4d87486493c2881edced7b1d2f8beaac32aaa5b5 macos-i386 17b9fc782e86bffe170abb83a01e0cb7c90a0daa macos-x86_64 18887bdbd3e6d2a127aa34216fa06e9877b0fbc6 winnt-i386 10b9b5fa3e9241ef0b6c3b77b0c072a45b585905 winnt-x86_64 ba71627e46964535b64da56bd0679e5f86fae957 2014-12-30 linux-i386 ab8bba0918d3d2ddbd7fd21f147e223dbf04cece linux-x86_64 0efe0f7bcbcbeb5494affcc8a2207db448a08c45 macos-i386 e5097005b0a27c186b8edee24982fd4c3ebba81e macos-x86_64 6c0bb776e5645fb93b67341b111c715f39b25511 winnt-i386 2088c5256445b5bb2da57a71f6a9671e5a280477 winnt-x86_64 950e25bcedc5ba9d96891523c9967f81d5f6c74d 2014-12-21 linux-i386 4dea04e278192c5409f43794a98f20a8f59df2d9 linux-x86_64 3e48c573d3c4d26591feb7bfe988174720f08374 macos-i386 dc3d498c0567af4a0820e91756dcfff8fde0efac macos-x86_64 f301bd8c3c93a5c88698c69190e464af1525ac96 winnt-i386 5b6bc87e302d1ff6ac9b0576292eb7cbff2c3b83 winnt-x86_64 a8bb8d3a7ed3fc8caf4a33d6b9d2e43544877409 2014-12-20 linux-i386 1cccab5a6ac8e73472bf78cdce019cd1a60d4638 linux-x86_64 53c176fcda0a40fb77b901303c443de3dce3e58d macos-i386 bbc23c78ca4307efa6250552a097e6b2ccfe2cc3 macos-x86_64 4f97a30408c99858ad2b7a7f6edfe3d5b8f0ff3f winnt-i386 5d77cd604b011100398023e8dc3d98c173247874 winnt-x86_64 1290dcc2a51e99027803d641c08299abe1265158 2014-12-18 linux-i386 30eec547395093ab9c6a0587a3210666b9272815 linux-x86_64 20d13252996838680f4356a7addd75403bb11aec macos-i386 c179a345cb1fbb08f8173133701635ef3c0e03be macos-x86_64 4f2a877828a2d8ca7d66906529bde01b26d8cef7 winnt-i386 fa20b54e06badc5fb092981d442e4e831dd9c5f8 winnt-x86_64 196cae1120f5070e7dd4796d19ed45b9dd01aba2 2014-12-08 linux-i386 853d29bc167748f8a481d5d43fb20ab99e3e16ee linux-x86_64 57c79c64459145321baa8fc45d51c588d18125ad macos-i386 43b483c9a389243ce58ba5356c4f71a626dc5658 macos-x86_64 3777768da6a820f49d789c3477b493b24de59a61 winnt-i386 b831d1d673db189496f94d3596351d9545687947 winnt-x86_64 846b677e6fec99690b00595b934fdb30b834c815 2014-11-22 linux-i386 3204c8a38721199f69d2971db887d1dc71a63825 linux-x86_64 39ca0d02eac184bc764ff9c1f645ca361715c5c2 macos-i386 ebc1836424c4b3ba49f9adef271c50d2a8e134c0 macos-x86_64 a2045e95984b65eab4a704152566f8ab9a3be518 winnt-i386 5e0831b14d2e6ee91ef195dfbc4d9699499d5e99 winnt-x86_64 d5fa1b58207346061898459955fa7f0b33d77474 2014-11-11 linux-i386 5cbf3346309d303cb954c363097fc4abedf50610 linux-x86_64 8c1594e227eca6f23ba02daa5f3cd6150ac88907 macos-i386 f338835a58cc5357ed092a23ba0ddbf2624dfacd macos-x86_64 b2d03a6a9422c42b7f5ba008c8851ddc89ae693c winnt-i386 50b851d94181375f0c7a00aacb7d8d63960eddc7 winnt-x86_64 aa12a1cb80a665f53066a15774360d686b3e5968 2014-11-07 linux-i386 f65ae2b9d94477fec79e444ea489ff98a456e033 linux-x86_64 1a7f663d8f4e2109240a20d8e63c958e0557d883 macos-i386 9d82a00bd396c99cc27693864da2364d0394e843 macos-x86_64 1dc297d8e149384a76dfb7efc7869b82fe663b92 winnt-i386 d9f87d83c6cbabd7a4794498e4c3a4e94df0740a winnt-x86_64 74284401082e1b5aff14a72e2152ed5cb55812cf 2014-10-28 linux-i386 15fb3dd24140911ba707d8b4b1dd6826732a3ad6 linux-x86_64 a924d82f5dc987efda6303d3e2c1aeb8ade34faa macos-i386 bfaddd7eacd1dec4749ab4918fad47f50fa64467 macos-x86_64 43a91c484f665be2ec0959e2e884ab93cce06a6b winnt-i386 299de1d99341fed17dc2726e5564dd0ab0ca1dfa winnt-x86_64 1948ae424458c498f904ea97efb00350a7d8598f 2014-10-16 linux-i386 61417861716cd41d8f372be36bb0572e4f29dec8 linux-x86_64 59be4ff9f547f1ba47ad133ab74151a48bc2659b macos-i386 cb5267d2e7df8406c26bb0337b1c2e80b125e2cb macos-x86_64 9283adb4dfd1b60c7bfe38ef755f9187fe7d5580 winnt-i386 88deb2950fa2b73358bc15763e6373ade6325f53 winnt-x86_64 0143d4b0e4b20e84dbb27a4440b4b55d369f4456 2014-09-19 linux-i386 c92895421e6fa170dbd713e74334b8c3cf22b817 linux-x86_64 66ee4126f9e4820cd82e78181931f8ea365904de macos-i386 e2364b1f1ece338b9fc4c308c472fc2413bff04e macos-x86_64 09f92f06ab4f048acf71d83dc0426ff1509779a9 winnt-i386 0c9b75d5b9ca58a7e39290fbe9c54d91db65c42c winnt-x86_64 180c547aa79ba3069852450a6e833b577c7d4c3d 2014-09-11 linux-i386 f18823de75413ab72df91deb9b3b341c02005b2e linux-x86_64 58d9789472dd955be94903cafd406ce394915297 macos-i386 07da45add611e7ecea8f9115ee551df1ff354f51 macos-x86_64 0b82c9c58865fe8298273ee5fafc937db1b80528 winnt-i386 4782a7014dd53213535f19b1f2a09f640cf00490 2014-09-03 linux-i386 d357756680a60cd00464fa991b71170dcddb2b30 linux-x86_64 35fd121fda3509cc020d42223017be03a1c19b87 macos-i386 40aad83e9d97f5a344179f4573807f3ac04775f9 macos-x86_64 5e64f637019f499585ab100e5072b8eeeba191ed winnt-i386 fc25a2f6f9ce3a6f11348ffe17e1115ca81fc4db 2014-08-19 linux-i386 8d20fc36b8b7339fcd1ae6c118f1becd001c2b08 linux-x86_64 46e05521f0dceeb831462caa8a54ca1caf21c078 macos-i386 fd65cf0e9c6fa137db666da289aa4359dbc56ca1 macos-x86_64 59ba26a9c92af40c08eed443dcfca518718a2ba1 winnt-i386 cb0c4fa54abebb86d1a4bb28c2b1d084234c3b35 2014-08-16 linux-i386 30ea09ef95aa230ff415319be699c950603a8fb4 linux-x86_64 95badae811c711ae5d03f837a38f6ae12c8e473a macos-i386 5b7afe93a4a79416bab0778e7e03a786cf2e9252 macos-x86_64 e4141beae6e3dae44393d148492ec9ac1ac1ae5c winnt-i386 580cb0e92ddb1e2f935386183543c3d0152f13b9 2014-08-12 linux-i386 af5e80dba2d845e30039302e57bd516c96b347de linux-x86_64 42a7786073802d6b47dbb6d2bb071a322964b28e macos-i386 244595a91534ce3097877d96241ae21d150e670d macos-x86_64 8c56578bd4610adcc1b608aa841c13f6f9b60d45 winnt-i386 4708fba1f267c1c32460c7d8b4cd2ed8c32a1ecb 2014-08-08 linux-i386 44207002e96c4f1309af70673966ee1e67938f5e linux-x86_64 5dc5e5aa575814af2d4e40e9dcdca2c55b594bd1 macos-i386 5d1924057a0d56d033f32680f4b393cdd9c6805a macos-x86_64 65462ea1e48cb4b4c57ff7e947cd2cc26a8f2723 winnt-i386 a481b15d35ab2e1d1dcd2f181a2566e097604ffc 2014-08-06 linux-i386 eb7c2a87b30db077f6f1c4ea724ebd0e5cc07d1c linux-x86_64 1672657adb9012df2912bbb2f43466f1c6817e55 macos-i386 1224207bbfa9f46796940512ac8a7a9ab9f5665b macos-x86_64 da4afea32d7336a0a91b8fe160d38896385d4ae2 winnt-i386 2b6b2efe9ec77d3d456c943bb2e54f2281309ef1 2014-08-04 linux-i386 49032ce8c5c2b94d73e298dcbdb09e0b2fbe573c linux-x86_64 98c83ecc7cac3765d62f5e8b19bdc506e01f3cab macos-i386 c450260a2edace970089b35fed644eb607b509ba macos-x86_64 04763ba59b70240d16bdb57845e3511b3b243522 winnt-i386 15a70b068beb3b85760279496cf62b7406e5e2b2 2014-07-30 linux-i386 4d4e78426060b891cf729d5e3cca86d5aebdd31d linux-x86_64 2a39bb838bc1c740d41a2ee8054a2c32f1efbec8 macos-i386 16d1581dad71b1cf551646bc2dfdc920f4dda16c macos-x86_64 05d836f2195e55f050e68e8bb209405a67fbefcb winnt-i386 ade95f921ba73848d2ae67d1b8cd7c364e881e86 2014-07-29 mac 53f8bc39132e987d25e022698c3234fee0916ecf linux b7dbdc89126577fda2eef7d63c5f7fc1d8d28f99 win 9551454e2ce649d146ad8d856cee3672ab0def02 2014-07-26 mac 9a78815c7fcdb1cdabc93eb120f80444f209d968 linux b38e7c45292d2cc6a1932fa9a1f349f9b92c0c1d win 4e955f8b80684ea6c9ca2dd6e2c235ce2d9cf21f cargo-0.8.0/tests/000077500000000000000000000000001264656333200137665ustar00rootroot00000000000000cargo-0.8.0/tests/check-style.sh000077500000000000000000000001721264656333200165400ustar00rootroot00000000000000echo "checking for lines over 100 characters..." find src tests -name '*.rs' | xargs grep '.\{101,\}' && exit 1 echo "ok" cargo-0.8.0/tests/resolve.rs000066400000000000000000000254061264656333200160220ustar00rootroot00000000000000extern crate hamcrest; extern crate cargo; use std::collections::HashMap; use hamcrest::{assert_that, equal_to, contains}; use cargo::core::source::{SourceId, GitReference}; use cargo::core::dependency::Kind::{self, Development}; use cargo::core::{Dependency, PackageId, Summary, Registry}; use cargo::util::{CargoResult, ToUrl}; use cargo::core::resolver::{self, Method}; fn resolve(pkg: PackageId, deps: Vec, registry: &mut R) -> CargoResult> { let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); let method = Method::Everything; Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| { p.clone() }).collect()) } trait ToDep { fn to_dep(self) -> Dependency; } impl ToDep for &'static str { fn to_dep(self) -> Dependency { let url = "http://example.com".to_url().unwrap(); let source_id = SourceId::for_registry(&url); Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() } } impl ToDep for Dependency { fn to_dep(self) -> Dependency { self } } trait ToPkgId { fn to_pkgid(&self) -> PackageId; } impl ToPkgId for &'static str { fn to_pkgid(&self) -> PackageId { PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() } } impl ToPkgId for (&'static str, &'static str) { fn to_pkgid(&self) -> PackageId { let (name, vers) = *self; PackageId::new(name, vers, ®istry_loc()).unwrap() } } macro_rules! pkg { ($pkgid:expr => [$($deps:expr),+]) => ({ let d: Vec = vec![$($deps.to_dep()),+]; Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap() }); ($pkgid:expr) => ( Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap() ) } fn registry_loc() -> SourceId { let remote = "http://example.com".to_url().unwrap(); SourceId::for_registry(&remote) } fn pkg(name: &str) -> Summary { Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() } fn pkg_id(name: &str) -> PackageId { PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() } fn pkg_id_loc(name: &str, loc: &str) -> PackageId { let remote = loc.to_url(); let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&remote.unwrap(), master); PackageId::new(name, "1.0.0", &source_id).unwrap() } fn pkg_loc(name: &str, loc: &str) -> Summary { Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() } fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") } fn dep_req(name: &str, req: &str) -> Dependency { let url = "http://example.com".to_url().unwrap(); let source_id = SourceId::for_registry(&url); Dependency::parse(name, Some(req), &source_id).unwrap() } fn dep_loc(name: &str, location: &str) -> Dependency { let url = location.to_url().unwrap(); let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&url, master); Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() } fn dep_kind(name: &str, kind: Kind) -> Dependency { dep(name).clone_inner().set_kind(kind).into_dependency() } fn registry(pkgs: Vec) -> Vec { pkgs } fn names(names: &[P]) -> Vec { names.iter().map(|name| name.to_pkgid()).collect() } fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { names.iter() .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() } #[test] fn test_resolving_empty_dependency_list() { let res = resolve(pkg_id("root"), Vec::new(), &mut registry(vec![])).unwrap(); assert_that(&res, equal_to(&names(&["root"]))); } #[test] fn test_resolving_only_package() { let mut reg = registry(vec![pkg("foo")]); let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); } #[test] fn test_resolving_one_dep() { let mut reg = registry(vec![pkg("foo"), pkg("bar")]); let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); } #[test] fn test_resolving_multiple_deps() { let mut reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], &mut reg).unwrap(); assert_that(&res, contains(names(&["root", "foo", "baz"])).exactly()); } #[test] fn test_resolving_transitive_deps() { let mut reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); assert_that(&res, contains(names(&["root", "foo", "bar"]))); } #[test] fn test_resolving_common_transitive_deps() { let mut reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], &mut reg).unwrap(); assert_that(&res, contains(names(&["root", "foo", "bar"]))); } #[test] fn test_resolving_with_same_name() { let list = vec![pkg_loc("foo", "http://first.example.com"), pkg_loc("bar", "http://second.example.com")]; let mut reg = registry(list); let res = resolve(pkg_id("root"), vec![dep_loc("foo", "http://first.example.com"), dep_loc("bar", "http://second.example.com")], &mut reg); let mut names = loc_names(&[("foo", "http://first.example.com"), ("bar", "http://second.example.com")]); names.push(pkg_id("root")); assert_that(&res.unwrap(), contains(names).exactly()); } #[test] fn test_resolving_with_dev_deps() { let mut reg = registry(vec![ pkg!("foo" => ["bar", dep_kind("baz", Development)]), pkg!("baz" => ["bat", dep_kind("bam", Development)]), pkg!("bar"), pkg!("bat") ]); let res = resolve(pkg_id("root"), vec![dep("foo"), dep_kind("baz", Development)], &mut reg).unwrap(); assert_that(&res, contains(names(&["root", "foo", "bar", "baz"]))); } #[test] fn resolving_with_many_versions() { let mut reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2")), ]); let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.2")]))); } #[test] fn resolving_with_specific_version() { let mut reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2")), ]); let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.1")]))); } #[test] fn resolving_incompat_versions() { let mut reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2")), pkg!("bar" => [dep_req("foo", "=1.0.2")]), ]); assert!(resolve(pkg_id("root"), vec![ dep_req("foo", "=1.0.1"), dep("bar"), ], &mut reg).is_err()); } #[test] fn resolving_backtrack() { let mut reg = registry(vec![ pkg!(("foo", "1.0.2") => [dep("bar")]), pkg!(("foo", "1.0.1") => [dep("baz")]), pkg!("bar" => [dep_req("foo", "=2.0.2")]), pkg!("baz"), ]); let res = resolve(pkg_id("root"), vec![ dep_req("foo", "^1"), ], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]))); } #[test] fn resolving_allows_multiple_compatible_versions() { let mut reg = registry(vec![ pkg!(("foo", "1.0.0")), pkg!(("foo", "2.0.0")), pkg!(("foo", "0.1.0")), pkg!(("foo", "0.2.0")), pkg!("bar" => ["d1", "d2", "d3", "d4"]), pkg!("d1" => [dep_req("foo", "1")]), pkg!("d2" => [dep_req("foo", "2")]), pkg!("d3" => [dep_req("foo", "0.1")]), pkg!("d4" => [dep_req("foo", "0.2")]), ]); let res = resolve(pkg_id("root"), vec![ dep("bar"), ], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("foo", "2.0.0"), ("foo", "0.1.0"), ("foo", "0.2.0"), ("d1", "1.0.0"), ("d2", "1.0.0"), ("d3", "1.0.0"), ("d4", "1.0.0"), ("bar", "1.0.0")]))); } #[test] fn resolving_with_deep_backtracking() { let mut reg = registry(vec![ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), dep_req("other", "1")]), pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), pkg!(("baz", "1.0.1")), pkg!(("dep_req", "1.0.0")), pkg!(("dep_req", "2.0.0")), ]); let res = resolve(pkg_id("root"), vec![ dep_req("foo", "1"), ], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "2.0.0"), ("baz", "1.0.1")]))); } #[test] fn resolving_but_no_exists() { let mut reg = registry(vec![ ]); let res = resolve(pkg_id("root"), vec![ dep_req("foo", "1"), ], &mut reg); assert!(res.is_err()); assert_eq!(res.err().unwrap().to_string(), "\ no matching package named `foo` found (required by `root`) location searched: registry http://example.com/ version required: ^1\ "); } #[test] fn resolving_cycle() { let mut reg = registry(vec![ pkg!("foo" => ["foo"]), ]); let _ = resolve(pkg_id("root"), vec![ dep_req("foo", "1"), ], &mut reg); } #[test] fn hard_equality() { extern crate env_logger; let mut reg = registry(vec![ pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.0")), pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), ]); let res = resolve(pkg_id("root"), vec![ dep_req("bar", "1"), dep_req("foo", "=1.0.0"), ], &mut reg).unwrap(); assert_that(&res, contains(names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]))); } cargo-0.8.0/tests/support/000077500000000000000000000000001264656333200155025ustar00rootroot00000000000000cargo-0.8.0/tests/support/git.rs000066400000000000000000000100541264656333200166330ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use url::Url; use git2; use cargo::util::ProcessError; use support::{ProjectBuilder, project, path2url}; pub struct RepoBuilder { repo: git2::Repository, files: Vec, } pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } impl RepoBuilder { pub fn init(p: &Path) -> RepoBuilder { fs::create_dir_all(p.parent().unwrap()).unwrap(); let repo = git2::Repository::init(p).unwrap(); { let mut config = repo.config().unwrap(); config.set_str("user.name", "name").unwrap(); config.set_str("user.email", "email").unwrap(); } RepoBuilder { repo: repo, files: Vec::new() } } pub fn file(self, path: &str, contents: &str) -> RepoBuilder { let mut me = self.nocommit_file(path, contents); me.files.push(PathBuf::from(path)); me } pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { let dst = self.repo.workdir().unwrap().join(path); fs::create_dir_all(dst.parent().unwrap()).unwrap(); File::create(&dst).unwrap().write_all(contents.as_bytes()).unwrap(); self } pub fn build(&self) { let mut index = self.repo.index().unwrap(); for file in self.files.iter() { index.add_path(file).unwrap(); } index.write().unwrap(); let id = index.write_tree().unwrap(); let tree = self.repo.find_tree(id).unwrap(); let sig = self.repo.signature().unwrap(); self.repo.commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]).unwrap(); } pub fn root(&self) -> &Path { self.repo.workdir().unwrap() } pub fn url(&self) -> Url { path2url(self.repo.workdir().unwrap().to_path_buf()) } } pub fn new(name: &str, callback: F) -> Result where F: FnOnce(ProjectBuilder) -> ProjectBuilder { let mut git_project = project(name); git_project = callback(git_project); git_project.build(); let repo = git2::Repository::init(&git_project.root()).unwrap(); let mut cfg = repo.config().unwrap(); cfg.set_str("user.email", "foo@bar.com").unwrap(); cfg.set_str("user.name", "Foo Bar").unwrap(); drop(cfg); add(&repo); commit(&repo); Ok(git_project) } pub fn add(repo: &git2::Repository) { // FIXME(libgit2/libgit2#2514): apparently add_all will add all submodules // as well, and then fail b/c they're a directory. As a stopgap, we just // ignore all submodules. let mut s = repo.submodules().unwrap(); for submodule in s.iter_mut() { submodule.add_to_index(false).unwrap(); } let mut index = repo.index().unwrap(); index.add_all(["*"].iter(), git2::ADD_DEFAULT, Some(&mut (|a, _b| { if s.iter().any(|s| a.starts_with(s.path())) {1} else {0} }))).unwrap(); index.write().unwrap(); } pub fn add_submodule<'a>(repo: &'a git2::Repository, url: &str, path: &Path) -> git2::Submodule<'a> { let path = path.to_str().unwrap().replace(r"\", "/"); let mut s = repo.submodule(url, Path::new(&path), false).unwrap(); let subrepo = s.open().unwrap(); subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*").unwrap(); let mut origin = subrepo.find_remote("origin").unwrap(); origin.fetch(&[], None, None).unwrap(); subrepo.checkout_head(None).unwrap(); s.add_finalize().unwrap(); return s; } pub fn commit(repo: &git2::Repository) -> git2::Oid { let tree_id = repo.index().unwrap().write_tree().unwrap(); let sig = repo.signature().unwrap(); let mut parents = Vec::new(); match repo.head().ok().map(|h| h.target().unwrap()) { Some(parent) => parents.push(repo.find_commit(parent).unwrap()), None => {} } let parents = parents.iter().collect::>(); repo.commit(Some("HEAD"), &sig, &sig, "test", &repo.find_tree(tree_id).unwrap(), &parents).unwrap() } cargo-0.8.0/tests/support/mod.rs000066400000000000000000000376631264656333200166460ustar00rootroot00000000000000use std::env; use std::error::Error; use std::ffi::OsStr; use std::fmt; use std::fs; use std::io::prelude::*; use std::os; use std::path::{Path, PathBuf}; use std::process::Output; use std::str; use std::usize; use url::Url; use hamcrest as ham; use cargo::util::ProcessBuilder; use cargo::util::ProcessError; use cargo::util::process; use support::paths::CargoPathExt; pub mod paths; pub mod git; pub mod registry; /* * * ===== Builders ===== * */ #[derive(PartialEq,Clone)] struct FileBuilder { path: PathBuf, body: String } impl FileBuilder { pub fn new(path: PathBuf, body: &str) -> FileBuilder { FileBuilder { path: path, body: body.to_string() } } fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); let mut file = try!( fs::File::create(&self.path) .with_err_msg(format!("Could not create file; path={}", self.path.display()))); file.write_all(self.body.as_bytes()) .with_err_msg(format!("Could not write to file; path={}", self.path.display())) } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq,Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst: dst, src: src } } #[cfg(unix)] fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::unix::fs::symlink(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } #[cfg(windows)] fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::windows::fs::symlink_file(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } #[derive(PartialEq,Clone)] pub struct ProjectBuilder { name: String, root: PathBuf, files: Vec, symlinks: Vec } impl ProjectBuilder { pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { ProjectBuilder { name: name.to_string(), root: root, files: vec![], symlinks: vec![] } } pub fn root(&self) -> PathBuf { self.root.clone() } pub fn url(&self) -> Url { path2url(self.root()) } pub fn bin(&self, b: &str) -> PathBuf { self.build_dir().join("debug").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir().join("release").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn build_dir(&self) -> PathBuf { self.root.join("target") } pub fn process>(&self, program: T) -> ProcessBuilder { let mut p = ::process(program); p.cwd(self.root()); return p } pub fn cargo(&self, cmd: &str) -> ProcessBuilder { let mut p = self.process(&cargo_dir().join("cargo")); p.arg(cmd); return p; } pub fn cargo_process(&self, cmd: &str) -> ProcessBuilder { self.build(); self.cargo(cmd) } pub fn file>(mut self, path: B, body: &str) -> ProjectBuilder { self.files.push(FileBuilder::new(self.root.join(path), body)); self } pub fn symlink>(mut self, dst: T, src: T) -> ProjectBuilder { self.symlinks.push(SymlinkBuilder::new(self.root.join(dst), self.root.join(src))); self } // TODO: return something different than a ProjectBuilder pub fn build(&self) -> &ProjectBuilder { match self.build_with_result() { Err(e) => panic!(e), _ => return self } } pub fn build_with_result(&self) -> Result<(), String> { // First, clean the directory if it already exists try!(self.rm_root()); // Create the empty directory try!(mkdir_recursive(&self.root)); for file in self.files.iter() { try!(file.mk()); } for symlink in self.symlinks.iter() { try!(symlink.mk()); } Ok(()) } fn rm_root(&self) -> Result<(), String> { if self.root.c_exists() { rmdir_recursive(&self.root) } else { Ok(()) } } } // Generates a project layout pub fn project(name: &str) -> ProjectBuilder { ProjectBuilder::new(name, paths::root().join(name)) } // === Helpers === pub fn mkdir_recursive(path: &Path) -> Result<(), String> { fs::create_dir_all(path) .with_err_msg(format!("could not create directory; path={}", path.display())) } pub fn rmdir_recursive(path: &Path) -> Result<(), String> { path.rm_rf() .with_err_msg(format!("could not rm directory; path={}", path.display())) } pub fn main_file(println: &str, deps: &[&str]) -> String { let mut buf = String::new(); for dep in deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(&println); buf.push_str("); }\n"); buf.to_string() } trait ErrMsg { fn with_err_msg(self, val: String) -> Result; } impl ErrMsg for Result { fn with_err_msg(self, val: String) -> Result { match self { Ok(val) => Ok(val), Err(err) => Err(format!("{}; original={}", val, err)) } } } // Path to cargo executables pub fn cargo_dir() -> PathBuf { env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| { env::current_exe().ok().as_ref().and_then(|s| s.parent()) .map(|s| s.to_path_buf()) }).unwrap_or_else(|| { panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") }) } /// Returns an absolute path in the filesystem that `path` points to. The /// returned path does not contain any symlinks in its hierarchy. /* * * ===== Matchers ===== * */ #[derive(Clone)] pub struct Execs { expect_stdout: Option, expect_stdin: Option, expect_stderr: Option, expect_exit_code: Option, expect_stdout_contains: Vec, expect_stderr_contains: Vec, } impl Execs { pub fn with_stdout(mut self, expected: S) -> Execs { self.expect_stdout = Some(expected.to_string()); self } pub fn with_stderr(mut self, expected: S) -> Execs { self.expect_stderr = Some(expected.to_string()); self } pub fn with_status(mut self, expected: i32) -> Execs { self.expect_exit_code = Some(expected); self } pub fn with_stdout_contains(mut self, expected: S) -> Execs { self.expect_stdout_contains.push(expected.to_string()); self } pub fn with_stderr_contains(mut self, expected: S) -> Execs { self.expect_stderr_contains.push(expected.to_string()); self } fn match_output(&self, actual: &Output) -> ham::MatchResult { self.match_status(actual) .and(self.match_stdout(actual)) .and(self.match_stderr(actual)) } fn match_status(&self, actual: &Output) -> ham::MatchResult { match self.expect_exit_code { None => ham::success(), Some(code) => { ham::expect( actual.status.code() == Some(code), format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", actual.status, String::from_utf8_lossy(&actual.stdout), String::from_utf8_lossy(&actual.stderr))) } } } fn match_stdout(&self, actual: &Output) -> ham::MatchResult { try!(self.match_std(self.expect_stdout.as_ref(), &actual.stdout, "stdout", &actual.stderr, false)); for expect in self.expect_stdout_contains.iter() { try!(self.match_std(Some(expect), &actual.stdout, "stdout", &actual.stderr, true)); } for expect in self.expect_stderr_contains.iter() { try!(self.match_std(Some(expect), &actual.stderr, "stderr", &actual.stdout, true)); } Ok(()) } fn match_stderr(&self, actual: &Output) -> ham::MatchResult { self.match_std(self.expect_stderr.as_ref(), &actual.stderr, "stderr", &actual.stdout, false) } #[allow(deprecated)] // connect => join in 1.3 fn match_std(&self, expected: Option<&String>, actual: &[u8], description: &str, extra: &[u8], partial: bool) -> ham::MatchResult { let out = match expected { Some(out) => out, None => return ham::success(), }; let actual = match str::from_utf8(actual) { Err(..) => return Err(format!("{} was not utf8 encoded", description)), Ok(actual) => actual, }; // Let's not deal with \r\n vs \n on windows... let actual = actual.replace("\r", ""); let actual = actual.replace("\t", ""); let mut a = actual.lines(); let e = out.lines(); let diffs = if partial { let mut min = self.diff_lines(a.clone(), e.clone(), partial); while let Some(..) = a.next() { let a = self.diff_lines(a.clone(), e.clone(), partial); if a.len() < min.len() { min = a; } } min } else { self.diff_lines(a, e, partial) }; ham::expect(diffs.len() == 0, format!("differences:\n\ {}\n\n\ other output:\n\ `{}`", diffs.connect("\n"), String::from_utf8_lossy(extra))) } fn diff_lines<'a>(&self, actual: str::Lines<'a>, expected: str::Lines<'a>, partial: bool) -> Vec { let actual = actual.take(if partial { expected.clone().count() } else { usize::MAX }); zip_all(actual, expected).enumerate().filter_map(|(i, (a,e))| { match (a, e) { (Some(a), Some(e)) => { if lines_match(&e, &a) { None } else { Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) } }, (Some(a), None) => { Some(format!("{:3} -\n + |{}|\n", i, a)) }, (None, Some(e)) => { Some(format!("{:3} - |{}|\n +\n", i, e)) }, (None, None) => panic!("Cannot get here") } }).collect() } } fn lines_match(expected: &str, mut actual: &str) -> bool { for part in expected.split("[..]") { match actual.find(part) { Some(i) => actual = &actual[i + part.len()..], None => { return false } } } actual.len() == 0 || expected.ends_with("[..]") } struct ZipAll { first: I1, second: I2, } impl, I2: Iterator> Iterator for ZipAll { type Item = (Option, Option); fn next(&mut self) -> Option<(Option, Option)> { let first = self.first.next(); let second = self.second.next(); match (first, second) { (None, None) => None, (a, b) => Some((a, b)) } } } fn zip_all, I2: Iterator>(a: I1, b: I2) -> ZipAll { ZipAll { first: a, second: b, } } impl fmt::Display for Execs { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "execs") } } impl ham::Matcher for Execs { fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { self.matches(&mut process) } } impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { let res = process.exec_with_output(); match res { Ok(out) => self.match_output(&out), Err(ProcessError { output: Some(ref out), .. }) => { self.match_output(out) } Err(e) => { let mut s = format!("could not exec process {}: {}", process, e); match e.cause() { Some(cause) => s.push_str(&format!("\ncaused by: {}", cause.description())), None => {} } Err(s) } } } } pub fn execs() -> Execs { Execs { expect_stdout: None, expect_stderr: None, expect_stdin: None, expect_exit_code: None, expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), } } #[derive(Clone)] pub struct ShellWrites { expected: String } impl fmt::Display for ShellWrites { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "`{}` written to the shell", self.expected) } } impl<'a> ham::Matcher<&'a [u8]> for ShellWrites { fn matches(&self, actual: &[u8]) -> ham::MatchResult { let actual = String::from_utf8_lossy(actual); let actual = actual.to_string(); ham::expect(actual == self.expected, actual) } } pub fn shell_writes(string: T) -> ShellWrites { ShellWrites { expected: string.to_string() } } pub trait Tap { fn tap(mut self, callback: F) -> Self; } impl Tap for T { fn tap(mut self, callback: F) -> T { callback(&mut self); self } } pub fn basic_bin_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "{}" "#, name, name) } pub fn basic_lib_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "{}" "#, name, name) } pub fn path2url(p: PathBuf) -> Url { Url::from_file_path(&*p).ok().unwrap() } pub static RUNNING: &'static str = " Running"; pub static COMPILING: &'static str = " Compiling"; pub static DOCUMENTING: &'static str = " Documenting"; pub static FRESH: &'static str = " Fresh"; pub static UPDATING: &'static str = " Updating"; pub static ADDING: &'static str = " Adding"; pub static REMOVING: &'static str = " Removing"; pub static DOCTEST: &'static str = " Doc-tests"; pub static PACKAGING: &'static str = " Packaging"; pub static DOWNLOADING: &'static str = " Downloading"; pub static UPLOADING: &'static str = " Uploading"; pub static VERIFYING: &'static str = " Verifying"; pub static ARCHIVING: &'static str = " Archiving"; pub static INSTALLING: &'static str = " Installing"; cargo-0.8.0/tests/support/paths.rs000066400000000000000000000121651264656333200171740ustar00rootroot00000000000000use std::env; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::sync::{Once, ONCE_INIT}; use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; use filetime::{self, FileTime}; static CARGO_INTEGRATION_TEST_DIR : &'static str = "cit"; static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); pub fn root() -> PathBuf { let mut path = env::current_exe().unwrap(); path.pop(); // chop off exe name path.pop(); // chop off 'debug' // If `cargo test` is run manually then our path looks like // `target/debug/foo`, in which case our `path` is already pointing at // `target`. If, however, `cargo test --target $target` is used then the // output is `target/$target/debug/foo`, so our path is pointing at // `target/$target`. Here we conditionally pop the `$target` name. if path.file_name().and_then(|s| s.to_str()) != Some("target") { path.pop(); } path.join(CARGO_INTEGRATION_TEST_DIR) .join(&TASK_ID.with(|my_id| format!("t{}", my_id))) } pub fn home() -> PathBuf { root().join("home") } pub trait CargoPathExt { fn rm_rf(&self) -> io::Result<()>; fn mkdir_p(&self) -> io::Result<()>; fn move_into_the_past(&self) -> io::Result<()>; // cargo versions of the standard PathExt trait fn c_exists(&self) -> bool; fn c_is_file(&self) -> bool; fn c_is_dir(&self) -> bool; fn c_metadata(&self) -> io::Result; } impl CargoPathExt for Path { /* Technically there is a potential race condition, but we don't * care all that much for our tests */ fn rm_rf(&self) -> io::Result<()> { if self.c_exists() { for file in fs::read_dir(self).unwrap() { let file = try!(file).path(); if file.c_is_dir() { try!(file.rm_rf()); } else { // On windows we can't remove a readonly file, and git will // often clone files as readonly. As a result, we have some // special logic to remove readonly files on windows. match fs::remove_file(&file) { Ok(()) => {} Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { let mut p = file.c_metadata().unwrap().permissions(); p.set_readonly(false); fs::set_permissions(&file, p).unwrap(); try!(fs::remove_file(&file)); } Err(e) => return Err(e) } } } fs::remove_dir(self) } else { Ok(()) } } fn mkdir_p(&self) -> io::Result<()> { fs::create_dir_all(self) } fn move_into_the_past(&self) -> io::Result<()> { if self.c_is_file() { try!(time_travel(self)); } else { try!(recurse(self, &self.join("target"))); } return Ok(()); fn recurse(p: &Path, bad: &Path) -> io::Result<()> { if p.c_is_file() { time_travel(p) } else if p.starts_with(bad) { Ok(()) } else { for f in try!(fs::read_dir(p)) { let f = try!(f).path(); try!(recurse(&f, bad)); } Ok(()) } } fn time_travel(path: &Path) -> io::Result<()> { let stat = try!(path.c_metadata()); let mtime = FileTime::from_last_modification_time(&stat); let newtime = mtime.seconds_relative_to_1970() - 3600; let nanos = mtime.nanoseconds(); let newtime = FileTime::from_seconds_since_1970(newtime, nanos); // Sadly change_file_times has a failure mode where a readonly file // cannot have its times changed on windows. match filetime::set_file_times(path, newtime, newtime) { Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => {} e => return e, } let mut perms = stat.permissions(); perms.set_readonly(false); try!(fs::set_permissions(path, perms)); filetime::set_file_times(path, newtime, newtime) } } fn c_exists(&self) -> bool { fs::metadata(self).is_ok() } fn c_is_file(&self) -> bool { fs::metadata(self).map(|m| m.is_file()).unwrap_or(false) } fn c_is_dir(&self) -> bool { fs::metadata(self).map(|m| m.is_dir()).unwrap_or(false) } fn c_metadata(&self) -> io::Result { fs::metadata(self) } } /// Ensure required test directories exist and are empty pub fn setup() { debug!("path setup; root={}; home={}", root().display(), home().display()); static INIT: Once = ONCE_INIT; INIT.call_once(|| { root().parent().unwrap().mkdir_p().unwrap(); }); root().rm_rf().unwrap(); home().mkdir_p().unwrap(); } cargo-0.8.0/tests/support/registry.rs000066400000000000000000000141071264656333200177230ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::path::{PathBuf, Path}; use flate2::Compression::Default; use flate2::write::GzEncoder; use git2; use rustc_serialize::hex::ToHex; use tar::{Archive, Header}; use url::Url; use support::paths; use support::git::repo; use cargo::util::Sha256; pub fn registry_path() -> PathBuf { paths::root().join("registry") } pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } pub fn dl_path() -> PathBuf { paths::root().join("dl") } pub fn dl_url() -> Url { Url::from_file_path(&*dl_path()).ok().unwrap() } pub struct Package { name: String, vers: String, deps: Vec<(String, String, &'static str)>, files: Vec<(String, String)>, yanked: bool, } fn init() { let config = paths::home().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); if fs::metadata(&config).is_ok() { return } File::create(&config).unwrap().write_all(format!(r#" [registry] index = "{reg}" token = "api-token" "#, reg = registry()).as_bytes()).unwrap(); // Init a new registry repo(®istry_path()) .file("config.json", &format!(r#" {{"dl":"{}","api":""}} "#, dl_url())) .build(); } impl Package { pub fn new(name: &str, vers: &str) -> Package { init(); Package { name: name.to_string(), vers: vers.to_string(), deps: Vec::new(), files: Vec::new(), yanked: false, } } pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.files.push((name.to_string(), contents.to_string())); self } pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { self.deps.push((name.to_string(), vers.to_string(), "normal")); self } pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { self.deps.push((name.to_string(), vers.to_string(), "dev")); self } pub fn yanked(&mut self, yanked: bool) -> &mut Package { self.yanked = yanked; self } #[allow(deprecated)] // connect => join in 1.3 pub fn publish(&self) { self.make_archive(); // Figure out what we're going to write into the index let deps = self.deps.iter().map(|&(ref name, ref req, ref kind)| { format!("{{\"name\":\"{}\",\ \"req\":\"{}\",\ \"features\":[],\ \"default_features\":false,\ \"target\":null,\ \"optional\":false,\ \"kind\":\"{}\"}}", name, req, kind) }).collect::>().connect(","); let cksum = { let mut c = Vec::new(); File::open(&self.archive_dst()).unwrap() .read_to_end(&mut c).unwrap(); cksum(&c) }; let line = format!("{{\"name\":\"{}\",\"vers\":\"{}\",\ \"deps\":[{}],\"cksum\":\"{}\",\"features\":{{}},\ \"yanked\":{}}}", self.name, self.vers, deps, cksum, self.yanked); let file = match self.name.len() { 1 => format!("1/{}", self.name), 2 => format!("2/{}", self.name), 3 => format!("3/{}/{}", &self.name[..1], self.name), _ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name), }; // Write file/line in the index let dst = registry_path().join(&file); let mut prev = String::new(); let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); fs::create_dir_all(dst.parent().unwrap()).unwrap(); File::create(&dst).unwrap() .write_all((prev + &line[..] + "\n").as_bytes()).unwrap(); // Add the new file to the index let repo = git2::Repository::open(®istry_path()).unwrap(); let mut index = repo.index().unwrap(); index.add_path(Path::new(&file)).unwrap(); index.write().unwrap(); let id = index.write_tree().unwrap(); // Commit this change let tree = repo.find_tree(id).unwrap(); let sig = repo.signature().unwrap(); let parent = repo.refname_to_id("refs/heads/master").unwrap(); let parent = repo.find_commit(parent).unwrap(); repo.commit(Some("HEAD"), &sig, &sig, "Another commit", &tree, &[&parent]).unwrap(); } fn make_archive(&self) { let mut manifest = format!(r#" [package] name = "{}" version = "{}" authors = [] "#, self.name, self.vers); for &(ref dep, ref req, kind) in self.deps.iter() { manifest.push_str(&format!(r#" [{}dependencies.{}] version = "{}" "#, match kind { "build" => "build-", "dev" => "dev-", _ => "" }, dep, req)); } let dst = self.archive_dst(); fs::create_dir_all(dst.parent().unwrap()).unwrap(); let f = File::create(&dst).unwrap(); let a = Archive::new(GzEncoder::new(f, Default)); self.append(&a, "Cargo.toml", &manifest); if self.files.len() == 0 { self.append(&a, "src/lib.rs", ""); } else { for &(ref name, ref contents) in self.files.iter() { self.append(&a, name, contents); } } a.finish().unwrap(); } fn append(&self, ar: &Archive, file: &str, contents: &str) { let mut header = Header::new(); header.set_size(contents.len() as u64); header.set_path(format!("{}-{}/{}", self.name, self.vers, file)).unwrap(); header.set_cksum(); ar.append(&header, &mut contents.as_bytes()).unwrap(); } pub fn archive_dst(&self) -> PathBuf { dl_path().join(&self.name).join(&self.vers).join("download") } } fn cksum(s: &[u8]) -> String { let mut sha = Sha256::new(); sha.update(s); sha.finish().to_hex() } cargo-0.8.0/tests/test_bad_config.rs000066400000000000000000000215241264656333200174520ustar00rootroot00000000000000use support::{project, execs}; use hamcrest::assert_that; fn setup() {} test!(bad1 { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [target] nonexistent-target = "foo" "#); assert_that(foo.cargo_process("build").arg("-v") .arg("--target=nonexistent-target"), execs().with_status(101).with_stderr("\ expected table for configuration key `target.nonexistent-target`, but found string in [..]config ")); }); test!(bad2 { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [http] proxy = 3.0 "#); assert_that(foo.cargo_process("publish").arg("-v"), execs().with_status(101).with_stderr("\ Couldn't load Cargo configuration Caused by: failed to load TOML configuration from `[..]config` Caused by: failed to parse key `http` Caused by: failed to parse key `proxy` Caused by: found TOML configuration value of unknown type `float` ")); }); test!(bad3 { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [http] proxy = true "#); assert_that(foo.cargo_process("publish").arg("-v"), execs().with_status(101).with_stderr("\ invalid configuration for key `http.proxy` expected a string, but found a boolean in [..]config ")); }); test!(bad4 { let foo = project("foo") .file(".cargo/config", r#" [cargo-new] name = false "#); assert_that(foo.cargo_process("new").arg("-v").arg("foo"), execs().with_status(101).with_stderr("\ Failed to create project `foo` at `[..]` Caused by: invalid configuration for key `cargo-new.name` expected a string, but found a boolean in [..]config ")); }); test!(bad5 { let foo = project("foo") .file(".cargo/config", r#" foo = "" "#) .file("foo/.cargo/config", r#" foo = 2 "#); foo.build(); assert_that(foo.cargo("new") .arg("-v").arg("foo").cwd(&foo.root().join("foo")), execs().with_status(101).with_stderr("\ Couldn't load Cargo configuration Caused by: failed to merge key `foo` between files: file 1: [..]foo[..]foo[..]config file 2: [..]foo[..]config Caused by: expected integer, but found string ")); }); test!(bad_cargo_config_jobs { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [build] jobs = -1 "#); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ build.jobs must be positive, but found -1 in [..] ")); }); test!(default_cargo_config_jobs { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [build] jobs = 1 "#); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(good_cargo_config_jobs { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" [build] jobs = 4 "#); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(invalid_global_config { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] foo = "0.1.0" "#) .file(".cargo/config", "4") .file("src/lib.rs", ""); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ Couldn't load Cargo configuration Caused by: could not parse TOML configuration in `[..]config` Caused by: could not parse input as TOML [..]config:1:2 expected `=`, but found eof ")); }); test!(bad_cargo_lock { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("Cargo.lock", "") .file("src/lib.rs", ""); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ failed to parse lock file at: [..]Cargo.lock Caused by: expected a section for the key `root` ")); }); test!(bad_git_dependency { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies] foo = { git = "file:.." } "#) .file("src/lib.rs", ""); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ Unable to update file:/// Caused by: failed to clone into: [..] Caused by: [[..]] 'file:///' is not a valid local file URI ")); }); test!(bad_crate_type { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] crate-type = ["bad_type", "rlib"] "#) .file("src/lib.rs", ""); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(0).with_stderr("\ warning: crate-type \"bad_type\" was not one of lib|rlib|dylib|staticlib ")); }); test!(malformed_override { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [target.x86_64-apple-darwin.freetype] native = { foo: "bar" } "#) .file("src/lib.rs", ""); assert_that(foo.cargo_process("build"), execs().with_status(101).with_stderr("\ failed to parse manifest at `[..]` Caused by: could not parse input as TOML Cargo.toml:[..] ")); }); test!(duplicate_binary_names { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[bin]] name = "e" path = "a.rs" [[bin]] name = "e" path = "b.rs" "#) .file("a.rs", r#"fn main() -> () {}"#) .file("b.rs", r#"fn main() -> () {}"#); assert_that(foo.cargo_process("build"), execs().with_status(101).with_stderr("\ failed to parse manifest at `[..]` Caused by: found duplicate binary name e, but all binary targets must have a unique name ")); }); test!(duplicate_example_names { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[example]] name = "ex" path = "examples/ex.rs" [[example]] name = "ex" path = "examples/ex2.rs" "#) .file("examples/ex.rs", r#"fn main () -> () {}"#) .file("examples/ex2.rs", r#"fn main () -> () {}"#); assert_that(foo.cargo_process("build").arg("--example").arg("ex"), execs().with_status(101).with_stderr("\ failed to parse manifest at `[..]` Caused by: found duplicate example name ex, but all binary targets must have a unique name ")); }); test!(duplicate_bench_names { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "qqq" version = "0.1.0" authors = ["A "] [[bench]] name = "ex" path = "benches/ex.rs" [[bench]] name = "ex" path = "benches/ex2.rs" "#) .file("benches/ex.rs", r#"fn main () {}"#) .file("benches/ex2.rs", r#"fn main () {}"#); assert_that(foo.cargo_process("bench"), execs().with_status(101).with_stderr("\ failed to parse manifest at `[..]` Caused by: found duplicate bench name ex, but all binary targets must have a unique name ")); }); test!(unused_keys { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [target.foo] bar = "3" "#) .file("src/lib.rs", ""); assert_that(foo.cargo_process("build"), execs().with_status(0).with_stderr("\ unused manifest key: target.foo.bar ")); }); cargo-0.8.0/tests/test_bad_manifest_path.rs000066400000000000000000000210351264656333200210240ustar00rootroot00000000000000use support::{project, execs, main_file, basic_bin_manifest}; use hamcrest::{assert_that}; fn setup() {} fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process(command) .arg("--manifest-path").arg(manifest_path_argument) .cwd(p.root().parent().unwrap()), execs().with_status(101) .with_stderr("the manifest-path must be a path to a Cargo.toml file")); } #[allow(deprecated)] // connect => join in 1.3 fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { let p = project("foo"); let expected_path = manifest_path_argument .split("/").collect::>().connect("[..]"); assert_that(p.cargo_process(command) .arg("--manifest-path").arg(manifest_path_argument) .cwd(p.root().parent().unwrap()), execs().with_status(101) .with_stderr( format!("manifest path `{}` does not exist", expected_path) )); } test!(bench_dir_containing_cargo_toml { assert_not_a_cargo_toml("bench", "foo"); }); test!(bench_dir_plus_file { assert_not_a_cargo_toml("bench", "foo/bar"); }); test!(bench_dir_plus_path { assert_not_a_cargo_toml("bench", "foo/bar/baz"); }); test!(bench_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); }); test!(build_dir_containing_cargo_toml { assert_not_a_cargo_toml("build", "foo"); }); test!(build_dir_plus_file { assert_not_a_cargo_toml("bench", "foo/bar"); }); test!(build_dir_plus_path { assert_not_a_cargo_toml("bench", "foo/bar/baz"); }); test!(build_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); }); test!(clean_dir_containing_cargo_toml { assert_not_a_cargo_toml("clean", "foo"); }); test!(clean_dir_plus_file { assert_not_a_cargo_toml("clean", "foo/bar"); }); test!(clean_dir_plus_path { assert_not_a_cargo_toml("clean", "foo/bar/baz"); }); test!(clean_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); }); test!(doc_dir_containing_cargo_toml { assert_not_a_cargo_toml("doc", "foo"); }); test!(doc_dir_plus_file { assert_not_a_cargo_toml("doc", "foo/bar"); }); test!(doc_dir_plus_path { assert_not_a_cargo_toml("doc", "foo/bar/baz"); }); test!(doc_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); }); test!(fetch_dir_containing_cargo_toml { assert_not_a_cargo_toml("fetch", "foo"); }); test!(fetch_dir_plus_file { assert_not_a_cargo_toml("fetch", "foo/bar"); }); test!(fetch_dir_plus_path { assert_not_a_cargo_toml("fetch", "foo/bar/baz"); }); test!(fetch_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); }); test!(generate_lockfile_dir_containing_cargo_toml { assert_not_a_cargo_toml("generate-lockfile", "foo"); }); test!(generate_lockfile_dir_plus_file { assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); }); test!(generate_lockfile_dir_plus_path { assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); }); test!(generate_lockfile_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); }); test!(package_dir_containing_cargo_toml { assert_not_a_cargo_toml("package", "foo"); }); test!(package_dir_plus_file { assert_not_a_cargo_toml("package", "foo/bar"); }); test!(package_dir_plus_path { assert_not_a_cargo_toml("package", "foo/bar/baz"); }); test!(package_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); }); test!(pkgid_dir_containing_cargo_toml { assert_not_a_cargo_toml("pkgid", "foo"); }); test!(pkgid_dir_plus_file { assert_not_a_cargo_toml("pkgid", "foo/bar"); }); test!(pkgid_dir_plus_path { assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); }); test!(pkgid_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); }); test!(publish_dir_containing_cargo_toml { assert_not_a_cargo_toml("publish", "foo"); }); test!(publish_dir_plus_file { assert_not_a_cargo_toml("publish", "foo/bar"); }); test!(publish_dir_plus_path { assert_not_a_cargo_toml("publish", "foo/bar/baz"); }); test!(publish_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); }); test!(read_manifest_dir_containing_cargo_toml { assert_not_a_cargo_toml("read-manifest", "foo"); }); test!(read_manifest_dir_plus_file { assert_not_a_cargo_toml("read-manifest", "foo/bar"); }); test!(read_manifest_dir_plus_path { assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); }); test!(read_manifest_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); }); test!(run_dir_containing_cargo_toml { assert_not_a_cargo_toml("run", "foo"); }); test!(run_dir_plus_file { assert_not_a_cargo_toml("run", "foo/bar"); }); test!(run_dir_plus_path { assert_not_a_cargo_toml("run", "foo/bar/baz"); }); test!(run_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); }); test!(rustc_dir_containing_cargo_toml { assert_not_a_cargo_toml("rustc", "foo"); }); test!(rustc_dir_plus_file { assert_not_a_cargo_toml("rustc", "foo/bar"); }); test!(rustc_dir_plus_path { assert_not_a_cargo_toml("rustc", "foo/bar/baz"); }); test!(rustc_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); }); test!(test_dir_containing_cargo_toml { assert_not_a_cargo_toml("test", "foo"); }); test!(test_dir_plus_file { assert_not_a_cargo_toml("test", "foo/bar"); }); test!(test_dir_plus_path { assert_not_a_cargo_toml("test", "foo/bar/baz"); }); test!(test_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); }); test!(update_dir_containing_cargo_toml { assert_not_a_cargo_toml("update", "foo"); }); test!(update_dir_plus_file { assert_not_a_cargo_toml("update", "foo/bar"); }); test!(update_dir_plus_path { assert_not_a_cargo_toml("update", "foo/bar/baz"); }); test!(update_dir_to_nonexistent_cargo_toml { assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); }); test!(verify_project_dir_containing_cargo_toml { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg("foo") .cwd(p.root().parent().unwrap()), execs().with_status(1) .with_stdout("\ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ")); }); test!(verify_project_dir_plus_file { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg("foo/bar") .cwd(p.root().parent().unwrap()), execs().with_status(1) .with_stdout("\ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ")); }); test!(verify_project_dir_plus_path { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg("foo/bar/baz") .cwd(p.root().parent().unwrap()), execs().with_status(1) .with_stdout("\ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ ")); }); test!(verify_project_dir_to_nonexistent_cargo_toml { let p = project("foo"); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg("foo/bar/baz/Cargo.toml") .cwd(p.root().parent().unwrap()), execs().with_status(1) .with_stdout("\ {\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ ")); }); cargo-0.8.0/tests/test_cargo.rs000066400000000000000000000100051264656333200164620ustar00rootroot00000000000000use std::env; use std::ffi::OsString; use std::fs::{self, File}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::str; use cargo_process; use support::paths; use support::{execs, project, mkdir_recursive, ProjectBuilder}; use hamcrest::{assert_that}; fn setup() { } /// Add an empty file with executable flags (and platform-dependent suffix). /// TODO: move this to `ProjectBuilder` if other cases using this emerge. fn fake_executable(proj: ProjectBuilder, dir: &Path, name: &str) -> ProjectBuilder { let path = proj.root().join(dir).join(&format!("{}{}", name, env::consts::EXE_SUFFIX)); mkdir_recursive(path.parent().unwrap()).unwrap(); File::create(&path).unwrap(); make_executable(&path); return proj; #[cfg(unix)] fn make_executable(p: &Path) { use std::os::unix::prelude::*; let mut perms = fs::metadata(p).unwrap().permissions();; let mode = perms.mode(); perms.set_mode(mode | 0o111); fs::set_permissions(p, perms).unwrap(); } #[cfg(windows)] fn make_executable(_: &Path) {} } fn path() -> Vec { env::split_paths(&env::var_os("PATH").unwrap_or(OsString::new())).collect() } test!(list_commands_looks_at_path { let proj = project("list-non-overlapping"); let proj = fake_executable(proj, &Path::new("path-test"), "cargo-1"); let mut pr = cargo_process(); let mut path = path(); path.push(proj.root().join("path-test")); let path = env::join_paths(path.iter()).unwrap(); let output = pr.arg("-v").arg("--list") .env("PATH", &path); let output = output.exec_with_output().unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("\n 1\n"), "missing 1: {}", output); }); test!(find_closest_biuld_to_build { let mut pr = cargo_process(); pr.arg("biuld"); assert_that(pr, execs().with_status(101) .with_stderr("no such subcommand Did you mean `build`? ")); }); // if a subcommand is more than 3 edit distance away, we don't make a suggestion test!(find_closest_dont_correct_nonsense { let paths = path().into_iter().filter(|p| { fs::read_dir(p).into_iter() .flat_map(|i| i) .filter_map(|e| e.ok()) .all(|e| !e.file_name().to_str().unwrap_or("").starts_with("cargo-")) }); let mut pr = cargo_process(); pr.arg("asdf") .cwd(&paths::root()) .env("PATH", env::join_paths(paths).unwrap()); assert_that(pr, execs().with_status(101) .with_stderr("no such subcommand ")); }); test!(override_cargo_home { let root = paths::root(); let my_home = root.join("my_home"); fs::create_dir(&my_home).unwrap(); File::create(&my_home.join("config")).unwrap().write_all(br#" [cargo-new] name = "foo" email = "bar" git = false "#).unwrap(); assert_that(cargo_process() .arg("new").arg("foo") .env("USER", "foo") .env("CARGO_HOME", &my_home), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["foo "]"#)); }); test!(cargo_help { assert_that(cargo_process(), execs().with_status(0)); assert_that(cargo_process().arg("help"), execs().with_status(0)); assert_that(cargo_process().arg("-h"), execs().with_status(0)); assert_that(cargo_process().arg("help").arg("build"), execs().with_status(0)); assert_that(cargo_process().arg("build").arg("-h"), execs().with_status(0)); assert_that(cargo_process().arg("help").arg("-h"), execs().with_status(0)); assert_that(cargo_process().arg("help").arg("help"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_bench.rs000066400000000000000000000620221264656333200176270ustar00rootroot00000000000000use std::str; use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; use support::{COMPILING, FRESH, RUNNING}; use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file}; use cargo::util::process; fn setup() {} test!(cargo_bench_simple { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" #![feature(test)] extern crate test; fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[bench] fn bench_hello(_b: &mut test::Bencher) { assert_eq!(hello(), "hello") }"#); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); assert_that(p.cargo("bench"), execs().with_stdout(&format!("\ {} foo v0.5.0 ({}) {} target[..]release[..]foo-[..] running 1 test test bench_hello ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", COMPILING, p.url(), RUNNING))); }); test!(bench_tarname { if !::is_nightly() { return } let prj = project("foo") .file("Cargo.toml" , r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("benches/bin1.rs", r#" #![feature(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { }"#) .file("benches/bin2.rs", r#" #![feature(test)] extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { }"#); let expected_stdout = format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]release[..]bin2[..] running 1 test test run2 ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = prj.url()); assert_that(prj.cargo_process("bench").arg("--bench").arg("bin2"), execs().with_status(0).with_stdout(expected_stdout)); }); test!(cargo_bench_verbose { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" #![feature(test)] extern crate test; fn main() {} #[bench] fn bench_hello(_b: &mut test::Bencher) {} "#); assert_that(p.cargo_process("bench").arg("-v").arg("hello"), execs().with_stdout(&format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc src[..]foo.rs [..]` {running} `[..]target[..]release[..]foo-[..] hello --bench` running 1 test test bench_hello ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, url = p.url(), running = RUNNING))); }); test!(many_similar_names { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #![feature(test)] extern crate test; pub fn foo() {} #[bench] fn lib_bench(_b: &mut test::Bencher) {} ") .file("src/main.rs", " #![feature(test)] extern crate foo; extern crate test; fn main() {} #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } ") .file("benches/foo.rs", r#" #![feature(test)] extern crate foo; extern crate test; #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } "#); let output = p.cargo_process("bench").exec_with_output().unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output); assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output); assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output); }); test!(cargo_bench_failing_test { if !::is_nightly() { return } if !::can_panic() { return } let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" #![feature(test)] extern crate test; fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[bench] fn bench_hello(_b: &mut test::Bencher) { assert_eq!(hello(), "nope") }"#); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); assert_that(p.cargo("bench"), execs().with_stdout(&format!("\ {} foo v0.5.0 ({}) {} target[..]release[..]foo-[..] running 1 test test bench_hello ... ", COMPILING, p.url(), RUNNING)) .with_stderr("\ thread '
' panicked at 'assertion failed: \ `(left == right)` (left: \ `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:14 [..] ") .with_status(101)); }); test!(bench_with_lib_dep { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "baz" path = "src/main.rs" "#) .file("src/lib.rs", r#" #![feature(test)] extern crate test; /// /// ```rust /// extern crate foo; /// fn main() { /// println!("{}", foo::foo()); /// } /// ``` /// pub fn foo(){} #[bench] fn lib_bench(_b: &mut test::Bencher) {} "#) .file("src/main.rs", " #![feature(test)] extern crate foo; extern crate test; fn main() {} #[bench] fn bin_bench(_b: &mut test::Bencher) {} "); assert_that(p.cargo_process("bench"), execs().with_stdout(&format!("\ {} foo v0.0.1 ({}) {running} target[..]release[..]baz-[..] running 1 test test bin_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} target[..]release[..]foo-[..] running 1 test test lib_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", COMPILING, p.url(), running = RUNNING))) }); test!(bench_with_deep_lib_dep { if !::is_nightly() { return } let p = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies.foo] path = "../foo" "#) .file("src/lib.rs", " #![feature(test)] extern crate foo; extern crate test; #[bench] fn bar_bench(_b: &mut test::Bencher) { foo::foo(); } "); let p2 = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #![feature(test)] extern crate test; pub fn foo() {} #[bench] fn foo_bench(_b: &mut test::Bencher) {} "); p2.build(); assert_that(p.cargo_process("bench"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {compiling} bar v0.0.1 ({dir}) {running} target[..] running 1 test test bar_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(external_bench_explicit { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bench]] name = "bench" path = "src/bench.rs" "#) .file("src/lib.rs", r#" #![feature(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } #[bench] fn internal_bench(_b: &mut test::Bencher) {} "#) .file("src/bench.rs", r#" #![feature(test)] extern crate foo; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} "#); assert_that(p.cargo_process("bench"), execs().with_stdout(&format!("\ {} foo v0.0.1 ({}) {running} target[..]release[..]bench-[..] running 1 test test external_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} target[..]release[..]foo-[..] running 1 test test internal_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", COMPILING, p.url(), running = RUNNING))) }); test!(external_bench_implicit { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" #![feature(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } #[bench] fn internal_bench(_b: &mut test::Bencher) {} "#) .file("benches/external.rs", r#" #![feature(test)] extern crate foo; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} "#); assert_that(p.cargo_process("bench"), execs().with_stdout(&format!("\ {} foo v0.0.1 ({}) {running} target[..]release[..]external-[..] running 1 test test external_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} target[..]release[..]foo-[..] running 1 test test internal_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", COMPILING, p.url(), running = RUNNING))) }); test!(dont_run_examples { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" "#) .file("examples/dont-run-me-i-will-fail.rs", r#" fn main() { panic!("Examples should not be run by 'cargo test'"); } "#); assert_that(p.cargo_process("bench"), execs().with_status(0)); }); test!(pass_through_command_line { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #![feature(test)] extern crate test; #[bench] fn foo(_b: &mut test::Bencher) {} #[bench] fn bar(_b: &mut test::Bencher) {} "); assert_that(p.cargo_process("bench").arg("bar"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]release[..]foo-[..] running 1 test test bar ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); assert_that(p.cargo("bench").arg("foo"), execs().with_status(0) .with_stdout(&format!("\ {running} target[..]release[..]foo-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", running = RUNNING))); }); // Regression test for running cargo-bench twice with // tests in an rlib test!(cargo_bench_twice { if !::is_nightly() { return } let p = project("test_twice") .file("Cargo.toml", &basic_lib_manifest("test_twice")) .file("src/test_twice.rs", r#" #![crate_type = "rlib"] #![feature(test)] extern crate test; #[bench] fn dummy_bench(b: &mut test::Bencher) { } "#); p.cargo_process("build"); for _ in 0..2 { assert_that(p.cargo("bench"), execs().with_status(0)); } }); test!(lib_bin_same_name { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" [[bin]] name = "foo" "#) .file("src/lib.rs", " #![feature(test)] extern crate test; #[bench] fn lib_bench(_b: &mut test::Bencher) {} ") .file("src/main.rs", " #![feature(test)] extern crate foo; extern crate test; #[bench] fn bin_bench(_b: &mut test::Bencher) {} "); assert_that(p.cargo_process("bench"), execs().with_stdout(&format!("\ {} foo v0.0.1 ({}) {running} target[..]release[..]foo-[..] running 1 test test [..] ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} target[..]release[..]foo-[..] running 1 test test [..] ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", COMPILING, p.url(), running = RUNNING))) }); test!(lib_with_standard_name { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #![feature(test)] extern crate test; /// ``` /// syntax::foo(); /// ``` pub fn foo() {} #[bench] fn foo_bench(_b: &mut test::Bencher) {} ") .file("benches/bench.rs", " #![feature(test)] extern crate syntax; extern crate test; #[bench] fn bench(_b: &mut test::Bencher) { syntax::foo() } "); assert_that(p.cargo_process("bench"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) {running} target[..]release[..]bench-[..] running 1 test test bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} target[..]release[..]syntax-[..] running 1 test test foo_bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(lib_with_standard_name2 { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] name = "syntax" bench = false doctest = false "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " #![feature(test)] extern crate syntax; extern crate test; fn main() {} #[bench] fn bench(_b: &mut test::Bencher) { syntax::foo() } "); assert_that(p.cargo_process("bench"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) {running} target[..]release[..]syntax-[..] running 1 test test bench ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(bench_dylib { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate_type = ["dylib"] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", r#" #![feature(test)] extern crate bar as the_bar; extern crate test; pub fn bar() { the_bar::baz(); } #[bench] fn foo(_b: &mut test::Bencher) {} "#) .file("benches/bench.rs", r#" #![feature(test)] extern crate foo as the_foo; extern crate test; #[bench] fn foo(_b: &mut test::Bencher) { the_foo::bar(); } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#) .file("bar/src/lib.rs", " pub fn baz() {} "); assert_that(p.cargo_process("bench").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} bar v0.0.1 ({dir}) {running} [..] -C opt-level=3 [..] {compiling} foo v0.0.1 ({dir}) {running} [..] -C opt-level=3 [..] {running} [..] -C opt-level=3 [..] {running} [..] -C opt-level=3 [..] {running} [..]target[..]release[..]bench-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} [..]target[..]release[..]foo-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("bench").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {fresh} bar v0.0.1 ({dir}) {fresh} foo v0.0.1 ({dir}) {running} [..]target[..]release[..]bench-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} [..]target[..]release[..]foo-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", fresh = FRESH, running = RUNNING, dir = p.url()))); }); test!(bench_twice_with_build_cmd { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", " #![feature(test)] extern crate test; #[bench] fn foo(_b: &mut test::Bencher) {} "); assert_that(p.cargo_process("bench"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]release[..]foo-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); assert_that(p.cargo("bench"), execs().with_status(0) .with_stdout(&format!("\ {running} target[..]release[..]foo-[..] running 1 test test foo ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", running = RUNNING))); }); test!(bench_with_examples { if !::is_nightly() { return } let p = project("testbench") .file("Cargo.toml", r#" [package] name = "testbench" version = "6.6.6" authors = [] [[example]] name = "teste1" [[bench]] name = "testb1" "#) .file("src/lib.rs", r#" #![feature(test)] extern crate test; use test::Bencher; pub fn f1() { println!("f1"); } pub fn f2() {} #[bench] fn bench_bench1(_b: &mut Bencher) { f2(); } "#) .file("benches/testb1.rs", " #![feature(test)] extern crate testbench; extern crate test; use test::Bencher; #[bench] fn bench_bench2(_b: &mut Bencher) { testbench::f2(); } ") .file("examples/teste1.rs", r#" extern crate testbench; fn main() { println!("example1"); testbench::f1(); } "#); assert_that(p.cargo_process("bench").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} testbench v6.6.6 ({url}) {running} `rustc [..]` {running} `rustc [..]` {running} `rustc [..]` {running} `{dir}[..]target[..]release[..]testb1-[..] --bench` running 1 test test bench_bench2 ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured {running} `{dir}[..]target[..]release[..]testbench-[..] --bench` running 1 test test bench_bench1 ... bench: [..] 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", compiling = COMPILING, running = RUNNING, dir = p.root().display(), url = p.url()))); }); test!(test_a_bench { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.1.0" [lib] name = "foo" test = false doctest = false [[bench]] name = "b" test = true "#) .file("src/lib.rs", "") .file("benches/b.rs", r#" #[test] fn foo() {} "#); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.1.0 ([..]) {running} target[..]debug[..]b-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING))); }); test!(test_bench_multiple_packages { if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.1.0" [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", ""); let bar = project("bar") .file("Cargo.toml", r#" [project] name = "bar" authors = [] version = "0.1.0" [[bench]] name = "bbar" test = true "#) .file("src/lib.rs", "") .file("benches/bbar.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_bar(_b: &mut Bencher) {} "#); bar.build(); let baz = project("baz") .file("Cargo.toml", r#" [project] name = "baz" authors = [] version = "0.1.0" [[bench]] name = "bbaz" test = true "#) .file("src/lib.rs", "") .file("benches/bbaz.rs", r#" #![feature(test)] extern crate test; use test::Bencher; #[bench] fn bench_baz(_b: &mut Bencher) {} "#); baz.build(); assert_that(p.cargo_process("bench").arg("-p").arg("bar").arg("-p").arg("baz"), execs().with_status(0) .with_stdout_contains(&format!("\ {running} target[..]release[..]bbaz-[..] running 1 test test bench_baz ... bench: 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", running = RUNNING)) .with_stdout_contains(&format!("\ {running} target[..]release[..]bbar-[..] running 1 test test bench_bar ... bench: 0 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured ", running = RUNNING))); }); cargo-0.8.0/tests/test_cargo_build_auth.rs000066400000000000000000000135021264656333200206670ustar00rootroot00000000000000use std::collections::HashSet; use std::io::prelude::*; use std::net::TcpListener; use std::thread; use bufstream::BufStream; use git2; use support::{project, execs, UPDATING}; use support::paths; use hamcrest::assert_that; fn setup() { } // Test that HTTP auth is offered from `credential.helper` test!(http_auth_offered { let a = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = a.local_addr().unwrap(); fn headers(rdr: &mut BufRead) -> HashSet { let valid = ["GET", "Authorization", "Accept", "User-Agent"]; rdr.lines().map(|s| s.unwrap()) .take_while(|s| s.len() > 2) .map(|s| s.trim().to_string()) .filter(|s| { valid.iter().any(|prefix| s.starts_with(*prefix)) }) .collect() } let t = thread::spawn(move|| { let mut s = BufStream::new(a.accept().unwrap().0); let req = headers(&mut s); s.write_all(b"\ HTTP/1.1 401 Unauthorized\r\n\ WWW-Authenticate: Basic realm=\"wheee\"\r\n \r\n\ ").unwrap(); assert_eq!(req, vec![ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Accept: */*", "User-Agent: git/1.0 (libgit2 0.23.0)", ].into_iter().map(|s| s.to_string()).collect()); drop(s); let mut s = BufStream::new(a.accept().unwrap().0); let req = headers(&mut s); s.write_all(b"\ HTTP/1.1 401 Unauthorized\r\n\ WWW-Authenticate: Basic realm=\"wheee\"\r\n \r\n\ ").unwrap(); assert_eq!(req, vec![ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Authorization: Basic Zm9vOmJhcg==", "Accept: */*", "User-Agent: git/1.0 (libgit2 0.23.0)", ].into_iter().map(|s| s.to_string()).collect()); }); let script = project("script") .file("Cargo.toml", r#" [project] name = "script" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("username=foo"); println!("password=bar"); } "#); assert_that(script.cargo_process("build").arg("-v"), execs().with_status(0)); let script = script.bin("script"); let config = paths::home().join(".gitconfig"); let mut config = git2::Config::open(&config).unwrap(); config.set_str("credential.helper", &script.display().to_string()).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "http://127.0.0.1:{}/foo/bar" "#, addr.port())) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stdout(&format!("\ {updating} git repository `http://{addr}/foo/bar` ", updating = UPDATING, addr = addr, )) .with_stderr(&format!("\ Unable to update http://{addr}/foo/bar Caused by: failed to clone into: [..] Caused by: failed to authenticate when downloading repository To learn more, run the command again with --verbose. ", addr = addr))); t.join().ok().unwrap(); }); // Boy, sure would be nice to have a TLS implementation in rust! test!(https_something_happens { let a = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = a.local_addr().unwrap(); let t = thread::spawn(move|| { drop(a.accept().unwrap()); }); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "https://127.0.0.1:{}/foo/bar" "#, addr.port())) .file("src/main.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101).with_stdout(&format!("\ {updating} git repository `https://{addr}/foo/bar` ", updating = UPDATING, addr = addr, )) .with_stderr(&format!("\ Unable to update https://{addr}/foo/bar Caused by: failed to clone into: [..] Caused by: {errmsg} ", addr = addr, errmsg = if cfg!(windows) { "[[..]] failed to send request: [..]\n" } else if cfg!(target_os = "macos") { // OSX is difficult to tests as some builds may use // Security.framework and others may use OpenSSL. In that case let's // just not verify the error message here. "[..]" } else { "[[..]] SSL error: [..]" }))); t.join().ok().unwrap(); }); // Boy, sure would be nice to have an SSH implementation in rust! test!(ssh_something_happens { let a = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = a.local_addr().unwrap(); let t = thread::spawn(move|| { drop(a.accept().unwrap()); }); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] git = "ssh://127.0.0.1:{}/foo/bar" "#, addr.port())) .file("src/main.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101).with_stdout(&format!("\ {updating} git repository `ssh://{addr}/foo/bar` ", updating = UPDATING, addr = addr, )) .with_stderr(&format!("\ Unable to update ssh://{addr}/foo/bar Caused by: failed to clone into: [..] Caused by: [[..]] Failed to start SSH session: Failed getting banner ", addr = addr))); t.join().ok().unwrap(); }); cargo-0.8.0/tests/test_cargo_build_lib.rs000066400000000000000000000045211264656333200204750ustar00rootroot00000000000000use std::path::MAIN_SEPARATOR as SEP; use support::{basic_bin_manifest, execs, project, ProjectBuilder}; use support::{COMPILING, RUNNING}; use hamcrest::{assert_that}; fn setup() { } fn verbose_output_for_lib(p: &ProjectBuilder) -> String { format!("\ {compiling} {name} v{version} ({url}) {running} `rustc src{sep}lib.rs --crate-name {name} --crate-type lib -g \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, compiling = COMPILING, sep = SEP, dir = p.root().display(), url = p.url(), name = "foo", version = "0.0.1") } test!(build_lib_only { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["wycats@example.com"] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("build").arg("--lib").arg("-v"), execs() .with_status(0) .with_stdout(verbose_output_for_lib(&p))); }); test!(build_with_no_lib { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("build").arg("--lib"), execs().with_status(101) .with_stderr("no library targets found")); }); test!(build_with_relative_cargo_home_path { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["wycats@example.com"] [dependencies] "test-dependency" = { path = "src/test_dependency" } "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/test_dependency/src/lib.rs", r#" "#) .file("src/test_dependency/Cargo.toml", r#" [package] name = "test-dependency" version = "0.0.1" authors = ["wycats@example.com"] "#); assert_that(p.cargo_process("build").env("CARGO_HOME", "./cargo_home/"), execs() .with_status(0)); }); cargo-0.8.0/tests/test_cargo_clean.rs000066400000000000000000000154621264656333200176400ustar00rootroot00000000000000use std::env; use support::{git, project, execs, main_file, basic_bin_manifest}; use support::{COMPILING, RUNNING}; use support::registry::Package; use hamcrest::{assert_that, existing_dir, existing_file, is_not}; fn setup() { } test!(cargo_clean_simple { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.build_dir(), existing_dir()); assert_that(p.cargo("clean"), execs().with_status(0)); assert_that(&p.build_dir(), is_not(existing_dir())); }); test!(different_dir { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("src/bar/a.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.build_dir(), existing_dir()); assert_that(p.cargo("clean").cwd(&p.root().join("src")), execs().with_status(0).with_stdout("")); assert_that(&p.build_dir(), is_not(existing_dir())); }); test!(clean_multiple_packages { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [[bin]] name = "d1" "#) .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [[bin]] name = "d2" "#) .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }"); p.build(); assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2") .arg("-p").arg("foo"), execs().with_status(0)); let d1_path = &p.build_dir().join("debug").join("deps") .join(format!("d1{}", env::consts::EXE_SUFFIX)); let d2_path = &p.build_dir().join("debug").join("deps") .join(format!("d2{}", env::consts::EXE_SUFFIX)); assert_that(&p.bin("foo"), existing_file()); assert_that(d1_path, existing_file()); assert_that(d2_path, existing_file()); assert_that(p.cargo("clean").arg("-p").arg("d1").arg("-p").arg("d2") .cwd(&p.root().join("src")), execs().with_status(0).with_stdout("")); assert_that(&p.bin("foo"), existing_file()); assert_that(d1_path, is_not(existing_file())); assert_that(d2_path, is_not(existing_file())); }); test!(clean_release { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(p.cargo_process("build").arg("--release"), execs().with_status(0)); assert_that(p.cargo("clean").arg("-p").arg("foo"), execs().with_status(0)); assert_that(p.cargo("build").arg("--release"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("clean").arg("-p").arg("foo").arg("--release"), execs().with_status(0)); assert_that(p.cargo("build").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) ", compiling = COMPILING))); }); test!(build_script { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#) .file("src/main.rs", "fn main() {}") .file("build.rs", r#" use std::path::PathBuf; use std::env; fn main() { let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); if env::var("FIRST").is_ok() { std::fs::File::create(out.join("out")).unwrap(); } else { assert!(!std::fs::metadata(out.join("out")).is_ok()); } } "#) .file("a/src/lib.rs", ""); p.build(); assert_that(p.cargo_process("build").env("FIRST", "1"), execs().with_status(0)); assert_that(p.cargo("clean").arg("-p").arg("foo"), execs().with_status(0)); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} `rustc build.rs [..]` {running} `[..]build-script-build[..]` {running} `rustc src[..]main.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(clean_git { let git = git::new("dep", |project| { project.file("Cargo.toml", r#" [project] name = "dep" version = "0.5.0" authors = [] "#) .file("src/lib.rs", "") }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] dep = {{ git = '{}' }} "#, git.url())) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("clean").arg("-p").arg("dep"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(registry { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "0.1" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("bar", "0.1.0").publish(); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("clean").arg("-p").arg("bar"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("build"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_compile.rs000066400000000000000000001566751264656333200202220ustar00rootroot00000000000000use std::env; use std::fs::{self, File}; use std::io::prelude::*; use std::thread; use tempdir::TempDir; use support::{project, execs, main_file, basic_bin_manifest}; use support::{COMPILING, RUNNING, ProjectBuilder}; use hamcrest::{assert_that, existing_file, is_not}; use support::paths::{CargoPathExt,root}; use cargo::util::process; fn setup() { } test!(cargo_compile_simple { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("i am foo\n")); }); test!(cargo_compile_manifest_path { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("build") .arg("--manifest-path").arg("foo/Cargo.toml") .cwd(p.root().parent().unwrap()), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); }); test!(cargo_compile_with_invalid_manifest { let p = project("foo") .file("Cargo.toml", ""); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: no `package` or `project` section found. ")) }); test!(cargo_compile_with_invalid_manifest2 { let p = project("foo") .file("Cargo.toml", r" [project] foo = bar "); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: could not parse input as TOML Cargo.toml:3:19-3:20 expected a value ")) }); test!(cargo_compile_with_invalid_manifest3 { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/Cargo.toml", "a = bar"); assert_that(p.cargo_process("build").arg("--manifest-path") .arg("src/Cargo.toml"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: could not parse input as TOML\n\ src[..]Cargo.toml:1:5-1:6 expected a value\n\n")) }); test!(cargo_compile_with_invalid_version { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" authors = [] version = "1.0" "#); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: cannot parse '1.0' as a semver for the key `project.version` ")) }); test!(cargo_compile_with_invalid_package_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "" authors = [] version = "0.0.0" "#); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: package name cannot be an empty string. ")) }); test!(cargo_compile_with_invalid_bin_target_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "" "#); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: binary target names cannot be empty. ")) }); test!(cargo_compile_with_forbidden_bin_target_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "build" "#); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: the binary target name `build` is forbidden ")) }); test!(cargo_compile_with_invalid_lib_target_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" [lib] name = "" "#); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: library target names cannot be empty. ")) }); test!(cargo_compile_without_manifest { let tmpdir = TempDir::new("cargo").unwrap(); let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr("\ could not find `Cargo.toml` in `[..]` or any parent directory ")); }); test!(cargo_compile_with_invalid_code { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", "invalid rust code!"); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr_contains("\ src[..]foo.rs:1:1: 1:8 error: expected item[..]found `invalid` src[..]foo.rs:1 invalid rust code! ^~~~~~~ ") .with_stderr_contains("\ Could not compile `foo`. To learn more, run the command again with --verbose.\n")); assert_that(&p.root().join("Cargo.lock"), existing_file()); }); test!(cargo_compile_with_invalid_code_in_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", "invalid rust code!"); let bar = project("bar") .file("Cargo.toml", &basic_bin_manifest("bar")) .file("src/lib.rs", "invalid rust code!"); let baz = project("baz") .file("Cargo.toml", &basic_bin_manifest("baz")) .file("src/lib.rs", "invalid rust code!"); bar.build(); baz.build(); assert_that(p.cargo_process("build"), execs().with_status(101)); }); test!(cargo_compile_with_warnings_in_the_root_package { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", "fn main() {} fn dead() {}"); assert_that(p.cargo_process("build"), execs() .with_stderr("\ src[..]foo.rs:1:14: 1:26 warning: function is never used: `dead`, \ #[warn(dead_code)] on by default src[..]foo.rs:1 fn main() {} fn dead() {} [..] ^~~~~~~~~~~~ ")); }); test!(cargo_compile_with_warnings_in_a_dep_package { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" pub fn gimme() -> &'static str { "test passed" } fn dead() {} "#); assert_that(p.cargo_process("build"), execs() .with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url())) .with_stderr("\ [..]warning: function is never used: `dead`[..] [..]fn dead() {} [..]^~~~~~~~~~~~ ")); assert_that(&p.bin("foo"), existing_file()); assert_that( process(&p.bin("foo")), execs().with_stdout("test passed\n")); }); test!(cargo_compile_with_nested_deps_inferred { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = 'bar' [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" "#) .file("bar/src/lib.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("baz/src/lib.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#); p.cargo_process("build") .exec_with_output() .unwrap(); assert_that(&p.bin("foo"), existing_file()); assert_that( process(&p.bin("foo")), execs().with_stdout("test passed\n")); }); test!(cargo_compile_with_nested_deps_correct_bin { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [[bin]] name = "foo" "#) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" "#) .file("bar/src/lib.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("baz/src/lib.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#); p.cargo_process("build") .exec_with_output() .unwrap(); assert_that(&p.bin("foo"), existing_file()); assert_that( process(&p.bin("foo")), execs().with_stdout("test passed\n")); }); test!(cargo_compile_with_nested_deps_shorthand { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "baz" "#) .file("baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#); p.cargo_process("build") .exec_with_output() .unwrap(); assert_that(&p.bin("foo"), existing_file()); assert_that( process(&p.bin("foo")), execs().with_stdout("test passed\n")); }); test!(cargo_compile_with_nested_deps_longhand { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "bar" version = "0.5.0" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] path = "../baz" version = "0.5.0" [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "baz" "#) .file("baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("test passed\n")); }); // Check that Cargo gives a sensible error if a dependency can't be found // because of a name mismatch. test!(cargo_compile_with_dep_name_mismatch { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = ["wycats@example.com"] [[bin]] name = "foo" [dependencies.notquitebar] path = "bar" "#) .file("src/foo.rs", &main_file(r#""i am foo""#, &["bar"])) .file("bar/Cargo.toml", &basic_bin_manifest("bar")) .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!( r#"no matching package named `notquitebar` found (required by `foo`) location searched: {proj_dir} version required: * "#, proj_dir = p.url()))); }); test!(compile_path_dep_then_change_version { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", "") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" [package] name = "bar" version = "0.0.2" authors = [] "#).unwrap(); assert_that(p.cargo("build"), execs().with_status(101).with_stderr("\ no matching package named `bar` found (required by `foo`) location searched: [..] version required: = 0.0.1 versions found: 0.0.2 consider running `cargo update` to update a path dependency's locked version ")); }); test!(ignores_carriage_return_in_lockfile { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", r#" mod a; fn main() {} "#) .file("src/a.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); let lockfile = p.root().join("Cargo.lock"); let mut lock = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); let lock = lock.replace("\n", "\r\n"); File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap(); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(crate_version_env_vars { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.1-alpha.1" authors = ["wycats@example.com"] "#) .file("src/main.rs", r#" extern crate foo; static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); static VERSION: &'static str = env!("CARGO_PKG_VERSION"); static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); fn main() { let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_PRE, CARGO_MANIFEST_DIR); assert_eq!(s, foo::version()); println!("{}", s); assert_eq!(s, VERSION); } "#) .file("src/lib.rs", r#" pub fn version() -> String { format!("{}-{}-{} @ {} in {}", env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_PATCH"), env!("CARGO_PKG_VERSION_PRE"), env!("CARGO_MANIFEST_DIR")) } "#); println!("build"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); println!("bin"); assert_that(process(&p.bin("foo")), execs().with_stdout(&format!("0-5-1 @ alpha.1 in {}\n", p.root().display()))); println!("test"); assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); }); // this is testing that src/.rs still works (for now) test!(many_crate_types_old_style_lib_location { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate_type = ["rlib", "dylib"] "#) .file("src/foo.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert_that(&p.root().join("target/debug").join(&fname), existing_file()); }); test!(many_crate_types_correct { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" crate_type = ["rlib", "dylib"] "#) .file("src/lib.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert_that(&p.root().join("target/debug").join(&fname), existing_file()); }); test!(unused_keys { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] bulid = "foo" [lib] name = "foo" "#) .file("src/foo.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0) .with_stderr("unused manifest key: project.bulid\n")); let mut p = project("bar"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "foo" build = "foo" "#) .file("src/foo.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0) .with_stderr("unused manifest key: lib.build\n")); }); test!(self_dependency { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [dependencies.test] path = "." [lib] name = "test" "#) .file("src/test.rs", "fn main() {}"); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr("\ cyclic package dependency: package `test v0.0.0 ([..])` depends on itself ")); }); test!(ignore_broken_symlinks { // windows and symlinks don't currently agree that well if cfg!(windows) { return } let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .symlink("Notafile", "bar"); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("i am foo\n")); }); test!(missing_lib_and_bin { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] "#); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr("\ failed to parse manifest at `[..]Cargo.toml` Caused by: no targets specified in the manifest either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n")); }); test!(lto_build { // FIXME: currently this hits a linker bug on 32-bit MSVC if cfg!(all(target_env = "msvc", target_pointer_width = "32")) { return } let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.release] lto = true "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} test v0.0.0 ({url}) {running} `rustc src[..]main.rs --crate-name test --crate-type bin \ -C opt-level=3 \ -C lto \ --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ -L dependency={dir}[..]target[..]release \ -L dependency={dir}[..]target[..]release[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); }); test!(verbose_build { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} test v0.0.0 ({url}) {running} `rustc src[..]lib.rs --crate-name test --crate-type lib -g \ --out-dir {dir}[..]target[..]debug \ --emit=dep-info,link \ -L dependency={dir}[..]target[..]debug \ -L dependency={dir}[..]target[..]debug[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); }); test!(verbose_release_build { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} test v0.0.0 ({url}) {running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ -C opt-level=3 \ --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ -L dependency={dir}[..]target[..]release \ -L dependency={dir}[..]target[..]release[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), ))); }); test!(verbose_release_build_deps { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [dependencies.foo] path = "foo" "#) .file("src/lib.rs", "") .file("foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] name = "foo" crate_type = ["dylib", "rlib"] "#) .file("foo/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) {running} `rustc foo[..]src[..]lib.rs --crate-name foo \ --crate-type dylib --crate-type rlib -C prefer-dynamic \ -C opt-level=3 \ -C metadata=[..] \ -C extra-filename=-[..] \ --out-dir {dir}[..]target[..]release[..]deps \ --emit=dep-info,link \ -L dependency={dir}[..]target[..]release[..]deps \ -L dependency={dir}[..]target[..]release[..]deps` {compiling} test v0.0.0 ({url}) {running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ -C opt-level=3 \ --out-dir {dir}[..]target[..]release \ --emit=dep-info,link \ -L dependency={dir}[..]target[..]release \ -L dependency={dir}[..]target[..]release[..]deps \ --extern foo={dir}[..]target[..]release[..]deps[..]\ {prefix}foo-[..]{suffix} \ --extern foo={dir}[..]target[..]release[..]deps[..]libfoo-[..].rlib` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX))); }); test!(explicit_examples { let mut p = project("world"); p = p.file("Cargo.toml", r#" [package] name = "world" version = "1.0.0" authors = [] [lib] name = "world" path = "src/lib.rs" [[example]] name = "hello" path = "examples/ex-hello.rs" [[example]] name = "goodbye" path = "examples/ex-goodbye.rs" "#) .file("src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } pub fn get_goodbye() -> &'static str { "Goodbye" } pub fn get_world() -> &'static str { "World" } "#) .file("examples/ex-hello.rs", r#" extern crate world; fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } "#) .file("examples/ex-goodbye.rs", r#" extern crate world; fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); assert_that(process(&p.bin("examples/hello")), execs().with_stdout("Hello, World!\n")); assert_that(process(&p.bin("examples/goodbye")), execs().with_stdout("Goodbye, World!\n")); }); test!(implicit_examples { let mut p = project("world"); p = p.file("Cargo.toml", r#" [package] name = "world" version = "1.0.0" authors = [] "#) .file("src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } pub fn get_goodbye() -> &'static str { "Goodbye" } pub fn get_world() -> &'static str { "World" } "#) .file("examples/hello.rs", r#" extern crate world; fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } "#) .file("examples/goodbye.rs", r#" extern crate world; fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } "#); assert_that(p.cargo_process("test"), execs().with_status(0)); assert_that(process(&p.bin("examples/hello")), execs().with_stdout("Hello, World!\n")); assert_that(process(&p.bin("examples/goodbye")), execs().with_stdout("Goodbye, World!\n")); }); test!(standard_build_no_ndebug { let p = project("world") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn main() { if cfg!(debug_assertions) { println!("slow") } else { println!("fast") } } "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(process(&p.bin("foo")), execs().with_stdout("slow\n")); }); test!(release_build_ndebug { let p = project("world") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn main() { if cfg!(debug_assertions) { println!("slow") } else { println!("fast") } } "#); assert_that(p.cargo_process("build").arg("--release"), execs().with_status(0)); assert_that(process(&p.release_bin("foo")), execs().with_stdout("fast\n")); }); test!(inferred_main_bin { let p = project("world") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(process(&p.bin("foo")), execs().with_status(0)); }); test!(deletion_causes_failure { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); let p = p.file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#); assert_that(p.cargo_process("build"), execs().with_status(101)); }); test!(bad_cargo_toml_in_target_dir { let p = project("world") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("target/Cargo.toml", "bad-toml"); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(process(&p.bin("foo")), execs().with_status(0)); }); test!(lib_with_standard_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() { syntax::foo() } "); assert_that(p.cargo_process("build"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); test!(simple_staticlib { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" crate-type = ["staticlib"] "#) .file("src/lib.rs", "pub fn foo() {}"); // env var is a test for #1381 assert_that(p.cargo_process("build").env("RUST_LOG", "nekoneko=trace"), execs().with_status(0)); }); test!(staticlib_rlib_and_bin { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" crate-type = ["staticlib", "rlib"] "#) .file("src/lib.rs", "pub fn foo() {}") .file("src/main.rs", r#" extern crate foo; fn main() { foo::foo(); }"#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(opt_out_of_bin { let p = project("foo") .file("Cargo.toml", r#" bin = [] [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/lib.rs", "") .file("src/main.rs", "bad syntax"); assert_that(p.cargo_process("build"), execs().with_status(0)); }); test!(single_lib { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [lib] name = "foo" path = "src/bar.rs" "#) .file("src/bar.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); }); test!(freshness_ignores_excluded { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" exclude = ["src/b*.rs"] "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", "pub fn bar() -> i32 { 1 }"); foo.build(); foo.root().move_into_the_past().unwrap(); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) ", compiling = COMPILING, url = foo.url()))); // Smoke test to make sure it doesn't compile again println!("first pass"); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); // Modify an ignored file and make sure we don't rebuild println!("second pass"); File::create(&foo.root().join("src/bar.rs")).unwrap(); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); }); test!(rebuild_preserves_out_dir { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' "#) .file("build.rs", r#" use std::env; use std::fs::File; use std::path::Path; fn main() { let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); if env::var_os("FIRST").is_some() { File::create(&path).unwrap(); } else { File::create(&path).unwrap(); } } "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }"); foo.build(); foo.root().move_into_the_past().unwrap(); assert_that(foo.cargo("build").env("FIRST", "1"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) ", compiling = COMPILING, url = foo.url()))); File::create(&foo.root().join("src/bar.rs")).unwrap(); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) ", compiling = COMPILING, url = foo.url()))); }); test!(dep_no_libs { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.0" authors = [] "#) .file("bar/src/main.rs", ""); assert_that(foo.cargo_process("build"), execs().with_status(0)); }); test!(recompile_space_in_name { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [lib] name = "foo" path = "src/my lib.rs" "#) .file("src/my lib.rs", ""); assert_that(foo.cargo_process("build"), execs().with_status(0)); foo.root().move_into_the_past().unwrap(); assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); }); #[cfg(unix)] test!(ignore_bad_directories { use std::os::unix::prelude::*; let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", ""); foo.build(); let dir = foo.root().join("tmp"); fs::create_dir(&dir).unwrap(); let stat = fs::metadata(&dir).unwrap(); let mut perms = stat.permissions(); perms.set_mode(0o644); fs::set_permissions(&dir, perms.clone()).unwrap(); assert_that(foo.cargo("build"), execs().with_status(0)); perms.set_mode(0o755); fs::set_permissions(&dir, perms).unwrap(); }); test!(bad_cargo_config { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "") .file(".cargo/config", r#" this is not valid toml "#); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ Couldn't load Cargo configuration Caused by: could not parse TOML configuration in `[..]` Caused by: could not parse input as TOML [..].cargo[..]config:2:20-2:21 expected `=`, but found `i` ")); }); test!(cargo_platform_specific_dependency { let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [target.{host}.dependencies] dep = {{ path = "dep" }} [target.{host}.build-dependencies] build = {{ path = "build" }} [target.{host}.dev-dependencies] dev = {{ path = "dev" }} "#, host = host)) .file("src/main.rs", r#" extern crate dep; fn main() { dep::dep() } "#) .file("tests/foo.rs", r#" extern crate dev; #[test] fn foo() { dev::dev() } "#) .file("build.rs", r#" extern crate build; fn main() { build::build(); } "#) .file("dep/Cargo.toml", r#" [project] name = "dep" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("dep/src/lib.rs", "pub fn dep() {}") .file("build/Cargo.toml", r#" [project] name = "build" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("build/src/lib.rs", "pub fn build() {}") .file("dev/Cargo.toml", r#" [project] name = "dev" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("dev/src/lib.rs", "pub fn dev() {}"); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); assert_that(p.cargo("test"), execs().with_status(0)); }); test!(bad_platform_specific_dependency { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [target.wrong-target.dependencies.bar] path = "bar" "#) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("bar/src/lib.rs", r#" extern crate baz; pub fn gimme() -> String { format!("") } "#); assert_that(p.cargo_process("build"), execs().with_status(101)); }); test!(cargo_platform_specific_dependency_wrong_platform { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [target.non-existing-triplet.dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" fn main() {} "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("bar/src/lib.rs", r#" invalid rust file, should not be compiled "#); p.cargo_process("build").exec_with_output().unwrap(); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs()); let loc = p.root().join("Cargo.lock"); let mut lockfile = String::new(); File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); assert!(lockfile.contains("bar")) }); test!(example_bin_same_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}"); p.cargo_process("test").arg("--no-run").arg("-v") .exec_with_output() .unwrap(); assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); p.cargo("test").arg("--no-run").arg("-v") .exec_with_output() .unwrap(); assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); }); test!(compile_then_delete { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("run"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); if cfg!(windows) { // On windows unlinking immediately after running often fails, so sleep thread::sleep_ms(100); } fs::remove_file(&p.bin("foo")).unwrap(); assert_that(p.cargo("run"), execs().with_status(0)); }); test!(transitive_dependencies_not_available { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.aaaaa] path = "a" "#) .file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}") .file("a/Cargo.toml", r#" [package] name = "aaaaa" version = "0.0.1" authors = [] [dependencies.bbbbb] path = "../b" "#) .file("a/src/lib.rs", "extern crate bbbbb;") .file("b/Cargo.toml", r#" [package] name = "bbbbb" version = "0.0.1" authors = [] "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stderr("\ [..] can't find crate for `bbbbb`[..] [..] extern crate bbbbb; [..] [..] error: aborting due to previous error Could not compile `foo`. Caused by: [..] ")); }); test!(cyclic_deps_rejected { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies.foo] path = ".." "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stderr("\ cyclic package dependency: package `foo v0.0.1 ([..])` depends on itself ")); }); test!(predictable_filenames { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["staticlib", "dylib", "rlib"] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(&p.root().join("target/debug/libfoo.a"), existing_file()); assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); assert_that(&p.root().join("target/debug").join(dylib_name), existing_file()); }); test!(dashes_to_underscores { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo-bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/main.rs", "extern crate foo_bar; fn main() {}"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(&p.bin("foo-bar"), existing_file()); }); test!(dashes_in_crate_name_bad { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo-bar" "#) .file("src/lib.rs", "") .file("src/main.rs", "extern crate foo_bar; fn main() {}"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101)); }); test!(rustc_env_var { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", ""); p.build(); assert_that(p.cargo("build") .env("RUSTC", "rustc-that-does-not-exist").arg("-v"), execs().with_status(101) .with_stderr("\ Could not execute process `rustc-that-does-not-exist -vV` ([..]) Caused by: [..] ")); assert_that(&p.bin("a"), is_not(existing_file())); }); test!(filtering { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/bin/a.rs", "fn main() {}") .file("src/bin/b.rs", "fn main() {}") .file("examples/a.rs", "fn main() {}") .file("examples/b.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build").arg("--lib"), execs().with_status(0)); assert_that(&p.bin("a"), is_not(existing_file())); assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"), execs().with_status(0)); assert_that(&p.bin("a"), existing_file()); assert_that(&p.bin("b"), is_not(existing_file())); assert_that(&p.bin("examples/a"), existing_file()); assert_that(&p.bin("examples/b"), is_not(existing_file())); }); test!(ignore_dotfile { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/.a.rs", "") .file("src/bin/a.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(ignore_dotdirs { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/a.rs", "fn main() {}") .file(".git/Cargo.toml", "") .file(".pc/dummy-fix.patch/Cargo.toml", ""); p.build(); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(dotdir_root { let p = ProjectBuilder::new("foo", root().join(".foo")) .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/a.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(custom_target_dir { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}"); p.build(); let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), execs().with_status(0)); assert_that(&p.root().join("foo/target/debug").join(&exe_name), existing_file()); assert_that(&p.root().join("target/debug").join(&exe_name), is_not(existing_file())); assert_that(p.cargo("build"), execs().with_status(0)); assert_that(&p.root().join("foo/target/debug").join(&exe_name), existing_file()); assert_that(&p.root().join("target/debug").join(&exe_name), existing_file()); fs::create_dir(p.root().join(".cargo")).unwrap(); File::create(p.root().join(".cargo/config")).unwrap().write_all(br#" [build] target-dir = "bar/target" "#).unwrap(); assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), execs().with_status(0)); assert_that(&p.root().join("bar/target/debug").join(&exe_name), existing_file()); assert_that(&p.root().join("foo/target/debug").join(&exe_name), existing_file()); assert_that(&p.root().join("target/debug").join(&exe_name), existing_file()); }); test!(rustc_no_trans { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"), execs().with_status(0)); }); test!(build_multiple_packages { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [[bin]] name = "d1" "#) .file("d1/src/lib.rs", "") .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [[bin]] name = "d2" doctest = false "#) .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }"); p.build(); assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2") .arg("-p").arg("foo"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("i am foo\n")); let d1_path = &p.build_dir().join("debug").join("deps") .join(format!("d1{}", env::consts::EXE_SUFFIX)); let d2_path = &p.build_dir().join("debug").join("deps") .join(format!("d2{}", env::consts::EXE_SUFFIX)); assert_that(d1_path, existing_file()); assert_that(process(d1_path), execs().with_stdout("d1")); assert_that(d2_path, existing_file()); assert_that(process(d2_path), execs().with_stdout("d2")); }); test!(invalid_spec { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [[bin]] name = "d1" "#) .file("d1/src/lib.rs", "") .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }"); p.build(); assert_that(p.cargo_process("build").arg("-p").arg("notAValidDep"), execs().with_status(101).with_stderr( "could not find package matching spec `notAValidDep`".to_string())); assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"), execs().with_status(101).with_stderr( "could not find package matching spec `notAValidDep`".to_string())); }); test!(manifest_with_bom_is_ok { let p = project("foo") .file("Cargo.toml", "\u{FEFF} [package] name = \"foo\" version = \"0.0.1\" authors = [] ") .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_compile_custom_build.rs000066400000000000000000001435401264656333200227560ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::thread; use support::{project, execs}; use support::{COMPILING, RUNNING, DOCTEST, FRESH, DOCUMENTING}; use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file, existing_dir}; fn setup() { } test!(custom_build_script_failed { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#) .file("src/main.rs", r#" fn main() {} "#) .file("build.rs", r#" fn main() { std::process::exit(101); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stdout(&format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc build.rs --crate-name build_script_build --crate-type bin [..]` {running} `[..]build-script-build[..]` ", url = p.url(), compiling = COMPILING, running = RUNNING)) .with_stderr(&format!("\ failed to run custom build command for `foo v0.5.0 ({})` Process didn't exit successfully: `[..]build[..]build-script-build[..]` \ (exit code: 101)", p.url()))); }); test!(custom_build_env_vars { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [features] bar_feat = ["bar/foo"] [dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" fn main() {} "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [features] foo = [] "#) .file("bar/src/lib.rs", r#" pub fn hello() {} "#); let file_content = format!(r#" use std::env; use std::io::prelude::*; use std::path::Path; use std::fs; fn main() {{ let _target = env::var("TARGET").unwrap(); let _ncpus = env::var("NUM_JOBS").unwrap(); let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let opt = env::var("OPT_LEVEL").unwrap(); assert_eq!(opt, "0"); let opt = env::var("PROFILE").unwrap(); assert_eq!(opt, "debug"); let debug = env::var("DEBUG").unwrap(); assert_eq!(debug, "true"); let out = env::var("OUT_DIR").unwrap(); assert!(out.starts_with(r"{0}")); assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false)); let _host = env::var("HOST").unwrap(); let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); }} "#, p.root().join("target").join("debug").join("build").display()); let p = p.file("bar/build.rs", &file_content); assert_that(p.cargo_process("build").arg("--features").arg("bar_feat"), execs().with_status(0)); }); test!(custom_build_script_wrong_rustc_flags { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#) .file("src/main.rs", r#" fn main() {} "#) .file("build.rs", r#" fn main() { println!("cargo:rustc-flags=-aaa -bbb"); } "#); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr(&format!("\ Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \ `-aaa -bbb`", p.url()))); }); /* test!(custom_build_script_rustc_flags { let p = project("foo") .file("Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.foo] path = "foo" "#) .file("src/main.rs", r#" fn main() {} "#) .file("foo/Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" "#) .file("foo/src/lib.rs", r#" "#) .file("foo/build.rs", r#" fn main() { println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); } "#); // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works) assert_that(p.cargo_process("build").arg("--verbose"), execs().with_status(101) .with_stdout(&format!("\ {compiling} bar v0.5.0 ({url}) {running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib -g \ -C metadata=[..] \ -C extra-filename=-[..] \ --out-dir {dir}{sep}target \ --emit=dep-info,link \ -L {dir}{sep}target \ -L {dir}{sep}target{sep}deps` ", running = RUNNING, compiling = COMPILING, sep = path::SEP, dir = p.root().display(), url = p.url(), ))); }); */ test!(links_no_build_cmd { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr("\ package `foo v0.5.0 (file://[..])` specifies that it links to `a` but does \ not have a custom build script ")); }); test!(links_duplicates { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("build.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#) .file("a/src/lib.rs", "") .file("a/build.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101) .with_stderr("\ native library `a` is being linked to by more than one package, and can only be \ linked to by one package [..] v0.5.0 (file://[..]) [..] v0.5.0 (file://[..]) ")); }); test!(overrides_and_links { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), "bar"); assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), "baz"); } "#) .file(".cargo/config", &format!(r#" [target.{}.foo] rustc-flags = "-L foo -L bar" foo = "bar" bar = "baz" "#, target)) .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("a/src/lib.rs", "") .file("a/build.rs", "not valid rust code"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ [..] [..] [..] [..] [..] {running} `rustc [..] --crate-name foo [..] -L foo -L bar[..]` ", running = RUNNING))); }); test!(unused_overrides { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file(".cargo/config", &format!(r#" [target.{}.foo] rustc-flags = "-L foo -L bar" foo = "bar" bar = "baz" "#, target)); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(links_passes_env_vars { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); } "#) .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("a/src/lib.rs", "") .file("a/build.rs", r#" fn main() { println!("cargo:foo=bar"); println!("cargo:bar=baz"); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(only_rerun_build_script { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() {} "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); File::create(&p.root().join("some-new-file")).unwrap(); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(rebuild_continues_to_pass_env_vars { let a = project("a") .file("Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { println!("cargo:foo=bar"); println!("cargo:bar=baz"); std::thread::sleep_ms(500); } "#); a.build(); a.root().move_into_the_past().unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = '{}' "#, a.root().display())) .file("src/lib.rs", "") .file("build.rs", r#" use std::env; fn main() { assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); File::create(&p.root().join("some-new-file")).unwrap(); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); }); test!(testing_and_such { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() {} "#); println!("build"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); File::create(&p.root().join("src/lib.rs")).unwrap(); p.root().move_into_the_past().unwrap(); println!("test"); assert_that(p.cargo("test").arg("-vj1"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..]` {running} `rustc [..] --crate-name foo [..]` {running} `[..]foo-[..][..]` running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {doctest} foo {running} `rustdoc --test [..]` running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))); println!("doc"); assert_that(p.cargo("doc").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {documenting} foo v0.5.0 (file://[..]) {running} `rustdoc [..]` ", documenting = DOCUMENTING, running = RUNNING))); File::create(&p.root().join("src/main.rs")).unwrap() .write_all(b"fn main() {}").unwrap(); println!("run"); assert_that(p.cargo("run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `target[..]foo[..]` ", compiling = COMPILING, running = RUNNING))); }); test!(propagation_of_l_flags { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" [dependencies.b] path = "../b" "#) .file("a/src/lib.rs", "") .file("a/build.rs", r#" fn main() { println!("cargo:rustc-flags=-L bar"); } "#) .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("b/src/lib.rs", "") .file("b/build.rs", "bad file") .file(".cargo/config", &format!(r#" [target.{}.foo] rustc-flags = "-L foo" "#, target)); assert_that(p.cargo_process("build").arg("-v").arg("-j1"), execs().with_status(0) .with_stdout_contains(&format!("\ {running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]` {compiling} foo v0.5.0 (file://[..]) {running} `rustc [..] --crate-name foo [..] -L bar -L foo` ", compiling = COMPILING, running = RUNNING))); }); test!(propagation_of_l_flags_new { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" [dependencies.b] path = "../b" "#) .file("a/src/lib.rs", "") .file("a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=bar"); } "#) .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("b/src/lib.rs", "") .file("b/build.rs", "bad file") .file(".cargo/config", &format!(r#" [target.{}.foo] rustc-link-search = ["foo"] "#, target)); assert_that(p.cargo_process("build").arg("-v").arg("-j1"), execs().with_status(0) .with_stdout_contains(&format!("\ {running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]` {compiling} foo v0.5.0 (file://[..]) {running} `rustc [..] --crate-name foo [..] -L bar -L foo` ", compiling = COMPILING, running = RUNNING))); }); test!(build_deps_simple { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("build.rs", " extern crate a; fn main() {} ") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} a v0.5.0 (file://[..]) {running} `rustc [..] --crate-name a [..]` {compiling} foo v0.5.0 (file://[..]) {running} `rustc build.rs [..] --extern a=[..]` {running} `[..]foo-[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(build_deps_not_for_normal { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.aaaaa] path = "a" "#) .file("src/lib.rs", "extern crate aaaaa;") .file("build.rs", " extern crate aaaaa; fn main() {} ") .file("a/Cargo.toml", r#" [project] name = "aaaaa" version = "0.5.0" authors = [] "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(101) .with_stderr("\ [..]lib.rs[..] error: can't find crate for `aaaaa`[..] [..]lib.rs[..] extern crate aaaaa; [..] ^~~~~~~~~~~~~~~~~~~ error: aborting due to previous error Could not compile `foo`. Caused by: Process didn't exit successfully: [..] ")); }); test!(build_cmd_with_a_build_cmd { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("build.rs", " extern crate a; fn main() {} ") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.b] path = "../b" "#) .file("a/src/lib.rs", "") .file("a/build.rs", "extern crate b; fn main() {}") .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} b v0.5.0 (file://[..]) {running} `rustc [..] --crate-name b [..]` {compiling} a v0.5.0 (file://[..]) {running} `rustc a[..]build.rs [..] --extern b=[..]` {running} `[..]a-[..]build-script-build[..]` {running} `rustc [..]lib.rs --crate-name a --crate-type lib -g \ -C metadata=[..] -C extra-filename=-[..] \ --out-dir [..]target[..]deps --emit=dep-info,link \ -L [..]target[..]deps -L [..]target[..]deps` {compiling} foo v0.5.0 (file://[..]) {running} `rustc build.rs --crate-name build_script_build --crate-type bin \ -g \ --out-dir [..]build[..]foo-[..] --emit=dep-info,link \ -L [..]target[..]debug -L [..]target[..]deps \ --extern a=[..]liba-[..].rlib` {running} `[..]foo-[..]build-script-build[..]` {running} `rustc [..]lib.rs --crate-name foo --crate-type lib -g \ --out-dir [..]target[..]debug --emit=dep-info,link \ -L [..]target[..]debug -L [..]target[..]deps` ", compiling = COMPILING, running = RUNNING))); }); test!(out_dir_is_preserved { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" use std::env; use std::fs::File; use std::path::Path; fn main() { let out = env::var("OUT_DIR").unwrap(); File::create(Path::new(&out).join("foo")).unwrap(); } "#); // Make the file assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); // Change to asserting that it's there File::create(&p.root().join("build.rs")).unwrap().write_all(br#" use std::env; use std::old_io::File; fn main() { let out = env::var("OUT_DIR").unwrap(); File::open(&Path::new(&out).join("foo")).unwrap(); } "#).unwrap(); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); // Run a fresh build where file should be preserved assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); // One last time to make sure it's still there. File::create(&p.root().join("foo")).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); }); test!(output_separate_lines { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { println!("cargo:rustc-flags=-L foo"); println!("cargo:rustc-flags=-l static=foo"); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `rustc build.rs [..]` {running} `[..]foo-[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..] -L foo -l static=foo` ", compiling = COMPILING, running = RUNNING))); }); test!(output_separate_lines_new { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { println!("cargo:rustc-link-search=foo"); println!("cargo:rustc-link-lib=static=foo"); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `rustc build.rs [..]` {running} `[..]foo-[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..] -L foo -l static=foo` ", compiling = COMPILING, running = RUNNING))); }); #[cfg(not(windows))] // FIXME(#867) test!(code_generation { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/main.rs", r#" include!(concat!(env!("OUT_DIR"), "/hello.rs")); fn main() { println!("{}", message()); } "#) .file("build.rs", r#" use std::env; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; fn main() { let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); let mut f = File::create(&dst.join("hello.rs")).unwrap(); f.write_all(b" pub fn message() -> &'static str { \"Hello, World!\" } ").unwrap(); } "#); assert_that(p.cargo_process("run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `target[..]foo` Hello, World! ", compiling = COMPILING, running = RUNNING))); assert_that(p.cargo_process("test"), execs().with_status(0)); }); test!(release_with_build_script { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() {} "#); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0)); }); test!(build_script_only { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#) .file("build.rs", r#"fn main() {}"#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stderr("\ failed to parse manifest at `[..]` Caused by: no targets specified in the manifest either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present")); }); test!(shared_dep_with_a_build_script { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [dependencies.a] path = "a" [build-dependencies.b] path = "b" "#) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.5.0" authors = [] [dependencies.a] path = "../b" "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(transitive_dep_host { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" [build-dependencies.b] path = "b" "#) .file("src/lib.rs", "") .file("build.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", "") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.5.0" authors = [] [lib] name = "b" plugin = true [dependencies.a] path = "../a" "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); }); test!(test_a_lib_with_a_build_command { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", r#" include!(concat!(env!("OUT_DIR"), "/foo.rs")); /// ``` /// foo::bar(); /// ``` pub fn bar() { assert_eq!(foo(), 1); } "#) .file("build.rs", r#" use std::env; use std::io::prelude::*; use std::fs::File; use std::path::PathBuf; fn main() { let out = PathBuf::from(env::var("OUT_DIR").unwrap()); File::create(out.join("foo.rs")).unwrap().write_all(b" fn foo() -> i32 { 1 } ").unwrap(); } "#); assert_that(p.cargo_process("test"), execs().with_status(0)); }); test!(test_dev_dep_build_script { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#) .file("a/build.rs", "fn main() {}") .file("a/src/lib.rs", ""); assert_that(p.cargo_process("test"), execs().with_status(0)); }); test!(build_script_with_dynamic_native_dependency { let build = project("builder") .file("Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] [lib] name = "builder" crate-type = ["dylib"] plugin = true "#) .file("src/lib.rs", r#" #[no_mangle] pub extern fn foo() {} "#); assert_that(build.cargo_process("build"), execs().with_status(0)); let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [build-dependencies.bar] path = "bar" "#) .file("build.rs", r#" extern crate bar; fn main() { bar::bar() } "#) .file("src/lib.rs", "") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#) .file("bar/build.rs", r#" use std::env; use std::path::PathBuf; fn main() { let src = PathBuf::from(env::var("SRC").unwrap()); println!("cargo:rustc-link-search={}/target/debug", src.display()); } "#) .file("bar/src/lib.rs", r#" pub fn bar() { #[link(name = "builder")] extern { fn foo(); } unsafe { foo() } } "#); assert_that(foo.cargo_process("build").env("SRC", build.root()), execs().with_status(0)); }); test!(profile_and_opt_level_set_correctly { let build = project("builder") .file("Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" use std::env; fn main() { assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); assert_eq!(env::var("PROFILE").unwrap(), "release"); assert_eq!(env::var("DEBUG").unwrap(), "false"); } "#); assert_that(build.cargo_process("bench"), execs().with_status(0)); }); test!(build_script_with_lto { let build = project("builder") .file("Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] build = "build.rs" [profile.dev] lto = true "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { } "#); assert_that(build.cargo_process("build"), execs().with_status(0)); }); test!(test_duplicate_deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] build = "build.rs" [dependencies.bar] path = "bar" [build-dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::do_nothing() } "#) .file("build.rs", r#" extern crate bar; fn main() { bar::do_nothing() } "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.1.0" authors = [] "#) .file("bar/src/lib.rs", "pub fn do_nothing() {}"); assert_that(p.cargo_process("build"), execs().with_status(0)); }); test!(cfg_feedback { let build = project("builder") .file("Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] build = "build.rs" "#) .file("src/main.rs", " #[cfg(foo)] fn main() {} ") .file("build.rs", r#" fn main() { println!("cargo:rustc-cfg=foo"); } "#); assert_that(build.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(cfg_override { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "a" build = "build.rs" "#) .file("src/main.rs", " #[cfg(foo)] fn main() {} ") .file("build.rs", "") .file(".cargo/config", &format!(r#" [target.{}.a] rustc-cfg = ["foo"] "#, target)); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(cfg_test { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#) .file("build.rs", r#" fn main() { println!("cargo:rustc-cfg=foo"); } "#) .file("src/lib.rs", r#" /// /// ``` /// extern crate foo; /// /// fn main() { /// foo::foo() /// } /// ``` /// #[cfg(foo)] pub fn foo() {} #[cfg(foo)] #[test] fn test_foo() { foo() } "#) .file("tests/test.rs", r#" #[cfg(foo)] #[test] fn test_bar() {} "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) {running} [..] build.rs [..] {running} [..]build-script-build[..] {running} [..] --cfg foo[..] {running} [..] --cfg foo[..] {running} [..] --cfg foo[..] {running} [..]foo-[..] running 1 test test test_foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} [..]test-[..] running 1 test test test_bar ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo {running} [..] --cfg foo[..] running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, dir = p.url(), running = RUNNING, doctest = DOCTEST))); }); test!(cfg_doc { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [dependencies.bar] path = "bar" "#) .file("build.rs", r#" fn main() { println!("cargo:rustc-cfg=foo"); } "#) .file("src/lib.rs", r#" #[cfg(foo)] pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#) .file("bar/build.rs", r#" fn main() { println!("cargo:rustc-cfg=bar"); } "#) .file("bar/src/lib.rs", r#" #[cfg(bar)] pub fn bar() {} "#); assert_that(p.cargo_process("doc"), execs().with_status(0)); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); }); test!(cfg_override_test { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" links = "a" "#) .file("build.rs", "") .file(".cargo/config", &format!(r#" [target.{}.a] rustc-cfg = ["foo"] "#, ::rustc_host())) .file("src/lib.rs", r#" /// /// ``` /// extern crate foo; /// /// fn main() { /// foo::foo() /// } /// ``` /// #[cfg(foo)] pub fn foo() {} #[cfg(foo)] #[test] fn test_foo() { foo() } "#) .file("tests/test.rs", r#" #[cfg(foo)] #[test] fn test_bar() {} "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) {running} `[..]` {running} `[..]` {running} `[..]` {running} [..]foo-[..] running 1 test test test_foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} [..]test-[..] running 1 test test test_bar ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo {running} [..] --cfg foo[..] running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, dir = p.url(), running = RUNNING, doctest = DOCTEST))); }); test!(cfg_override_doc { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" links = "a" [dependencies.bar] path = "bar" "#) .file(".cargo/config", &format!(r#" [target.{target}.a] rustc-cfg = ["foo"] [target.{target}.b] rustc-cfg = ["bar"] "#, target = ::rustc_host())) .file("build.rs", "") .file("src/lib.rs", r#" #[cfg(foo)] pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" links = "b" "#) .file("bar/build.rs", "") .file("bar/src/lib.rs", r#" #[cfg(bar)] pub fn bar() {} "#) ; assert_that(p.cargo_process("doc"), execs().with_status(0)); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); }); test!(flags_go_into_tests { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] b = { path = "b" } "#) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a = { path = "../a" } "#) .file("b/src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#) .file("a/src/lib.rs", "") .file("a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=test"); } "#); assert_that(p.cargo_process("test").arg("-v").arg("--test=foo"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.5.0 ([..] {running} `rustc a[..]build.rs [..]` {running} `[..]build-script-build[..]` {running} `rustc a[..]src[..]lib.rs [..] -L test[..]` {compiling} b v0.5.0 ([..] {running} `rustc b[..]src[..]lib.rs [..] -L test[..]` {compiling} foo v0.5.0 ([..] {running} `rustc src[..]lib.rs [..] -L test[..]` {running} `rustc tests[..]foo.rs [..] -L test[..]` {running} `[..]foo-[..]` running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING))); assert_that(p.cargo("test").arg("-v").arg("-pb").arg("--lib"), execs().with_status(0).with_stdout(&format!("\ {fresh} a v0.5.0 ([..] {compiling} b v0.5.0 ([..] {running} `rustc b[..]src[..]lib.rs [..] -L test[..]` {running} `[..]b-[..]` running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, fresh = FRESH))); }); test!(diamond_passes_args_only_once { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] b = { path = "../b" } c = { path = "../c" } "#) .file("a/src/lib.rs", "") .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] c = { path = "../c" } "#) .file("b/src/lib.rs", "") .file("c/Cargo.toml", r#" [project] name = "c" version = "0.5.0" authors = [] build = "build.rs" "#) .file("c/build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=test"); } "#) .file("c/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} c v0.5.0 ([..] {running} `rustc [..]` {running} `[..]` {running} `rustc [..]` {compiling} b v0.5.0 ([..] {running} `rustc [..]` {compiling} a v0.5.0 ([..] {running} `rustc [..]` {compiling} foo v0.5.0 ([..] {running} `[..]rlib -L native=test` ", compiling = COMPILING, running = RUNNING))); }); test!(adding_an_override_invalidates { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("src/lib.rs", "") .file(".cargo/config", "") .file("build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=foo"); } "#); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.5.0 ([..] {running} `rustc [..]` {running} `[..]` {running} `rustc [..] -L native=foo` ", compiling = COMPILING, running = RUNNING))); File::create(p.root().join(".cargo/config")).unwrap().write_all(format!(" [target.{}.foo] rustc-link-search = [\"native=bar\"] ", target).as_bytes()).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.5.0 ([..] {running} `rustc [..] -L native=bar` ", compiling = COMPILING, running = RUNNING))); }); test!(changing_an_override_invalidates { let target = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] links = "foo" build = "build.rs" "#) .file("src/lib.rs", "") .file(".cargo/config", &format!(" [target.{}.foo] rustc-link-search = [\"native=foo\"] ", target)) .file("build.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.5.0 ([..] {running} `rustc [..] -L native=foo` ", compiling = COMPILING, running = RUNNING))); File::create(p.root().join(".cargo/config")).unwrap().write_all(format!(" [target.{}.foo] rustc-link-search = [\"native=bar\"] ", target).as_bytes()).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.5.0 ([..] {running} `rustc [..] -L native=bar` ", compiling = COMPILING, running = RUNNING))); }); test!(rebuild_only_on_explicit_paths { let p = project("a") .file("Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] build = "build.rs" "#) .file("src/lib.rs", "") .file("build.rs", r#" fn main() { println!("cargo:rerun-if-changed=foo"); println!("cargo:rerun-if-changed=bar"); } "#); p.build(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); // files don't exist, so should always rerun if they don't exist println!("run without"); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.5.0 ([..]) {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", running = RUNNING, compiling = COMPILING))); thread::sleep_ms(1000); File::create(p.root().join("foo")).unwrap(); File::create(p.root().join("bar")).unwrap(); // now the exist, so run once, catch the mtime, then shouldn't run again println!("run with"); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.5.0 ([..]) {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", running = RUNNING, compiling = COMPILING))); println!("run with2"); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {fresh} a v0.5.0 ([..]) ", fresh = FRESH))); thread::sleep_ms(1000); // random other files do not affect freshness println!("run baz"); File::create(p.root().join("baz")).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {fresh} a v0.5.0 ([..]) ", fresh = FRESH))); // but changing dependent files does println!("run foo change"); File::create(p.root().join("foo")).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.5.0 ([..]) {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", running = RUNNING, compiling = COMPILING))); // .. as does deleting a file println!("run foo delete"); fs::remove_file(p.root().join("bar")).unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.5.0 ([..]) {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", running = RUNNING, compiling = COMPILING))); }); test!(doctest_recieves_build_link_args { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] links = "bar" build = "build.rs" "#) .file("a/src/lib.rs", "") .file("a/build.rs", r#" fn main() { println!("cargo:rustc-link-search=native=bar"); } "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0) .with_stdout_contains(&format!("\ {running} `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]` ", running = RUNNING))); }); cargo-0.8.0/tests/test_cargo_compile_git_deps.rs000066400000000000000000001457641264656333200220750ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::path::Path; use std::thread; use git2; use support::{git, project, execs, main_file, path2url}; use support::{COMPILING, UPDATING, RUNNING}; use support::paths::{self, CargoPathExt}; use hamcrest::{assert_that,existing_file}; use cargo::util::process; fn setup() { } test!(cargo_compile_simple_git_dep { let project = project("foo"); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep1" "#) .file("src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#) }).unwrap(); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' [[bin]] name = "foo" "#, git_project.url())) .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); assert_that(project.cargo_process("build"), execs() .with_stdout(&format!("{} git repository `{}`\n\ {} dep1 v0.5.0 ({}#[..])\n\ {} foo v0.5.0 ({})\n", UPDATING, path2url(git_root.clone()), COMPILING, path2url(git_root), COMPILING, path2url(root))) .with_stderr("")); assert_that(&project.bin("foo"), existing_file()); assert_that( process(&project.bin("foo")), execs().with_stdout("hello world\n")); }); test!(cargo_compile_git_dep_branch { let project = project("foo"); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep1" "#) .file("src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#) }).unwrap(); // Make a new branch based on the current HEAD commit let repo = git2::Repository::open(&git_project.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); let head = repo.find_commit(head).unwrap(); repo.branch("branchy", &head, true).unwrap(); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' branch = "branchy" [[bin]] name = "foo" "#, git_project.url())) .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); assert_that(project.cargo_process("build"), execs() .with_stdout(&format!("{} git repository `{}`\n\ {} dep1 v0.5.0 ({}?branch=branchy#[..])\n\ {} foo v0.5.0 ({})\n", UPDATING, path2url(git_root.clone()), COMPILING, path2url(git_root), COMPILING, path2url(root))) .with_stderr("")); assert_that(&project.bin("foo"), existing_file()); assert_that( process(&project.bin("foo")), execs().with_stdout("hello world\n")); }); test!(cargo_compile_git_dep_tag { let project = project("foo"); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep1" "#) .file("src/dep1.rs", r#" pub fn hello() -> &'static str { "hello world" } "#) }).unwrap(); // Make a tag corresponding to the current HEAD let repo = git2::Repository::open(&git_project.root()).unwrap(); let head = repo.head().unwrap().target().unwrap(); repo.tag("v0.1.0", &repo.find_object(head, None).unwrap(), &repo.signature().unwrap(), "make a new tag", false).unwrap(); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' tag = "v0.1.0" [[bin]] name = "foo" "#, git_project.url())) .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); let root = project.root(); let git_root = git_project.root(); assert_that(project.cargo_process("build"), execs() .with_stdout(&format!("{} git repository `{}`\n\ {} dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ {} foo v0.5.0 ({})\n", UPDATING, path2url(git_root.clone()), COMPILING, path2url(git_root), COMPILING, path2url(root)))); assert_that(&project.bin("foo"), existing_file()); assert_that(process(&project.bin("foo")), execs().with_stdout("hello world\n")); assert_that(project.cargo("build"), execs().with_status(0)); }); test!(cargo_compile_with_nested_paths { let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [dependencies.dep2] version = "0.5.0" path = "vendor/dep2" [lib] name = "dep1" "#) .file("src/dep1.rs", r#" extern crate dep2; pub fn hello() -> &'static str { dep2::hello() } "#) .file("vendor/dep2/Cargo.toml", r#" [project] name = "dep2" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep2" "#) .file("vendor/dep2/src/dep2.rs", r#" pub fn hello() -> &'static str { "hello world" } "#) }).unwrap(); let p = project("parent") .file("Cargo.toml", &format!(r#" [project] name = "parent" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] version = "0.5.0" git = '{}' [[bin]] name = "parent" "#, git_project.url())) .file("src/parent.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); p.cargo_process("build") .exec_with_output() .unwrap(); assert_that(&p.bin("parent"), existing_file()); assert_that(process(&p.bin("parent")), execs().with_stdout("hello world\n")); }); test!(cargo_compile_with_meta_package { let git_project = git::new("meta-dep", |project| { project .file("dep1/Cargo.toml", r#" [project] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep1" "#) .file("dep1/src/dep1.rs", r#" pub fn hello() -> &'static str { "this is dep1" } "#) .file("dep2/Cargo.toml", r#" [project] name = "dep2" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "dep2" "#) .file("dep2/src/dep2.rs", r#" pub fn hello() -> &'static str { "this is dep2" } "#) }).unwrap(); let p = project("parent") .file("Cargo.toml", &format!(r#" [project] name = "parent" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] version = "0.5.0" git = '{}' [dependencies.dep2] version = "0.5.0" git = '{}' [[bin]] name = "parent" "#, git_project.url(), git_project.url())) .file("src/parent.rs", &main_file(r#""{} {}", dep1::hello(), dep2::hello()"#, &["dep1", "dep2"])); p.cargo_process("build") .exec_with_output() .unwrap(); assert_that(&p.bin("parent"), existing_file()); assert_that(process(&p.bin("parent")), execs().with_stdout("this is dep1 this is dep2\n")); }); test!(cargo_compile_with_short_ssh_git { let url = "git@github.com:a/dep"; let project = project("project") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep] git = "{}" [[bin]] name = "foo" "#, url)) .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); assert_that(project.cargo_process("build"), execs() .with_stdout("") .with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: invalid url `{}`: relative URL without a base ", url))); }); test!(two_revs_same_deps { let bar = git::new("meta-dep", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }).unwrap(); let repo = git2::Repository::open(&bar.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Commit the changes and make sure we trigger a recompile File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); git::add(&repo); let rev2 = git::commit(&repo); let foo = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' rev = "{}" [dependencies.baz] path = "../baz" "#, bar.url(), rev1)) .file("src/main.rs", r#" extern crate bar; extern crate baz; fn main() { assert_eq!(bar::bar(), 1); assert_eq!(baz::baz(), 2); } "#); let baz = project("baz") .file("Cargo.toml", &format!(r#" [package] name = "baz" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' rev = "{}" "#, bar.url(), rev2)) .file("src/lib.rs", r#" extern crate bar; pub fn baz() -> i32 { bar::bar() } "#); baz.build(); assert_that(foo.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(&foo.bin("foo"), existing_file()); assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); }); test!(recompilation { let git_project = git::new("bar", |project| { project .file("Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "bar" "#) .file("src/bar.rs", r#" pub fn bar() {} "#) }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' [[bin]] name = "foo" "#, git_project.url())) .file("src/foo.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"])); // First time around we should compile both foo and bar assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} git repository `{}`\n\ {} bar v0.5.0 ({}#[..])\n\ {} foo v0.5.0 ({})\n", UPDATING, git_project.url(), COMPILING, git_project.url(), COMPILING, p.url()))); // Don't recompile the second time assert_that(p.cargo("build"), execs().with_stdout("")); // Modify a file manually, shouldn't trigger a recompile File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" pub fn bar() { println!("hello!"); } "#).unwrap(); assert_that(p.cargo("build"), execs().with_stdout("")); assert_that(p.cargo("update"), execs().with_stdout(&format!("{} git repository `{}`", UPDATING, git_project.url()))); assert_that(p.cargo("build"), execs().with_stdout("")); // Commit the changes and make sure we don't trigger a recompile because the // lockfile says not to change let repo = git2::Repository::open(&git_project.root()).unwrap(); git::add(&repo); git::commit(&repo); println!("compile after commit"); assert_that(p.cargo("build"), execs().with_stdout("")); p.root().move_into_the_past().unwrap(); // Update the dependency and carry on! assert_that(p.cargo("update"), execs().with_stdout(&format!("{} git repository `{}`\n\ {} bar v0.5.0 ([..]) -> #[..]\n\ ", UPDATING, git_project.url(), UPDATING))); println!("going for the last compile"); assert_that(p.cargo("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({}#[..])\n\ {} foo v0.5.0 ({})\n", COMPILING, git_project.url(), COMPILING, p.url()))); // Make sure clean only cleans one dep assert_that(p.cargo("clean") .arg("-p").arg("foo"), execs().with_stdout("")); assert_that(p.cargo("build"), execs().with_stdout(&format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); }); test!(update_with_shared_deps { let git_project = git::new("bar", |project| { project .file("Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["carlhuda@example.com"] [lib] name = "bar" "#) .file("src/bar.rs", r#" pub fn bar() {} "#) }).unwrap(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] path = "dep1" [dependencies.dep2] path = "dep2" "#) .file("src/main.rs", r#" extern crate dep1; extern crate dep2; fn main() {} "#) .file("dep1/Cargo.toml", &format!(r#" [package] name = "dep1" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' "#, git_project.url())) .file("dep1/src/lib.rs", "") .file("dep2/Cargo.toml", &format!(r#" [package] name = "dep2" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" git = '{}' "#, git_project.url())) .file("dep2/src/lib.rs", ""); // First time around we should compile both foo and bar assert_that(p.cargo_process("build"), execs().with_stdout(&format!("\ {updating} git repository `{git}` {compiling} bar v0.5.0 ({git}#[..]) {compiling} [..] v0.5.0 ({dir}) {compiling} [..] v0.5.0 ({dir}) {compiling} foo v0.5.0 ({dir})\n", updating = UPDATING, git = git_project.url(), compiling = COMPILING, dir = p.url()))); // Modify a file manually, and commit it File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" pub fn bar() { println!("hello!"); } "#).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap(); let old_head = repo.head().unwrap().target().unwrap(); git::add(&repo); git::commit(&repo); thread::sleep_ms(1000); // By default, not transitive updates println!("dep1 update"); assert_that(p.cargo("update") .arg("-p").arg("dep1"), execs().with_stdout("")); // Don't do anything bad on a weird --precise argument println!("bar bad precise update"); assert_that(p.cargo("update") .arg("-p").arg("bar") .arg("--precise").arg("0.1.2"), execs().with_status(101).with_stderr("\ Unable to update [..] To learn more, run the command again with --verbose. ")); // Specifying a precise rev to the old rev shouldn't actually update // anything because we already have the rev in the db. println!("bar precise update"); assert_that(p.cargo("update") .arg("-p").arg("bar") .arg("--precise").arg(&old_head.to_string()), execs().with_stdout("")); // Updating aggressively should, however, update the repo. println!("dep1 aggressive update"); assert_that(p.cargo("update") .arg("-p").arg("dep1") .arg("--aggressive"), execs().with_stdout(&format!("{} git repository `{}`\n\ {} bar v0.5.0 ([..]) -> #[..]\n\ ", UPDATING, git_project.url(), UPDATING))); // Make sure we still only compile one version of the git repo println!("build"); assert_that(p.cargo("build"), execs().with_stdout(&format!("\ {compiling} bar v0.5.0 ({git}#[..]) {compiling} [..] v0.5.0 ({dir}) {compiling} [..] v0.5.0 ({dir}) {compiling} foo v0.5.0 ({dir})\n", git = git_project.url(), compiling = COMPILING, dir = p.url()))); // We should be able to update transitive deps assert_that(p.cargo("update").arg("-p").arg("bar"), execs().with_stdout(&format!("{} git repository `{}`", UPDATING, git_project.url()))); }); test!(dep_with_submodule { let project = project("foo"); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [package] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] "#) }).unwrap(); let git_project2 = git::new("dep2", |project| { project.file("lib.rs", "pub fn dep() {}") }).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap(); let url = path2url(git_project2.root()).to_string(); git::add_submodule(&repo, &url, Path::new("src")); git::commit(&repo); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url())) .file("src/lib.rs", " extern crate dep1; pub fn foo() { dep1::dep() } "); assert_that(project.cargo_process("build"), execs().with_stderr("").with_status(0)); }); test!(two_deps_only_update_one { let project = project("foo"); let git1 = git::new("dep1", |project| { project .file("Cargo.toml", r#" [package] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] "#) .file("src/lib.rs", "") }).unwrap(); let git2 = git::new("dep2", |project| { project .file("Cargo.toml", r#" [package] name = "dep2" version = "0.5.0" authors = ["carlhuda@example.com"] "#) .file("src/lib.rs", "") }).unwrap(); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' [dependencies.dep2] git = '{}' "#, git1.url(), git2.url())) .file("src/main.rs", "fn main() {}"); assert_that(project.cargo_process("build"), execs() .with_stdout(&format!("{} git repository `[..]`\n\ {} git repository `[..]`\n\ {} [..] v0.5.0 ([..])\n\ {} [..] v0.5.0 ([..])\n\ {} foo v0.5.0 ({})\n", UPDATING, UPDATING, COMPILING, COMPILING, COMPILING, project.url())) .with_stderr("")); File::create(&git1.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn foo() {} "#).unwrap(); let repo = git2::Repository::open(&git1.root()).unwrap(); git::add(&repo); git::commit(&repo); assert_that(project.cargo("update") .arg("-p").arg("dep1"), execs() .with_stdout(&format!("{} git repository `{}`\n\ {} dep1 v0.5.0 ([..]) -> #[..]\n\ ", UPDATING, git1.url(), UPDATING)) .with_stderr("")); }); test!(stale_cached_version { let bar = git::new("meta-dep", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.0.0" authors = [] "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }).unwrap(); // Update the git database in the cache with the current state of the git // repo let foo = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url())) .file("src/main.rs", r#" extern crate bar; fn main() { assert_eq!(bar::bar(), 1) } "#); assert_that(foo.cargo_process("build"), execs().with_status(0)); assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); // Update the repo, and simulate someone else updating the lockfile and then // us pulling it down. File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 1 + 0 } "#).unwrap(); let repo = git2::Repository::open(&bar.root()).unwrap(); git::add(&repo); git::commit(&repo); thread::sleep_ms(1000); let rev = repo.revparse_single("HEAD").unwrap().id(); File::create(&foo.root().join("Cargo.lock")).unwrap().write_all(format!(r#" [root] name = "foo" version = "0.0.0" dependencies = [ 'bar 0.0.0 (git+{url}#{hash})' ] [[package]] name = "bar" version = "0.0.0" source = 'git+{url}#{hash}' "#, url = bar.url(), hash = rev).as_bytes()).unwrap(); // Now build! assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {updating} git repository `{bar}` {compiling} bar v0.0.0 ({bar}#[..]) {compiling} foo v0.0.0 ({foo}) ", updating = UPDATING, compiling = COMPILING, bar = bar.url(), foo = foo.url()))); assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); }); test!(dep_with_changed_submodule { let project = project("foo"); let git_project = git::new("dep1", |project| { project .file("Cargo.toml", r#" [package] name = "dep1" version = "0.5.0" authors = ["carlhuda@example.com"] "#) }).unwrap(); let git_project2 = git::new("dep2", |project| { project .file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") }).unwrap(); let git_project3 = git::new("dep3", |project| { project .file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") }).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap(); let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), &Path::new("src")); git::commit(&repo); let project = project .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.dep1] git = '{}' "#, git_project.url())) .file("src/main.rs", " extern crate dep1; pub fn main() { println!(\"{}\", dep1::dep()) } "); println!("first run"); assert_that(project.cargo_process("run"), execs() .with_stdout(&format!("{} git repository `[..]`\n\ {} dep1 v0.5.0 ([..])\n\ {} foo v0.5.0 ([..])\n\ {} `target[..]foo[..]`\n\ project2\ ", UPDATING, COMPILING, COMPILING, RUNNING)) .with_stderr("") .with_status(0)); File::create(&git_project.root().join(".gitmodules")).unwrap() .write_all(format!("[submodule \"src\"]\n\tpath = src\n\turl={}", git_project3.url()).as_bytes()).unwrap(); // Sync the submodule and reset it to the new remote. sub.sync().unwrap(); { let subrepo = sub.open().unwrap(); subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*").unwrap(); subrepo.remote_set_url("origin", &git_project3.url().to_string()).unwrap(); let mut origin = subrepo.find_remote("origin").unwrap(); origin.fetch(&[], None, None).unwrap(); let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); let obj = subrepo.find_object(id, None).unwrap(); subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); } sub.add_to_index(true).unwrap(); git::add(&repo); git::commit(&repo); thread::sleep_ms(1000); // Update the dependency and carry on! println!("update"); assert_that(project.cargo("update").arg("-v"), execs() .with_stderr("") .with_stdout(&format!("{} git repository `{}`\n\ {} dep1 v0.5.0 ([..]) -> #[..]\n\ ", UPDATING, git_project.url(), UPDATING))); println!("last run"); assert_that(project.cargo("run"), execs() .with_stdout(&format!("{compiling} dep1 v0.5.0 ([..])\n\ {compiling} foo v0.5.0 ([..])\n\ {running} `target[..]foo[..]`\n\ project3\ ", compiling = COMPILING, running = RUNNING)) .with_stderr("") .with_status(0)); }); test!(dev_deps_with_testing { let p2 = git::new("bar", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#) }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" git = '{}' "#, p2.url())) .file("src/main.rs", r#" fn main() {} #[cfg(test)] mod tests { extern crate bar; #[test] fn foo() { bar::gimme(); } } "#); // Generate a lockfile which did not use `bar` to compile, but had to update // `bar` to generate the lockfile assert_that(p.cargo_process("build"), execs().with_stdout(&format!("\ {updating} git repository `{bar}` {compiling} foo v0.5.0 ({url}) ", updating = UPDATING, compiling = COMPILING, url = p.url(), bar = p2.url()))); // Make sure we use the previous resolution of `bar` instead of updating it // a second time. assert_that(p.cargo("test"), execs().with_stdout(&format!("\ {compiling} [..] v0.5.0 ([..]) {compiling} [..] v0.5.0 ([..] {running} target[..]foo-[..] running 1 test test tests::foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING))); }); test!(git_build_cmd_freshness { let foo = git::new("foo", |project| { project.file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") .file(".gitignore", " src/bar.rs ") }).unwrap(); foo.root().move_into_the_past().unwrap(); thread::sleep_ms(1000); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) ", compiling = COMPILING, url = foo.url()))); // Smoke test to make sure it doesn't compile again println!("first pass"); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); // Modify an ignored file and make sure we don't rebuild println!("second pass"); File::create(&foo.root().join("src/bar.rs")).unwrap(); assert_that(foo.cargo("build"), execs().with_status(0) .with_stdout("")); }); test!(git_name_not_always_needed { let p2 = git::new("bar", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#) }).unwrap(); let repo = git2::Repository::open(&p2.root()).unwrap(); let mut cfg = repo.config().unwrap(); let _ = cfg.remove("user.name"); let _ = cfg.remove("user.email"); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.bar] git = '{}' "#, p2.url())) .file("src/main.rs", "fn main() {}"); // Generate a lockfile which did not use `bar` to compile, but had to update // `bar` to generate the lockfile assert_that(p.cargo_process("build"), execs().with_stdout(&format!("\ {updating} git repository `{bar}` {compiling} foo v0.5.0 ({url}) ", updating = UPDATING, compiling = COMPILING, url = p.url(), bar = p2.url()))); }); test!(git_repo_changing_no_rebuild { let bar = git::new("bar", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }).unwrap(); // Lock p1 to the first rev in the git repo let p1 = project("p1") .file("Cargo.toml", &format!(r#" [project] name = "p1" version = "0.5.0" authors = [] build = 'build.rs' [dependencies.bar] git = '{}' "#, bar.url())) .file("src/main.rs", "fn main() {}") .file("build.rs", "fn main() {}"); p1.build(); p1.root().move_into_the_past().unwrap(); assert_that(p1.cargo("build"), execs().with_stdout(&format!("\ {updating} git repository `{bar}` {compiling} [..] {compiling} [..] ", updating = UPDATING, compiling = COMPILING, bar = bar.url()))); // Make a commit to lock p2 to a different rev File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); let repo = git2::Repository::open(&bar.root()).unwrap(); git::add(&repo); git::commit(&repo); // Lock p2 to the second rev let p2 = project("p2") .file("Cargo.toml", &format!(r#" [project] name = "p2" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url())) .file("src/main.rs", "fn main() {}"); assert_that(p2.cargo_process("build"), execs().with_stdout(&format!("\ {updating} git repository `{bar}` {compiling} [..] {compiling} [..] ", updating = UPDATING, compiling = COMPILING, bar = bar.url()))); // And now for the real test! Make sure that p1 doesn't get rebuilt // even though the git repo has changed. assert_that(p1.cargo("build"), execs().with_stdout("")); }); test!(git_dep_build_cmd { let p = git::new("foo", |project| { project.file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [lib] name = "bar" "#) .file("bar/src/bar.rs.in", r#" pub fn gimme() -> i32 { 0 } "#) .file("bar/build.rs", r#" use std::fs; fn main() { fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); } "#) }).unwrap(); p.root().join("bar").move_into_the_past().unwrap(); assert_that(p.cargo("build"), execs().with_status(0)); assert_that(process(&p.bin("foo")), execs().with_stdout("0\n")); // Touching bar.rs.in should cause the `build` command to run again. fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap() .write_all(b"pub fn gimme() -> i32 { 1 }").unwrap(); assert_that(p.cargo("build"), execs().with_status(0)); assert_that(process(&p.bin("foo")), execs().with_stdout("1\n")); }); test!(fetch_downloads { let bar = git::new("bar", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") }).unwrap(); let p = project("p1") .file("Cargo.toml", &format!(r#" [project] name = "p1" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url())) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("fetch"), execs().with_status(0).with_stdout(&format!("\ {updating} git repository `{url}` ", updating = UPDATING, url = bar.url()))); assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout("")); }); test!(warnings_in_git_dep { let bar = git::new("bar", |project| { project.file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "fn unused() {}") }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies.bar] git = '{}' "#, bar.url())) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build"), execs() .with_stdout(&format!("{} git repository `{}`\n\ {} bar v0.5.0 ({}#[..])\n\ {} foo v0.5.0 ({})\n", UPDATING, bar.url(), COMPILING, bar.url(), COMPILING, p.url())) .with_stderr("")); }); test!(update_ambiguous { let foo1 = git::new("foo1", |project| { project.file("Cargo.toml", r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "") }).unwrap(); let foo2 = git::new("foo2", |project| { project.file("Cargo.toml", r#" [package] name = "foo" version = "0.6.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "") }).unwrap(); let bar = git::new("bar", |project| { project.file("Cargo.toml", &format!(r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.foo] git = '{}' "#, foo2.url())) .file("src/lib.rs", "") }).unwrap(); let p = project("project") .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.foo] git = '{}' [dependencies.bar] git = '{}' "#, foo1.url(), bar.url())) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); assert_that(p.cargo("update") .arg("-p").arg("foo"), execs().with_status(101) .with_stderr("\ There are multiple `foo` packages in your project, and the specification `foo` \ is ambiguous. Please re-run this command with `-p ` where `` is one of the \ following: foo:0.[..].0 foo:0.[..].0 ")); }); test!(update_one_dep_in_repo_with_many_deps { let foo = git::new("foo", |project| { project.file("Cargo.toml", r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("a/src/lib.rs", "") }).unwrap(); let p = project("project") .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.foo] git = '{}' [dependencies.a] git = '{}' "#, foo.url(), foo.url())) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); assert_that(p.cargo("update") .arg("-p").arg("foo"), execs().with_status(0) .with_stdout(&format!("\ Updating git repository `{}` ", foo.url()))); }); test!(switch_deps_does_not_update_transitive { let transitive = git::new("transitive", |project| { project.file("Cargo.toml", r#" [package] name = "transitive" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", "") }).unwrap(); let dep1 = git::new("dep1", |project| { project.file("Cargo.toml", &format!(r#" [package] name = "dep" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.transitive] git = '{}' "#, transitive.url())) .file("src/lib.rs", "") }).unwrap(); let dep2 = git::new("dep2", |project| { project.file("Cargo.toml", &format!(r#" [package] name = "dep" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.transitive] git = '{}' "#, transitive.url())) .file("src/lib.rs", "") }).unwrap(); let p = project("project") .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep1.url())) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ Updating git repository `{}` Updating git repository `{}` {compiling} transitive [..] {compiling} dep [..] {compiling} project [..] ", dep1.url(), transitive.url(), compiling = COMPILING))); // Update the dependency to point to the second repository, but this // shouldn't update the transitive dependency which is the same. File::create(&p.root().join("Cargo.toml")).unwrap().write_all(format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep2.url()).as_bytes()).unwrap(); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ Updating git repository `{}` {compiling} dep [..] {compiling} project [..] ", dep2.url(), compiling = COMPILING))); }); test!(update_one_source_updates_all_packages_in_that_git_source { let dep = git::new("dep", |project| { project.file("Cargo.toml", r#" [package] name = "dep" version = "0.5.0" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] "#) .file("a/src/lib.rs", "") }).unwrap(); let p = project("project") .file("Cargo.toml", &format!(r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.dep] git = '{}' "#, dep.url())) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build"), execs().with_status(0)); let repo = git2::Repository::open(&dep.root()).unwrap(); let rev1 = repo.revparse_single("HEAD").unwrap().id(); // Just be sure to change a file File::create(&dep.root().join("src/lib.rs")).unwrap().write_all(br#" pub fn bar() -> i32 { 2 } "#).unwrap(); git::add(&repo); git::commit(&repo); assert_that(p.cargo("update").arg("-p").arg("dep"), execs().with_status(0)); let mut lockfile = String::new(); File::open(&p.root().join("Cargo.lock")).unwrap() .read_to_string(&mut lockfile).unwrap(); assert!(!lockfile.contains(&rev1.to_string()), "{} in {}", rev1, lockfile); }); test!(switch_sources { let a1 = git::new("a1", |project| { project.file("Cargo.toml", r#" [package] name = "a" version = "0.5.0" authors = [] "#) .file("src/lib.rs", "") }).unwrap(); let a2 = git::new("a2", |project| { project.file("Cargo.toml", r#" [package] name = "a" version = "0.5.1" authors = [] "#) .file("src/lib.rs", "") }).unwrap(); let p = project("project") .file("Cargo.toml", r#" [project] name = "project" version = "0.5.0" authors = [] [dependencies.b] path = "b" "#) .file("src/main.rs", "fn main() {}") .file("b/Cargo.toml", &format!(r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies.a] git = '{}' "#, a1.url())) .file("b/src/lib.rs", "fn main() {}"); p.build(); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {updating} git repository `file://[..]a1` {compiling} a v0.5.0 ([..]a1#[..] {compiling} b v0.5.0 ([..]) {compiling} project v0.5.0 ([..]) ", updating = UPDATING, compiling = COMPILING))); File::create(&p.root().join("b/Cargo.toml")).unwrap().write_all(format!(r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies.a] git = '{}' "#, a2.url()).as_bytes()).unwrap(); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {updating} git repository `file://[..]a2` {compiling} a v0.5.1 ([..]a2#[..] {compiling} b v0.5.0 ([..]) {compiling} project v0.5.0 ([..]) ", updating = UPDATING, compiling = COMPILING))); }); test!(dont_require_submodules_are_checked_out { let project = project("foo"); let git1 = git::new("dep1", |p| { p.file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] build = "build.rs" "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("a/foo", "") }).unwrap(); let git2 = git::new("dep2", |p| p).unwrap(); let repo = git2::Repository::open(&git1.root()).unwrap(); let url = path2url(git2.root()).to_string(); git::add_submodule(&repo, &url, &Path::new("a/submodule")); git::commit(&repo); git2::Repository::init(&project.root()).unwrap(); let url = path2url(git1.root()).to_string(); let dst = paths::home().join("foo"); git2::Repository::clone(&url, &dst).unwrap(); assert_that(git1.cargo("build").arg("-v").cwd(&dst), execs().with_status(0)); }); test!(doctest_same_name { let a2 = git::new("a2", |p| { p.file("Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#) .file("src/lib.rs", "pub fn a2() {}") }).unwrap(); let a1 = git::new("a1", |p| { p.file("Cargo.toml", &format!(r#" [project] name = "a" version = "0.5.0" authors = [] [dependencies] a = {{ git = '{}' }} "#, a2.url())) .file("src/lib.rs", "extern crate a; pub fn a1() {}") }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a1.url())) .file("src/lib.rs", r#" #[macro_use] extern crate a; "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); }); test!(lints_are_suppressed { let a = git::new("a", |p| { p.file("Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#) .file("src/lib.rs", " use std::option; ") }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a.url())) .file("src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} git repository `[..]` {compiling} a v0.5.0 ([..]) {compiling} foo v0.0.1 ([..]) ", compiling = COMPILING, updating = UPDATING))); }); test!(denied_lints_are_allowed { let enabled = super::RUSTC.with(|r| r.cap_lints); if !enabled { return } let a = git::new("a", |p| { p.file("Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#) .file("src/lib.rs", " #![deny(warnings)] use std::option; ") }).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = {{ git = '{}' }} "#, a.url())) .file("src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} git repository `[..]` {compiling} a v0.5.0 ([..]) {compiling} foo v0.0.1 ([..]) ", compiling = COMPILING, updating = UPDATING))); }); cargo-0.8.0/tests/test_cargo_compile_path_deps.rs000066400000000000000000000554161264656333200222400ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::thread; use support::{project, execs, main_file}; use support::{COMPILING, RUNNING}; use support::paths::{self, CargoPathExt}; use hamcrest::{assert_that, existing_file}; use cargo::util::process; fn setup() { } test!(cargo_compile_with_nested_deps_shorthand { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.baz] version = "0.5.0" path = "baz" [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" extern crate baz; pub fn gimme() -> String { baz::gimme() } "#) .file("bar/baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "baz" "#) .file("bar/baz/src/baz.rs", r#" pub fn gimme() -> String { "test passed".to_string() } "#); assert_that(p.cargo_process("build"), execs().with_status(0) .with_stdout(&format!("{} baz v0.5.0 ({})\n\ {} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), COMPILING, p.url()))); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("test passed\n").with_status(0)); println!("cleaning"); assert_that(p.cargo("clean"), execs().with_stdout("").with_status(0)); println!("building baz"); assert_that(p.cargo("build").arg("-p").arg("baz"), execs().with_status(0) .with_stdout(&format!("{} baz v0.5.0 ({})\n", COMPILING, p.url()))); println!("building foo"); assert_that(p.cargo("build") .arg("-p").arg("foo"), execs().with_status(0) .with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); }); test!(cargo_compile_with_root_dev_deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" path = "../bar" [[bin]] name = "foo" "#) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])); let p2 = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#); p2.build(); assert_that(p.cargo_process("build"), execs().with_status(101)) }); test!(cargo_compile_with_root_dev_deps_with_testing { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.bar] version = "0.5.0" path = "../bar" [[bin]] name = "foo" "#) .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])); let p2 = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#); p2.build(); assert_that(p.cargo_process("test"), execs().with_stdout(&format!("\ {compiling} [..] v0.5.0 ({url}) {compiling} [..] v0.5.0 ({url}) {running} target[..]foo-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, url = p.url(), running = RUNNING))); }); test!(cargo_compile_with_transitive_dev_deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [dev-dependencies.baz] git = "git://example.com/path/to/nowhere" [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" pub fn gimme() -> &'static str { "zoidberg" } "#); assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("zoidberg\n")); }); test!(no_rebuild_dependency { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "foo" [dependencies.bar] path = "bar" "#) .file("src/foo.rs", r#" extern crate bar; fn main() { bar::bar() } "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" pub fn bar() {} "#); // First time around we should compile both foo and bar assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); // This time we shouldn't compile bar assert_that(p.cargo("build"), execs().with_stdout("")); p.root().move_into_the_past().unwrap(); p.build(); // rebuild the files (rewriting them in the process) assert_that(p.cargo("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); }); test!(deep_dependencies_trigger_rebuild { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "foo" [dependencies.bar] path = "bar" "#) .file("src/foo.rs", r#" extern crate bar; fn main() { bar::bar() } "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" [dependencies.baz] path = "../baz" "#) .file("bar/src/bar.rs", r#" extern crate baz; pub fn bar() { baz::baz() } "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "baz" "#) .file("baz/src/baz.rs", r#" pub fn baz() {} "#); assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ {} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), COMPILING, p.url()))); assert_that(p.cargo("build"), execs().with_stdout("")); // Make sure an update to baz triggers a rebuild of bar // // We base recompilation off mtime, so sleep for at least a second to ensure // that this write will change the mtime. thread::sleep_ms(1000); File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#" pub fn baz() { println!("hello!"); } "#).unwrap(); assert_that(p.cargo("build"), execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ {} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), COMPILING, p.url()))); // Make sure an update to bar doesn't trigger baz thread::sleep_ms(1000); File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#" extern crate baz; pub fn bar() { println!("hello!"); baz::baz(); } "#).unwrap(); assert_that(p.cargo("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); }); test!(no_rebuild_two_deps { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "foo" [dependencies.bar] path = "bar" [dependencies.baz] path = "baz" "#) .file("src/foo.rs", r#" extern crate bar; fn main() { bar::bar() } "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" [dependencies.baz] path = "../baz" "#) .file("bar/src/bar.rs", r#" pub fn bar() {} "#) .file("baz/Cargo.toml", r#" [project] name = "baz" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "baz" "#) .file("baz/src/baz.rs", r#" pub fn baz() {} "#); assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ {} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), COMPILING, p.url()))); assert_that(&p.bin("foo"), existing_file()); assert_that(p.cargo("build"), execs().with_stdout("")); assert_that(&p.bin("foo"), existing_file()); }); test!(nested_deps_recompile { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "src/bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("src/bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "bar" "#) .file("src/bar/src/bar.rs", "pub fn gimme() {}"); let bar = p.url(); assert_that(p.cargo_process("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, bar, COMPILING, p.url()))); p.root().move_into_the_past().unwrap(); File::create(&p.root().join("src/foo.rs")).unwrap().write_all(br#" fn main() {} "#).unwrap(); // This shouldn't recompile `bar` assert_that(p.cargo("build"), execs().with_stdout(&format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); }); test!(error_message_for_missing_manifest { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = "src/bar" [lib] name = "foo" "#) .file("src/bar/not-a-manifest", ""); assert_that(p.cargo_process("build"), execs() .with_status(101) .with_stderr(&format!("\ Unable to update file://[..] Caused by: Could not find `Cargo.toml` in `{}` ", p.root().join("src").join("bar").display()))); }); test!(override_relative { let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", ""); fs::create_dir(&paths::root().join(".cargo")).unwrap(); File::create(&paths::root().join(".cargo/config")).unwrap() .write_all(br#"paths = ["bar"]"#).unwrap(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = '{}' "#, bar.root().display())) .file("src/lib.rs", ""); bar.build(); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(override_self { let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("src/lib.rs", ""); let p = project("foo"); let root = p.root().clone(); let p = p .file(".cargo/config", &format!(r#" paths = ['{}'] "#, root.display())) .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] path = '{}' "#, bar.root().display())) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}"); bar.build(); assert_that(p.cargo_process("build"), execs().with_status(0)); }); test!(override_path_dep { let bar = project("bar") .file("p1/Cargo.toml", r#" [package] name = "p1" version = "0.5.0" authors = [] [dependencies.p2] path = "../p2" "#) .file("p1/src/lib.rs", "") .file("p2/Cargo.toml", r#" [package] name = "p2" version = "0.5.0" authors = [] "#) .file("p2/src/lib.rs", ""); let p = project("foo") .file(".cargo/config", &format!(r#" paths = ['{}', '{}'] "#, bar.root().join("p1").display(), bar.root().join("p2").display())) .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.p2] path = '{}' "#, bar.root().join("p2").display())) .file("src/lib.rs", ""); bar.build(); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); }); test!(path_dep_build_cmd { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = ["wycats@example.com"] [dependencies.bar] version = "0.5.0" path = "bar" [[bin]] name = "foo" "#) .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] build = "build.rs" [lib] name = "bar" "#) .file("bar/build.rs", r#" use std::fs; fn main() { fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); } "#) .file("bar/src/bar.rs.in", r#" pub fn gimme() -> i32 { 0 } "#); p.build(); p.root().join("bar").move_into_the_past().unwrap(); assert_that(p.cargo("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("0\n")); // Touching bar.rs.in should cause the `build` command to run again. { let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); file.unwrap().write_all(br#"pub fn gimme() -> i32 { 1 }"#).unwrap(); } assert_that(p.cargo("build"), execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); assert_that(process(&p.bin("foo")), execs().with_stdout("1\n")); }); test!(dev_deps_no_rebuild_lib { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dev-dependencies.bar] path = "bar" [lib] name = "foo" doctest = false "#) .file("src/lib.rs", r#" #[cfg(test)] extern crate bar; #[cfg(not(test))] fn foo() { env!("FOO"); } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.5.0" authors = ["wycats@example.com"] "#) .file("bar/src/lib.rs", "pub fn bar() {}"); p.build(); assert_that(p.cargo("build") .env("FOO", "bar"), execs().with_status(0) .with_stdout(&format!("{} foo v0.5.0 ({})\n", COMPILING, p.url()))); assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(&format!("\ {} [..] v0.5.0 ({}) {} [..] v0.5.0 ({}) Running target[..]foo-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), COMPILING, p.url()))); }); test!(custom_target_no_rebuild { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.5.0" authors = [] "#) .file("a/src/lib.rs", "") .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a = { path = "../a" } "#) .file("b/src/lib.rs", ""); p.build(); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(&format!("\ {compiling} a v0.5.0 ([..]) {compiling} foo v0.5.0 ([..]) ", compiling = COMPILING))); assert_that(p.cargo("build") .arg("--manifest-path=b/Cargo.toml") .env("CARGO_TARGET_DIR", "target"), execs().with_status(0) .with_stdout(&format!("\ {compiling} b v0.5.0 ([..]) ", compiling = COMPILING))); }); test!(override_and_depend { let p = project("foo") .file("a/a1/Cargo.toml", r#" [project] name = "a1" version = "0.5.0" authors = [] [dependencies] a2 = { path = "../a2" } "#) .file("a/a1/src/lib.rs", "") .file("a/a2/Cargo.toml", r#" [project] name = "a2" version = "0.5.0" authors = [] "#) .file("a/a2/src/lib.rs", "") .file("b/Cargo.toml", r#" [project] name = "b" version = "0.5.0" authors = [] [dependencies] a1 = { path = "../a/a1" } a2 = { path = "../a/a2" } "#) .file("b/src/lib.rs", "") .file("b/.cargo/config", r#" paths = ["../a"] "#); p.build(); assert_that(p.cargo("build").cwd(p.root().join("b")), execs().with_status(0) .with_stdout(&format!("\ {compiling} a2 v0.5.0 ([..]) {compiling} a1 v0.5.0 ([..]) {compiling} b v0.5.0 ([..]) ", compiling = COMPILING))); }); cargo-0.8.0/tests/test_cargo_compile_plugins.rs000066400000000000000000000154671264656333200217540ustar00rootroot00000000000000use std::fs; use std::env; use support::{project, execs}; use support::{COMPILING, RUNNING}; use hamcrest::assert_that; fn setup() { } test!(plugin_to_the_max { if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo_lib" [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate foo_lib; fn main() { foo_lib::foo(); } "#) .file("src/foo_lib.rs", r#" #![feature(plugin)] #![plugin(bar)] pub fn foo() {} "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, rustc_private)] extern crate rustc_plugin; extern crate baz; use rustc_plugin::Registry; #[plugin_registrar] pub fn foo(_reg: &mut Registry) { println!("{}", baz::baz()); } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] [lib] name = "baz" crate_type = ["dylib"] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); assert_that(foo.cargo_process("build"), execs().with_status(0)); assert_that(foo.cargo("doc"), execs().with_status(0)); }); test!(plugin_with_dynamic_native_dependency { if !::is_nightly() { return } let build = project("builder") .file("Cargo.toml", r#" [package] name = "builder" version = "0.0.1" authors = [] [lib] name = "builder" crate-type = ["dylib"] "#) .file("src/lib.rs", r#" #[no_mangle] pub extern fn foo() {} "#); assert_that(build.cargo_process("build"), execs().with_status(0)); let src = build.root().join("target/debug"); let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { let lib = lib.file_name().unwrap().to_str().unwrap(); lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX) }).unwrap(); let libname = lib.file_name().unwrap().to_str().unwrap(); let libname = &libname[env::consts::DLL_PREFIX.len().. libname.len() - env::consts::DLL_SUFFIX.len()]; let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = 'build.rs' [lib] name = "bar" plugin = true "#) .file("bar/build.rs", r#" use std::path::PathBuf; use std::env; fn main() { let src = PathBuf::from(env::var("SRC").unwrap()); println!("cargo:rustc-flags=-L {}", src.parent().unwrap() .display()); } "#) .file("bar/src/lib.rs", &format!(r#" #![feature(plugin_registrar, rustc_private)] extern crate rustc_plugin; use rustc_plugin::Registry; #[link(name = "{}")] extern {{ fn foo(); }} #[plugin_registrar] pub fn bar(_reg: &mut Registry) {{ unsafe {{ foo() }} }} "#, libname)); assert_that(foo.cargo_process("build").env("SRC", &lib).arg("-v"), execs().with_status(0)); }); test!(plugin_integration { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [lib] name = "foo" plugin = true doctest = false "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", "") .file("tests/it_works.rs", ""); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); }); test!(doctest_a_plugin { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = { path = "bar" } "#) .file("src/lib.rs", r#" #[macro_use] extern crate bar; "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true "#) .file("bar/src/lib.rs", r#" pub fn bar() {} "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); }); // See #1515 test!(native_plugin_dependency_with_custom_ar_linker { let target = ::rustc_host(); let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" plugin = true "#) .file("src/lib.rs", ""); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" [dependencies.foo] path = "../foo" "#) .file("src/lib", "") .file(".cargo/config", &format!(r#" [target.{}] ar = "nonexistent-ar" linker = "nonexistent-linker" "#, target)); foo.build(); assert_that(bar.cargo_process("build").arg("--verbose"), execs().with_stdout(&format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` ", compiling = COMPILING, running = RUNNING, url = bar.url()))) }); cargo-0.8.0/tests/test_cargo_cross_compile.rs000066400000000000000000000621551264656333200214200ustar00rootroot00000000000000use std::env; use support::{project, execs, basic_bin_manifest}; use support::{RUNNING, COMPILING, DOCTEST}; use hamcrest::{assert_that, existing_file}; use cargo::util::process; fn setup() { } fn disabled() -> bool { // First, disable if ./configure requested so match env::var("CFG_DISABLE_CROSS_TESTS") { Ok(ref s) if *s == "1" => return true, _ => {} } // Right now the windows bots cannot cross compile due to the mingw setup, // so we disable ourselves on all but macos/linux setups where the rustc // install script ensures we have both architectures !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) } fn alternate() -> String { let platform = match env::consts::OS { "linux" => "unknown-linux-gnu", "macos" => "apple-darwin", "windows" => "pc-windows-msvc", _ => unreachable!(), }; let arch = match env::consts::ARCH { "x86" => "x86_64", "x86_64" => "i686", _ => unreachable!(), }; format!("{}-{}", arch, platform) } fn alternate_arch() -> &'static str { match env::consts::ARCH { "x86" => "x86_64", "x86_64" => "x86", _ => unreachable!(), } } test!(simple_cross { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#) .file("build.rs", &format!(r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, alternate())) .file("src/main.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); assert_that(&p.target_bin(&target, "foo"), existing_file()); assert_that(process(&p.target_bin(&target, "foo")), execs().with_status(0)); }); test!(simple_deps { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::bar(); } "#); let p2 = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn bar() {}"); p2.build(); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); assert_that(&p.target_bin(&target, "foo"), existing_file()); assert_that(process(&p.target_bin(&target, "foo")), execs().with_status(0)); }); test!(plugin_deps { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate baz; fn main() { assert_eq!(bar!(), baz::baz()); } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, quote, rustc_private)] extern crate rustc_plugin; extern crate syntax; use rustc_plugin::Registry; use syntax::ast::TokenTree; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; #[plugin_registrar] pub fn foo(reg: &mut Registry) { reg.register_macro("bar", expand_bar); } fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { MacEager::expr(quote_expr!(cx, 1)) } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); assert_that(&foo.target_bin(&target, "foo"), existing_file()); assert_that(process(&foo.target_bin(&target, "foo")), execs().with_status(0)); }); test!(plugin_to_the_max { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate baz; fn main() { assert_eq!(bar!(), baz::baz()); } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, quote, rustc_private)] extern crate rustc_plugin; extern crate syntax; extern crate baz; use rustc_plugin::Registry; use syntax::ast::TokenTree; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; #[plugin_registrar] pub fn foo(reg: &mut Registry) { reg.register_macro("bar", expand_bar); } fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { MacEager::expr(quote_expr!(cx, baz::baz())) } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); println!("second"); assert_that(foo.cargo("build").arg("-v") .arg("--target").arg(&target), execs().with_status(0)); assert_that(&foo.target_bin(&target, "foo"), existing_file()); assert_that(process(&foo.target_bin(&target, "foo")), execs().with_status(0)); }); test!(linker_and_ar { if disabled() { return } let target = alternate(); let p = project("foo") .file(".cargo/config", &format!(r#" [target.{}] ar = "my-ar-tool" linker = "my-linker-tool" "#, target)) .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); assert_that(p.cargo_process("build").arg("--target").arg(&target) .arg("-v"), execs().with_status(101) .with_stdout(&format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc src[..]foo.rs --crate-name foo --crate-type bin -g \ --out-dir {dir}[..]target[..]{target}[..]debug \ --emit=dep-info,link \ --target {target} \ -C ar=my-ar-tool -C linker=my-linker-tool \ -L dependency={dir}[..]target[..]{target}[..]debug \ -L dependency={dir}[..]target[..]{target}[..]debug[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), target = target, ))); }); test!(plugin_with_extra_dylib_dep { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] fn main() {} "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, rustc_private)] extern crate rustc_plugin; extern crate baz; use rustc_plugin::Registry; #[plugin_registrar] pub fn foo(reg: &mut Registry) { println!("{}", baz::baz()); } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] [lib] name = "baz" crate_type = ["dylib"] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); }); test!(cross_tests { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "bar" "#) .file("src/main.rs", &format!(r#" extern crate foo; use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test() {{ main() }} "#, alternate_arch())) .file("src/lib.rs", &format!(r#" use std::env; pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test_foo() {{ foo() }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("test").arg("--target").arg(&target), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({foo}) {running} target[..]{triple}[..]bar-[..] running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]{triple}[..]foo-[..] running 1 test test test_foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, foo = p.url(), triple = target, doctest = DOCTEST))); }); test!(simple_cargo_run { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/main.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("run").arg("--target").arg(&target), execs().with_status(0)); }); test!(cross_with_a_build_script { if disabled() { return } let target = alternate(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' "#) .file("build.rs", &format!(r#" use std::env; use std::path::PathBuf; fn main() {{ assert_eq!(env::var("TARGET").unwrap(), "{0}"); let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); path.pop(); assert!(path.file_name().unwrap().to_str().unwrap() .starts_with("foo-")); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); }} "#, target)) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 (file://[..]) {running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..]` {running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` {running} `rustc src[..]main.rs [..] --target {target} [..]` ", compiling = COMPILING, running = RUNNING, target = target, dir = p.root().display()))); }); test!(build_script_needed_for_host_and_target { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' [dependencies.d1] path = "d1" [build-dependencies.d2] path = "d2" "#) .file("build.rs", r#" extern crate d2; fn main() { d2::d2(); } "#) .file("src/main.rs", " extern crate d1; fn main() { d1::d1(); } ") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = 'build.rs' "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d1/build.rs", r#" use std::env; fn main() { let target = env::var("TARGET").unwrap(); println!("cargo:rustc-flags=-L /path/to/{}", target); } "#) .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] [dependencies.d1] path = "../d1" "#) .file("d2/src/lib.rs", " extern crate d1; pub fn d2() { d1::d1(); } "); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0) .with_stdout_contains(&format!("\ {compiling} d1 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]`", running = RUNNING, dir = p.root().display())) .with_stdout_contains(&format!("\ {running} `{dir}[..]target[..]build[..]d1-[..]build-script-build`", running = RUNNING, dir = p.root().display())) .with_stdout_contains(&format!("\ {running} `rustc d1[..]src[..]lib.rs [..]`", running = RUNNING)) .with_stdout_contains(&format!("\ {compiling} d2 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d2[..]src[..]lib.rs [..] \ -L /path/to/{host}`", running = RUNNING, host = host)) .with_stdout_contains(&format!("\ {compiling} foo v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..] \ -L /path/to/{host}`", running = RUNNING, dir = p.root().display(), host = host)) .with_stdout_contains(&format!("\ {running} `rustc src[..]main.rs [..] --target {target} [..] \ -L /path/to/{target}`", running = RUNNING, target = target))); }); test!(build_deps_for_the_right_arch { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.d2] path = "d2" "#) .file("src/main.rs", "extern crate d2; fn main() {}") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "../d1" "#) .file("d2/build.rs", "extern crate d1; fn main() {}") .file("d2/src/lib.rs", ""); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); }); test!(build_script_only_host { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#) .file("src/main.rs", "fn main() {}") .file("build.rs", "extern crate d1; fn main() {}") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = "build.rs" "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d1/build.rs", r#" use std::env; fn main() { assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") .contains("target/debug/build/d1-"), "bad: {:?}", env::var("OUT_DIR")); } "#); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); }); test!(plugin_build_script_right_arch { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [lib] name = "foo" plugin = true "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(alternate()), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} `rustc build.rs [..]` {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(build_script_with_platform_specific_dependencies { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#) .file("build.rs", "extern crate d1; fn main() {}") .file("src/lib.rs", "") .file("d1/Cargo.toml", &format!(r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host)) .file("d1/src/lib.rs", "extern crate d2;") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(0) .with_stdout(&format!("\ {compiling} d2 v0.0.0 ([..]) {running} `rustc d2[..]src[..]lib.rs [..]` {compiling} d1 v0.0.0 ([..]) {running} `rustc d1[..]src[..]lib.rs [..]` {compiling} foo v0.0.1 ([..]) {running} `rustc build.rs [..]` {running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` {running} `rustc src[..]lib.rs [..] --target {target} [..]` ", compiling = COMPILING, running = RUNNING, dir = p.root().display(), target = target))); }); test!(platform_specific_dependencies_do_not_leak { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [dependencies.d1] path = "d1" [build-dependencies.d1] path = "d1" "#) .file("build.rs", "extern crate d1; fn main() {}") .file("src/lib.rs", "") .file("d1/Cargo.toml", &format!(r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host)) .file("d1/src/lib.rs", "extern crate d2;") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(101) .with_stderr("\ [..] error: can't find crate for `d2`[..] [..] extern crate d2; [..] error: aborting due to previous error Could not compile `d1`. Caused by: [..] ")); }); test!(platform_specific_variables_reflected_in_build_scripts { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [target.{host}.dependencies] d1 = {{ path = "d1" }} [target.{target}.dependencies] d2 = {{ path = "d2" }} "#, host = host, target = target)) .file("build.rs", &format!(r#" use std::env; fn main() {{ let platform = env::var("TARGET").unwrap(); let (expected, not_expected) = match &platform[..] {{ "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), _ => panic!("unknown platform") }}; env::var(expected).ok() .expect(&format!("missing {{}}", expected)); env::var(not_expected).err() .expect(&format!("found {{}}", not_expected)); }} "#, host = host, target = target)) .file("src/lib.rs", "") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] links = "d1" build = "build.rs" "#) .file("d1/build.rs", r#" fn main() { println!("cargo:val=1") } "#) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] links = "d2" build = "build.rs" "#) .file("d2/build.rs", r#" fn main() { println!("cargo:val=1") } "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_doc.rs000066400000000000000000000336071264656333200173240ustar00rootroot00000000000000use std::str; use support::{project, execs, path2url}; use support::{COMPILING, DOCUMENTING, RUNNING}; use hamcrest::{assert_that, existing_file, existing_dir, is_not}; fn setup() { } test!(simple { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("doc"), execs().with_status(0).with_stdout(&format!("\ [..] foo v0.0.1 ({dir}) [..] foo v0.0.1 ({dir}) ", dir = path2url(p.root())))); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); }); test!(doc_no_libs { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "foo" doc = false "#) .file("src/main.rs", r#" bad code "#); assert_that(p.cargo_process("doc"), execs().with_status(0)); }); test!(doc_twice { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" pub fn foo() {} "#); assert_that(p.cargo_process("doc"), execs().with_status(0).with_stdout(&format!("\ {documenting} foo v0.0.1 ({dir}) ", documenting = DOCUMENTING, dir = path2url(p.root())))); assert_that(p.cargo("doc"), execs().with_status(0).with_stdout("")) }); test!(doc_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", r#" extern crate bar; pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", r#" pub fn bar() {} "#); assert_that(p.cargo_process("doc"), execs().with_status(0).with_stdout(&format!("\ [..] bar v0.0.1 ({dir}) [..] bar v0.0.1 ({dir}) {documenting} foo v0.0.1 ({dir}) ", documenting = DOCUMENTING, dir = path2url(p.root())))); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); assert_that(p.cargo("doc") .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), execs().with_status(0).with_stdout("")); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); }); test!(doc_no_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", r#" extern crate bar; pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", r#" pub fn bar() {} "#); assert_that(p.cargo_process("doc").arg("--no-deps"), execs().with_status(0).with_stdout(&format!("\ {compiling} bar v0.0.1 ({dir}) {documenting} foo v0.0.1 ({dir}) ", documenting = DOCUMENTING, compiling = COMPILING, dir = path2url(p.root())))); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/index.html"), is_not(existing_file())); }); test!(doc_only_bin { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" extern crate bar; pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", r#" pub fn bar() {} "#); assert_that(p.cargo_process("doc").arg("-v"), execs().with_status(0)); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); }); test!(doc_lib_bin_same_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}") .file("src/lib.rs", "fn foo() {}"); assert_that(p.cargo_process("doc"), execs().with_status(101) .with_stderr("\ cannot document a package where a library and a binary have the same name. \ Consider renaming one or marking the target as `doc = false` ")); }); test!(doc_dash_p { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#) .file("src/lib.rs", "extern crate a;") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies.b] path = "../b" "#) .file("a/src/lib.rs", "extern crate b;") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("doc").arg("-p").arg("a"), execs().with_status(0) .with_stdout(&format!("\ [..] b v0.0.1 (file://[..]) [..] b v0.0.1 (file://[..]) {documenting} a v0.0.1 (file://[..]) ", documenting = DOCUMENTING))); }); test!(doc_same_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/bin/main.rs", "fn main() {}") .file("examples/main.rs", "fn main() {}") .file("tests/main.rs", "fn main() {}"); assert_that(p.cargo_process("doc"), execs().with_status(0)); }); test!(doc_target { const TARGET: &'static str = "arm-unknown-linux-gnueabihf"; if !::is_nightly() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" #![feature(no_core)] #![no_core] extern { pub static A: u32; } "#); assert_that(p.cargo_process("doc").arg("--target").arg(TARGET).arg("--verbose"), execs().with_status(0)); assert_that(&p.root().join(&format!("target/{}/doc", TARGET)), existing_dir()); assert_that(&p.root().join(&format!("target/{}/doc/foo/index.html", TARGET)), existing_file()); }); test!(target_specific_not_documented { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [target.foo.dependencies] a = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", "not rust"); assert_that(p.cargo_process("doc"), execs().with_status(0)); }); test!(output_not_captured { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", " /// ``` /// ☃ /// ``` pub fn foo() {} "); let output = p.cargo_process("doc").exec_with_output().err().unwrap() .output.unwrap(); let stderr = str::from_utf8(&output.stderr).unwrap(); assert!(stderr.contains("☃"), "no snowman\n{}", stderr); assert!(stderr.contains("unknown start of token"), "no message\n{}", stderr); }); test!(target_specific_documented { let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] [target.foo.dependencies] a = {{ path = "a" }} [target.{}.dependencies] a = {{ path = "a" }} "#, ::rustc_host())) .file("src/lib.rs", " extern crate a; /// test pub fn foo() {} ") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", " /// test pub fn foo() {} "); assert_that(p.cargo_process("doc"), execs().with_status(0)); }); test!(no_document_build_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [build-dependencies] a = { path = "a" } "#) .file("src/lib.rs", " pub fn foo() {} ") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", " /// ``` /// ☃ /// ``` pub fn foo() {} "); assert_that(p.cargo_process("doc"), execs().with_status(0)); }); test!(doc_release { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("--release"), execs().with_status(0)); assert_that(p.cargo("doc").arg("--release").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {documenting} foo v0.0.1 ([..]) {running} `rustdoc src[..]lib.rs [..]` ", documenting = DOCUMENTING, running = RUNNING))); }); test!(doc_multiple_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" [dependencies.baz] path = "baz" "#) .file("src/lib.rs", r#" extern crate bar; pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", r#" pub fn bar() {} "#) .file("baz/Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("baz/src/lib.rs", r#" pub fn baz() {} "#); assert_that(p.cargo_process("doc") .arg("-p").arg("bar") .arg("-p").arg("baz") .arg("-v"), execs().with_status(0)); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); assert_that(&p.root().join("target/doc/baz/index.html"), existing_file()); }); test!(features { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" [features] foo = ["bar/bar"] "#) .file("src/lib.rs", r#" #[cfg(feature = "foo")] pub fn foo() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] bar = [] "#) .file("bar/build.rs", r#" fn main() { println!("cargo:rustc-cfg=bar"); } "#) .file("bar/src/lib.rs", r#" #[cfg(feature = "bar")] pub fn bar() {} "#); assert_that(p.cargo_process("doc").arg("--features").arg("foo"), execs().with_status(0)); assert_that(&p.root().join("target/doc"), existing_dir()); assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); }); cargo-0.8.0/tests/test_cargo_features.rs000066400000000000000000000513271264656333200203740ustar00rootroot00000000000000use std::fs::File; use std::io::prelude::*; use support::{project, execs}; use support::{COMPILING, FRESH}; use support::paths::CargoPathExt; use hamcrest::assert_that; fn setup() { } test!(invalid1 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Feature `bar` includes `baz` which is neither a dependency nor another feature "))); }); test!(invalid2 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] [dependencies.bar] path = "foo" "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Features and dependencies cannot have the same name: `bar` "))); }); test!(invalid3 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] bar = ["baz"] [dependencies.baz] path = "foo" "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Feature `bar` depends on `baz` which is not an optional dependency. Consider adding `optional = true` to the dependency "))); }); test!(invalid4 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" features = ["bar"] "#) .file("src/main.rs", "") .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ Package `bar v0.0.1 ([..])` does not have these features: `bar` "))); let p = p.file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#); assert_that(p.cargo_process("build").arg("--features").arg("test"), execs().with_status(101).with_stderr(&format!("\ Package `foo v0.0.1 ([..])` does not have these features: `test` "))); }); test!(invalid5 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dev-dependencies.bar] path = "bar" optional = true "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Dev-dependencies are not allowed to be optional: `bar` "))); }); test!(invalid6 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build").arg("--features").arg("foo"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Feature `foo` requires `bar` which is not an optional dependency "))); }); test!(invalid7 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] bar = [] "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build").arg("--features").arg("foo"), execs().with_status(101).with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: Feature `foo` requires `bar` which is not an optional dependency "))); }); test!(invalid8 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" features = ["foo/bar"] "#) .file("src/main.rs", "") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("--features").arg("foo"), execs().with_status(101).with_stderr(&format!("\ features in dependencies cannot enable features in other dependencies: `foo/bar` "))); }); test!(no_feature_doesnt_build { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true "#) .file("src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; #[cfg(feature = "bar")] fn main() { bar::bar(); println!("bar") } #[cfg(not(feature = "bar"))] fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", "pub fn bar() {}"); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("")); assert_that(p.cargo("build").arg("--features").arg("bar"), execs().with_status(0).with_stdout(format!("\ {compiling} bar v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("bar\n")); }); test!(default_feature_pulled_in { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["bar"] [dependencies.bar] path = "bar" optional = true "#) .file("src/main.rs", r#" #[cfg(feature = "bar")] extern crate bar; #[cfg(feature = "bar")] fn main() { bar::bar(); println!("bar") } #[cfg(not(feature = "bar"))] fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", "pub fn bar() {}"); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} bar v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("bar\n")); assert_that(p.cargo("build").arg("--no-default-features"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), execs().with_status(0).with_stdout("")); }); test!(cyclic_feature { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["default"] "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ Cyclic feature dependency: feature `default` depends on itself ")); }); test!(cyclic_feature2 { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar"] bar = ["foo"] "#) .file("src/main.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ Cyclic feature dependency: feature `[..]` depends on itself ")); }); test!(groups_on_groups_on_groups { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] default = ["f1"] f1 = ["f2", "bar"] f2 = ["f3", "f4"] f3 = ["f5", "f6", "baz"] f4 = ["f5", "f7"] f5 = ["f6"] f6 = ["f7"] f7 = ["bar"] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true "#) .file("src/main.rs", r#" extern crate bar; extern crate baz; fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("baz/src/lib.rs", "pub fn baz() {}"); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} ba[..] v0.0.1 ({dir}) {compiling} ba[..] v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); test!(many_cli_features { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" optional = true [dependencies.baz] path = "baz" optional = true "#) .file("src/main.rs", r#" extern crate bar; extern crate baz; fn main() {} "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", "pub fn bar() {}") .file("baz/Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("baz/src/lib.rs", "pub fn baz() {}"); assert_that(p.cargo_process("build").arg("--features").arg("bar baz"), execs().with_status(0).with_stdout(format!("\ {compiling} ba[..] v0.0.1 ({dir}) {compiling} ba[..] v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); test!(union_features { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" features = ["f1"] [dependencies.d2] path = "d2" features = ["f2"] "#) .file("src/main.rs", r#" extern crate d1; extern crate d2; fn main() { d2::f1(); d2::f2(); } "#) .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] f1 = ["d2"] [dependencies.d2] path = "../d2" features = ["f1"] optional = true "#) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [features] f1 = [] f2 = [] "#) .file("d2/src/lib.rs", r#" #[cfg(feature = "f1")] pub fn f1() {} #[cfg(feature = "f2")] pub fn f2() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} d2 v0.0.1 ({dir}) {compiling} d1 v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); test!(many_features_no_rebuilds { let p = project("foo") .file("Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies.a] path = "a" features = ["fall"] "#) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] ftest = [] ftest2 = [] fall = ["ftest", "ftest2"] "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} a v0.1.0 ({dir}) {compiling} b v0.1.0 ({dir}) ", compiling = COMPILING, dir = p.url()))); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(format!("\ {fresh} a v0.1.0 ([..]) {fresh} b v0.1.0 ([..]) ", fresh = FRESH))); }); // Tests that all cmd lines work with `--features ""` test!(empty_features { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build").arg("--features").arg(""), execs().with_status(0)); }); // Tests that all cmd lines work with `--features ""` test!(transitive_features { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] foo = ["bar/baz"] [dependencies.bar] path = "bar" "#) .file("src/main.rs", " extern crate bar; fn main() { bar::baz(); } ") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [features] baz = [] "#) .file("bar/src/lib.rs", r#" #[cfg(feature = "baz")] pub fn baz() {} "#); assert_that(p.cargo_process("build").arg("--features").arg("foo"), execs().with_status(0)); }); test!(everything_in_the_lockfile { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [features] f1 = ["d1/f1"] f2 = ["d2"] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" optional = true [dependencies.d3] path = "d3" optional = true "#) .file("src/main.rs", "fn main() {}") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [features] f1 = [] "#) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.2" authors = [] "#) .file("d2/src/lib.rs", "") .file("d3/Cargo.toml", r#" [package] name = "d3" version = "0.0.3" authors = [] [features] f3 = [] "#) .file("d3/src/lib.rs", ""); assert_that(p.cargo_process("fetch"), execs().with_status(0)); let loc = p.root().join("Cargo.lock"); let mut lockfile = String::new(); File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); assert!(lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile); assert!(lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile); assert!(lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile); }); test!(no_rebuild_when_frobbing_default_feature { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#) .file("src/lib.rs", "") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies] a = { path = "../a", features = ["f1"], default-features = false } "#) .file("b/src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] default = ["f1"] f1 = [] "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); test!(unions_work_with_no_default_features { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] a = { path = "a" } b = { path = "b" } "#) .file("src/lib.rs", r#" extern crate a; pub fn foo() { a::a(); } "#) .file("b/Cargo.toml", r#" [package] name = "b" version = "0.1.0" authors = [] [dependencies] a = { path = "../a", features = [], default-features = false } "#) .file("b/src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] [features] default = ["f1"] f1 = [] "#) .file("a/src/lib.rs", r#" #[cfg(feature = "f1")] pub fn a() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); test!(optional_and_dev_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = { path = "foo", optional = true } [dev-dependencies] foo = { path = "foo" } "#) .file("src/lib.rs", "") .file("foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("foo/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} test v0.1.0 ([..]) ", compiling = COMPILING))); }); test!(activating_feature_activates_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "test" version = "0.1.0" authors = [] [dependencies] foo = { path = "foo", optional = true } [features] a = ["foo/a"] "#) .file("src/lib.rs", " extern crate foo; pub fn bar() { foo::bar(); } ") .file("foo/Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [features] a = [] "#) .file("foo/src/lib.rs", r#" #[cfg(feature = "a")] pub fn bar() {} "#); assert_that(p.cargo_process("build").arg("--features").arg("a").arg("-v"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_fetch.rs000066400000000000000000000007331264656333200176420ustar00rootroot00000000000000use support::{project, execs}; use hamcrest::assert_that; fn setup() {} test!(no_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", r#" mod a; fn main() {} "#) .file("src/a.rs", ""); assert_that(p.cargo_process("fetch"), execs().with_status(0).with_stdout("")); }); cargo-0.8.0/tests/test_cargo_freshness.rs000066400000000000000000000177161264656333200205620ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::thread; use support::{project, execs, path2url}; use support::COMPILING; use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file}; fn setup() {} test!(modifying_and_moving { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", r#" mod a; fn main() {} "#) .file("src/a.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); p.root().move_into_the_past().unwrap(); p.root().join("target").move_into_the_past().unwrap(); File::create(&p.root().join("src/a.rs")).unwrap() .write_all(b"fn main() {}").unwrap(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); assert_that(p.cargo("build"), execs().with_status(101)); }); test!(modify_only_some_files { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/lib.rs", "mod a;") .file("src/a.rs", "") .file("src/main.rs", r#" mod b; fn main() {} "#) .file("src/b.rs", "") .file("tests/test.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); assert_that(p.cargo("test"), execs().with_status(0)); thread::sleep_ms(1000); assert_that(&p.bin("foo"), existing_file()); let lib = p.root().join("src/lib.rs"); let bin = p.root().join("src/b.rs"); File::create(&lib).unwrap().write_all(b"invalid rust code").unwrap(); File::create(&bin).unwrap().write_all(b"fn foo() {}").unwrap(); lib.move_into_the_past().unwrap(); // Make sure the binary is rebuilt, not the lib assert_that(p.cargo("build") .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = path2url(p.root())))); assert_that(&p.bin("foo"), existing_file()); }); test!(rebuild_sub_package_then_while_package { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies.a] path = "a" [dependencies.b] path = "b" "#) .file("src/lib.rs", "extern crate a; extern crate b;") .file("a/Cargo.toml", r#" [package] name = "a" authors = [] version = "0.0.1" [dependencies.b] path = "../b" "#) .file("a/src/lib.rs", "extern crate b;") .file("b/Cargo.toml", r#" [package] name = "b" authors = [] version = "0.0.1" "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); File::create(&p.root().join("b/src/lib.rs")).unwrap().write_all(br#" pub fn b() {} "#).unwrap(); assert_that(p.cargo("build").arg("-pb"), execs().with_status(0)); File::create(&p.root().join("src/lib.rs")).unwrap().write_all(br#" extern crate a; extern crate b; pub fn toplevel() {} "#).unwrap(); assert_that(p.cargo("build"), execs().with_status(0)); }); test!(changing_features_is_ok { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" [features] foo = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0) .with_stdout("\ [..]Compiling foo v0.0.1 ([..]) ")); assert_that(p.cargo("build").arg("--features").arg("foo"), execs().with_status(0) .with_stdout("\ [..]Compiling foo v0.0.1 ([..]) ")); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("\ [..]Compiling foo v0.0.1 ([..]) ")); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("")); }); test!(rebuild_tests_if_lib_changes { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn foo() {}") .file("tests/foo.rs", r#" extern crate foo; #[test] fn test() { foo::foo(); } "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("test"), execs().with_status(0)); File::create(&p.root().join("src/lib.rs")).unwrap(); p.root().move_into_the_past().unwrap(); p.root().join("target").move_into_the_past().unwrap(); assert_that(p.cargo("build"), execs().with_status(0)); assert_that(p.cargo("test").arg("-v"), execs().with_status(101)); }); test!(no_rebuild_transitive_target_deps { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } [dev-dependencies] b = { path = "b" } "#) .file("src/lib.rs", "") .file("tests/foo.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [target.foo.dependencies] c = { path = "../c" } "#) .file("a/src/lib.rs", "") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies] c = { path = "../c" } "#) .file("b/src/lib.rs", "") .file("c/Cargo.toml", r#" [package] name = "c" version = "0.0.1" authors = [] "#) .file("c/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("test").arg("--no-run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} c v0.0.1 ([..]) {compiling} b v0.0.1 ([..]) {compiling} foo v0.0.1 ([..]) ", compiling = COMPILING))); }); test!(rerun_if_changed_in_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] build = "build.rs" "#) .file("a/build.rs", r#" fn main() { println!("cargo:rerun-if-changed=build.rs"); } "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); cargo-0.8.0/tests/test_cargo_generate_lockfile.rs000066400000000000000000000115651264656333200222200ustar00rootroot00000000000000use std::fs::File; use std::io::prelude::*; use support::{project, execs}; use hamcrest::{assert_that, existing_file}; fn setup() {} test!(adding_and_removing_packages { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", r#" [package] name = "bar" authors = [] version = "0.0.1" "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); let lockfile = p.root().join("Cargo.lock"); let toml = p.root().join("Cargo.toml"); let mut lock1 = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock1).unwrap(); // add a dep File::create(&toml).unwrap().write_all(br#" [package] name = "foo" authors = [] version = "0.0.1" [dependencies.bar] path = "bar" "#).unwrap(); assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); let mut lock2 = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock2).unwrap(); assert!(lock1 != lock2); // change the dep File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" [package] name = "bar" authors = [] version = "0.0.2" "#).unwrap(); assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); let mut lock3 = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock3).unwrap(); assert!(lock1 != lock3); assert!(lock2 != lock3); // remove the dep File::create(&toml).unwrap().write_all(br#" [package] name = "foo" authors = [] version = "0.0.1" "#).unwrap(); assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); let mut lock4 = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock4).unwrap(); assert_eq!(lock1, lock4); }); test!(preserve_metadata { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", r#" [package] name = "bar" authors = [] version = "0.0.1" "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); let metadata = r#" [metadata] bar = "baz" foo = "bar" "#; let lockfile = p.root().join("Cargo.lock"); { let mut lock = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); let data = lock + metadata; File::create(&lockfile).unwrap().write_all(data.as_bytes()).unwrap(); } // Build and make sure the metadata is still there assert_that(p.cargo("build"), execs().with_status(0)); let mut lock = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); assert!(lock.contains(metadata.trim()), "{}", lock); // Update and make sure the metadata is still there assert_that(p.cargo("update"), execs().with_status(0)); let mut lock = String::new(); File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); assert!(lock.contains(metadata.trim()), "{}", lock); }); test!(preserve_line_endings_issue_2076 { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.1" "#) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", r#" [package] name = "bar" authors = [] version = "0.0.1" "#) .file("bar/src/lib.rs", ""); let lockfile = p.root().join("Cargo.lock"); assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); assert_that(&lockfile, existing_file()); assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); let mut lock0 = String::new(); { File::open(&lockfile).unwrap().read_to_string(&mut lock0).unwrap(); } assert!(lock0.starts_with("[root]\n")); let lock1 = lock0.replace("\n", "\r\n"); { File::create(&lockfile).unwrap().write_all(lock1.as_bytes()).unwrap(); } assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); let mut lock2 = String::new(); { File::open(&lockfile).unwrap().read_to_string(&mut lock2).unwrap(); } assert!(lock2.starts_with("[root]\r\n")); assert_eq!(lock1, lock2); }); cargo-0.8.0/tests/test_cargo_install.rs000066400000000000000000000361721264656333200202250ustar00rootroot00000000000000use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use cargo::util::ProcessBuilder; use hamcrest::{assert_that, existing_file, is_not, Matcher, MatchResult}; use support::{project, execs}; use support::{UPDATING, DOWNLOADING, COMPILING, INSTALLING, REMOVING}; use support::paths; use support::registry::Package; use support::git; use self::InstalledExe as has_installed_exe; fn setup() { } fn cargo_process(s: &str) -> ProcessBuilder { let mut p = ::cargo_process(); p.arg(s); return p } fn pkg(name: &str, vers: &str) { Package::new(name, vers) .file("src/lib.rs", "") .file("src/main.rs", &format!(" extern crate {}; fn main() {{}} ", name)) .publish() } fn exe(name: &str) -> String { if cfg!(windows) {format!("{}.exe", name)} else {name.to_string()} } fn cargo_home() -> PathBuf { paths::home().join(".cargo") } struct InstalledExe(&'static str); impl> Matcher

for InstalledExe { fn matches(&self, path: P) -> MatchResult { let path = path.as_ref().join("bin").join(exe(self.0)); existing_file().matches(&path) } } impl fmt::Display for InstalledExe { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "installed exe `{}`", self.0) } } test!(simple { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("foo"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} foo v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 (registry file://[..]) {installing} {home}[..]bin[..]foo[..] ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, installing = INSTALLING, home = cargo_home().display()))); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_process("uninstall").arg("foo"), execs().with_status(0).with_stdout(&format!("\ {removing} {home}[..]bin[..]foo[..] ", removing = REMOVING, home = cargo_home().display()))); assert_that(cargo_home(), is_not(has_installed_exe("foo"))); }); test!(pick_max_version { pkg("foo", "0.0.1"); pkg("foo", "0.0.2"); assert_that(cargo_process("install").arg("foo"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} foo v0.0.2 (registry file://[..]) {compiling} foo v0.0.2 (registry file://[..]) {installing} {home}[..]bin[..]foo[..] ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, installing = INSTALLING, home = cargo_home().display()))); assert_that(cargo_home(), has_installed_exe("foo")); }); test!(missing { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("bar"), execs().with_status(101).with_stderr("\ could not find `bar` in `registry file://[..]` ")); }); test!(bad_version { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("foo").arg("--vers=0.2.0"), execs().with_status(101).with_stderr("\ could not find `foo` in `registry file://[..]` with version `0.2.0` ")); }); test!(no_crate { assert_that(cargo_process("install"), execs().with_status(101).with_stderr("\ must specify a crate to install from crates.io, or use --path or --git \ to specify alternate source ")); }); test!(install_location_precedence { pkg("foo", "0.0.1"); let root = paths::root(); let t1 = root.join("t1"); let t2 = root.join("t2"); let t3 = root.join("t3"); let t4 = cargo_home(); fs::create_dir(root.join(".cargo")).unwrap(); File::create(root.join(".cargo/config")).unwrap().write_all(format!("\ [install] root = '{}' ", t3.display()).as_bytes()).unwrap(); println!("install --root"); assert_that(cargo_process("install").arg("foo") .arg("--root").arg(&t1) .env("CARGO_INSTALL_ROOT", &t2), execs().with_status(0)); assert_that(&t1, has_installed_exe("foo")); assert_that(&t2, is_not(has_installed_exe("foo"))); println!("install CARGO_INSTALL_ROOT"); assert_that(cargo_process("install").arg("foo") .env("CARGO_INSTALL_ROOT", &t2), execs().with_status(0)); assert_that(&t2, has_installed_exe("foo")); assert_that(&t3, is_not(has_installed_exe("foo"))); println!("install install.root"); assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); assert_that(&t3, has_installed_exe("foo")); assert_that(&t4, is_not(has_installed_exe("foo"))); fs::remove_file(root.join(".cargo/config")).unwrap(); println!("install cargo home"); assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); assert_that(&t4, has_installed_exe("foo")); }); test!(install_path { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_process("install").arg("--path").arg(".").cwd(p.root()), execs().with_status(101).with_stderr("\ binary `foo[..]` already exists in destination as part of `foo v0.1.0 [..]` ")); }); test!(multiple_crates_error { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("a/src/main.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(101).with_stderr("\ multiple packages with binaries found: bar, foo ")); }); test!(multiple_crates_select { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("a/src/main.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("foo"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_home(), is_not(has_installed_exe("bar"))); assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("bar"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("bar")); }); test!(multiple_crates_auto_binaries { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#) .file("src/main.rs", "extern crate bar; fn main() {}") .file("a/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); }); test!(multiple_crates_auto_examples { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#) .file("src/lib.rs", "extern crate bar;") .file("examples/foo.rs", " extern crate bar; extern crate foo; fn main() {} ") .file("a/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()) .arg("--example=foo"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); }); test!(no_binaries_or_examples { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] [dependencies] bar = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(101).with_stderr("\ no packages found with binaries or examples ")); }); test!(no_binaries { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/lib.rs", "") .file("examples/foo.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("foo"), execs().with_status(101).with_stderr("\ specified package has no binaries ")); }); test!(examples { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/lib.rs", "") .file("examples/foo.rs", "extern crate foo; fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()) .arg("--example=foo"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); }); test!(install_twice { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(0)); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(101).with_stderr("\ binary `foo[..]` already exists in destination as part of `foo v0.1.0 ([..])` ")); }); test!(compile_failure { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", ""); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(101).with_stderr("\ error: main function not found error: aborting due to previous error failed to compile `foo v0.1.0 (file://[..])`, intermediate artifacts can be \ found at `[..]target-install` Caused by: Could not compile `foo`. To learn more, run the command again with --verbose. ")); }); test!(git_repo { let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/main.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()), execs().with_status(0).with_stdout(&format!("\ {updating} git repository `[..]` {compiling} foo v0.1.0 ([..]) {installing} {home}[..]bin[..]foo[..] ", updating = UPDATING, compiling = COMPILING, installing = INSTALLING, home = cargo_home().display()))); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_home(), has_installed_exe("foo")); }); test!(list { pkg("foo", "0.0.1"); pkg("bar", "0.2.1"); pkg("bar", "0.2.2"); assert_that(cargo_process("install").arg("--list"), execs().with_status(0).with_stdout("")); assert_that(cargo_process("install").arg("bar").arg("--vers").arg("=0.2.1"), execs().with_status(0)); assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); assert_that(cargo_process("install").arg("--list"), execs().with_status(0).with_stdout("\ bar v0.2.1 (registry [..]): bar[..] foo v0.0.1 (registry [..]): foo[..] ")); }); test!(uninstall_pkg_does_not_exist { assert_that(cargo_process("uninstall").arg("foo"), execs().with_status(101).with_stderr("\ package id specification `foo` matched no packages ")); }); test!(uninstall_bin_does_not_exist { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), execs().with_status(101).with_stderr("\ binary `bar[..]` not installed as part of `foo v0.0.1 ([..])` ")); }); test!(uninstall_piecemeal { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.1.0" authors = [] "#) .file("src/bin/foo.rs", "fn main() {}") .file("src/bin/bar.rs", "fn main() {}"); p.build(); assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_home(), has_installed_exe("bar")); assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), execs().with_status(0).with_stdout(&format!("\ {removing} [..]bar[..] ", removing = REMOVING))); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_home(), is_not(has_installed_exe("bar"))); assert_that(cargo_process("uninstall").arg("foo").arg("--bin=foo"), execs().with_status(0).with_stdout(&format!("\ {removing} [..]foo[..] ", removing = REMOVING))); assert_that(cargo_home(), is_not(has_installed_exe("foo"))); assert_that(cargo_process("uninstall").arg("foo"), execs().with_status(101).with_stderr("\ package id specification `foo` matched no packages ")); }); test!(subcommand_works_out_of_the_box { Package::new("cargo-foo", "1.0.0") .file("src/main.rs", r#" fn main() { println!("bar"); } "#) .publish(); assert_that(cargo_process("install").arg("cargo-foo"), execs().with_status(0)); assert_that(cargo_process("foo"), execs().with_status(0).with_stdout("bar\n")); assert_that(cargo_process("--list"), execs().with_status(0).with_stdout_contains(" foo\n")); }); cargo-0.8.0/tests/test_cargo_new.rs000066400000000000000000000247161264656333200173510ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::env; use tempdir::TempDir; use support::{execs, paths}; use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file, existing_dir, is_not}; use cargo::util::{process, ProcessBuilder}; fn setup() { } fn my_process(s: &str) -> ProcessBuilder { let mut p = process(s); p.cwd(&paths::root()).env("HOME", &paths::home()); return p; } fn cargo_process(s: &str) -> ProcessBuilder { let mut p = ::cargo_process(); p.arg(s); return p; } test!(simple_lib { assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("none") .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo"), existing_dir()); assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); assert_that(&paths::root().join("foo/src/lib.rs"), existing_file()); assert_that(&paths::root().join("foo/.gitignore"), is_not(existing_file())); assert_that(cargo_process("build").cwd(&paths::root().join("foo")), execs().with_status(0)); }); test!(simple_bin { assert_that(cargo_process("new").arg("foo").arg("--bin") .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo"), existing_dir()); assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); assert_that(cargo_process("build").cwd(&paths::root().join("foo")), execs().with_status(0)); assert_that(&paths::root().join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)), existing_file()); }); test!(simple_git { let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").cwd(td.path().clone()) .env("USER", "foo"), execs().with_status(0)); assert_that(td.path(), existing_dir()); assert_that(&td.path().join("foo/Cargo.toml"), existing_file()); assert_that(&td.path().join("foo/src/lib.rs"), existing_file()); assert_that(&td.path().join("foo/.git"), existing_dir()); assert_that(&td.path().join("foo/.gitignore"), existing_file()); assert_that(cargo_process("build").cwd(&td.path().clone().join("foo")), execs().with_status(0)); }); test!(no_argument { assert_that(cargo_process("new"), execs().with_status(1) .with_stderr("\ Invalid arguments. Usage: cargo new [options] cargo new -h | --help ")); }); test!(existing { let dst = paths::root().join("foo"); fs::create_dir(&dst).unwrap(); assert_that(cargo_process("new").arg("foo"), execs().with_status(101) .with_stderr(format!("destination `{}` already exists\n", dst.display()))); }); test!(invalid_characters { assert_that(cargo_process("new").arg("foo.rs"), execs().with_status(101) .with_stderr("Invalid character `.` in crate name: `foo.rs`")); }); test!(rust_prefix_stripped { assert_that(cargo_process("new").arg("rust-foo").env("USER", "foo"), execs().with_status(0) .with_stdout("note: package will be named `foo`; use --name to override")); let toml = paths::root().join("rust-foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"name = "foo""#)); }); test!(bin_disables_stripping { assert_that(cargo_process("new").arg("rust-foo").arg("--bin").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("rust-foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"name = "rust-foo""#)); }); test!(explicit_name_not_stripped { assert_that(cargo_process("new").arg("foo").arg("--name").arg("rust-bar").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"name = "rust-bar""#)); }); test!(finds_author_user { // Use a temp dir to make sure we don't pick up .cargo/config somewhere in // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["foo"]"#)); }); test!(finds_author_user_escaped { // Use a temp dir to make sure we don't pick up .cargo/config somewhere in // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\"") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); }); test!(finds_author_username { // Use a temp dir to make sure we don't pick up .cargo/config somewhere in // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo") .env_remove("USER") .env("USERNAME", "foo") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["foo"]"#)); }); test!(finds_author_email { // Use a temp dir to make sure we don't pick up .cargo/config somewhere in // the hierarchy let td = TempDir::new("cargo").unwrap(); assert_that(cargo_process("new").arg("foo") .env("USER", "bar") .env("EMAIL", "baz") .cwd(td.path().clone()), execs().with_status(0)); let toml = td.path().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["bar "]"#)); }); test!(finds_author_git { my_process("git").args(&["config", "--global", "user.name", "bar"]) .exec().unwrap(); my_process("git").args(&["config", "--global", "user.email", "baz"]) .exec().unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["bar "]"#)); }); test!(author_prefers_cargo { my_process("git").args(&["config", "--global", "user.name", "foo"]) .exec().unwrap(); my_process("git").args(&["config", "--global", "user.email", "bar"]) .exec().unwrap(); let root = paths::root(); fs::create_dir(&root.join(".cargo")).unwrap(); File::create(&root.join(".cargo/config")).unwrap().write_all(br#" [cargo-new] name = "new-foo" email = "new-bar" git = false "#).unwrap(); assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let toml = paths::root().join("foo/Cargo.toml"); let mut contents = String::new(); File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); assert!(contents.contains(r#"authors = ["new-foo "]"#)); assert!(!root.join("foo/.gitignore").c_exists()); }); test!(git_prefers_command_line { let root = paths::root(); let td = TempDir::new("cargo").unwrap(); fs::create_dir(&root.join(".cargo")).unwrap(); File::create(&root.join(".cargo/config")).unwrap().write_all(br#" [cargo-new] vcs = "none" name = "foo" email = "bar" "#).unwrap(); assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git") .cwd(td.path()) .env("USER", "foo"), execs().with_status(0)); assert!(td.path().join("foo/.gitignore").c_exists()); }); test!(subpackage_no_git { assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); assert_that(cargo_process("new").arg("foo/components/subcomponent") .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo/components/subcomponent/.git"), is_not(existing_file())); assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"), is_not(existing_file())); }); test!(subpackage_git_with_vcs_arg { assert_that(cargo_process("new").arg("foo").env("USER", "foo"), execs().with_status(0)); let subpackage = paths::root().join("foo").join("components"); fs::create_dir(&subpackage).unwrap(); assert_that(cargo_process("new").arg("foo/components/subcomponent") .arg("--vcs").arg("git") .env("USER", "foo"), execs().with_status(0)); assert_that(&paths::root().join("foo/components/subcomponent/.git"), existing_dir()); assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"), existing_file()); }); test!(unknown_flags { assert_that(cargo_process("new").arg("foo").arg("--flag"), execs().with_status(1) .with_stderr("\ Unknown flag: '--flag' Usage: cargo new [..] cargo new [..] ")); }); cargo-0.8.0/tests/test_cargo_package.rs000066400000000000000000000343561264656333200201540ustar00rootroot00000000000000use std::fs::File; use std::io::Cursor; use std::io::prelude::*; use std::path::Path; use flate2::read::GzDecoder; use git2; use tar::Archive; use support::{project, execs, paths, git, path2url}; use support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING, UPDATING, DOWNLOADING}; use support::registry::{self, Package}; use hamcrest::{assert_that, existing_file}; fn setup() { } test!(simple { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] license = "MIT" description = "foo" "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#) .file("src/bar.txt", ""); // should be ignored when packaging assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url()))); assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file()); assert_that(p.cargo("package").arg("-l"), execs().with_status(0).with_stdout("\ Cargo.toml src[..]main.rs ")); assert_that(p.cargo("package"), execs().with_status(0).with_stdout("")); let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); let mut rdr = GzDecoder::new(f).unwrap(); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let ar = Archive::new(Cursor::new(contents)); for f in ar.files().unwrap() { let f = f.unwrap(); let fname = f.header().path_bytes(); let fname = &*fname; assert!(fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/src/main.rs", "unexpected filename: {:?}", f.header().path()) } }); test!(metadata_warning { let p = project("all") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url())) .with_stderr("\ warning: manifest has no description, license, license-file, documentation, \ homepage or repository. See \ http://doc.crates.io/manifest.html#package-metadata for more info.")); let p = project("one") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" "#) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url())) .with_stderr("\ warning: manifest has no description, documentation, homepage or repository. See \ http://doc.crates.io/manifest.html#package-metadata for more info.")); let p = project("all") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" repository = "bar" "#)) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url()))); }); test!(wildcard_deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" repository = "bar" [dependencies] bar = "*" [build-dependencies] baz = "*" [dev-dependencies] buz = "*" "#) .file("src/main.rs", "fn main() {}"); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); Package::new("buz", "0.0.1").dep("bar", "0.0.1").publish(); assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {updating} registry `{reg}` {downloading} [..] v0.0.1 (registry file://[..]) {downloading} [..] v0.0.1 (registry file://[..]) {downloading} [..] v0.0.1 (registry file://[..]) {compiling} baz v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url(), reg = registry::registry())) .with_stderr("\ warning: some dependencies have wildcard (\"*\") version constraints. On January 22nd, 2016, \ crates.io will begin rejecting packages with wildcard dependency constraints. See \ http://doc.crates.io/crates-io.html#using-crates.io-based-crates for information on version \ constraints. dependencies for these crates have wildcard constraints: bar, baz")); }); test!(package_verbose { let root = paths::root().join("all"); let p = git::repo(&root) .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("a/Cargo.toml", r#" [project] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); let mut cargo = ::cargo_process(); cargo.cwd(p.root()); assert_that(cargo.clone().arg("build"), execs().with_status(0)); assert_that(cargo.arg("package").arg("-v").arg("--no-verify"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ([..]) {archiving} [..] {archiving} [..] ", packaging = PACKAGING, archiving = ARCHIVING))); }); test!(package_verification { let p = project("all") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url()))); }); test!(exclude { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#) .file("bar.txt", "") .file("src/bar.txt", ""); assert_that(p.cargo_process("package").arg("--no-verify").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ([..]) {archiving} [..] {archiving} [..] ", packaging = PACKAGING, archiving = ARCHIVING))); }); test!(include { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] exclude = ["*.txt"] include = ["foo.txt", "**/*.rs", "Cargo.toml"] "#) .file("foo.txt", "") .file("src/main.rs", r#" fn main() { println!("hello"); } "#) .file("src/bar.txt", ""); // should be ignored when packaging assert_that(p.cargo_process("package").arg("--no-verify").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ([..]) {archiving} [..] {archiving} [..] {archiving} [..] ", packaging = PACKAGING, archiving = ARCHIVING))); }); test!(package_lib_with_bin { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" extern crate foo; fn main() {} "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("package").arg("-v"), execs().with_status(0)); }); test!(package_new_git_repo { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" "#) .file("src/main.rs", "fn main() {}"); p.build(); git2::Repository::init(&p.root()).unwrap(); assert_that(::cargo_process().arg("package").cwd(p.root()) .arg("--no-verify").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ([..]) {archiving} [..] {archiving} [..] ", packaging = PACKAGING, archiving = ARCHIVING))); }); test!(package_git_submodule { use std::str::from_utf8; let project = git::new("foo", |project| { project.file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = ["foo@example.com"] license = "MIT" description = "foo" repository = "foo" "#) .file("src/lib.rs", "pub fn foo() {}") }).unwrap(); let library = git::new("bar", |library| { library.file("Makefile", "all:") }).unwrap(); let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("bar")); git::commit(&repository); let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); repository.reset(&repository.revparse_single("HEAD").unwrap(), git2::ResetType::Hard, None).unwrap(); let result = project.cargo("package").arg("--no-verify").arg("-v").exec_with_output().unwrap(); assert!(result.status.success()); assert!(from_utf8(&result.stdout).unwrap().contains(&format!("{} bar/Makefile", ARCHIVING))); }); test!(no_duplicates_from_modified_tracked_files { let root = paths::root().join("all"); let p = git::repo(&root) .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#); p.build(); File::create(p.root().join("src/main.rs")).unwrap().write_all(r#" fn main() { println!("A change!"); } "#.as_bytes()).unwrap(); let mut cargo = ::cargo_process(); cargo.cwd(p.root()); assert_that(cargo.clone().arg("build"), execs().with_status(0)); assert_that(cargo.arg("package").arg("--list"), execs().with_status(0).with_stdout(&format!("\ Cargo.toml src/main.rs "))); }); test!(ignore_nested { let cargo_toml = r#" [project] name = "nested" version = "0.0.1" authors = [] license = "MIT" description = "nested" "#; let main_rs = r#" fn main() { println!("hello"); } "#; let p = project("nested") .file("Cargo.toml", cargo_toml) .file("src/main.rs", main_rs) // If a project happens to contain a copy of itself, we should // ignore it. .file("a_dir/nested/Cargo.toml", cargo_toml) .file("a_dir/nested/src/main.rs", main_rs); assert_that(p.cargo_process("package"), execs().with_status(0).with_stdout(&format!("\ {packaging} nested v0.0.1 ({dir}) {verifying} nested v0.0.1 ({dir}) {compiling} nested v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, compiling = COMPILING, dir = p.url()))); assert_that(&p.root().join("target/package/nested-0.0.1.crate"), existing_file()); assert_that(p.cargo("package").arg("-l"), execs().with_status(0).with_stdout("\ Cargo.toml src[..]main.rs ")); assert_that(p.cargo("package"), execs().with_status(0).with_stdout("")); let f = File::open(&p.root().join("target/package/nested-0.0.1.crate")).unwrap(); let mut rdr = GzDecoder::new(f).unwrap(); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let ar = Archive::new(Cursor::new(contents)); for f in ar.files().unwrap() { let f = f.unwrap(); let fname = f.header().path_bytes(); let fname = &*fname; assert!(fname == b"nested-0.0.1/Cargo.toml" || fname == b"nested-0.0.1/src/main.rs", "unexpected filename: {:?}", f.header().path()) } }); #[cfg(unix)] // windows doesn't allow these characters in filenames test!(package_weird_characters { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#) .file("src/:foo", ""); assert_that(p.cargo_process("package"), execs().with_status(101).with_stderr("\ warning: [..] failed to prepare local package for uploading Caused by: cannot package a filename with a special character `:`: src/:foo ")); }); cargo-0.8.0/tests/test_cargo_profiles.rs000066400000000000000000000063361264656333200204010ustar00rootroot00000000000000use std::env; use std::path::MAIN_SEPARATOR as SEP; use support::{project, execs}; use support::{COMPILING, RUNNING}; use hamcrest::assert_that; fn setup() { } test!(profile_overrides { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.dev] opt-level = 1 debug = false rpath = true "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} test v0.0.0 ({url}) {running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ -C opt-level=1 \ -C debug-assertions=on \ -C rpath \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, compiling = COMPILING, sep = SEP, dir = p.root().display(), url = p.url(), ))); }); test!(top_level_overrides_deps { let mut p = project("foo"); p = p .file("Cargo.toml", r#" [package] name = "test" version = "0.0.0" authors = [] [profile.release] opt-level = 1 debug = true [dependencies.foo] path = "foo" "#) .file("src/lib.rs", "") .file("foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [profile.release] opt-level = 0 debug = false [lib] name = "foo" crate_type = ["dylib", "rlib"] "#) .file("foo/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.0 ({url}) {running} `rustc foo{sep}src{sep}lib.rs --crate-name foo \ --crate-type dylib --crate-type rlib -C prefer-dynamic \ -C opt-level=1 \ -g \ -C metadata=[..] \ -C extra-filename=-[..] \ --out-dir {dir}{sep}target{sep}release{sep}deps \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}release{sep}deps \ -L dependency={dir}{sep}target{sep}release{sep}deps` {compiling} test v0.0.0 ({url}) {running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ -C opt-level=1 \ -g \ --out-dir {dir}{sep}target{sep}release \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}release \ -L dependency={dir}{sep}target{sep}release{sep}deps \ --extern foo={dir}{sep}target{sep}release{sep}deps{sep}\ {prefix}foo-[..]{suffix} \ --extern foo={dir}{sep}target{sep}release{sep}deps{sep}libfoo-[..].rlib` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), sep = SEP, prefix = env::consts::DLL_PREFIX, suffix = env::consts::DLL_SUFFIX))); }); cargo-0.8.0/tests/test_cargo_publish.rs000066400000000000000000000102441264656333200202150ustar00rootroot00000000000000use std::io::prelude::*; use std::fs::{self, File}; use std::io::{Cursor, SeekFrom}; use std::path::PathBuf; use flate2::read::GzDecoder; use tar::Archive; use url::Url; use support::{project, execs}; use support::{UPDATING, PACKAGING, UPLOADING}; use support::paths; use support::git::repo; use hamcrest::assert_that; fn registry_path() -> PathBuf { paths::root().join("registry") } fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } fn upload_path() -> PathBuf { paths::root().join("upload") } fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); File::create(&config).unwrap().write_all(&format!(r#" [registry] index = "{reg}" token = "api-token" "#, reg = registry()).as_bytes()).unwrap(); fs::create_dir_all(&upload_path().join("api/v1/crates")).unwrap(); repo(®istry_path()) .file("config.json", &format!(r#"{{ "dl": "{0}", "api": "{0}" }}"#, upload())) .build(); } test!(simple { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("publish").arg("--no-verify"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` {packaging} foo v0.0.1 ({dir}) {uploading} foo v0.0.1 ({dir}) ", updating = UPDATING, uploading = UPLOADING, packaging = PACKAGING, dir = p.url(), reg = registry()))); let mut f = File::open(&upload_path().join("api/v1/crates/new")).unwrap(); // Skip the metadata payload and the size of the tarball let mut sz = [0; 4]; assert_eq!(f.read(&mut sz).unwrap(), 4); let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) | ((sz[3] as u32) << 24); f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); // Verify the tarball let mut rdr = GzDecoder::new(f).unwrap(); assert_eq!(rdr.header().filename().unwrap(), "foo-0.0.1.crate".as_bytes()); let mut contents = Vec::new(); rdr.read_to_end(&mut contents).unwrap(); let inner = Cursor::new(contents); let ar = Archive::new(inner); for file in ar.files().unwrap() { let file = file.unwrap(); let fname = file.header().path_bytes(); let fname = &*fname; assert!(fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/src/main.rs", "unexpected filename: {:?}", file.header().path()); } }); test!(git_deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.foo] git = "git://path/to/nowhere" "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("publish").arg("-v").arg("--no-verify"), execs().with_status(101).with_stderr("\ all dependencies must come from the same source. dependency `foo` comes from git://path/to/nowhere instead ")); }); test!(path_dependency_no_version { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" [dependencies.bar] path = "bar" "#) .file("src/main.rs", "fn main() {}") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", ""); assert_that(p.cargo_process("publish"), execs().with_status(101).with_stderr("\ all path dependencies must have a version specified when publishing. dependency `bar` does not specify a version ")); }); cargo-0.8.0/tests/test_cargo_read_manifest.rs000066400000000000000000000055171264656333200213570ustar00rootroot00000000000000use support::{project, execs, main_file, basic_bin_manifest}; use hamcrest::{assert_that}; fn setup() {} fn remove_all_whitespace(s: &str) -> String { s.split_whitespace().collect() } fn read_manifest_output() -> String { remove_all_whitespace(r#" { "name":"foo", "version":"0.5.0", "id":"foo[..]0.5.0[..](path+file://[..]/foo)", "source":null, "dependencies":[], "targets":[{ "kind":["bin"], "name":"foo", "src_path":"src[..]foo.rs" }], "features":{}, "manifest_path":"[..]Cargo.toml" }"#) } test!(cargo_read_manifest_path_to_cargo_toml_relative { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("read-manifest") .arg("--manifest-path").arg("foo/Cargo.toml") .cwd(p.root().parent().unwrap()), execs().with_status(0) .with_stdout(read_manifest_output())); }); test!(cargo_read_manifest_path_to_cargo_toml_absolute { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("read-manifest") .arg("--manifest-path").arg(p.root().join("Cargo.toml")) .cwd(p.root().parent().unwrap()), execs().with_status(0) .with_stdout(read_manifest_output())); }); test!(cargo_read_manifest_path_to_cargo_toml_parent_relative { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("read-manifest") .arg("--manifest-path").arg("foo") .cwd(p.root().parent().unwrap()), execs().with_status(101) .with_stderr("the manifest-path must be a path to a Cargo.toml file")); }); test!(cargo_read_manifest_path_to_cargo_toml_parent_absolute { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("read-manifest") .arg("--manifest-path").arg(p.root()) .cwd(p.root().parent().unwrap()), execs().with_status(101) .with_stderr("the manifest-path must be a path to a Cargo.toml file")); }); test!(cargo_read_manifest_cwd { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("read-manifest") .cwd(p.root()), execs().with_status(0) .with_stdout(read_manifest_output())); }); cargo-0.8.0/tests/test_cargo_registry.rs000066400000000000000000000645101264656333200204240ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use support::{project, execs}; use support::{UPDATING, DOWNLOADING, COMPILING, PACKAGING, VERIFYING, ADDING, REMOVING}; use support::paths::{self, CargoPathExt}; use support::registry::{self, Package}; use support::git; use hamcrest::assert_that; fn setup() { } test!(simple { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#) .file("src/main.rs", "fn main() {}"); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` {downloading} bar v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url(), reg = registry::registry()))); // Don't download a second time assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` [..] bar v0.0.1 (registry file://[..]) [..] foo v0.0.1 ({dir}) ", updating = UPDATING, dir = p.url(), reg = registry::registry()))); }); test!(deps { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = ">= 0.0.0" "#) .file("src/main.rs", "fn main() {}"); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` {downloading} [..] v0.0.1 (registry file://[..]) {downloading} [..] v0.0.1 (registry file://[..]) {compiling} baz v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url(), reg = registry::registry()))); }); test!(nonexistent { Package::new("init", "0.0.1").publish(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] nonexistent = ">= 0.0.0" "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ no matching package named `nonexistent` found (required by `foo`) location searched: registry file://[..] version required: >= 0.0.0 ")); }); test!(wrong_version { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] foo = ">= 1.0.0" "#) .file("src/main.rs", "fn main() {}"); Package::new("foo", "0.0.1").publish(); Package::new("foo", "0.0.2").publish(); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ no matching package named `foo` found (required by `foo`) location searched: registry file://[..] version required: >= 1.0.0 versions found: 0.0.2, 0.0.1 ")); Package::new("foo", "0.0.3").publish(); Package::new("foo", "0.0.4").publish(); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ no matching package named `foo` found (required by `foo`) location searched: registry file://[..] version required: >= 1.0.0 versions found: 0.0.4, 0.0.3, 0.0.2, ... ")); }); test!(bad_cksum { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bad-cksum = ">= 0.0.0" "#) .file("src/main.rs", "fn main() {}"); let pkg = Package::new("bad-cksum", "0.0.1"); pkg.publish(); File::create(&pkg.archive_dst()).unwrap(); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ unable to get packages from source Caused by: failed to download package `bad-cksum v0.0.1 (registry file://[..])` from [..] Caused by: failed to verify the checksum of `bad-cksum v0.0.1 (registry file://[..])` ")); }); test!(update_registry { Package::new("init", "0.0.1").publish(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] notyet = ">= 0.0.0" "#) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ no matching package named `notyet` found (required by `foo`) location searched: registry file://[..] version required: >= 0.0.0 ")); Package::new("notyet", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` {downloading} notyet v0.0.1 (registry file://[..]) {compiling} notyet v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url(), reg = registry::registry()))); }); test!(package_with_path_deps { Package::new("init", "0.0.1").publish(); let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license = "MIT" description = "foo" repository = "bar" [dependencies.notyet] version = "0.0.1" path = "notyet" "#) .file("src/main.rs", "fn main() {}") .file("notyet/Cargo.toml", r#" [package] name = "notyet" version = "0.0.1" authors = [] "#) .file("notyet/src/lib.rs", ""); p.build(); assert_that(p.cargo("package").arg("-v"), execs().with_status(101).with_stderr("\ failed to verify package tarball Caused by: no matching package named `notyet` found (required by `foo`) location searched: registry file://[..] version required: ^0.0.1 ")); Package::new("notyet", "0.0.1").publish(); assert_that(p.cargo("package"), execs().with_status(0).with_stdout(format!("\ {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {updating} registry `[..]` {downloading} notyet v0.0.1 (registry file://[..]) {compiling} notyet v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, verifying = VERIFYING, updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url(), ))); }); test!(lockfile_locks { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); p.root().move_into_the_past().unwrap(); Package::new("bar", "0.0.2").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); test!(lockfile_locks_transitively { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) {downloading} [..] v0.0.1 (registry file://[..]) {compiling} baz v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); p.root().move_into_the_past().unwrap(); Package::new("baz", "0.0.2").publish(); Package::new("bar", "0.0.2").dep("baz", "*").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); test!(yanks_are_not_used { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("baz", "0.0.1").publish(); Package::new("baz", "0.0.2").yanked(true).publish(); Package::new("bar", "0.0.1").dep("baz", "*").publish(); Package::new("bar", "0.0.2").dep("baz", "*").yanked(true).publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) {downloading} [..] v0.0.1 (registry file://[..]) {compiling} baz v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); }); test!(relying_on_a_yank_is_bad { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("baz", "0.0.1").publish(); Package::new("baz", "0.0.2").yanked(true).publish(); Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); assert_that(p.cargo("build"), execs().with_status(101).with_stderr("\ no matching package named `baz` found (required by `bar`) location searched: registry file://[..] version required: = 0.0.2 versions found: 0.0.1 ")); }); test!(yanks_in_lockfiles_are_ok { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0)); fs::remove_dir_all(®istry::registry_path().join("3")).unwrap(); Package::new("bar", "0.0.1").yanked(true).publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); assert_that(p.cargo("update"), execs().with_status(101).with_stderr("\ no matching package named `bar` found (required by `foo`) location searched: registry file://[..] version required: * ")); }); test!(update_with_lockfile_if_packages_missing { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); paths::home().join(".cargo/registry").rm_rf().unwrap(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) ", updating = UPDATING, downloading = DOWNLOADING))); }); test!(update_lockfile { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); println!("0.0.1"); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0)); Package::new("bar", "0.0.2").publish(); Package::new("bar", "0.0.3").publish(); paths::home().join(".cargo/registry").rm_rf().unwrap(); println!("0.0.2 update"); assert_that(p.cargo("update") .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {updating} bar v0.0.1 (registry file://[..]) -> v0.0.2 ", updating = UPDATING))); println!("0.0.2 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {downloading} [..] v0.0.2 (registry file://[..]) {compiling} bar v0.0.2 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); println!("0.0.3 update"); assert_that(p.cargo("update") .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {updating} bar v0.0.2 (registry file://[..]) -> v0.0.3 ", updating = UPDATING))); println!("0.0.3 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {downloading} [..] v0.0.3 (registry file://[..]) {compiling} bar v0.0.3 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); println!("new dependencies update"); Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); Package::new("spam", "0.2.5").publish(); assert_that(p.cargo("update") .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {updating} bar v0.0.3 (registry file://[..]) -> v0.0.4 {adding} spam v0.2.5 (registry file://[..]) ", updating = UPDATING, adding = ADDING))); println!("new dependencies update"); Package::new("bar", "0.0.5").publish(); assert_that(p.cargo("update") .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {updating} bar v0.0.4 (registry file://[..]) -> v0.0.5 {removing} spam v0.2.5 (registry file://[..]) ", updating = UPDATING, removing = REMOVING))); }); test!(dev_dependency_not_used { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("baz", "0.0.1").publish(); Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} [..] v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); }); test!(login_with_no_cargo_dir { let home = paths::home().join("new-home"); fs::create_dir(&home).unwrap(); assert_that(::cargo_process().arg("login").arg("foo").arg("-v"), execs().with_status(0)); }); test!(bad_license_file { let p = project("all") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] license-file = "foo" description = "bar" repository = "baz" "#) .file("src/main.rs", r#" fn main() {} "#); assert_that(p.cargo_process("publish").arg("-v"), execs().with_status(101) .with_stderr("\ the license file `foo` does not exist")); }); test!(updating_a_dep { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.a] path = "a" "#) .file("src/main.rs", "fn main() {}") .file("a/Cargo.toml", r#" [project] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "*" "#) .file("a/src/lib.rs", ""); p.build(); Package::new("bar", "0.0.1").publish(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) {compiling} a v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); File::create(&p.root().join("a/Cargo.toml")).unwrap().write_all(br#" [project] name = "a" version = "0.0.1" authors = [] [dependencies] bar = "0.1.0" "#).unwrap(); Package::new("bar", "0.1.0").publish(); println!("second"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} bar v0.1.0 (registry file://[..]) {compiling} bar v0.1.0 (registry file://[..]) {compiling} a v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); }); test!(git_and_registry_dep { let b = git::repo(&paths::root().join("b")) .file("Cargo.toml", r#" [project] name = "b" version = "0.0.1" authors = [] [dependencies] a = "0.0.1" "#) .file("src/lib.rs", ""); b.build(); let p = project("foo") .file("Cargo.toml", &format!(r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies] a = "0.0.1" [dependencies.b] git = '{}' "#, b.url())) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("a", "0.0.1").publish(); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} [..] {updating} [..] {downloading} a v0.0.1 (registry file://[..]) {compiling} a v0.0.1 (registry [..]) {compiling} b v0.0.1 ([..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); p.root().move_into_the_past().unwrap(); println!("second"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); test!(update_publish_then_update { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("a", "0.1.0").publish(); assert_that(p.cargo("build"), execs().with_status(0)); Package::new("a", "0.1.1").publish(); let lock = p.root().join("Cargo.lock"); let mut s = String::new(); File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); File::create(&lock).unwrap() .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); println!("second"); fs::remove_dir_all(&p.root().join("target")).unwrap(); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} [..] {downloading} a v0.1.1 (registry file://[..]) {compiling} a v0.1.1 (registry [..]) {compiling} foo v0.5.0 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); }); test!(fetch_downloads { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("a", "0.1.0").publish(); assert_that(p.cargo("fetch"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` {downloading} a v0.1.0 (registry [..]) ", updating = UPDATING, downloading = DOWNLOADING))); }); test!(update_transitive_dependency { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "0.1.0" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("a", "0.1.0").dep("b", "*").publish(); Package::new("b", "0.1.0").publish(); assert_that(p.cargo("fetch"), execs().with_status(0)); Package::new("b", "0.1.1").publish(); assert_that(p.cargo("update").arg("-pb"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` {updating} b v0.1.0 (registry [..]) -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ {downloading} b v0.1.1 (registry file://[..]) {compiling} b v0.1.1 (registry [..]) {compiling} a v0.1.0 (registry [..]) {compiling} foo v0.5.0 ([..]) ", downloading = DOWNLOADING, compiling = COMPILING))); }); test!(update_backtracking_ok { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] webdriver = "0.1" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("webdriver", "0.1.0").dep("hyper", "0.6").publish(); Package::new("hyper", "0.6.5").dep("openssl", "0.1") .dep("cookie", "0.1") .publish(); Package::new("cookie", "0.1.0").dep("openssl", "0.1").publish(); Package::new("openssl", "0.1.0").publish(); assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); Package::new("openssl", "0.1.1").publish(); Package::new("hyper", "0.6.6").dep("openssl", "0.1.1") .dep("cookie", "0.1.0") .publish(); assert_that(p.cargo("update").arg("-p").arg("hyper"), execs().with_status(0) .with_stdout(&format!("\ {updating} registry `[..]` ", updating = UPDATING))); }); test!(update_multiple_packages { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] a = "*" b = "*" c = "*" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("a", "0.1.0").publish(); Package::new("b", "0.1.0").publish(); Package::new("c", "0.1.0").publish(); assert_that(p.cargo("fetch"), execs().with_status(0)); Package::new("a", "0.1.1").publish(); Package::new("b", "0.1.1").publish(); Package::new("c", "0.1.1").publish(); assert_that(p.cargo("update").arg("-pa").arg("-pb"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` {updating} a v0.1.0 (registry [..]) -> v0.1.1 {updating} b v0.1.0 (registry [..]) -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("update").arg("-pb").arg("-pc"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` {updating} c v0.1.0 (registry [..]) -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout_contains(format!("\ {downloading} a v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ {downloading} b v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ {downloading} c v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ {compiling} a v0.1.1 (registry [..])", compiling = COMPILING)) .with_stdout_contains(format!("\ {compiling} b v0.1.1 (registry [..])", compiling = COMPILING)) .with_stdout_contains(format!("\ {compiling} c v0.1.1 (registry [..])", compiling = COMPILING)) .with_stdout_contains(format!("\ {compiling} foo v0.5.0 ([..])", compiling = COMPILING))); }); test!(bundled_crate_in_registry { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.5.0" authors = [] [dependencies] bar = "0.1" baz = "0.1" "#) .file("src/main.rs", "fn main() {}"); p.build(); Package::new("bar", "0.1.0").publish(); Package::new("baz", "0.1.0") .dep("bar", "0.1.0") .file("Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] [dependencies] bar = { path = "bar", version = "0.1.0" } "#) .file("src/lib.rs", "") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("bar/src/lib.rs", "") .publish(); assert_that(p.cargo("run"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_run.rs000066400000000000000000000337301264656333200173600ustar00rootroot00000000000000use std::path::MAIN_SEPARATOR as SEP; use support::{project, execs, path2url}; use support::{COMPILING, RUNNING}; use hamcrest::{assert_that, existing_file}; fn setup() { } test!(simple { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#); assert_that(p.cargo_process("run"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `target{sep}debug{sep}foo[..]` hello ", compiling = COMPILING, running = RUNNING, dir = path2url(p.root()), sep = SEP))); assert_that(&p.bin("foo"), existing_file()); }); test!(simple_quiet { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#); assert_that(p.cargo_process("run").arg("-q"), execs().with_status(0).with_stdout("\ hello ") ); }); test!(simple_quiet_and_verbose { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#); assert_that(p.cargo_process("run").arg("-q").arg("-v"), execs().with_status(101).with_stderr("\ cannot set both --verbose and --quiet ") ); }); test!(simple_with_args { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { assert_eq!(std::env::args().nth(1).unwrap(), "hello"); assert_eq!(std::env::args().nth(2).unwrap(), "world"); } "#); assert_that(p.cargo_process("run").arg("hello").arg("world"), execs().with_status(0)); }); test!(exit_code { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { std::process::exit(2); } "#); assert_that(p.cargo_process("run"), execs().with_status(2) .with_stderr(&format!("\ Process didn't exit successfully: `target[..]foo[..]` (exit code: 2) ", ))); }); test!(exit_code_verbose { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { std::process::exit(2); } "#); assert_that(p.cargo_process("run").arg("-v"), execs().with_status(2) .with_stderr(&format!("\ Process didn't exit successfully: `target[..]foo[..]` (exit code: 2) ", ))); }); test!(no_main_file { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("run"), execs().with_status(101) .with_stderr("a bin target must be available \ for `cargo run`\n")); }); test!(too_many_bins { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/bin/a.rs", "") .file("src/bin/b.rs", ""); assert_that(p.cargo_process("run"), execs().with_status(101) .with_stderr("`cargo run` requires that a project only \ have one executable; use the `--bin` option \ to specify which one to run\n")); }); test!(specify_name { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/bin/a.rs", r#" extern crate foo; fn main() { println!("hello a.rs"); } "#) .file("src/bin/b.rs", r#" extern crate foo; fn main() { println!("hello b.rs"); } "#); assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `rustc src[..]lib.rs [..]` {running} `rustc src[..]a.rs [..]` {running} `target{sep}debug{sep}a[..]` hello a.rs ", compiling = COMPILING, running = RUNNING, dir = path2url(p.root()), sep = SEP))); assert_that(p.cargo("run").arg("--bin").arg("b").arg("-v"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} `rustc src[..]b.rs [..]` {running} `target{sep}debug{sep}b[..]` hello b.rs ", running = RUNNING, compiling = COMPILING, sep = SEP))); }); test!(run_example { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("examples/a.rs", r#" fn main() { println!("example"); } "#) .file("src/bin/a.rs", r#" fn main() { println!("bin"); } "#); assert_that(p.cargo_process("run").arg("--example").arg("a"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `target{sep}debug{sep}examples{sep}a[..]` example ", compiling = COMPILING, running = RUNNING, dir = path2url(p.root()), sep = SEP))); }); test!(either_name_or_example { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/a.rs", r#" fn main() { println!("hello a.rs"); } "#) .file("examples/b.rs", r#" fn main() { println!("hello b.rs"); } "#); assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("--example").arg("b"), execs().with_status(101) .with_stderr("`cargo run` can run at most one \ executable, but multiple were \ specified")); }); test!(one_bin_multiple_examples { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/bin/main.rs", r#" fn main() { println!("hello main.rs"); } "#) .file("examples/a.rs", r#" fn main() { println!("hello a.rs"); } "#) .file("examples/b.rs", r#" fn main() { println!("hello b.rs"); } "#); assert_that(p.cargo_process("run"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `target{sep}debug{sep}main[..]` hello main.rs ", compiling = COMPILING, running = RUNNING, dir = path2url(p.root()), sep = SEP))); }); test!(example_with_release_flag { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] version = "*" path = "bar" "#) .file("examples/a.rs", r#" extern crate bar; fn main() { if cfg!(debug_assertions) { println!("slow1") } else { println!("fast1") } bar::baz(); } "#) .file("bar/Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" "#) .file("bar/src/bar.rs", r#" pub fn baz() { if cfg!(debug_assertions) { println!("slow2") } else { println!("fast2") } } "#); assert_that(p.cargo_process("run").arg("-v").arg("--release").arg("--example").arg("a"), execs().with_status(0).with_stdout(&format!("\ {compiling} bar v0.0.1 ({url}) {running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ -C opt-level=3 \ -C metadata=[..] \ -C extra-filename=[..] \ --out-dir {dir}{sep}target{sep}release{sep}deps \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}release{sep}deps \ -L dependency={dir}{sep}target{sep}release{sep}deps` {compiling} foo v0.0.1 ({url}) {running} `rustc examples{sep}a.rs --crate-name a --crate-type bin \ -C opt-level=3 \ --out-dir {dir}{sep}target{sep}release{sep}examples \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}release \ -L dependency={dir}{sep}target{sep}release{sep}deps \ --extern bar={dir}{sep}target{sep}release{sep}deps{sep}libbar-[..].rlib` {running} `target{sep}release{sep}examples{sep}a[..]` fast1 fast2 ", compiling = COMPILING, running = RUNNING, dir = p.root().display(), url = path2url(p.root()), sep = SEP))); assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"), execs().with_status(0).with_stdout(&format!("\ {compiling} bar v0.0.1 ({url}) {running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ -g \ -C metadata=[..] \ -C extra-filename=[..] \ --out-dir {dir}{sep}target{sep}debug{sep}deps \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ -L dependency={dir}{sep}target{sep}debug{sep}deps` {compiling} foo v0.0.1 ({url}) {running} `rustc examples{sep}a.rs --crate-name a --crate-type bin \ -g \ --out-dir {dir}{sep}target{sep}debug{sep}examples \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ --extern bar={dir}{sep}target{sep}debug{sep}deps{sep}libbar-[..].rlib` {running} `target{sep}debug{sep}examples{sep}a[..]` slow1 slow2 ", compiling = COMPILING, running = RUNNING, dir = p.root().display(), url = path2url(p.root()), sep = SEP))); }); test!(run_dylib_dep { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::bar(); } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate-type = ["dylib"] "#) .file("bar/src/lib.rs", "pub fn bar() {}"); assert_that(p.cargo_process("run").arg("hello").arg("world"), execs().with_status(0)); }); test!(release_works { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { if cfg!(debug_assertions) { panic!() } } "#); assert_that(p.cargo_process("run").arg("--release"), execs().with_status(0).with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `target{sep}release{sep}foo[..]` ", compiling = COMPILING, running = RUNNING, dir = path2url(p.root()), sep = SEP))); assert_that(&p.release_bin("foo"), existing_file()); }); test!(run_bin_different_name { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "bar" "#) .file("src/bar.rs", r#" fn main() { } "#); assert_that(p.cargo_process("run"), execs().with_status(0)); }); test!(dashes_are_forwarded { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "bar" "#) .file("src/main.rs", r#" fn main() { let s: Vec = std::env::args().collect(); assert_eq!(s[1], "a"); assert_eq!(s[2], "--"); assert_eq!(s[3], "b"); } "#); assert_that(p.cargo_process("run").arg("--").arg("a").arg("--").arg("b"), execs().with_status(0)); }); test!(run_from_executable_folder { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() { println!("hello"); } "#); let cwd = p.root().join("target").join("debug"); p.cargo_process("build").exec_with_output().unwrap(); assert_that(p.cargo("run").cwd(cwd), execs().with_status(0).with_stdout(&format!("\ {running} `.{sep}foo[..]` hello ", running = RUNNING, sep = SEP ))); }); cargo-0.8.0/tests/test_cargo_rustc.rs000066400000000000000000000261701264656333200177140ustar00rootroot00000000000000use std::path::MAIN_SEPARATOR as SEP; use support::{execs, project}; use support::{COMPILING, RUNNING}; use hamcrest::assert_that; fn setup() { } fn cargo_rustc_error() -> &'static str { "extra arguments to `rustc` can only be passed to one target, consider filtering\n\ the package by passing e.g. `--lib` or `--bin NAME` to specify a single target" } test!(build_lib_for_foo { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("--lib").arg("-v"), execs() .with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, compiling = COMPILING, sep = SEP, dir = p.root().display(), url = p.url()))); }); test!(build_lib_and_allow_unstable_options { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("--lib").arg("-v") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ -Z unstable-options \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, compiling = COMPILING, sep = SEP, dir = p.root().display(), url = p.url()))) }); test!(build_main_and_allow_unstable_options { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("-v").arg("--bin").arg("foo") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(&format!("\ {compiling} {name} v{version} ({url}) {running} `rustc src{sep}lib.rs --crate-name {name} --crate-type lib -g \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` {running} `rustc src{sep}main.rs --crate-name {name} --crate-type bin -g \ -Z unstable-options \ --out-dir {dir}{sep}target{sep}debug \ --emit=dep-info,link \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ --extern {name}={dir}{sep}target{sep}debug{sep}lib{name}.rlib` ", running = RUNNING, compiling = COMPILING, sep = SEP, dir = p.root().display(), url = p.url(), name = "foo", version = "0.0.1"))); }); test!(fails_when_trying_to_build_main_and_lib_with_args { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("-v") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(101) .with_stderr(cargo_rustc_error())); }); test!(build_with_args_to_one_of_multiple_binaries { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/foo.rs", r#" fn main() {} "#) .file("src/bin/bar.rs", r#" fn main() {} "#) .file("src/bin/baz.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("-v").arg("--bin").arg("bar") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ --out-dir {dir}{sep}target{sep}debug [..]` {running} `rustc src{sep}bin{sep}bar.rs --crate-name bar --crate-type bin -g \ -Z unstable-options [..]` ", compiling = COMPILING, running = RUNNING, sep = SEP, dir = p.root().display(), url = p.url()))); }); test!(fails_with_args_to_all_binaries { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/foo.rs", r#" fn main() {} "#) .file("src/bin/bar.rs", r#" fn main() {} "#) .file("src/bin/baz.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("-v") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(101) .with_stderr(cargo_rustc_error())); }); test!(build_with_args_to_one_of_multiple_tests { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("tests/foo.rs", r#" "#) .file("tests/bar.rs", r#" "#) .file("tests/baz.rs", r#" "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustc").arg("-v").arg("--test").arg("bar") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ --out-dir {dir}{sep}target{sep}debug [..]` {running} `rustc tests{sep}bar.rs --crate-name bar --crate-type bin -g \ -Z unstable-options [..]--test[..]` ", compiling = COMPILING, running = RUNNING, sep = SEP, dir = p.root().display(), url = p.url()))); }); test!(build_foo_with_bar_dependency { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::baz() } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("src/lib.rs", r#" pub fn baz() {} "#); bar.build(); assert_that(foo.cargo_process("rustc").arg("-v").arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(format!("\ {compiling} bar v0.1.0 ({url}) {running} `[..] -g -C [..]` {compiling} foo v0.0.1 ({url}) {running} `[..] -g -Z unstable-options [..]` ", compiling = COMPILING, running = RUNNING, url = foo.url()))); }); test!(build_only_bar_dependency { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::baz() } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("src/lib.rs", r#" pub fn baz() {} "#); bar.build(); assert_that(foo.cargo_process("rustc").arg("-v").arg("-p").arg("bar") .arg("--").arg("-Z").arg("unstable-options"), execs() .with_status(0) .with_stdout(format!("\ {compiling} bar v0.1.0 ({url}) {running} `[..]--crate-name bar --crate-type lib [..] -Z unstable-options [..]` ", compiling = COMPILING, running = RUNNING, url = foo.url()))); }); test!(fail_with_multiple_packages { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", r#" fn main() {} "#); foo.build(); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.1.0" authors = [] "#) .file("src/main.rs", r#" fn main() { if cfg!(flag = "1") { println!("Yeah from bar!"); } } "#); bar.build(); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.1.0" authors = [] "#) .file("src/main.rs", r#" fn main() { if cfg!(flag = "1") { println!("Yeah from baz!"); } } "#); baz.build(); assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar") .arg("-p").arg("baz"), execs().with_status(1).with_stderr("\ Invalid arguments. Usage: cargo rustc [options] [--] [...]".to_string())); }); test!(rustc_with_other_profile { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] a = { path = "a" } "#) .file("src/main.rs", r#" #[cfg(test)] extern crate a; #[test] fn foo() {} "#) .file("a/Cargo.toml", r#" [package] name = "a" version = "0.1.0" authors = [] "#) .file("a/src/lib.rs", ""); foo.build(); assert_that(foo.cargo("rustc").arg("--profile").arg("test"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_rustdoc.rs000066400000000000000000000121551264656333200202350ustar00rootroot00000000000000use std::path::MAIN_SEPARATOR as SEP; use support::{execs, project}; use support::{COMPILING, RUNNING, DOCUMENTING}; use hamcrest::{assert_that}; fn setup() { } test!(rustdoc_simple { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustdoc").arg("-v"), execs() .with_status(0) .with_stdout(format!("\ {documenting} foo v0.0.1 ({url}) {running} `rustdoc src{sep}lib.rs --crate-name foo \ -o {dir}{sep}target{sep}doc \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, documenting = DOCUMENTING, sep = SEP, dir = p.root().display(), url = p.url()))); }); test!(rustdoc_args { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustdoc").arg("-v").arg("--").arg("--no-defaults"), execs() .with_status(0) .with_stdout(format!("\ {documenting} foo v0.0.1 ({url}) {running} `rustdoc src{sep}lib.rs --crate-name foo \ -o {dir}{sep}target{sep}doc \ --no-defaults \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, documenting = DOCUMENTING, sep = SEP, dir = p.root().display(), url = p.url()))); }); test!(rustdoc_foo_with_bar_dependency { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/lib.rs", r#" extern crate bar; pub fn foo() {} "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" pub fn baz() {} "#); bar.build(); assert_that(foo.cargo_process("rustdoc").arg("-v").arg("--").arg("--no-defaults"), execs() .with_status(0) .with_stdout(format!("\ {compiling} bar v0.0.1 ({url}) {running} `rustc {bar_dir}{sep}src{sep}lib.rs [..]` {documenting} foo v0.0.1 ({url}) {running} `rustdoc src{sep}lib.rs --crate-name foo \ -o {dir}{sep}target{sep}doc \ --no-defaults \ -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ --extern [..]` ", running = RUNNING, compiling = COMPILING, sep = SEP, documenting = DOCUMENTING, dir = foo.root().display(), url = foo.url(), bar_dir = bar.root().display()))); }); test!(rustdoc_only_bar_dependency { let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::baz() } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" pub fn baz() {} "#); bar.build(); assert_that(foo.cargo_process("rustdoc").arg("-v").arg("-p").arg("bar") .arg("--").arg("--no-defaults"), execs() .with_status(0) .with_stdout(format!("\ {documenting} bar v0.0.1 ({url}) {running} `rustdoc {bar_dir}{sep}src{sep}lib.rs --crate-name bar \ -o {dir}{sep}target{sep}doc \ --no-defaults \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, documenting = DOCUMENTING, sep = SEP, dir = foo.root().display(), url = foo.url(), bar_dir = bar.root().display()))); }); test!(rustdoc_same_name_err { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", r#" fn main() {} "#) .file("src/lib.rs", r#" "#); assert_that(p.cargo_process("rustdoc").arg("-v") .arg("--").arg("--no-defaults"), execs() .with_status(101) .with_stderr("cannot document a package where a library and a \ binary have the same name. Consider renaming one \ or marking the target as `doc = false`")); }); cargo-0.8.0/tests/test_cargo_search.rs000066400000000000000000000057471264656333200200300ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::prelude::*; use std::path::PathBuf; use url::Url; use cargo::util::ProcessBuilder; use support::UPDATING; use support::execs; use support::paths; use support::git::repo; use hamcrest::assert_that; fn registry_path() -> PathBuf { paths::root().join("registry") } fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } fn api_path() -> PathBuf { paths::root().join("api") } fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); File::create(&config).unwrap().write_all(format!(r#" [registry] index = "{reg}" "#, reg = registry()).as_bytes()).unwrap(); fs::create_dir_all(&api_path().join("api/v1")).unwrap(); repo(®istry_path()) .file("config.json", &format!(r#"{{ "dl": "{0}", "api": "{0}" }}"#, api())) .build(); } fn cargo_process(s: &str) -> ProcessBuilder { let mut b = ::cargo_process(); b.arg(s); return b } test!(simple { let contents = r#"{ "crates": [{ "created_at": "2014-11-16T20:17:35Z", "description": "Design by contract style assertions for Rust", "documentation": null, "downloads": 2, "homepage": null, "id": "hoare", "keywords": [], "license": null, "links": { "owners": "/api/v1/crates/hoare/owners", "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", "version_downloads": "/api/v1/crates/hoare/downloads", "versions": "/api/v1/crates/hoare/versions" }, "max_version": "0.1.1", "name": "hoare", "repository": "https://github.com/nick29581/libhoare", "updated_at": "2014-11-20T21:49:21Z", "versions": null }], "meta": { "total": 1 } }"#; let base = api_path().join("api/v1/crates"); // Older versions of curl don't peel off query parameters when looking for // filenames, so just make both files. // // On windows, though, `?` is an invalid character, but we always build curl // from source there anyway! File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); if !cfg!(windows) { File::create(&base.with_file_name("crates?q=postgres")).unwrap() .write_all(contents.as_bytes()).unwrap(); } assert_that(cargo_process("search").arg("postgres"), execs().with_status(0).with_stdout(format!("\ {updating} registry `[..]` hoare (0.1.1) Design by contract style assertions for Rust", updating = UPDATING))); }); test!(help { assert_that(cargo_process("search").arg("-h"), execs().with_status(0)); assert_that(cargo_process("help").arg("search"), execs().with_status(0)); }); cargo-0.8.0/tests/test_cargo_test.rs000066400000000000000000001436271264656333200175420ustar00rootroot00000000000000use std::fs::File; use std::io::prelude::*; use std::str; use std::thread; use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; use support::{COMPILING, RUNNING, DOCTEST}; use support::paths::CargoPathExt; use hamcrest::{assert_that, existing_file, is_not}; use cargo::util::process; fn setup() {} test!(cargo_test_simple { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[test] fn test_hello() { assert_eq!(hello(), "hello") }"#); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); assert_that(p.cargo("test"), execs().with_stdout(format!("\ {} foo v0.5.0 ({}) {} target[..]foo-[..] running 1 test test test_hello ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), RUNNING))); }); test!(cargo_test_release { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.1.0" [dependencies] bar = { path = "bar" } "#) .file("src/lib.rs", r#" extern crate bar; pub fn foo() { bar::bar(); } #[test] fn test() { foo(); } "#) .file("tests/test.rs", r#" extern crate foo; #[test] fn test() { foo::foo(); } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", "pub fn bar() {}"); assert_that(p.cargo_process("test").arg("-v").arg("--release"), execs().with_stdout(format!("\ {compiling} bar v0.0.1 ({dir}) {running} [..] -C opt-level=3 [..] {compiling} foo v0.1.0 ({dir}) {running} [..] -C opt-level=3 [..] {running} [..] -C opt-level=3 [..] {running} [..] -C opt-level=3 [..] {running} `[..]target[..]foo-[..]` running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} `[..]target[..]test-[..]` running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo {running} `rustdoc --test [..]lib.rs[..]` running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, dir = p.url(), running = RUNNING, doctest = DOCTEST))); }); test!(cargo_test_verbose { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn main() {} #[test] fn test_hello() {} "#); assert_that(p.cargo_process("test").arg("-v").arg("hello"), execs().with_stdout(format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc src[..]foo.rs [..]` {running} `[..]target[..]foo-[..] hello` running 1 test test test_hello ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, url = p.url(), running = RUNNING))); }); test!(many_similar_names { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " pub fn foo() {} #[test] fn lib_test() {} ") .file("src/main.rs", " extern crate foo; fn main() {} #[test] fn bin_test() { foo::foo() } ") .file("tests/foo.rs", r#" extern crate foo; #[test] fn test_test() { foo::foo() } "#); let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("test bin_test"), "bin_test missing\n{}", output); assert!(output.contains("test lib_test"), "lib_test missing\n{}", output); assert!(output.contains("test test_test"), "test_test missing\n{}", output); }); test!(cargo_test_failing_test { if !::can_panic() { return } let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", r#" fn hello() -> &'static str { "hello" } pub fn main() { println!("{}", hello()) } #[test] fn test_hello() { assert_eq!(hello(), "nope") }"#); assert_that(p.cargo_process("build"), execs()); assert_that(&p.bin("foo"), existing_file()); assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); assert_that(p.cargo("test"), execs().with_stdout_contains(format!("\ {compiling} foo v0.5.0 ({url}) {running} target[..]foo-[..] running 1 test test test_hello ... FAILED failures: ---- test_hello stdout ---- thread 'test_hello' panicked at 'assertion failed: \ `(left == right)` (left: \ `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:12 ", compiling = COMPILING, url = p.url(), running = RUNNING)) .with_stdout_contains("\ failures: test_hello test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured ") .with_status(101)); }); test!(test_with_lib_dep { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "baz" path = "src/main.rs" "#) .file("src/lib.rs", r#" /// /// ```rust /// extern crate foo; /// fn main() { /// println!("{:?}", foo::foo()); /// } /// ``` /// pub fn foo(){} #[test] fn lib_test() {} "#) .file("src/main.rs", " extern crate foo; fn main() {} #[test] fn bin_test() {} "); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {} foo v0.0.1 ({}) {running} target[..]baz-[..] running 1 test test bin_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]foo[..] running 1 test test lib_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) }); test!(test_with_deep_lib_dep { let p = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies.foo] path = "../foo" "#) .file("src/lib.rs", " extern crate foo; /// ``` /// bar::bar(); /// ``` pub fn bar() {} #[test] fn bar_test() { foo::foo(); } "); let p2 = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " pub fn foo() {} #[test] fn foo_test() {} "); p2.build(); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {compiling} bar v0.0.1 ({dir}) {running} target[..] running 1 test test bar_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} bar running 1 test test bar_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST, dir = p.url()))); }); test!(external_test_explicit { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [[test]] name = "test" path = "src/test.rs" "#) .file("src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } #[test] fn internal_test() {} "#) .file("src/test.rs", r#" extern crate foo; #[test] fn external_test() { assert_eq!(foo::get_hello(), "Hello") } "#); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {} foo v0.0.1 ({}) {running} target[..]foo-[..] running 1 test test internal_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]test-[..] running 1 test test external_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) }); test!(external_test_implicit { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" pub fn get_hello() -> &'static str { "Hello" } #[test] fn internal_test() {} "#) .file("tests/external.rs", r#" extern crate foo; #[test] fn external_test() { assert_eq!(foo::get_hello(), "Hello") } "#); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {} foo v0.0.1 ({}) {running} target[..]external-[..] running 1 test test external_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]foo-[..] running 1 test test internal_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) }); test!(dont_run_examples { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" "#) .file("examples/dont-run-me-i-will-fail.rs", r#" fn main() { panic!("Examples should not be run by 'cargo test'"); } "#); assert_that(p.cargo_process("test"), execs().with_status(0)); }); test!(pass_through_command_line { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #[test] fn foo() {} #[test] fn bar() {} "); assert_that(p.cargo_process("test").arg("bar"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] running 1 test test bar ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST, dir = p.url()))); assert_that(p.cargo("test").arg("foo"), execs().with_status(0) .with_stdout(&format!("\ {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING, doctest = DOCTEST))); }); // Regression test for running cargo-test twice with // tests in an rlib test!(cargo_test_twice { let p = project("test_twice") .file("Cargo.toml", &basic_lib_manifest("test_twice")) .file("src/test_twice.rs", r#" #![crate_type = "rlib"] #[test] fn dummy_test() { } "#); p.cargo_process("build"); for _ in 0..2 { assert_that(p.cargo("test"), execs().with_status(0)); } }); test!(lib_bin_same_name { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" [[bin]] name = "foo" "#) .file("src/lib.rs", " #[test] fn lib_test() {} ") .file("src/main.rs", " extern crate foo; #[test] fn bin_test() {} "); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {} foo v0.0.1 ({}) {running} target[..]foo-[..] running 1 test test [..] ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]foo-[..] running 1 test test [..] ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) }); test!(lib_with_standard_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " /// ``` /// syntax::foo(); /// ``` pub fn foo() {} #[test] fn foo_test() {} ") .file("tests/test.rs", " extern crate syntax; #[test] fn test() { syntax::foo() } "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) {running} target[..]syntax-[..] running 1 test test foo_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]test-[..] running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} syntax running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST, dir = p.url()))); }); test!(lib_with_standard_name2 { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] name = "syntax" test = false doctest = false "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) {running} target[..]syntax-[..] running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(lib_without_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} syntax v0.0.1 ({dir}) {running} target[..]syntax-[..] running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(bin_without_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[bin]] path = "src/main.rs" "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } "); assert_that(p.cargo_process("test"), execs().with_status(101) .with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: binary target bin.name is required"))); }); test!(bench_without_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[bench]] path = "src/bench.rs" "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ") .file("src/bench.rs", " #![feature(test)] extern crate syntax; extern crate test; #[bench] fn external_bench(_b: &mut test::Bencher) {} "); assert_that(p.cargo_process("test"), execs().with_status(101) .with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: bench target bench.name is required"))); }); test!(test_without_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[test]] path = "src/test.rs" "#) .file("src/lib.rs", r#" pub fn foo() {} pub fn get_hello() -> &'static str { "Hello" } "#) .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ") .file("src/test.rs", r#" extern crate syntax; #[test] fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } "#); assert_that(p.cargo_process("test"), execs().with_status(101) .with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: test target test.name is required"))); }); test!(example_without_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "syntax" version = "0.0.1" authors = [] [lib] test = false doctest = false [[example]] path = "examples/example.rs" "#) .file("src/lib.rs", " pub fn foo() {} ") .file("src/main.rs", " extern crate syntax; fn main() {} #[test] fn test() { syntax::foo() } ") .file("examples/example.rs", r#" extern crate syntax; fn main() { println!("example1"); } "#); assert_that(p.cargo_process("test"), execs().with_status(101) .with_stderr(&format!("\ failed to parse manifest at `[..]` Caused by: example target example.name is required"))); }); test!(bin_there_for_integration { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/main.rs", " fn main() { std::process::exit(101); } #[test] fn main_test() {} ") .file("tests/foo.rs", r#" use std::process::Command; #[test] fn test_test() { let status = Command::new("target/debug/foo").status().unwrap(); assert_eq!(status.code(), Some(101)); } "#); let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("main_test ... ok"), "no main_test\n{}", output); assert!(output.contains("test_test ... ok"), "no test_test\n{}", output); }); test!(test_dylib { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate_type = ["dylib"] [dependencies.bar] path = "bar" "#) .file("src/lib.rs", r#" extern crate bar as the_bar; pub fn bar() { the_bar::baz(); } #[test] fn foo() { bar(); } "#) .file("tests/test.rs", r#" extern crate foo as the_foo; #[test] fn foo() { the_foo::bar(); } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" crate_type = ["dylib"] "#) .file("bar/src/lib.rs", " pub fn baz() {} "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} bar v0.0.1 ({dir}) {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]test-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]test-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING))); }); test!(test_twice_with_build_cmd { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", " #[test] fn foo() {} "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST, dir = p.url()))); assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(format!("\ {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING, doctest = DOCTEST))); }); test!(test_then_build { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #[test] fn foo() {} "); assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST, dir = p.url()))); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout("")); }); test!(test_no_run { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", " #[test] fn foo() { panic!() } "); assert_that(p.cargo_process("test").arg("--no-run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); test!(test_run_specific_bin_target { let prj = project("foo") .file("Cargo.toml" , r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name="bin1" path="src/bin1.rs" [[bin]] name="bin2" path="src/bin2.rs" "#) .file("src/bin1.rs", "#[test] fn test1() { }") .file("src/bin2.rs", "#[test] fn test2() { }"); let expected_stdout = format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]bin2-[..] running 1 test test test2 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = prj.url()); assert_that(prj.cargo_process("test").arg("--bin").arg("bin2"), execs().with_status(0).with_stdout(&expected_stdout)); }); test!(test_run_specific_test_target { let prj = project("foo") .file("Cargo.toml" , r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/a.rs", "fn main() { }") .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") .file("tests/a.rs", "#[test] fn test_a() { }") .file("tests/b.rs", "#[test] fn test_b() { }"); let expected_stdout = format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]b-[..] running 1 test test test_b ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = prj.url()); assert_that(prj.cargo_process("test").arg("--test").arg("b"), execs().with_status(0).with_stdout(&expected_stdout)); }); test!(test_no_harness { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [[bin]] name = "foo" test = false [[test]] name = "bar" path = "foo.rs" harness = false "#) .file("src/main.rs", "fn main() {}") .file("foo.rs", "fn main() {}"); assert_that(p.cargo_process("test").arg("--").arg("--nocapture"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]bar-[..] ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(selective_testing { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [lib] name = "foo" doctest = false "#) .file("src/lib.rs", "") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" doctest = false "#) .file("d1/src/lib.rs", "") .file("d1/src/main.rs", "extern crate d1; fn main() {}") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [lib] name = "d2" doctest = false "#) .file("d2/src/lib.rs", "") .file("d2/src/main.rs", "extern crate d2; fn main() {}"); p.build(); println!("d1"); assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(&format!("\ {compiling} d1 v0.0.1 ({dir}) {running} target[..]d1-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {running} target[..]d1-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); println!("d2"); assert_that(p.cargo("test").arg("-p").arg("d2"), execs().with_status(0) .with_stdout(&format!("\ {compiling} d2 v0.0.1 ({dir}) {running} target[..]d2-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {running} target[..]d2-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); println!("whole"); assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url()))); }); test!(almost_cyclic_but_not_quite { let p = project("a") .file("Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dev-dependencies.b] path = "b" [dev-dependencies.c] path = "c" "#) .file("src/lib.rs", r#" #[cfg(test)] extern crate b; #[cfg(test)] extern crate c; "#) .file("b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] [dependencies.a] path = ".." "#) .file("b/src/lib.rs", r#" extern crate a; "#) .file("c/Cargo.toml", r#" [package] name = "c" version = "0.0.1" authors = [] "#) .file("c/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); assert_that(p.cargo("test"), execs().with_status(0)); }); test!(build_then_selective_test { let p = project("a") .file("Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] [dependencies.b] path = "b" "#) .file("src/lib.rs", "extern crate b;") .file("src/main.rs", "extern crate b; extern crate a; fn main() {}") .file("b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("test").arg("-p").arg("b"), execs().with_status(0)); }); test!(example_dev_dep { let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" version = "0.0.1" authors = [] [dev-dependencies.bar] path = "bar" "#) .file("src/lib.rs", r#" "#) .file("examples/e1.rs", r#" extern crate bar; fn main() { } "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("bar/src/lib.rs", r#" // make sure this file takes awhile to compile macro_rules! f0( () => (1) ); macro_rules! f1( () => ({(f0!()) + (f0!())}) ); macro_rules! f2( () => ({(f1!()) + (f1!())}) ); macro_rules! f3( () => ({(f2!()) + (f2!())}) ); macro_rules! f4( () => ({(f3!()) + (f3!())}) ); macro_rules! f5( () => ({(f4!()) + (f4!())}) ); macro_rules! f6( () => ({(f5!()) + (f5!())}) ); macro_rules! f7( () => ({(f6!()) + (f6!())}) ); macro_rules! f8( () => ({(f7!()) + (f7!())}) ); pub fn bar() { f8!(); } "#); assert_that(p.cargo_process("test"), execs().with_status(0)); assert_that(p.cargo("run") .arg("--example").arg("e1").arg("--release").arg("-v"), execs().with_status(0)); }); test!(selective_testing_with_docs { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" "#) .file("src/lib.rs", r#" /// ``` /// not valid rust /// ``` pub fn foo() {} "#) .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" path = "d1.rs" "#) .file("d1/d1.rs", ""); p.build(); assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(&format!("\ {compiling} d1 v0.0.1 ({dir}) {running} target[..]deps[..]d1[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {doctest} d1 running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, dir = p.url(), doctest = DOCTEST))); }); test!(example_bin_same_name { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) .file("examples/foo.rs", r#"fn main() { println!("example"); }"#); assert_that(p.cargo_process("test").arg("--no-run").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ({dir}) {running} `rustc [..]` {running} `rustc [..]` ", compiling = COMPILING, running = RUNNING, dir = p.url()))); assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); assert_that(p.process(&p.bin("examples/foo")), execs().with_status(0).with_stdout("example\n")); assert_that(p.cargo("run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} [..] bin ", compiling = COMPILING, running = RUNNING))); assert_that(&p.bin("foo"), existing_file()); }); test!(test_with_example_twice { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) .file("examples/foo.rs", r#"fn main() { println!("example"); }"#); println!("first"); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); assert_that(&p.bin("examples/foo"), existing_file()); println!("second"); assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); assert_that(&p.bin("examples/foo"), existing_file()); }); test!(example_with_dev_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" test = false doctest = false [dev-dependencies.a] path = "a" "#) .file("src/lib.rs", "") .file("examples/ex.rs", "extern crate a; fn main() {}") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", ""); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ [..] [..] [..] [..] {running} `rustc [..] --crate-name ex [..] --extern a=[..]` ", running = RUNNING))); }); test!(bin_is_preserved { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); println!("testing"); assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); assert_that(&p.bin("foo"), existing_file()); }); test!(bad_example { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", ""); assert_that(p.cargo_process("run").arg("--example").arg("foo"), execs().with_status(101).with_stderr("\ no example target named `foo` ")); assert_that(p.cargo_process("run").arg("--bin").arg("foo"), execs().with_status(101).with_stderr("\ no bin target named `foo` ")); }); test!(doctest_feature { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [features] bar = [] "#) .file("src/lib.rs", r#" /// ```rust /// assert_eq!(foo::foo(), 1); /// ``` #[cfg(feature = "bar")] pub fn foo() -> i32 { 1 } "#); assert_that(p.cargo_process("test").arg("--features").arg("bar"), execs().with_status(0).with_stdout(format!("\ {compiling} foo [..] {running} target[..]foo[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))) }); test!(dashes_to_underscores { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo-bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" /// ``` /// assert_eq!(foo_bar::foo(), 1); /// ``` pub fn foo() -> i32 { 1 } "#); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); }); test!(doctest_dev_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] b = { path = "b" } "#) .file("src/lib.rs", r#" /// ``` /// extern crate b; /// ``` pub fn foo() {} "#) .file("b/Cargo.toml", r#" [package] name = "b" version = "0.0.1" authors = [] "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); }); test!(filter_no_doc_tests { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" /// ``` /// extern crate b; /// ``` pub fn foo() {} "#) .file("tests/foo.rs", ""); assert_that(p.cargo_process("test").arg("--test=foo"), execs().with_stdout(format!("\ {compiling} foo v0.0.1 ([..]) {running} target[..]debug[..]foo[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING))); }); test!(dylib_doctest { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["rlib", "dylib"] test = false "#) .file("src/lib.rs", r#" /// ``` /// foo::foo(); /// ``` pub fn foo() {} "#); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {compiling} foo v0.0.1 ([..]) {doctest} foo running 1 test test foo_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, doctest = DOCTEST))); }); test!(dylib_doctest2 { // can't doctest dylibs as they're statically linked together let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" crate-type = ["dylib"] test = false "#) .file("src/lib.rs", r#" /// ``` /// foo::foo(); /// ``` pub fn foo() {} "#); assert_that(p.cargo_process("test"), execs().with_stdout("")); }); test!(cyclic_dev_dep_doc_test { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] bar = { path = "bar" } "#) .file("src/lib.rs", r#" //! ``` //! extern crate bar; //! ``` "#) .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = { path = ".." } "#) .file("bar/src/lib.rs", r#" extern crate foo; "#); assert_that(p.cargo_process("test"), execs().with_stdout(format!("\ {compiling} foo v0.0.1 ([..]) {compiling} bar v0.0.1 ([..]) {running} target[..]foo[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 1 test test _0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))) }); test!(dev_dep_with_build_script { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dev-dependencies] bar = { path = "bar" } "#) .file("src/lib.rs", "") .file("examples/foo.rs", "fn main() {}") .file("bar/Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] build = "build.rs" "#) .file("bar/src/lib.rs", "") .file("bar/build.rs", "fn main() {}"); assert_that(p.cargo_process("test"), execs().with_status(0)); }); test!(no_fail_fast { if !::can_panic() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", r#" pub fn add_one(x: i32) -> i32{ x + 1 } /// ```rust /// use foo::sub_one; /// assert_eq!(sub_one(101), 100); /// ``` pub fn sub_one(x: i32) -> i32{ x - 1 } "#) .file("tests/test_add_one.rs", r#" extern crate foo; use foo::*; #[test] fn add_one_test() { assert_eq!(add_one(1), 2); } #[test] fn fail_add_one_test() { assert_eq!(add_one(1), 1); } "#) .file("tests/test_sub_one.rs", r#" extern crate foo; use foo::*; #[test] fn sub_one_test() { assert_eq!(sub_one(1), 0); } "#); assert_that(p.cargo_process("test").arg("--no-fail-fast"), execs().with_status(101) .with_stdout_contains(format!("\ {compiling} foo v0.0.1 ([..]) {running} target[..]foo[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured {running} target[..]test_add_one[..] ", compiling = COMPILING, running = RUNNING)) .with_stdout_contains(format!("\ test result: FAILED. 1 passed; 1 failed; 0 ignored; 0 measured {running} target[..]test_sub_one[..] running 1 test test sub_one_test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 1 test test sub_one_0 ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING, doctest = DOCTEST))) }); test!(test_multiple_packages { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.d1] path = "d1" [dependencies.d2] path = "d2" [lib] name = "foo" doctest = false "#) .file("src/lib.rs", "") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.1" authors = [] [lib] name = "d1" doctest = false "#) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.1" authors = [] [lib] name = "d2" doctest = false "#) .file("d2/src/lib.rs", ""); p.build(); assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"), execs().with_status(0) .with_stdout_contains(&format!("\ {running} target[..]debug[..]d1-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING)) .with_stdout_contains(&format!("\ {running} target[..]debug[..]d2-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", running = RUNNING))); }); test!(bin_does_not_rebuild_tests { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "") .file("src/main.rs", "fn main() {}") .file("tests/foo.rs", ""); p.build(); assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); thread::sleep_ms(1000); File::create(&p.root().join("src/main.rs")).unwrap() .write_all(b"fn main() { 3; }").unwrap(); assert_that(p.cargo("test").arg("-v").arg("--no-run"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} `rustc src[..]main.rs [..]` {running} `rustc src[..]main.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(selective_test_wonky_profile { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [profile.release] opt-level = 2 [dependencies] a = { path = "a" } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(p.cargo("test").arg("-v").arg("--no-run").arg("--release") .arg("-p").arg("foo").arg("-p").arg("a"), execs().with_status(0)); }); test!(selective_test_optional_dep { let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies] a = { path = "a", optional = true } "#) .file("src/lib.rs", "") .file("a/Cargo.toml", r#" [package] name = "a" version = "0.0.1" authors = [] "#) .file("a/src/lib.rs", ""); p.build(); assert_that(p.cargo("test").arg("-v").arg("--no-run") .arg("--features").arg("a").arg("-p").arg("a"), execs().with_status(0).with_stdout(&format!("\ {compiling} a v0.0.1 ([..]) {running} `rustc a[..]src[..]lib.rs [..]` {running} `rustc a[..]src[..]lib.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); cargo-0.8.0/tests/test_cargo_tool_paths.rs000066400000000000000000000074001264656333200207230ustar00rootroot00000000000000use support::{path2url, project, execs}; use support::{COMPILING, RUNNING}; use hamcrest::assert_that; fn setup() { } test!(pathless_tools { let target = ::rustc_host(); let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" "#) .file("src/lib.rs", "") .file(".cargo/config", &format!(r#" [target.{}] ar = "nonexistent-ar" linker = "nonexistent-linker" "#, target)); assert_that(foo.cargo_process("build").arg("--verbose"), execs().with_stdout(&format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` ", compiling = COMPILING, running = RUNNING, url = foo.url()))) }); test!(absolute_tools { let target = ::rustc_host(); // Escaped as they appear within a TOML config file let config = if cfg!(windows) { (r#"C:\\bogus\\nonexistent-ar"#, r#"C:\\bogus\\nonexistent-linker"#) } else { (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) }; let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" "#) .file("src/lib.rs", "") .file(".cargo/config", &format!(r#" [target.{target}] ar = "{ar}" linker = "{linker}" "#, target = target, ar = config.0, linker = config.1)); let output = if cfg!(windows) { (r#"C:\bogus\nonexistent-ar"#, r#"C:\bogus\nonexistent-linker"#) } else { (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) }; assert_that(foo.cargo_process("build").arg("--verbose"), execs().with_stdout(&format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc [..] -C ar={ar} -C linker={linker} [..]` ", compiling = COMPILING, running = RUNNING, url = foo.url(), ar = output.0, linker = output.1))) }); test!(relative_tools { let target = ::rustc_host(); // Escaped as they appear within a TOML config file let config = if cfg!(windows) { (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#) } else { (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#) }; // Funky directory structure to test that relative tool paths are made absolute // by reference to the `.cargo/..` directory and not to (for example) the CWD. let origin = project("origin") .file("foo/Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [lib] name = "foo" "#) .file("foo/src/lib.rs", "") .file(".cargo/config", &format!(r#" [target.{target}] ar = "{ar}" linker = "{linker}" "#, target = target, ar = config.0, linker = config.1)); let foo_path = origin.root().join("foo"); let foo_url = path2url(foo_path.clone()); let prefix = origin.root().into_os_string().into_string().unwrap(); let output = if cfg!(windows) { (format!(r#"{}\.\nonexistent-ar"#, prefix), format!(r#"{}\.\tools\nonexistent-linker"#, prefix)) } else { (format!(r#"{}/./nonexistent-ar"#, prefix), format!(r#"{}/./tools/nonexistent-linker"#, prefix)) }; assert_that(origin.cargo_process("build").cwd(foo_path).arg("--verbose"), execs().with_stdout(&format!("\ {compiling} foo v0.0.1 ({url}) {running} `rustc [..] -C ar={ar} -C linker={linker} [..]` ", compiling = COMPILING, running = RUNNING, url = foo_url, ar = output.0, linker = output.1))) }); cargo-0.8.0/tests/test_cargo_verify_project.rs000066400000000000000000000030121264656333200215740ustar00rootroot00000000000000use support::{project, execs, main_file, basic_bin_manifest}; use hamcrest::{assert_that}; fn setup() {} fn verify_project_success_output() -> String { r#"{"success":"true"}"#.into() } test!(cargo_verify_project_path_to_cargo_toml_relative { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg("foo/Cargo.toml") .cwd(p.root().parent().unwrap()), execs().with_status(0) .with_stdout(verify_project_success_output())); }); test!(cargo_verify_project_path_to_cargo_toml_absolute { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .arg("--manifest-path").arg(p.root().join("Cargo.toml")) .cwd(p.root().parent().unwrap()), execs().with_status(0) .with_stdout(verify_project_success_output())); }); test!(cargo_verify_project_cwd { let p = project("foo") .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); assert_that(p.cargo_process("verify-project") .cwd(p.root()), execs().with_status(0) .with_stdout(verify_project_success_output())); }); cargo-0.8.0/tests/test_cargo_version.rs000066400000000000000000000010131264656333200202260ustar00rootroot00000000000000use support::{project, execs}; use hamcrest::assert_that; use cargo; fn setup() {} test!(simple { let p = project("foo"); assert_that(p.cargo_process("version"), execs().with_status(0).with_stdout(&format!("{}\n", cargo::version()))); assert_that(p.cargo_process("--version"), execs().with_status(0).with_stdout(&format!("{}\n", cargo::version()))); }); cargo-0.8.0/tests/test_shell.rs000066400000000000000000000056571264656333200165170ustar00rootroot00000000000000use std::io::prelude::*; use std::io; use std::sync::{Arc, Mutex}; use term::{Terminal, TerminfoTerminal, color}; use hamcrest::{assert_that}; use cargo::core::shell::{Shell, ShellConfig}; use cargo::core::shell::ColorConfig::{Auto,Always, Never}; use support::{Tap, execs, shell_writes}; fn setup() { } struct Sink(Arc>>); impl Write for Sink { fn write(&mut self, data: &[u8]) -> io::Result { Write::write(&mut *self.0.lock().unwrap(), data) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } test!(non_tty { let config = ShellConfig { color_config: Auto, tty: false }; let a = Arc::new(Mutex::new(Vec::new())); Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { shell.say("Hey Alex", color::RED).unwrap(); }); let buf = a.lock().unwrap().clone(); assert_that(&buf[..], shell_writes("Hey Alex\n")); }); test!(color_explicitly_disabled { let term = TerminfoTerminal::new(Vec::new()); if term.is_none() { return } let config = ShellConfig { color_config: Never, tty: true }; let a = Arc::new(Mutex::new(Vec::new())); Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { shell.say("Hey Alex", color::RED).unwrap(); }); let buf = a.lock().unwrap().clone(); assert_that(&buf[..], shell_writes("Hey Alex\n")); }); test!(colored_shell { let term = TerminfoTerminal::new(Vec::new()); if term.is_none() { return } let config = ShellConfig { color_config: Auto, tty: true }; let a = Arc::new(Mutex::new(Vec::new())); Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { shell.say("Hey Alex", color::RED).unwrap(); }); let buf = a.lock().unwrap().clone(); assert_that(&buf[..], shell_writes(colored_output("Hey Alex\n", color::RED).unwrap())); }); test!(color_explicitly_enabled { let term = TerminfoTerminal::new(Vec::new()); if term.is_none() { return } let config = ShellConfig { color_config: Always, tty: false }; let a = Arc::new(Mutex::new(Vec::new())); Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { shell.say("Hey Alex", color::RED).unwrap(); }); let buf = a.lock().unwrap().clone(); assert_that(&buf[..], shell_writes(colored_output("Hey Alex\n", color::RED).unwrap())); }); test!(no_term { // Verify that shell creation is successful when $TERM does not exist. assert_that(::cargo_process().env_remove("TERM"), execs().with_stderr("")); }); fn colored_output(string: &str, color: color::Color) -> io::Result { let mut term = TerminfoTerminal::new(Vec::new()).unwrap(); try!(term.reset()); try!(term.fg(color)); try!(write!(&mut term, "{}", string)); try!(term.reset()); try!(term.flush()); Ok(String::from_utf8_lossy(term.get_ref()).to_string()) } cargo-0.8.0/tests/tests.rs000066400000000000000000000044461264656333200155060ustar00rootroot00000000000000extern crate bufstream; extern crate cargo; extern crate filetime; extern crate flate2; extern crate git2; extern crate hamcrest; extern crate libc; extern crate rustc_serialize; extern crate tar; extern crate tempdir; extern crate term; extern crate url; #[cfg(windows)] extern crate kernel32; #[cfg(windows)] extern crate winapi; #[macro_use] extern crate log; use cargo::util::Rustc; use std::ffi::OsStr; mod support; macro_rules! test { ($name:ident $expr:expr) => ( #[test] fn $name() { ::support::paths::setup(); setup(); $expr; } ) } mod test_bad_config; mod test_bad_manifest_path; mod test_cargo; mod test_cargo_bench; mod test_cargo_build_auth; mod test_cargo_build_lib; mod test_cargo_clean; mod test_cargo_compile; mod test_cargo_compile_custom_build; mod test_cargo_compile_git_deps; mod test_cargo_compile_path_deps; mod test_cargo_compile_plugins; mod test_cargo_cross_compile; mod test_cargo_doc; mod test_cargo_features; mod test_cargo_fetch; mod test_cargo_freshness; mod test_cargo_generate_lockfile; mod test_cargo_install; mod test_cargo_new; mod test_cargo_package; mod test_cargo_profiles; mod test_cargo_publish; mod test_cargo_read_manifest; mod test_cargo_registry; mod test_cargo_run; mod test_cargo_rustc; mod test_cargo_rustdoc; mod test_cargo_search; mod test_cargo_test; mod test_cargo_tool_paths; mod test_cargo_verify_project; mod test_cargo_version; mod test_shell; thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap()); fn rustc_host() -> String { RUSTC.with(|r| r.host.clone()) } fn is_nightly() -> bool { RUSTC.with(|r| { r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev") }) } fn can_panic() -> bool { RUSTC.with(|r| !(r.host.contains("msvc") && !r.host.contains("x86_64"))) } fn process>(t: T) -> cargo::util::ProcessBuilder { let mut p = cargo::util::process(t.as_ref()); p.cwd(&support::paths::root()) .env("HOME", &support::paths::home()) .env_remove("CARGO_HOME") .env_remove("CARGO_TARGET_DIR") // we assume 'target' .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows return p } fn cargo_process() -> cargo::util::ProcessBuilder { process(&support::cargo_dir().join("cargo")) }